|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"global_step": 32735, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9697571406751184e-05, |
|
"loss": 0.4892, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9392087979227128e-05, |
|
"loss": 0.4021, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.908660455170307e-05, |
|
"loss": 0.3816, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8781121124179017e-05, |
|
"loss": 0.3486, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8475637696654957e-05, |
|
"loss": 0.335, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.81701542691309e-05, |
|
"loss": 0.3268, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.786528180846189e-05, |
|
"loss": 0.3362, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.7559798380937837e-05, |
|
"loss": 0.3123, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7254314953413777e-05, |
|
"loss": 0.3114, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.694883152588972e-05, |
|
"loss": 0.2938, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6643348098365667e-05, |
|
"loss": 0.297, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6337864670841607e-05, |
|
"loss": 0.2818, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.60329922101726e-05, |
|
"loss": 0.2854, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.5727508782648543e-05, |
|
"loss": 0.2273, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.5422025355124486e-05, |
|
"loss": 0.2193, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.511654192760043e-05, |
|
"loss": 0.2276, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.4811058500076371e-05, |
|
"loss": 0.2151, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.4505575072552316e-05, |
|
"loss": 0.2193, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.4200702611883306e-05, |
|
"loss": 0.2363, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.3897052084924394e-05, |
|
"loss": 0.2273, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.3591568657400339e-05, |
|
"loss": 0.2275, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.328608522987628e-05, |
|
"loss": 0.2247, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.2980601802352224e-05, |
|
"loss": 0.2133, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.2675118374828165e-05, |
|
"loss": 0.2222, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.236963494730411e-05, |
|
"loss": 0.2136, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.2064151519780052e-05, |
|
"loss": 0.2185, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.1758668092255997e-05, |
|
"loss": 0.1531, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.145318466473194e-05, |
|
"loss": 0.1496, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.1147701237207882e-05, |
|
"loss": 0.1467, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.0842217809683826e-05, |
|
"loss": 0.1674, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0536734382159768e-05, |
|
"loss": 0.1505, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.0231250954635711e-05, |
|
"loss": 0.1447, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 9.925767527111655e-06, |
|
"loss": 0.1407, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.620284099587598e-06, |
|
"loss": 0.153, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 9.314800672063541e-06, |
|
"loss": 0.1534, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 9.009317244539484e-06, |
|
"loss": 0.1499, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 8.703833817015428e-06, |
|
"loss": 0.1605, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 8.398350389491371e-06, |
|
"loss": 0.1674, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 8.093477928822362e-06, |
|
"loss": 0.1524, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 7.788605468153352e-06, |
|
"loss": 0.0993, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 7.483122040629296e-06, |
|
"loss": 0.081, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 7.17763861310524e-06, |
|
"loss": 0.0811, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 6.872155185581183e-06, |
|
"loss": 0.0825, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 6.566671758057126e-06, |
|
"loss": 0.0823, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 6.261188330533069e-06, |
|
"loss": 0.0808, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 5.955704903009012e-06, |
|
"loss": 0.078, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 5.650221475484955e-06, |
|
"loss": 0.0764, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 5.344738047960899e-06, |
|
"loss": 0.0769, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 5.040476554146938e-06, |
|
"loss": 0.0861, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 4.734993126622881e-06, |
|
"loss": 0.0956, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 4.429509699098824e-06, |
|
"loss": 0.0827, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 4.124026271574768e-06, |
|
"loss": 0.0836, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 3.81854284405071e-06, |
|
"loss": 0.0529, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 3.5130594165266534e-06, |
|
"loss": 0.0371, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 3.208797922712693e-06, |
|
"loss": 0.0408, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 2.903314495188636e-06, |
|
"loss": 0.044, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 2.5978310676645796e-06, |
|
"loss": 0.034, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 2.2923476401405224e-06, |
|
"loss": 0.041, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 1.9868642126164657e-06, |
|
"loss": 0.0402, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 1.6813807850924087e-06, |
|
"loss": 0.0321, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 1.375897357568352e-06, |
|
"loss": 0.0349, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.0704139300442952e-06, |
|
"loss": 0.0422, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 7.649305025202383e-07, |
|
"loss": 0.041, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 4.594470749961815e-07, |
|
"loss": 0.0445, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 1.5396364747212466e-07, |
|
"loss": 0.0376, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 32735, |
|
"total_flos": 1.2201660848633088e+17, |
|
"train_loss": 0.16656628906298943, |
|
"train_runtime": 2957.7214, |
|
"train_samples_per_second": 177.067, |
|
"train_steps_per_second": 11.068 |
|
} |
|
], |
|
"max_steps": 32735, |
|
"num_train_epochs": 5, |
|
"total_flos": 1.2201660848633088e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|