|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 1028, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.019464720194647202, |
|
"grad_norm": 419.45176209681534, |
|
"learning_rate": 1.7475728155339808e-06, |
|
"loss": 7.5347, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.038929440389294405, |
|
"grad_norm": 440.7543770201198, |
|
"learning_rate": 3.689320388349515e-06, |
|
"loss": 5.088, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.058394160583941604, |
|
"grad_norm": 78.99835155456897, |
|
"learning_rate": 5.631067961165049e-06, |
|
"loss": 4.2568, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07785888077858881, |
|
"grad_norm": 70.58925751405357, |
|
"learning_rate": 7.572815533980583e-06, |
|
"loss": 3.4786, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09732360097323602, |
|
"grad_norm": 40.09311796360936, |
|
"learning_rate": 9.514563106796117e-06, |
|
"loss": 3.161, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11678832116788321, |
|
"grad_norm": 41.72458199935773, |
|
"learning_rate": 1.145631067961165e-05, |
|
"loss": 3.104, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1362530413625304, |
|
"grad_norm": 149.8142811415225, |
|
"learning_rate": 1.3398058252427187e-05, |
|
"loss": 3.5738, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15571776155717762, |
|
"grad_norm": 72.86685673927337, |
|
"learning_rate": 1.533980582524272e-05, |
|
"loss": 3.0172, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17518248175182483, |
|
"grad_norm": 25.03536602970884, |
|
"learning_rate": 1.7281553398058253e-05, |
|
"loss": 3.3168, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.19464720194647203, |
|
"grad_norm": 59.627261461817, |
|
"learning_rate": 1.922330097087379e-05, |
|
"loss": 3.4559, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2141119221411192, |
|
"grad_norm": 69.6146294827494, |
|
"learning_rate": 1.999792377815462e-05, |
|
"loss": 3.0347, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.23357664233576642, |
|
"grad_norm": 72.85844217141502, |
|
"learning_rate": 1.9985238877782747e-05, |
|
"loss": 3.1156, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.25304136253041365, |
|
"grad_norm": 33.23078781184279, |
|
"learning_rate": 1.9961037146461106e-05, |
|
"loss": 3.151, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2725060827250608, |
|
"grad_norm": 81.22302004701628, |
|
"learning_rate": 1.992534649811862e-05, |
|
"loss": 2.9322, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.291970802919708, |
|
"grad_norm": 47.33360495853381, |
|
"learning_rate": 1.9878208097835908e-05, |
|
"loss": 2.8629, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.31143552311435524, |
|
"grad_norm": 51.597242465258795, |
|
"learning_rate": 1.9819676314366084e-05, |
|
"loss": 2.9709, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3309002433090024, |
|
"grad_norm": 82.70654992211028, |
|
"learning_rate": 1.974981865742661e-05, |
|
"loss": 2.8736, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.35036496350364965, |
|
"grad_norm": 129.437045502628, |
|
"learning_rate": 1.9668715699834553e-05, |
|
"loss": 3.064, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.36982968369829683, |
|
"grad_norm": 41.951916677490715, |
|
"learning_rate": 1.957646098457507e-05, |
|
"loss": 2.7225, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.38929440389294406, |
|
"grad_norm": 47.177248106803845, |
|
"learning_rate": 1.9473160916910268e-05, |
|
"loss": 2.9907, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.40875912408759124, |
|
"grad_norm": 28.20808923970595, |
|
"learning_rate": 1.9358934641652897e-05, |
|
"loss": 2.8968, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4282238442822384, |
|
"grad_norm": 60.66162586721256, |
|
"learning_rate": 1.923391390574645e-05, |
|
"loss": 2.9422, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.44768856447688565, |
|
"grad_norm": 29.677036481098472, |
|
"learning_rate": 1.909824290631012e-05, |
|
"loss": 3.0416, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.46715328467153283, |
|
"grad_norm": 57.8380142267369, |
|
"learning_rate": 1.8952078124323922e-05, |
|
"loss": 2.8864, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.48661800486618007, |
|
"grad_norm": 23.875475400629053, |
|
"learning_rate": 1.8795588144145784e-05, |
|
"loss": 2.6229, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5060827250608273, |
|
"grad_norm": 25.63414075808507, |
|
"learning_rate": 1.8628953459068766e-05, |
|
"loss": 2.764, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5255474452554745, |
|
"grad_norm": 50.33174840522443, |
|
"learning_rate": 1.8452366263142694e-05, |
|
"loss": 2.711, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5450121654501217, |
|
"grad_norm": 40.94875404333496, |
|
"learning_rate": 1.8266030229500307e-05, |
|
"loss": 2.8135, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5644768856447688, |
|
"grad_norm": 23.116839479892334, |
|
"learning_rate": 1.807016027544359e-05, |
|
"loss": 2.782, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.583941605839416, |
|
"grad_norm": 18.82174925977683, |
|
"learning_rate": 1.786498231456125e-05, |
|
"loss": 2.6532, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6034063260340633, |
|
"grad_norm": 22.880354016784672, |
|
"learning_rate": 1.7650732996163246e-05, |
|
"loss": 2.8259, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6228710462287105, |
|
"grad_norm": 28.332068157479885, |
|
"learning_rate": 1.7427659432332844e-05, |
|
"loss": 2.7381, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6423357664233577, |
|
"grad_norm": 28.026764464260065, |
|
"learning_rate": 1.7196018912911126e-05, |
|
"loss": 2.7222, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6618004866180048, |
|
"grad_norm": 21.57301570810096, |
|
"learning_rate": 1.6956078608742567e-05, |
|
"loss": 2.722, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.681265206812652, |
|
"grad_norm": 24.388668227839457, |
|
"learning_rate": 1.6708115263524047e-05, |
|
"loss": 2.6107, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7007299270072993, |
|
"grad_norm": 18.703736650144812, |
|
"learning_rate": 1.6452414874612608e-05, |
|
"loss": 2.593, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7201946472019465, |
|
"grad_norm": 31.332283196695716, |
|
"learning_rate": 1.618927236316026e-05, |
|
"loss": 2.7911, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7396593673965937, |
|
"grad_norm": 23.25616532139509, |
|
"learning_rate": 1.5918991233956145e-05, |
|
"loss": 2.7352, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7591240875912408, |
|
"grad_norm": 12.936266532050945, |
|
"learning_rate": 1.5641883225368468e-05, |
|
"loss": 2.7952, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7785888077858881, |
|
"grad_norm": 86.36443076979923, |
|
"learning_rate": 1.5358267949789968e-05, |
|
"loss": 3.0855, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7980535279805353, |
|
"grad_norm": 46.98416378181883, |
|
"learning_rate": 1.5068472525001554e-05, |
|
"loss": 2.8366, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8175182481751825, |
|
"grad_norm": 26.85730980671752, |
|
"learning_rate": 1.4772831196879383e-05, |
|
"loss": 2.5443, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8369829683698297, |
|
"grad_norm": 21.408351349543143, |
|
"learning_rate": 1.4471684953880458e-05, |
|
"loss": 2.542, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.8564476885644768, |
|
"grad_norm": 19.42979950360618, |
|
"learning_rate": 1.416538113375145e-05, |
|
"loss": 2.6807, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8759124087591241, |
|
"grad_norm": 54.322602669364706, |
|
"learning_rate": 1.3854273022914333e-05, |
|
"loss": 2.7384, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8953771289537713, |
|
"grad_norm": 29.215560058723682, |
|
"learning_rate": 1.3538719448990905e-05, |
|
"loss": 2.6412, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9148418491484185, |
|
"grad_norm": 25.527754957600752, |
|
"learning_rate": 1.3219084366936172e-05, |
|
"loss": 2.5258, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9343065693430657, |
|
"grad_norm": 48.82663237736622, |
|
"learning_rate": 1.2895736439257933e-05, |
|
"loss": 2.5599, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.9537712895377128, |
|
"grad_norm": 27.743882727321346, |
|
"learning_rate": 1.256904861080674e-05, |
|
"loss": 2.6751, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9732360097323601, |
|
"grad_norm": 27.354218263639577, |
|
"learning_rate": 1.223939767862668e-05, |
|
"loss": 2.4962, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9927007299270073, |
|
"grad_norm": 25.900686408023237, |
|
"learning_rate": 1.190716385736307e-05, |
|
"loss": 2.5036, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.0116788321167882, |
|
"grad_norm": 19.413128620243246, |
|
"learning_rate": 1.1572730340728362e-05, |
|
"loss": 2.0859, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.0311435523114356, |
|
"grad_norm": 16.59211624117448, |
|
"learning_rate": 1.1236482859532019e-05, |
|
"loss": 2.0009, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.0506082725060828, |
|
"grad_norm": 22.403659201349214, |
|
"learning_rate": 1.0898809236784152e-05, |
|
"loss": 2.089, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.07007299270073, |
|
"grad_norm": 25.224065033796418, |
|
"learning_rate": 1.0560098940386028e-05, |
|
"loss": 1.8724, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0895377128953772, |
|
"grad_norm": 14.859356677964838, |
|
"learning_rate": 1.0220742633923393e-05, |
|
"loss": 1.8878, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.1090024330900243, |
|
"grad_norm": 17.04142136353786, |
|
"learning_rate": 9.88113172608072e-06, |
|
"loss": 1.8977, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.1284671532846715, |
|
"grad_norm": 13.713070262842116, |
|
"learning_rate": 9.541657919196049e-06, |
|
"loss": 1.9069, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.1479318734793187, |
|
"grad_norm": 17.789458089215415, |
|
"learning_rate": 9.202712757477145e-06, |
|
"loss": 1.8461, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.1673965936739659, |
|
"grad_norm": 19.610482780070903, |
|
"learning_rate": 8.864687175400045e-06, |
|
"loss": 1.9158, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.186861313868613, |
|
"grad_norm": 12.010210734055757, |
|
"learning_rate": 8.527971046810845e-06, |
|
"loss": 1.8679, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.2063260340632604, |
|
"grad_norm": 22.335816412732463, |
|
"learning_rate": 8.192952735250815e-06, |
|
"loss": 1.9524, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.2257907542579076, |
|
"grad_norm": 10.686579448514223, |
|
"learning_rate": 7.86001864602348e-06, |
|
"loss": 1.8423, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.2452554744525548, |
|
"grad_norm": 51.109105653823455, |
|
"learning_rate": 7.529552780520292e-06, |
|
"loss": 1.8348, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.264720194647202, |
|
"grad_norm": 33.402753904185154, |
|
"learning_rate": 7.201936293318946e-06, |
|
"loss": 1.8627, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.2841849148418492, |
|
"grad_norm": 32.98747845535257, |
|
"learning_rate": 6.877547052565177e-06, |
|
"loss": 1.9406, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.3036496350364963, |
|
"grad_norm": 21.46620027744224, |
|
"learning_rate": 6.556759204145069e-06, |
|
"loss": 1.9661, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.3231143552311435, |
|
"grad_norm": 17.85715273280427, |
|
"learning_rate": 6.239942740150571e-06, |
|
"loss": 1.941, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.3425790754257907, |
|
"grad_norm": 16.397413728215973, |
|
"learning_rate": 5.927463072135936e-06, |
|
"loss": 1.8711, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.3620437956204379, |
|
"grad_norm": 36.03813133441638, |
|
"learning_rate": 5.619680609657294e-06, |
|
"loss": 1.9032, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.381508515815085, |
|
"grad_norm": 22.89930004578536, |
|
"learning_rate": 5.316950344581439e-06, |
|
"loss": 1.8234, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.4009732360097322, |
|
"grad_norm": 10.308090714098702, |
|
"learning_rate": 5.019621441643336e-06, |
|
"loss": 1.8646, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.4204379562043796, |
|
"grad_norm": 28.844231344171455, |
|
"learning_rate": 4.728036835724512e-06, |
|
"loss": 1.8324, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.4399026763990268, |
|
"grad_norm": 21.377632574293145, |
|
"learning_rate": 4.442532836316909e-06, |
|
"loss": 1.862, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.459367396593674, |
|
"grad_norm": 17.303686682340725, |
|
"learning_rate": 4.163438739628359e-06, |
|
"loss": 1.9171, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.4788321167883212, |
|
"grad_norm": 37.72983556916591, |
|
"learning_rate": 3.891076448777046e-06, |
|
"loss": 1.87, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.4982968369829683, |
|
"grad_norm": 14.826567789437357, |
|
"learning_rate": 3.625760102513103e-06, |
|
"loss": 1.8535, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.5177615571776155, |
|
"grad_norm": 18.51444546988277, |
|
"learning_rate": 3.367795712895483e-06, |
|
"loss": 1.8713, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.537226277372263, |
|
"grad_norm": 5.383086567424804, |
|
"learning_rate": 3.117480812342054e-06, |
|
"loss": 1.7639, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.55669099756691, |
|
"grad_norm": 26.01516191764228, |
|
"learning_rate": 2.8751041104599818e-06, |
|
"loss": 1.757, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.5761557177615573, |
|
"grad_norm": 25.971694889888745, |
|
"learning_rate": 2.6409451610522287e-06, |
|
"loss": 1.8712, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.5956204379562045, |
|
"grad_norm": 16.826359696774247, |
|
"learning_rate": 2.4152740396842044e-06, |
|
"loss": 1.8375, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.6150851581508516, |
|
"grad_norm": 20.178970467961914, |
|
"learning_rate": 2.1983510321825053e-06, |
|
"loss": 1.7858, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.6345498783454988, |
|
"grad_norm": 17.10427518401472, |
|
"learning_rate": 1.9904263344249743e-06, |
|
"loss": 1.7688, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.654014598540146, |
|
"grad_norm": 26.71032233185063, |
|
"learning_rate": 1.7917397637683799e-06, |
|
"loss": 1.7905, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.6734793187347932, |
|
"grad_norm": 14.410090356585053, |
|
"learning_rate": 1.602520482446519e-06, |
|
"loss": 1.8363, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.6929440389294403, |
|
"grad_norm": 20.353833687299634, |
|
"learning_rate": 1.4229867332577962e-06, |
|
"loss": 1.8553, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.7124087591240875, |
|
"grad_norm": 19.078090551260555, |
|
"learning_rate": 1.2533455878471158e-06, |
|
"loss": 1.8013, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.7318734793187347, |
|
"grad_norm": 15.961285499721651, |
|
"learning_rate": 1.0937927078724242e-06, |
|
"loss": 1.7987, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.7513381995133819, |
|
"grad_norm": 23.255794765895036, |
|
"learning_rate": 9.445121193313678e-07, |
|
"loss": 1.7438, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.770802919708029, |
|
"grad_norm": 15.026924673840433, |
|
"learning_rate": 8.056760003083519e-07, |
|
"loss": 1.8305, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.7902676399026762, |
|
"grad_norm": 33.518276552309146, |
|
"learning_rate": 6.774444823868153e-07, |
|
"loss": 1.8236, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.8097323600973236, |
|
"grad_norm": 19.766136489150675, |
|
"learning_rate": 5.59965465955763e-07, |
|
"loss": 1.8234, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.8291970802919708, |
|
"grad_norm": 33.636630644284125, |
|
"learning_rate": 4.533744496235859e-07, |
|
"loss": 1.868, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.848661800486618, |
|
"grad_norm": 11.42056049503142, |
|
"learning_rate": 3.577943739359102e-07, |
|
"loss": 1.7457, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.8681265206812652, |
|
"grad_norm": 15.746160617813796, |
|
"learning_rate": 2.7333547957774545e-07, |
|
"loss": 1.8388, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.8875912408759126, |
|
"grad_norm": 14.925251495002405, |
|
"learning_rate": 2.0009518022346075e-07, |
|
"loss": 1.8665, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.9070559610705597, |
|
"grad_norm": 16.423759367972707, |
|
"learning_rate": 1.3815795018125133e-07, |
|
"loss": 1.8027, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.926520681265207, |
|
"grad_norm": 7.929471875775863, |
|
"learning_rate": 8.759522696168865e-08, |
|
"loss": 1.7531, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.945985401459854, |
|
"grad_norm": 6.194804501228712, |
|
"learning_rate": 4.846532888272304e-08, |
|
"loss": 1.8579, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.9654501216545013, |
|
"grad_norm": 14.714124220198908, |
|
"learning_rate": 2.081338780617337e-08, |
|
"loss": 1.8148, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.9849148418491485, |
|
"grad_norm": 15.437155062866172, |
|
"learning_rate": 4.671297083285176e-09, |
|
"loss": 1.8327, |
|
"step": 1020 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1028, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 225241411682304.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|