|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 3492, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0057273768613974796, |
|
"grad_norm": 16.379655838012695, |
|
"learning_rate": 2.9999392970762296e-06, |
|
"loss": 1.5958, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.011454753722794959, |
|
"grad_norm": 13.56667423248291, |
|
"learning_rate": 2.999757193218044e-06, |
|
"loss": 1.3928, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01718213058419244, |
|
"grad_norm": 10.130936622619629, |
|
"learning_rate": 2.999453703164426e-06, |
|
"loss": 1.2605, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.022909507445589918, |
|
"grad_norm": 10.301068305969238, |
|
"learning_rate": 2.9990288514790206e-06, |
|
"loss": 1.3193, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0286368843069874, |
|
"grad_norm": 8.71345043182373, |
|
"learning_rate": 2.9984826725481468e-06, |
|
"loss": 1.1754, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03436426116838488, |
|
"grad_norm": 12.313104629516602, |
|
"learning_rate": 2.9978152105780155e-06, |
|
"loss": 1.3135, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04009163802978236, |
|
"grad_norm": 12.800464630126953, |
|
"learning_rate": 2.9970265195911502e-06, |
|
"loss": 1.2516, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.045819014891179836, |
|
"grad_norm": 10.99730396270752, |
|
"learning_rate": 2.9961166634220168e-06, |
|
"loss": 1.203, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05154639175257732, |
|
"grad_norm": 13.54122257232666, |
|
"learning_rate": 2.9950857157118544e-06, |
|
"loss": 1.2237, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0572737686139748, |
|
"grad_norm": 12.151561737060547, |
|
"learning_rate": 2.9939337599027165e-06, |
|
"loss": 1.2715, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06300114547537228, |
|
"grad_norm": 19.66217803955078, |
|
"learning_rate": 2.992660889230718e-06, |
|
"loss": 1.2601, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06872852233676977, |
|
"grad_norm": 9.985322952270508, |
|
"learning_rate": 2.9912672067184863e-06, |
|
"loss": 1.1591, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07445589919816724, |
|
"grad_norm": 10.843229293823242, |
|
"learning_rate": 2.9897528251668274e-06, |
|
"loss": 1.1064, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08018327605956473, |
|
"grad_norm": 13.866814613342285, |
|
"learning_rate": 2.9881178671455905e-06, |
|
"loss": 1.1918, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.0859106529209622, |
|
"grad_norm": 13.748754501342773, |
|
"learning_rate": 2.9863624649837522e-06, |
|
"loss": 1.2726, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09163802978235967, |
|
"grad_norm": 9.494117736816406, |
|
"learning_rate": 2.9844867607587037e-06, |
|
"loss": 1.1832, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09736540664375716, |
|
"grad_norm": 9.150392532348633, |
|
"learning_rate": 2.982490906284753e-06, |
|
"loss": 1.1274, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.10309278350515463, |
|
"grad_norm": 11.971685409545898, |
|
"learning_rate": 2.980375063100836e-06, |
|
"loss": 1.1399, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.10882016036655212, |
|
"grad_norm": 8.963068962097168, |
|
"learning_rate": 2.9781394024574426e-06, |
|
"loss": 1.1726, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1145475372279496, |
|
"grad_norm": 9.128679275512695, |
|
"learning_rate": 2.975784105302756e-06, |
|
"loss": 1.1972, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.12027491408934708, |
|
"grad_norm": 12.733792304992676, |
|
"learning_rate": 2.973309362268008e-06, |
|
"loss": 1.1732, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.12600229095074456, |
|
"grad_norm": 10.524323463439941, |
|
"learning_rate": 2.970715373652049e-06, |
|
"loss": 1.2613, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.13172966781214204, |
|
"grad_norm": 7.896759510040283, |
|
"learning_rate": 2.9680023494051364e-06, |
|
"loss": 1.1437, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.13745704467353953, |
|
"grad_norm": 7.099399089813232, |
|
"learning_rate": 2.9651705091119422e-06, |
|
"loss": 1.0787, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.143184421534937, |
|
"grad_norm": 9.507390975952148, |
|
"learning_rate": 2.962220081973781e-06, |
|
"loss": 1.0828, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.14891179839633448, |
|
"grad_norm": 10.189469337463379, |
|
"learning_rate": 2.959151306790057e-06, |
|
"loss": 1.1324, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.15463917525773196, |
|
"grad_norm": 10.902261734008789, |
|
"learning_rate": 2.955964431938939e-06, |
|
"loss": 1.2347, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.16036655211912945, |
|
"grad_norm": 10.873947143554688, |
|
"learning_rate": 2.952659715357254e-06, |
|
"loss": 1.1749, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1660939289805269, |
|
"grad_norm": 8.8741455078125, |
|
"learning_rate": 2.9492374245196148e-06, |
|
"loss": 1.2453, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.1718213058419244, |
|
"grad_norm": 9.256343841552734, |
|
"learning_rate": 2.9456978364167667e-06, |
|
"loss": 1.2202, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1775486827033219, |
|
"grad_norm": 13.664665222167969, |
|
"learning_rate": 2.9420412375331737e-06, |
|
"loss": 1.1165, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.18327605956471935, |
|
"grad_norm": 12.664402961730957, |
|
"learning_rate": 2.938267923823825e-06, |
|
"loss": 1.1933, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.18900343642611683, |
|
"grad_norm": 10.518999099731445, |
|
"learning_rate": 2.934378200690288e-06, |
|
"loss": 1.1306, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.19473081328751432, |
|
"grad_norm": 11.800782203674316, |
|
"learning_rate": 2.9303723829559846e-06, |
|
"loss": 1.1013, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.2004581901489118, |
|
"grad_norm": 13.071691513061523, |
|
"learning_rate": 2.9262507948407135e-06, |
|
"loss": 1.2304, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.20618556701030927, |
|
"grad_norm": 9.5667142868042, |
|
"learning_rate": 2.9220137699344057e-06, |
|
"loss": 1.1543, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.21191294387170675, |
|
"grad_norm": 11.27755069732666, |
|
"learning_rate": 2.917661651170129e-06, |
|
"loss": 1.1647, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.21764032073310424, |
|
"grad_norm": 12.141744613647461, |
|
"learning_rate": 2.913194790796327e-06, |
|
"loss": 1.1378, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.22336769759450173, |
|
"grad_norm": 14.818400382995605, |
|
"learning_rate": 2.9086135503483137e-06, |
|
"loss": 1.1353, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.2290950744558992, |
|
"grad_norm": 10.42490005493164, |
|
"learning_rate": 2.903918300619008e-06, |
|
"loss": 1.2457, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.23482245131729668, |
|
"grad_norm": 11.577737808227539, |
|
"learning_rate": 2.8991094216289254e-06, |
|
"loss": 1.1824, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.24054982817869416, |
|
"grad_norm": 9.93144416809082, |
|
"learning_rate": 2.8941873025954193e-06, |
|
"loss": 1.2134, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.24627720504009165, |
|
"grad_norm": 8.588950157165527, |
|
"learning_rate": 2.8891523419011774e-06, |
|
"loss": 1.2043, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.2520045819014891, |
|
"grad_norm": 9.073149681091309, |
|
"learning_rate": 2.884004947061981e-06, |
|
"loss": 1.2291, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.25773195876288657, |
|
"grad_norm": 7.688851833343506, |
|
"learning_rate": 2.8787455346937186e-06, |
|
"loss": 1.2401, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2634593356242841, |
|
"grad_norm": 10.9544677734375, |
|
"learning_rate": 2.873374530478667e-06, |
|
"loss": 1.2467, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.26918671248568155, |
|
"grad_norm": 9.605713844299316, |
|
"learning_rate": 2.8678923691310392e-06, |
|
"loss": 1.1678, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.27491408934707906, |
|
"grad_norm": 11.065582275390625, |
|
"learning_rate": 2.8622994943617984e-06, |
|
"loss": 1.1364, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.2806414662084765, |
|
"grad_norm": 9.69209098815918, |
|
"learning_rate": 2.8565963588427457e-06, |
|
"loss": 1.0762, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.286368843069874, |
|
"grad_norm": 10.874582290649414, |
|
"learning_rate": 2.8507834241698817e-06, |
|
"loss": 1.1281, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.286368843069874, |
|
"eval_loss": 1.1230082511901855, |
|
"eval_runtime": 5.3115, |
|
"eval_samples_per_second": 67.778, |
|
"eval_steps_per_second": 16.944, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.2920962199312715, |
|
"grad_norm": 16.209104537963867, |
|
"learning_rate": 2.844861160826047e-06, |
|
"loss": 1.158, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.29782359679266895, |
|
"grad_norm": 11.166873931884766, |
|
"learning_rate": 2.8388300481428423e-06, |
|
"loss": 1.0916, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.3035509736540664, |
|
"grad_norm": 11.725543022155762, |
|
"learning_rate": 2.8326905742618318e-06, |
|
"loss": 1.2, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.30927835051546393, |
|
"grad_norm": 10.414682388305664, |
|
"learning_rate": 2.8264432360950353e-06, |
|
"loss": 1.1615, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.3150057273768614, |
|
"grad_norm": 9.764941215515137, |
|
"learning_rate": 2.8200885392847106e-06, |
|
"loss": 1.175, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3207331042382589, |
|
"grad_norm": 16.335607528686523, |
|
"learning_rate": 2.8136269981624244e-06, |
|
"loss": 1.0646, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.32646048109965636, |
|
"grad_norm": 8.465163230895996, |
|
"learning_rate": 2.8070591357074282e-06, |
|
"loss": 1.2205, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.3321878579610538, |
|
"grad_norm": 11.596179008483887, |
|
"learning_rate": 2.800385483504327e-06, |
|
"loss": 1.2421, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.33791523482245134, |
|
"grad_norm": 11.226552963256836, |
|
"learning_rate": 2.7936065817000565e-06, |
|
"loss": 1.1326, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.3436426116838488, |
|
"grad_norm": 11.108230590820312, |
|
"learning_rate": 2.7867229789601615e-06, |
|
"loss": 1.2161, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.34936998854524626, |
|
"grad_norm": 12.34344482421875, |
|
"learning_rate": 2.7797352324243925e-06, |
|
"loss": 1.149, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3550973654066438, |
|
"grad_norm": 7.934173583984375, |
|
"learning_rate": 2.772643907661609e-06, |
|
"loss": 1.1019, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.36082474226804123, |
|
"grad_norm": 7.608425140380859, |
|
"learning_rate": 2.765449578624007e-06, |
|
"loss": 1.0918, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.3665521191294387, |
|
"grad_norm": 11.046185493469238, |
|
"learning_rate": 2.7581528276006626e-06, |
|
"loss": 1.0596, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3722794959908362, |
|
"grad_norm": 12.330936431884766, |
|
"learning_rate": 2.7507542451704043e-06, |
|
"loss": 1.282, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.37800687285223367, |
|
"grad_norm": 14.420117378234863, |
|
"learning_rate": 2.743254430154012e-06, |
|
"loss": 1.215, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.3837342497136312, |
|
"grad_norm": 10.501232147216797, |
|
"learning_rate": 2.7356539895657513e-06, |
|
"loss": 1.1786, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.38946162657502864, |
|
"grad_norm": 10.232950210571289, |
|
"learning_rate": 2.727953538564244e-06, |
|
"loss": 1.1201, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.3951890034364261, |
|
"grad_norm": 13.221607208251953, |
|
"learning_rate": 2.7201537004026756e-06, |
|
"loss": 1.1669, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.4009163802978236, |
|
"grad_norm": 9.83636474609375, |
|
"learning_rate": 2.7122551063783555e-06, |
|
"loss": 1.2779, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4066437571592211, |
|
"grad_norm": 10.758344650268555, |
|
"learning_rate": 2.704258395781618e-06, |
|
"loss": 1.1545, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.41237113402061853, |
|
"grad_norm": 15.233297348022461, |
|
"learning_rate": 2.696164215844081e-06, |
|
"loss": 1.0843, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.41809851088201605, |
|
"grad_norm": 9.102065086364746, |
|
"learning_rate": 2.687973221686263e-06, |
|
"loss": 1.0776, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.4238258877434135, |
|
"grad_norm": 9.260092735290527, |
|
"learning_rate": 2.679686076264554e-06, |
|
"loss": 1.1087, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.42955326460481097, |
|
"grad_norm": 19.753076553344727, |
|
"learning_rate": 2.6713034503175643e-06, |
|
"loss": 1.1055, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.4352806414662085, |
|
"grad_norm": 7.540209770202637, |
|
"learning_rate": 2.662826022311832e-06, |
|
"loss": 1.1342, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.44100801832760594, |
|
"grad_norm": 12.86109733581543, |
|
"learning_rate": 2.654254478386912e-06, |
|
"loss": 1.186, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.44673539518900346, |
|
"grad_norm": 9.657477378845215, |
|
"learning_rate": 2.6455895122998405e-06, |
|
"loss": 1.1337, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.4524627720504009, |
|
"grad_norm": 11.697619438171387, |
|
"learning_rate": 2.636831825368986e-06, |
|
"loss": 1.1123, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.4581901489117984, |
|
"grad_norm": 9.250412940979004, |
|
"learning_rate": 2.627982126417284e-06, |
|
"loss": 1.0457, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4639175257731959, |
|
"grad_norm": 12.023123741149902, |
|
"learning_rate": 2.619041131714869e-06, |
|
"loss": 1.0837, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.46964490263459335, |
|
"grad_norm": 8.030861854553223, |
|
"learning_rate": 2.6100095649211017e-06, |
|
"loss": 1.1827, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.4753722794959908, |
|
"grad_norm": 10.623430252075195, |
|
"learning_rate": 2.600888157025995e-06, |
|
"loss": 1.0982, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.48109965635738833, |
|
"grad_norm": 8.662152290344238, |
|
"learning_rate": 2.591677646291054e-06, |
|
"loss": 1.2227, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.4868270332187858, |
|
"grad_norm": 11.325638771057129, |
|
"learning_rate": 2.5823787781895197e-06, |
|
"loss": 0.9943, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.4925544100801833, |
|
"grad_norm": 9.88836669921875, |
|
"learning_rate": 2.5729923053460347e-06, |
|
"loss": 1.2253, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.49828178694158076, |
|
"grad_norm": 11.135693550109863, |
|
"learning_rate": 2.5635189874757255e-06, |
|
"loss": 1.2906, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.5040091638029782, |
|
"grad_norm": 8.991135597229004, |
|
"learning_rate": 2.553959591322716e-06, |
|
"loss": 1.0484, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5097365406643757, |
|
"grad_norm": 8.861202239990234, |
|
"learning_rate": 2.544314890598067e-06, |
|
"loss": 1.0301, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5154639175257731, |
|
"grad_norm": 13.207406997680664, |
|
"learning_rate": 2.5345856659171565e-06, |
|
"loss": 1.2542, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5211912943871707, |
|
"grad_norm": 9.178120613098145, |
|
"learning_rate": 2.5247727047364956e-06, |
|
"loss": 1.085, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.5269186712485682, |
|
"grad_norm": 10.854491233825684, |
|
"learning_rate": 2.5148768012899977e-06, |
|
"loss": 1.2201, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.5326460481099656, |
|
"grad_norm": 10.438577651977539, |
|
"learning_rate": 2.5048987565246934e-06, |
|
"loss": 1.1778, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5383734249713631, |
|
"grad_norm": 10.084646224975586, |
|
"learning_rate": 2.4948393780359025e-06, |
|
"loss": 1.1588, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.5441008018327605, |
|
"grad_norm": 10.475250244140625, |
|
"learning_rate": 2.484699480001873e-06, |
|
"loss": 1.138, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5498281786941581, |
|
"grad_norm": 11.831788063049316, |
|
"learning_rate": 2.4744798831178817e-06, |
|
"loss": 1.1661, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 11.067156791687012, |
|
"learning_rate": 2.464181414529809e-06, |
|
"loss": 1.047, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.561282932416953, |
|
"grad_norm": 12.043890953063965, |
|
"learning_rate": 2.4538049077671936e-06, |
|
"loss": 1.2494, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.5670103092783505, |
|
"grad_norm": 12.70505142211914, |
|
"learning_rate": 2.4433512026757667e-06, |
|
"loss": 1.1073, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.572737686139748, |
|
"grad_norm": 12.398516654968262, |
|
"learning_rate": 2.4328211453494805e-06, |
|
"loss": 1.183, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.572737686139748, |
|
"eval_loss": 1.1016738414764404, |
|
"eval_runtime": 5.3796, |
|
"eval_samples_per_second": 66.92, |
|
"eval_steps_per_second": 16.73, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5784650630011455, |
|
"grad_norm": 12.058919906616211, |
|
"learning_rate": 2.4222155880620232e-06, |
|
"loss": 1.1006, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.584192439862543, |
|
"grad_norm": 9.806292533874512, |
|
"learning_rate": 2.4115353891978432e-06, |
|
"loss": 1.1667, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.5899198167239404, |
|
"grad_norm": 8.650660514831543, |
|
"learning_rate": 2.4007814131826705e-06, |
|
"loss": 1.1308, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.5956471935853379, |
|
"grad_norm": 11.571444511413574, |
|
"learning_rate": 2.3899545304135533e-06, |
|
"loss": 1.2669, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.6013745704467354, |
|
"grad_norm": 10.114933013916016, |
|
"learning_rate": 2.3790556171884107e-06, |
|
"loss": 1.1049, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6071019473081328, |
|
"grad_norm": 11.771845817565918, |
|
"learning_rate": 2.3680855556351075e-06, |
|
"loss": 1.1428, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6128293241695304, |
|
"grad_norm": 10.653964042663574, |
|
"learning_rate": 2.357045233640057e-06, |
|
"loss": 1.1645, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6185567010309279, |
|
"grad_norm": 7.707538604736328, |
|
"learning_rate": 2.34593554477636e-06, |
|
"loss": 1.0534, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6242840778923253, |
|
"grad_norm": 13.67249870300293, |
|
"learning_rate": 2.3347573882314764e-06, |
|
"loss": 1.1389, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.6300114547537228, |
|
"grad_norm": 11.033611297607422, |
|
"learning_rate": 2.323511668734453e-06, |
|
"loss": 1.2522, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.6357388316151202, |
|
"grad_norm": 10.89904499053955, |
|
"learning_rate": 2.312199296482695e-06, |
|
"loss": 1.2145, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.6414662084765178, |
|
"grad_norm": 15.723979949951172, |
|
"learning_rate": 2.3008211870682958e-06, |
|
"loss": 1.1635, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.6471935853379153, |
|
"grad_norm": 9.854010581970215, |
|
"learning_rate": 2.2893782614039325e-06, |
|
"loss": 1.1548, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.6529209621993127, |
|
"grad_norm": 7.649772644042969, |
|
"learning_rate": 2.277871445648332e-06, |
|
"loss": 1.1, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.6586483390607102, |
|
"grad_norm": 10.99400806427002, |
|
"learning_rate": 2.266301671131307e-06, |
|
"loss": 1.0816, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.6643757159221076, |
|
"grad_norm": 10.330283164978027, |
|
"learning_rate": 2.2546698742783778e-06, |
|
"loss": 1.0854, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.6701030927835051, |
|
"grad_norm": 10.694668769836426, |
|
"learning_rate": 2.2429769965349817e-06, |
|
"loss": 1.1252, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.6758304696449027, |
|
"grad_norm": 8.67434310913086, |
|
"learning_rate": 2.231223984290273e-06, |
|
"loss": 1.2776, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.6815578465063001, |
|
"grad_norm": 16.908462524414062, |
|
"learning_rate": 2.219411788800528e-06, |
|
"loss": 1.1514, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.6872852233676976, |
|
"grad_norm": 9.732044219970703, |
|
"learning_rate": 2.2075413661121492e-06, |
|
"loss": 1.1616, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.693012600229095, |
|
"grad_norm": 12.07868766784668, |
|
"learning_rate": 2.1956136769842885e-06, |
|
"loss": 1.0862, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.6987399770904925, |
|
"grad_norm": 15.745189666748047, |
|
"learning_rate": 2.1836296868110836e-06, |
|
"loss": 1.172, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.7044673539518901, |
|
"grad_norm": 7.780303478240967, |
|
"learning_rate": 2.1715903655435243e-06, |
|
"loss": 1.1839, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.7101947308132875, |
|
"grad_norm": 10.862656593322754, |
|
"learning_rate": 2.1594966876109456e-06, |
|
"loss": 1.2164, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.715922107674685, |
|
"grad_norm": 11.16341495513916, |
|
"learning_rate": 2.1473496318421603e-06, |
|
"loss": 1.1294, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.7216494845360825, |
|
"grad_norm": 6.848244667053223, |
|
"learning_rate": 2.135150181386236e-06, |
|
"loss": 1.1519, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.7273768613974799, |
|
"grad_norm": 11.13603401184082, |
|
"learning_rate": 2.122899323632919e-06, |
|
"loss": 1.1782, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.7331042382588774, |
|
"grad_norm": 7.1757402420043945, |
|
"learning_rate": 2.1105980501327234e-06, |
|
"loss": 1.0794, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.738831615120275, |
|
"grad_norm": 8.15458869934082, |
|
"learning_rate": 2.0982473565166727e-06, |
|
"loss": 1.0031, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.7445589919816724, |
|
"grad_norm": 11.527698516845703, |
|
"learning_rate": 2.0858482424157164e-06, |
|
"loss": 1.1078, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.7502863688430699, |
|
"grad_norm": 7.272953510284424, |
|
"learning_rate": 2.073401711379827e-06, |
|
"loss": 1.2212, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.7560137457044673, |
|
"grad_norm": 8.701218605041504, |
|
"learning_rate": 2.060908770796769e-06, |
|
"loss": 1.1787, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.7617411225658648, |
|
"grad_norm": 8.603687286376953, |
|
"learning_rate": 2.0483704318105704e-06, |
|
"loss": 1.0969, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.7674684994272624, |
|
"grad_norm": 8.430140495300293, |
|
"learning_rate": 2.0357877092396775e-06, |
|
"loss": 1.1568, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.7731958762886598, |
|
"grad_norm": 10.301761627197266, |
|
"learning_rate": 2.023161621494823e-06, |
|
"loss": 1.1787, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.7789232531500573, |
|
"grad_norm": 10.218490600585938, |
|
"learning_rate": 2.0104931904965965e-06, |
|
"loss": 1.0406, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.7846506300114547, |
|
"grad_norm": 11.949338912963867, |
|
"learning_rate": 1.9977834415927326e-06, |
|
"loss": 1.1459, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.7903780068728522, |
|
"grad_norm": 9.224528312683105, |
|
"learning_rate": 1.985033403475123e-06, |
|
"loss": 1.1283, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.7961053837342497, |
|
"grad_norm": 9.380345344543457, |
|
"learning_rate": 1.9722441080965565e-06, |
|
"loss": 1.0648, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.8018327605956472, |
|
"grad_norm": 12.056816101074219, |
|
"learning_rate": 1.9594165905871968e-06, |
|
"loss": 1.1021, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.8075601374570447, |
|
"grad_norm": 11.585986137390137, |
|
"learning_rate": 1.946551889170801e-06, |
|
"loss": 1.0863, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.8132875143184422, |
|
"grad_norm": 10.934039115905762, |
|
"learning_rate": 1.9336510450806875e-06, |
|
"loss": 1.1676, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.8190148911798396, |
|
"grad_norm": 8.694523811340332, |
|
"learning_rate": 1.920715102475464e-06, |
|
"loss": 1.033, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.8247422680412371, |
|
"grad_norm": 9.625450134277344, |
|
"learning_rate": 1.9077451083545143e-06, |
|
"loss": 1.1275, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.8304696449026346, |
|
"grad_norm": 7.567831516265869, |
|
"learning_rate": 1.8947421124732576e-06, |
|
"loss": 1.2054, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.8361970217640321, |
|
"grad_norm": 10.554939270019531, |
|
"learning_rate": 1.8817071672581839e-06, |
|
"loss": 1.1276, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.8419243986254296, |
|
"grad_norm": 10.357826232910156, |
|
"learning_rate": 1.868641327721675e-06, |
|
"loss": 1.2319, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.847651775486827, |
|
"grad_norm": 16.142457962036133, |
|
"learning_rate": 1.8555456513766118e-06, |
|
"loss": 1.1066, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.8533791523482245, |
|
"grad_norm": 9.302567481994629, |
|
"learning_rate": 1.8424211981507858e-06, |
|
"loss": 0.9964, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.8591065292096219, |
|
"grad_norm": 13.468149185180664, |
|
"learning_rate": 1.8292690303011076e-06, |
|
"loss": 1.1009, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.8591065292096219, |
|
"eval_loss": 1.086885690689087, |
|
"eval_runtime": 5.2843, |
|
"eval_samples_per_second": 68.127, |
|
"eval_steps_per_second": 17.032, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.8648339060710195, |
|
"grad_norm": 9.433229446411133, |
|
"learning_rate": 1.816090212327634e-06, |
|
"loss": 1.0945, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.870561282932417, |
|
"grad_norm": 12.731361389160156, |
|
"learning_rate": 1.8028858108874094e-06, |
|
"loss": 1.0626, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.8762886597938144, |
|
"grad_norm": 10.284618377685547, |
|
"learning_rate": 1.7896568947081322e-06, |
|
"loss": 1.1405, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.8820160366552119, |
|
"grad_norm": 11.09489631652832, |
|
"learning_rate": 1.776404534501656e-06, |
|
"loss": 1.1558, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.8877434135166093, |
|
"grad_norm": 11.1835298538208, |
|
"learning_rate": 1.7631298028773296e-06, |
|
"loss": 1.2162, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.8934707903780069, |
|
"grad_norm": 8.714800834655762, |
|
"learning_rate": 1.7498337742551817e-06, |
|
"loss": 1.0309, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.8991981672394044, |
|
"grad_norm": 13.366388320922852, |
|
"learning_rate": 1.7365175247789623e-06, |
|
"loss": 1.0123, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.9049255441008018, |
|
"grad_norm": 8.921965599060059, |
|
"learning_rate": 1.7231821322290399e-06, |
|
"loss": 1.1375, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.9106529209621993, |
|
"grad_norm": 11.560654640197754, |
|
"learning_rate": 1.7098286759351717e-06, |
|
"loss": 1.1892, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.9163802978235968, |
|
"grad_norm": 7.3394670486450195, |
|
"learning_rate": 1.6964582366891427e-06, |
|
"loss": 1.2526, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.9221076746849943, |
|
"grad_norm": 8.516718864440918, |
|
"learning_rate": 1.6830718966572928e-06, |
|
"loss": 1.0736, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.9278350515463918, |
|
"grad_norm": 10.884456634521484, |
|
"learning_rate": 1.6696707392929268e-06, |
|
"loss": 1.1471, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.9335624284077892, |
|
"grad_norm": 11.866485595703125, |
|
"learning_rate": 1.6562558492486226e-06, |
|
"loss": 1.0814, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.9392898052691867, |
|
"grad_norm": 10.002748489379883, |
|
"learning_rate": 1.6428283122884437e-06, |
|
"loss": 1.0393, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.9450171821305842, |
|
"grad_norm": 10.888050079345703, |
|
"learning_rate": 1.6293892152000613e-06, |
|
"loss": 1.1509, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.9507445589919816, |
|
"grad_norm": 10.640623092651367, |
|
"learning_rate": 1.6159396457067895e-06, |
|
"loss": 1.2364, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.9564719358533792, |
|
"grad_norm": 10.190733909606934, |
|
"learning_rate": 1.60248069237955e-06, |
|
"loss": 1.203, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.9621993127147767, |
|
"grad_norm": 9.43203353881836, |
|
"learning_rate": 1.5890134445487679e-06, |
|
"loss": 1.1801, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.9679266895761741, |
|
"grad_norm": 11.540404319763184, |
|
"learning_rate": 1.5755389922162003e-06, |
|
"loss": 1.097, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.9736540664375716, |
|
"grad_norm": 10.006051063537598, |
|
"learning_rate": 1.5620584259667175e-06, |
|
"loss": 1.1071, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.979381443298969, |
|
"grad_norm": 11.146818161010742, |
|
"learning_rate": 1.548572836880034e-06, |
|
"loss": 1.297, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.9851088201603666, |
|
"grad_norm": 15.32827091217041, |
|
"learning_rate": 1.5350833164423974e-06, |
|
"loss": 1.1533, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.9908361970217641, |
|
"grad_norm": 13.463982582092285, |
|
"learning_rate": 1.5215909564582499e-06, |
|
"loss": 1.1084, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.9965635738831615, |
|
"grad_norm": 10.163274765014648, |
|
"learning_rate": 1.5080968489618567e-06, |
|
"loss": 1.0932, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.002290950744559, |
|
"grad_norm": 8.775249481201172, |
|
"learning_rate": 1.494602086128923e-06, |
|
"loss": 0.9301, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.0080183276059564, |
|
"grad_norm": 7.848943710327148, |
|
"learning_rate": 1.4811077601881953e-06, |
|
"loss": 0.6965, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.013745704467354, |
|
"grad_norm": 13.538870811462402, |
|
"learning_rate": 1.4676149633330583e-06, |
|
"loss": 0.6828, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.0194730813287514, |
|
"grad_norm": 10.99616527557373, |
|
"learning_rate": 1.4541247876331374e-06, |
|
"loss": 0.6853, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.0252004581901488, |
|
"grad_norm": 8.934946060180664, |
|
"learning_rate": 1.440638324945909e-06, |
|
"loss": 0.759, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.0309278350515463, |
|
"grad_norm": 8.413115501403809, |
|
"learning_rate": 1.4271566668283281e-06, |
|
"loss": 0.8167, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.036655211912944, |
|
"grad_norm": 12.693419456481934, |
|
"learning_rate": 1.413680904448481e-06, |
|
"loss": 0.7765, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.0423825887743414, |
|
"grad_norm": 18.639657974243164, |
|
"learning_rate": 1.4002121284972705e-06, |
|
"loss": 0.6784, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.0481099656357389, |
|
"grad_norm": 9.834324836730957, |
|
"learning_rate": 1.3867514291001363e-06, |
|
"loss": 0.6933, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.0538373424971363, |
|
"grad_norm": 10.812657356262207, |
|
"learning_rate": 1.373299895728824e-06, |
|
"loss": 0.7267, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.0595647193585338, |
|
"grad_norm": 10.644402503967285, |
|
"learning_rate": 1.3598586171132068e-06, |
|
"loss": 0.7609, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.0652920962199313, |
|
"grad_norm": 10.04351806640625, |
|
"learning_rate": 1.3464286811531663e-06, |
|
"loss": 0.6473, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.0710194730813287, |
|
"grad_norm": 8.527778625488281, |
|
"learning_rate": 1.3330111748305409e-06, |
|
"loss": 0.6419, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.0767468499427262, |
|
"grad_norm": 10.766478538513184, |
|
"learning_rate": 1.3196071841211486e-06, |
|
"loss": 0.707, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.0824742268041236, |
|
"grad_norm": 9.123445510864258, |
|
"learning_rate": 1.3062177939068908e-06, |
|
"loss": 0.7146, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.088201603665521, |
|
"grad_norm": 13.148359298706055, |
|
"learning_rate": 1.292844087887946e-06, |
|
"loss": 0.7023, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.0939289805269188, |
|
"grad_norm": 9.834714889526367, |
|
"learning_rate": 1.279487148495056e-06, |
|
"loss": 0.6432, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.0996563573883162, |
|
"grad_norm": 8.148127555847168, |
|
"learning_rate": 1.2661480568019203e-06, |
|
"loss": 0.6387, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.1053837342497137, |
|
"grad_norm": 14.112127304077148, |
|
"learning_rate": 1.2528278924376933e-06, |
|
"loss": 0.7409, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 12.995244979858398, |
|
"learning_rate": 1.2395277334996047e-06, |
|
"loss": 0.7186, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.1168384879725086, |
|
"grad_norm": 11.058789253234863, |
|
"learning_rate": 1.2262486564656995e-06, |
|
"loss": 0.7758, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.122565864833906, |
|
"grad_norm": 11.76444149017334, |
|
"learning_rate": 1.2129917361077125e-06, |
|
"loss": 0.6512, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.1282932416953035, |
|
"grad_norm": 12.445648193359375, |
|
"learning_rate": 1.1997580454040784e-06, |
|
"loss": 0.6401, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.134020618556701, |
|
"grad_norm": 12.5171537399292, |
|
"learning_rate": 1.1865486554530874e-06, |
|
"loss": 0.7075, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.1397479954180985, |
|
"grad_norm": 11.082114219665527, |
|
"learning_rate": 1.1733646353861951e-06, |
|
"loss": 0.7184, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.145475372279496, |
|
"grad_norm": 8.820939064025879, |
|
"learning_rate": 1.1602070522814882e-06, |
|
"loss": 0.6901, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.145475372279496, |
|
"eval_loss": 1.1485834121704102, |
|
"eval_runtime": 5.46, |
|
"eval_samples_per_second": 65.934, |
|
"eval_steps_per_second": 16.484, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.1512027491408934, |
|
"grad_norm": 12.333765983581543, |
|
"learning_rate": 1.1470769710773189e-06, |
|
"loss": 0.7145, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.1569301260022908, |
|
"grad_norm": 9.632255554199219, |
|
"learning_rate": 1.1339754544861111e-06, |
|
"loss": 0.6381, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.1626575028636885, |
|
"grad_norm": 10.103231430053711, |
|
"learning_rate": 1.1209035629083498e-06, |
|
"loss": 0.6353, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.168384879725086, |
|
"grad_norm": 11.000112533569336, |
|
"learning_rate": 1.1078623543467518e-06, |
|
"loss": 0.676, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.1741122565864834, |
|
"grad_norm": 11.119033813476562, |
|
"learning_rate": 1.0948528843206357e-06, |
|
"loss": 0.6746, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.179839633447881, |
|
"grad_norm": 11.437407493591309, |
|
"learning_rate": 1.081876205780492e-06, |
|
"loss": 0.6734, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.1855670103092784, |
|
"grad_norm": 8.372086524963379, |
|
"learning_rate": 1.068933369022758e-06, |
|
"loss": 0.7758, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.1912943871706758, |
|
"grad_norm": 7.501805305480957, |
|
"learning_rate": 1.0560254216048105e-06, |
|
"loss": 0.8655, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.1970217640320733, |
|
"grad_norm": 11.55257797241211, |
|
"learning_rate": 1.0431534082601814e-06, |
|
"loss": 0.7701, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.2027491408934707, |
|
"grad_norm": 13.704492568969727, |
|
"learning_rate": 1.0303183708139966e-06, |
|
"loss": 0.7325, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.2084765177548682, |
|
"grad_norm": 9.895234107971191, |
|
"learning_rate": 1.0175213480986553e-06, |
|
"loss": 0.7678, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.2142038946162657, |
|
"grad_norm": 11.093770027160645, |
|
"learning_rate": 1.0047633758697507e-06, |
|
"loss": 0.7074, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.2199312714776633, |
|
"grad_norm": 7.835501194000244, |
|
"learning_rate": 9.920454867222362e-07, |
|
"loss": 0.6939, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.2256586483390608, |
|
"grad_norm": 12.765395164489746, |
|
"learning_rate": 9.793687100068537e-07, |
|
"loss": 0.6601, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.2313860252004583, |
|
"grad_norm": 8.465944290161133, |
|
"learning_rate": 9.667340717468165e-07, |
|
"loss": 0.6601, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.2371134020618557, |
|
"grad_norm": 8.568673133850098, |
|
"learning_rate": 9.54142594554769e-07, |
|
"loss": 0.6536, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.2428407789232532, |
|
"grad_norm": 11.03416633605957, |
|
"learning_rate": 9.415952975500177e-07, |
|
"loss": 0.7267, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.2485681557846506, |
|
"grad_norm": 12.343921661376953, |
|
"learning_rate": 9.290931962760477e-07, |
|
"loss": 0.7921, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.254295532646048, |
|
"grad_norm": 12.319411277770996, |
|
"learning_rate": 9.166373026183269e-07, |
|
"loss": 0.6843, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.2600229095074456, |
|
"grad_norm": 11.100428581237793, |
|
"learning_rate": 9.042286247224054e-07, |
|
"loss": 0.7535, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.265750286368843, |
|
"grad_norm": 10.030117988586426, |
|
"learning_rate": 8.918681669123218e-07, |
|
"loss": 0.6571, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.2714776632302405, |
|
"grad_norm": 10.269767761230469, |
|
"learning_rate": 8.795569296093133e-07, |
|
"loss": 0.7255, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.277205040091638, |
|
"grad_norm": 11.503005027770996, |
|
"learning_rate": 8.67295909250846e-07, |
|
"loss": 0.7021, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.2829324169530354, |
|
"grad_norm": 19.0956974029541, |
|
"learning_rate": 8.55086098209965e-07, |
|
"loss": 0.644, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.2886597938144329, |
|
"grad_norm": 11.60425090789795, |
|
"learning_rate": 8.429284847149765e-07, |
|
"loss": 0.7289, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.2943871706758305, |
|
"grad_norm": 11.15966510772705, |
|
"learning_rate": 8.308240527694602e-07, |
|
"loss": 0.6789, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.300114547537228, |
|
"grad_norm": 11.775574684143066, |
|
"learning_rate": 8.187737820726293e-07, |
|
"loss": 0.7226, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.3058419243986255, |
|
"grad_norm": 10.303918838500977, |
|
"learning_rate": 8.067786479400346e-07, |
|
"loss": 0.6405, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.311569301260023, |
|
"grad_norm": 11.979586601257324, |
|
"learning_rate": 7.948396212246273e-07, |
|
"loss": 0.6847, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.3172966781214204, |
|
"grad_norm": 8.767427444458008, |
|
"learning_rate": 7.829576682381782e-07, |
|
"loss": 0.681, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.3230240549828178, |
|
"grad_norm": 12.378206253051758, |
|
"learning_rate": 7.711337506730695e-07, |
|
"loss": 0.7108, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.3287514318442153, |
|
"grad_norm": 9.478968620300293, |
|
"learning_rate": 7.593688255244566e-07, |
|
"loss": 0.6728, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.3344788087056128, |
|
"grad_norm": 13.136493682861328, |
|
"learning_rate": 7.476638450128123e-07, |
|
"loss": 0.6783, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.3402061855670104, |
|
"grad_norm": 12.096550941467285, |
|
"learning_rate": 7.360197565068561e-07, |
|
"loss": 0.6398, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.345933562428408, |
|
"grad_norm": 9.796936988830566, |
|
"learning_rate": 7.244375024468765e-07, |
|
"loss": 0.659, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.3516609392898054, |
|
"grad_norm": 12.897764205932617, |
|
"learning_rate": 7.129180202684554e-07, |
|
"loss": 0.6226, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.3573883161512028, |
|
"grad_norm": 13.480414390563965, |
|
"learning_rate": 7.0146224232659e-07, |
|
"loss": 0.7311, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.3631156930126003, |
|
"grad_norm": 9.178197860717773, |
|
"learning_rate": 6.900710958202341e-07, |
|
"loss": 0.7167, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.3688430698739977, |
|
"grad_norm": 7.676974773406982, |
|
"learning_rate": 6.787455027172515e-07, |
|
"loss": 0.7085, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.3745704467353952, |
|
"grad_norm": 19.0358829498291, |
|
"learning_rate": 6.674863796797954e-07, |
|
"loss": 0.6221, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.3802978235967927, |
|
"grad_norm": 10.022867202758789, |
|
"learning_rate": 6.562946379901158e-07, |
|
"loss": 0.7337, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.38602520045819, |
|
"grad_norm": 11.5038480758667, |
|
"learning_rate": 6.451711834768026e-07, |
|
"loss": 0.6849, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.3917525773195876, |
|
"grad_norm": 10.91701889038086, |
|
"learning_rate": 6.341169164414712e-07, |
|
"loss": 0.6061, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.397479954180985, |
|
"grad_norm": 11.305731773376465, |
|
"learning_rate": 6.23132731585894e-07, |
|
"loss": 0.6072, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.4032073310423825, |
|
"grad_norm": 11.2603178024292, |
|
"learning_rate": 6.122195179395854e-07, |
|
"loss": 0.7589, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.40893470790378, |
|
"grad_norm": 10.891215324401855, |
|
"learning_rate": 6.013781587878464e-07, |
|
"loss": 0.6253, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.4146620847651776, |
|
"grad_norm": 13.905660629272461, |
|
"learning_rate": 5.906095316002755e-07, |
|
"loss": 0.7618, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.420389461626575, |
|
"grad_norm": 8.74496078491211, |
|
"learning_rate": 5.799145079597458e-07, |
|
"loss": 0.6866, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.4261168384879725, |
|
"grad_norm": 10.08084487915039, |
|
"learning_rate": 5.69293953491864e-07, |
|
"loss": 0.6584, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.43184421534937, |
|
"grad_norm": 9.269981384277344, |
|
"learning_rate": 5.587487277949075e-07, |
|
"loss": 0.7377, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.43184421534937, |
|
"eval_loss": 1.1487523317337036, |
|
"eval_runtime": 5.3152, |
|
"eval_samples_per_second": 67.73, |
|
"eval_steps_per_second": 16.933, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.4375715922107675, |
|
"grad_norm": 8.877992630004883, |
|
"learning_rate": 5.48279684370252e-07, |
|
"loss": 0.7111, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.443298969072165, |
|
"grad_norm": 24.23348045349121, |
|
"learning_rate": 5.378876705532904e-07, |
|
"loss": 0.62, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.4490263459335624, |
|
"grad_norm": 9.67536735534668, |
|
"learning_rate": 5.275735274448529e-07, |
|
"loss": 0.7702, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.4547537227949598, |
|
"grad_norm": 10.30777359008789, |
|
"learning_rate": 5.173380898431305e-07, |
|
"loss": 0.6682, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.4604810996563573, |
|
"grad_norm": 12.1168212890625, |
|
"learning_rate": 5.071821861761074e-07, |
|
"loss": 0.6713, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.466208476517755, |
|
"grad_norm": 7.113183975219727, |
|
"learning_rate": 4.97106638434512e-07, |
|
"loss": 0.7582, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.4719358533791524, |
|
"grad_norm": 13.592216491699219, |
|
"learning_rate": 4.87112262105286e-07, |
|
"loss": 0.6917, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.47766323024055, |
|
"grad_norm": 10.175348281860352, |
|
"learning_rate": 4.771998661055823e-07, |
|
"loss": 0.7341, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.4833906071019474, |
|
"grad_norm": 10.051101684570312, |
|
"learning_rate": 4.673702527172919e-07, |
|
"loss": 0.5795, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.4891179839633448, |
|
"grad_norm": 9.961878776550293, |
|
"learning_rate": 4.576242175221121e-07, |
|
"loss": 0.6895, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.4948453608247423, |
|
"grad_norm": 10.214471817016602, |
|
"learning_rate": 4.479625493371512e-07, |
|
"loss": 0.6552, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.5005727376861397, |
|
"grad_norm": 16.537036895751953, |
|
"learning_rate": 4.3838603015108537e-07, |
|
"loss": 0.6821, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.5063001145475372, |
|
"grad_norm": 8.615083694458008, |
|
"learning_rate": 4.288954350608672e-07, |
|
"loss": 0.6581, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.5120274914089347, |
|
"grad_norm": 12.914610862731934, |
|
"learning_rate": 4.1949153220898987e-07, |
|
"loss": 0.7688, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.5177548682703321, |
|
"grad_norm": 10.495301246643066, |
|
"learning_rate": 4.1017508272131794e-07, |
|
"loss": 0.6831, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.5234822451317296, |
|
"grad_norm": 11.30919075012207, |
|
"learning_rate": 4.009468406454818e-07, |
|
"loss": 0.796, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.529209621993127, |
|
"grad_norm": 9.941025733947754, |
|
"learning_rate": 3.9180755288984837e-07, |
|
"loss": 0.7486, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.5349369988545245, |
|
"grad_norm": 11.294404983520508, |
|
"learning_rate": 3.8275795916306827e-07, |
|
"loss": 0.6736, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.540664375715922, |
|
"grad_norm": 12.294686317443848, |
|
"learning_rate": 3.737987919142061e-07, |
|
"loss": 0.6431, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.5463917525773194, |
|
"grad_norm": 11.265865325927734, |
|
"learning_rate": 3.64930776273457e-07, |
|
"loss": 0.8467, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.552119129438717, |
|
"grad_norm": 7.701663970947266, |
|
"learning_rate": 3.561546299934577e-07, |
|
"loss": 0.7307, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.5578465063001146, |
|
"grad_norm": 14.973838806152344, |
|
"learning_rate": 3.4747106339119276e-07, |
|
"loss": 0.7451, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.563573883161512, |
|
"grad_norm": 23.487585067749023, |
|
"learning_rate": 3.388807792905045e-07, |
|
"loss": 0.6253, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.5693012600229095, |
|
"grad_norm": 10.245492935180664, |
|
"learning_rate": 3.303844729652073e-07, |
|
"loss": 0.7833, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.575028636884307, |
|
"grad_norm": 10.44719123840332, |
|
"learning_rate": 3.219828320828148e-07, |
|
"loss": 0.6858, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.5807560137457046, |
|
"grad_norm": 8.665092468261719, |
|
"learning_rate": 3.1367653664888173e-07, |
|
"loss": 0.6299, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.586483390607102, |
|
"grad_norm": 11.613511085510254, |
|
"learning_rate": 3.054662589519661e-07, |
|
"loss": 0.8032, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.5922107674684995, |
|
"grad_norm": 7.786969184875488, |
|
"learning_rate": 2.973526635092163e-07, |
|
"loss": 0.7187, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.597938144329897, |
|
"grad_norm": 13.719048500061035, |
|
"learning_rate": 2.8933640701258597e-07, |
|
"loss": 0.7349, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.6036655211912945, |
|
"grad_norm": 11.509296417236328, |
|
"learning_rate": 2.8141813827568525e-07, |
|
"loss": 0.6635, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.609392898052692, |
|
"grad_norm": 10.817230224609375, |
|
"learning_rate": 2.7359849818126533e-07, |
|
"loss": 0.7161, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.6151202749140894, |
|
"grad_norm": 13.700976371765137, |
|
"learning_rate": 2.6587811962934823e-07, |
|
"loss": 0.7205, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.6208476517754868, |
|
"grad_norm": 10.548551559448242, |
|
"learning_rate": 2.582576274860015e-07, |
|
"loss": 0.6782, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.6265750286368843, |
|
"grad_norm": 9.971668243408203, |
|
"learning_rate": 2.507376385327634e-07, |
|
"loss": 0.7375, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.6323024054982818, |
|
"grad_norm": 11.734025001525879, |
|
"learning_rate": 2.4331876141672204e-07, |
|
"loss": 0.6543, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.6380297823596792, |
|
"grad_norm": 10.10330581665039, |
|
"learning_rate": 2.360015966012537e-07, |
|
"loss": 0.7008, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.6437571592210767, |
|
"grad_norm": 8.163260459899902, |
|
"learning_rate": 2.2878673631742236e-07, |
|
"loss": 0.758, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.6494845360824741, |
|
"grad_norm": 9.96818733215332, |
|
"learning_rate": 2.2167476451604624e-07, |
|
"loss": 0.7625, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.6552119129438716, |
|
"grad_norm": 11.303291320800781, |
|
"learning_rate": 2.1466625682043516e-07, |
|
"loss": 0.6977, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.660939289805269, |
|
"grad_norm": 11.4495210647583, |
|
"learning_rate": 2.0776178047979978e-07, |
|
"loss": 0.7007, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 13.028532981872559, |
|
"learning_rate": 2.0096189432334195e-07, |
|
"loss": 0.6004, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.6723940435280642, |
|
"grad_norm": 10.361539840698242, |
|
"learning_rate": 1.9426714871502292e-07, |
|
"loss": 0.7078, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.6781214203894617, |
|
"grad_norm": 10.60468864440918, |
|
"learning_rate": 1.8767808550901911e-07, |
|
"loss": 0.7244, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.6838487972508591, |
|
"grad_norm": 9.203190803527832, |
|
"learning_rate": 1.811952380058657e-07, |
|
"loss": 0.636, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.6895761741122566, |
|
"grad_norm": 11.939068794250488, |
|
"learning_rate": 1.7481913090929314e-07, |
|
"loss": 0.7098, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.695303550973654, |
|
"grad_norm": 9.831012725830078, |
|
"learning_rate": 1.6855028028375897e-07, |
|
"loss": 0.6872, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.7010309278350515, |
|
"grad_norm": 18.38079261779785, |
|
"learning_rate": 1.623891935126786e-07, |
|
"loss": 0.6811, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.7067583046964492, |
|
"grad_norm": 9.422789573669434, |
|
"learning_rate": 1.5633636925735933e-07, |
|
"loss": 0.6251, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.7124856815578466, |
|
"grad_norm": 9.562675476074219, |
|
"learning_rate": 1.503922974166406e-07, |
|
"loss": 0.7189, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.718213058419244, |
|
"grad_norm": 9.7070894241333, |
|
"learning_rate": 1.4455745908724226e-07, |
|
"loss": 0.6947, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.718213058419244, |
|
"eval_loss": 1.150073528289795, |
|
"eval_runtime": 5.3064, |
|
"eval_samples_per_second": 67.842, |
|
"eval_steps_per_second": 16.961, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.7239404352806416, |
|
"grad_norm": 12.27785873413086, |
|
"learning_rate": 1.3883232652482553e-07, |
|
"loss": 0.8112, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.729667812142039, |
|
"grad_norm": 12.872515678405762, |
|
"learning_rate": 1.3321736310577164e-07, |
|
"loss": 0.7575, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.7353951890034365, |
|
"grad_norm": 11.842426300048828, |
|
"learning_rate": 1.2771302328967577e-07, |
|
"loss": 0.7448, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.741122565864834, |
|
"grad_norm": 11.257376670837402, |
|
"learning_rate": 1.2231975258256462e-07, |
|
"loss": 0.6947, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.7468499427262314, |
|
"grad_norm": 12.237911224365234, |
|
"learning_rate": 1.1703798750083938e-07, |
|
"loss": 0.684, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.7525773195876289, |
|
"grad_norm": 11.303350448608398, |
|
"learning_rate": 1.1186815553594382e-07, |
|
"loss": 0.7047, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.7583046964490263, |
|
"grad_norm": 10.056689262390137, |
|
"learning_rate": 1.068106751197659e-07, |
|
"loss": 0.6862, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.7640320733104238, |
|
"grad_norm": 9.765776634216309, |
|
"learning_rate": 1.0186595559076939e-07, |
|
"loss": 0.7131, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.7697594501718212, |
|
"grad_norm": 10.987778663635254, |
|
"learning_rate": 9.703439716086493e-08, |
|
"loss": 0.6012, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.7754868270332187, |
|
"grad_norm": 14.675382614135742, |
|
"learning_rate": 9.231639088301635e-08, |
|
"loss": 0.6843, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.7812142038946162, |
|
"grad_norm": 15.27701187133789, |
|
"learning_rate": 8.771231861959106e-08, |
|
"loss": 0.7452, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 1.7869415807560136, |
|
"grad_norm": 12.017073631286621, |
|
"learning_rate": 8.322255301145204e-08, |
|
"loss": 0.7111, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 1.792668957617411, |
|
"grad_norm": 8.804431915283203, |
|
"learning_rate": 7.884745744779937e-08, |
|
"loss": 0.7718, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 1.7983963344788088, |
|
"grad_norm": 10.447242736816406, |
|
"learning_rate": 7.458738603675602e-08, |
|
"loss": 0.8338, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 1.8041237113402062, |
|
"grad_norm": 13.058491706848145, |
|
"learning_rate": 7.044268357670869e-08, |
|
"loss": 0.5838, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.8098510882016037, |
|
"grad_norm": 11.285879135131836, |
|
"learning_rate": 6.641368552840088e-08, |
|
"loss": 0.7741, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 1.8155784650630011, |
|
"grad_norm": 8.553770065307617, |
|
"learning_rate": 6.250071798778117e-08, |
|
"loss": 0.7114, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.8213058419243986, |
|
"grad_norm": 8.297019004821777, |
|
"learning_rate": 5.870409765960966e-08, |
|
"loss": 0.761, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.827033218785796, |
|
"grad_norm": 9.877187728881836, |
|
"learning_rate": 5.502413183182592e-08, |
|
"loss": 0.7156, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.8327605956471937, |
|
"grad_norm": 11.764557838439941, |
|
"learning_rate": 5.146111835067646e-08, |
|
"loss": 0.6779, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.8384879725085912, |
|
"grad_norm": 11.132621765136719, |
|
"learning_rate": 4.8015345596608805e-08, |
|
"loss": 0.7472, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.8442153493699887, |
|
"grad_norm": 11.750364303588867, |
|
"learning_rate": 4.468709246093133e-08, |
|
"loss": 0.6553, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.8499427262313861, |
|
"grad_norm": 14.244782447814941, |
|
"learning_rate": 4.147662832323884e-08, |
|
"loss": 0.8058, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.8556701030927836, |
|
"grad_norm": 10.789249420166016, |
|
"learning_rate": 3.838421302961098e-08, |
|
"loss": 0.7955, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.861397479954181, |
|
"grad_norm": 9.885890007019043, |
|
"learning_rate": 3.541009687158109e-08, |
|
"loss": 0.6453, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.8671248568155785, |
|
"grad_norm": 9.926796913146973, |
|
"learning_rate": 3.255452056587754e-08, |
|
"loss": 0.7055, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.872852233676976, |
|
"grad_norm": 12.368999481201172, |
|
"learning_rate": 2.98177152349417e-08, |
|
"loss": 0.6359, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.8785796105383734, |
|
"grad_norm": 10.849148750305176, |
|
"learning_rate": 2.7199902388220343e-08, |
|
"loss": 0.6735, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.8843069873997709, |
|
"grad_norm": 12.963729858398438, |
|
"learning_rate": 2.47012939042387e-08, |
|
"loss": 0.6878, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.8900343642611683, |
|
"grad_norm": 10.135659217834473, |
|
"learning_rate": 2.2322092013450313e-08, |
|
"loss": 0.657, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.8957617411225658, |
|
"grad_norm": 16.199338912963867, |
|
"learning_rate": 2.0062489281870156e-08, |
|
"loss": 0.8076, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.9014891179839633, |
|
"grad_norm": 12.090662002563477, |
|
"learning_rate": 1.7922668595487744e-08, |
|
"loss": 0.7334, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.9072164948453607, |
|
"grad_norm": 10.486949920654297, |
|
"learning_rate": 1.5902803145465994e-08, |
|
"loss": 0.7883, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.9129438717067582, |
|
"grad_norm": 8.531800270080566, |
|
"learning_rate": 1.4003056414122583e-08, |
|
"loss": 0.6773, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.9186712485681556, |
|
"grad_norm": 10.624215126037598, |
|
"learning_rate": 1.222358216169922e-08, |
|
"loss": 0.6701, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.9243986254295533, |
|
"grad_norm": 12.858296394348145, |
|
"learning_rate": 1.0564524413915422e-08, |
|
"loss": 0.6576, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.9301260022909508, |
|
"grad_norm": 10.357311248779297, |
|
"learning_rate": 9.026017450312673e-09, |
|
"loss": 0.5779, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.9358533791523482, |
|
"grad_norm": 8.941463470458984, |
|
"learning_rate": 7.608185793385458e-09, |
|
"loss": 0.7369, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.9415807560137457, |
|
"grad_norm": 8.527290344238281, |
|
"learning_rate": 6.311144198503316e-09, |
|
"loss": 0.6539, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.9473081328751431, |
|
"grad_norm": 9.527642250061035, |
|
"learning_rate": 5.134997644622108e-09, |
|
"loss": 0.6612, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.9530355097365406, |
|
"grad_norm": 9.823323249816895, |
|
"learning_rate": 4.079841325787981e-09, |
|
"loss": 0.7576, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.9587628865979383, |
|
"grad_norm": 12.560805320739746, |
|
"learning_rate": 3.145760643432527e-09, |
|
"loss": 0.6648, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.9644902634593358, |
|
"grad_norm": 11.428935050964355, |
|
"learning_rate": 2.33283119946015e-09, |
|
"loss": 0.7002, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.9702176403207332, |
|
"grad_norm": 9.424127578735352, |
|
"learning_rate": 1.6411187901297919e-09, |
|
"loss": 0.6452, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.9759450171821307, |
|
"grad_norm": 10.48491096496582, |
|
"learning_rate": 1.0706794007290243e-09, |
|
"loss": 0.5779, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.9816723940435281, |
|
"grad_norm": 9.892742156982422, |
|
"learning_rate": 6.215592010426763e-10, |
|
"loss": 0.7057, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.9873997709049256, |
|
"grad_norm": 10.53469181060791, |
|
"learning_rate": 2.937945416166543e-10, |
|
"loss": 0.684, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.993127147766323, |
|
"grad_norm": 10.819581031799316, |
|
"learning_rate": 8.741195081479747e-11, |
|
"loss": 0.6306, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.9988545246277205, |
|
"grad_norm": 13.76382064819336, |
|
"learning_rate": 2.428132672926964e-12, |
|
"loss": 0.6657, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 3492, |
|
"total_flos": 8.351047625159475e+16, |
|
"train_loss": 0.9289274918937465, |
|
"train_runtime": 3314.9598, |
|
"train_samples_per_second": 4.212, |
|
"train_steps_per_second": 1.053 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3492, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.351047625159475e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|