|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1045, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0009569377990430622, |
|
"grad_norm": 948.5667114257812, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 46.4048, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004784688995215311, |
|
"grad_norm": 579.7166137695312, |
|
"learning_rate": 1.4285714285714284e-05, |
|
"loss": 42.2625, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009569377990430622, |
|
"grad_norm": 96.71955871582031, |
|
"learning_rate": 2.8571428571428567e-05, |
|
"loss": 24.8784, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.014354066985645933, |
|
"grad_norm": 14.768364906311035, |
|
"learning_rate": 4.285714285714285e-05, |
|
"loss": 20.4002, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.019138755980861243, |
|
"grad_norm": 12.786796569824219, |
|
"learning_rate": 5.7142857142857135e-05, |
|
"loss": 18.6753, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.023923444976076555, |
|
"grad_norm": 46.72217559814453, |
|
"learning_rate": 7.142857142857142e-05, |
|
"loss": 15.9881, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.028708133971291867, |
|
"grad_norm": 16.096284866333008, |
|
"learning_rate": 8.57142857142857e-05, |
|
"loss": 7.4899, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03349282296650718, |
|
"grad_norm": 6.774743556976318, |
|
"learning_rate": 9.999999999999999e-05, |
|
"loss": 2.28, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03827751196172249, |
|
"grad_norm": 25.32003402709961, |
|
"learning_rate": 0.00011428571428571427, |
|
"loss": 1.816, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0430622009569378, |
|
"grad_norm": 4.912317276000977, |
|
"learning_rate": 0.00012857142857142855, |
|
"loss": 1.6463, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04784688995215311, |
|
"grad_norm": 3.605388879776001, |
|
"learning_rate": 0.00014285714285714284, |
|
"loss": 1.5122, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05263157894736842, |
|
"grad_norm": 5.295993328094482, |
|
"learning_rate": 0.00015714285714285713, |
|
"loss": 1.419, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05741626794258373, |
|
"grad_norm": 9.990426063537598, |
|
"learning_rate": 0.0001714285714285714, |
|
"loss": 1.3221, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06220095693779904, |
|
"grad_norm": 28.818571090698242, |
|
"learning_rate": 0.00018571428571428572, |
|
"loss": 1.2352, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06698564593301436, |
|
"grad_norm": 5.248025417327881, |
|
"learning_rate": 0.00019999999999999998, |
|
"loss": 1.2013, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07177033492822966, |
|
"grad_norm": 4.098865509033203, |
|
"learning_rate": 0.00021428571428571427, |
|
"loss": 1.1641, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07655502392344497, |
|
"grad_norm": 1.8723022937774658, |
|
"learning_rate": 0.00022857142857142854, |
|
"loss": 1.1404, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08133971291866028, |
|
"grad_norm": 20.416040420532227, |
|
"learning_rate": 0.00024285714285714283, |
|
"loss": 1.1758, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.0861244019138756, |
|
"grad_norm": 2.548046112060547, |
|
"learning_rate": 0.0002571428571428571, |
|
"loss": 1.1864, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 10.036776542663574, |
|
"learning_rate": 0.0002714285714285714, |
|
"loss": 1.1193, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09569377990430622, |
|
"grad_norm": 7.473161697387695, |
|
"learning_rate": 0.0002857142857142857, |
|
"loss": 1.1753, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.10047846889952153, |
|
"grad_norm": 1.7201554775238037, |
|
"learning_rate": 0.0003, |
|
"loss": 1.1511, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.10526315789473684, |
|
"grad_norm": 22.742843627929688, |
|
"learning_rate": 0.00029997905717787856, |
|
"loss": 1.1194, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11004784688995216, |
|
"grad_norm": 4.5689191818237305, |
|
"learning_rate": 0.00029991623455953814, |
|
"loss": 1.1401, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.11483253588516747, |
|
"grad_norm": 4.430986404418945, |
|
"learning_rate": 0.00029981154968741785, |
|
"loss": 1.1312, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11961722488038277, |
|
"grad_norm": 9.391127586364746, |
|
"learning_rate": 0.000299665031793473, |
|
"loss": 1.0676, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.12440191387559808, |
|
"grad_norm": 10.183479309082031, |
|
"learning_rate": 0.0002994767217910127, |
|
"loss": 1.1184, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1291866028708134, |
|
"grad_norm": 1.511924147605896, |
|
"learning_rate": 0.00029924667226327557, |
|
"loss": 1.0999, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1339712918660287, |
|
"grad_norm": 8.494267463684082, |
|
"learning_rate": 0.0002989749474487461, |
|
"loss": 1.0913, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.13875598086124402, |
|
"grad_norm": 2.4349634647369385, |
|
"learning_rate": 0.000298661623223217, |
|
"loss": 1.0802, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.14354066985645933, |
|
"grad_norm": 1.914871096611023, |
|
"learning_rate": 0.0002983067870786019, |
|
"loss": 1.0589, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14832535885167464, |
|
"grad_norm": 8.084524154663086, |
|
"learning_rate": 0.00029791053809850426, |
|
"loss": 1.0661, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.15311004784688995, |
|
"grad_norm": 15.332684516906738, |
|
"learning_rate": 0.0002974729869305495, |
|
"loss": 1.0688, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.15789473684210525, |
|
"grad_norm": 30.3629207611084, |
|
"learning_rate": 0.000296994255755488, |
|
"loss": 1.0531, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.16267942583732056, |
|
"grad_norm": 6.318498611450195, |
|
"learning_rate": 0.0002964744782530777, |
|
"loss": 1.0664, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1674641148325359, |
|
"grad_norm": 7.628419399261475, |
|
"learning_rate": 0.0002959137995647556, |
|
"loss": 1.1012, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1722488038277512, |
|
"grad_norm": 6.398703098297119, |
|
"learning_rate": 0.0002953123762531088, |
|
"loss": 1.1012, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.17703349282296652, |
|
"grad_norm": 1.5758147239685059, |
|
"learning_rate": 0.00029467037625815644, |
|
"loss": 1.0655, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 3.354144811630249, |
|
"learning_rate": 0.0002939879788504546, |
|
"loss": 1.0405, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.18660287081339713, |
|
"grad_norm": 7.777205467224121, |
|
"learning_rate": 0.00029326537458103683, |
|
"loss": 1.0575, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.19138755980861244, |
|
"grad_norm": 3.233966588973999, |
|
"learning_rate": 0.0002925027652282056, |
|
"loss": 1.0519, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.19617224880382775, |
|
"grad_norm": 5.466601848602295, |
|
"learning_rate": 0.00029170036374118777, |
|
"loss": 1.0377, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.20095693779904306, |
|
"grad_norm": 2.6383893489837646, |
|
"learning_rate": 0.0002908583941806717, |
|
"loss": 1.0435, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.20574162679425836, |
|
"grad_norm": 1.6238977909088135, |
|
"learning_rate": 0.00028997709165624034, |
|
"loss": 1.0098, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 4.616154193878174, |
|
"learning_rate": 0.0002890567022607206, |
|
"loss": 1.007, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.215311004784689, |
|
"grad_norm": 3.3014893531799316, |
|
"learning_rate": 0.0002880974830014643, |
|
"loss": 1.0311, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.22009569377990432, |
|
"grad_norm": 1.7747435569763184, |
|
"learning_rate": 0.0002870997017285824, |
|
"loss": 0.9898, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.22488038277511962, |
|
"grad_norm": 2.502115488052368, |
|
"learning_rate": 0.0002860636370601511, |
|
"loss": 1.0031, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.22966507177033493, |
|
"grad_norm": 2.33097767829895, |
|
"learning_rate": 0.00028498957830441117, |
|
"loss": 1.0175, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.23444976076555024, |
|
"grad_norm": 2.667079448699951, |
|
"learning_rate": 0.00028387782537898215, |
|
"loss": 1.0102, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.23923444976076555, |
|
"grad_norm": 5.493870735168457, |
|
"learning_rate": 0.0002827286887271143, |
|
"loss": 0.9815, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.24401913875598086, |
|
"grad_norm": 1.950860619544983, |
|
"learning_rate": 0.0002815424892310007, |
|
"loss": 0.9865, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.24880382775119617, |
|
"grad_norm": 1.8500947952270508, |
|
"learning_rate": 0.00028031955812217544, |
|
"loss": 0.9819, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.2535885167464115, |
|
"grad_norm": 4.860908508300781, |
|
"learning_rate": 0.0002790602368890209, |
|
"loss": 0.9728, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2583732057416268, |
|
"grad_norm": 2.0686490535736084, |
|
"learning_rate": 0.0002777648771814114, |
|
"loss": 1.02, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2631578947368421, |
|
"grad_norm": 3.445263624191284, |
|
"learning_rate": 0.00027643384071251954, |
|
"loss": 1.0279, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.2679425837320574, |
|
"grad_norm": 1.4115147590637207, |
|
"learning_rate": 0.000275067499157812, |
|
"loss": 0.9813, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 1.2224127054214478, |
|
"learning_rate": 0.00027366623405126404, |
|
"loss": 0.9903, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.27751196172248804, |
|
"grad_norm": 1.8986269235610962, |
|
"learning_rate": 0.0002722304366788205, |
|
"loss": 1.0094, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2822966507177033, |
|
"grad_norm": 4.729682445526123, |
|
"learning_rate": 0.0002707605079691344, |
|
"loss": 1.0046, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.28708133971291866, |
|
"grad_norm": 2.3022851943969727, |
|
"learning_rate": 0.0002692568583816124, |
|
"loss": 1.0305, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.291866028708134, |
|
"grad_norm": 2.677311658859253, |
|
"learning_rate": 0.0002677199077917991, |
|
"loss": 1.0308, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.2966507177033493, |
|
"grad_norm": 1.3505252599716187, |
|
"learning_rate": 0.000266150085374132, |
|
"loss": 0.9911, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.3014354066985646, |
|
"grad_norm": 1.311340093612671, |
|
"learning_rate": 0.00026454782948209983, |
|
"loss": 0.9897, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.3062200956937799, |
|
"grad_norm": 1.3043484687805176, |
|
"learning_rate": 0.00026291358752583764, |
|
"loss": 0.9659, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.31100478468899523, |
|
"grad_norm": 1.1022688150405884, |
|
"learning_rate": 0.0002612478158471936, |
|
"loss": 0.9852, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.3157894736842105, |
|
"grad_norm": 1.8880759477615356, |
|
"learning_rate": 0.0002595509795923004, |
|
"loss": 0.9707, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.32057416267942584, |
|
"grad_norm": 4.946929454803467, |
|
"learning_rate": 0.0002578235525816894, |
|
"loss": 1.0037, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.3253588516746411, |
|
"grad_norm": 1.5874648094177246, |
|
"learning_rate": 0.00025606601717798207, |
|
"loss": 1.013, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.33014354066985646, |
|
"grad_norm": 1.1188175678253174, |
|
"learning_rate": 0.0002542788641511963, |
|
"loss": 0.9814, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.3349282296650718, |
|
"grad_norm": 1.3674702644348145, |
|
"learning_rate": 0.0002524625925417046, |
|
"loss": 0.9924, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3397129186602871, |
|
"grad_norm": 1.3748054504394531, |
|
"learning_rate": 0.0002506177095208835, |
|
"loss": 0.9952, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3444976076555024, |
|
"grad_norm": 1.4951032400131226, |
|
"learning_rate": 0.00024874473024949224, |
|
"loss": 0.9859, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3492822966507177, |
|
"grad_norm": 1.5338605642318726, |
|
"learning_rate": 0.0002468441777338203, |
|
"loss": 0.9986, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.35406698564593303, |
|
"grad_norm": 2.292693614959717, |
|
"learning_rate": 0.00024491658267964474, |
|
"loss": 0.9815, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3588516746411483, |
|
"grad_norm": 3.6766607761383057, |
|
"learning_rate": 0.00024296248334403672, |
|
"loss": 0.9696, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 1.8351837396621704, |
|
"learning_rate": 0.00024098242538506007, |
|
"loss": 0.9806, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3684210526315789, |
|
"grad_norm": 3.7850887775421143, |
|
"learning_rate": 0.00023897696170940326, |
|
"loss": 0.9922, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.37320574162679426, |
|
"grad_norm": 1.6278636455535889, |
|
"learning_rate": 0.0002369466523179866, |
|
"loss": 0.9524, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.37799043062200954, |
|
"grad_norm": 2.634073495864868, |
|
"learning_rate": 0.0002348920641495893, |
|
"loss": 0.9721, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.3827751196172249, |
|
"grad_norm": 1.0519402027130127, |
|
"learning_rate": 0.0002328137709225385, |
|
"loss": 0.9433, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.3875598086124402, |
|
"grad_norm": 1.6269092559814453, |
|
"learning_rate": 0.00023071235297450588, |
|
"loss": 0.965, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.3923444976076555, |
|
"grad_norm": 1.0608420372009277, |
|
"learning_rate": 0.0002285883971004553, |
|
"loss": 0.9458, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.39712918660287083, |
|
"grad_norm": 1.1753120422363281, |
|
"learning_rate": 0.00022644249638878762, |
|
"loss": 0.9708, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.4019138755980861, |
|
"grad_norm": 1.1925132274627686, |
|
"learning_rate": 0.000224275250055728, |
|
"loss": 0.9419, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.40669856459330145, |
|
"grad_norm": 2.7282474040985107, |
|
"learning_rate": 0.00022208726327800255, |
|
"loss": 0.9776, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.41148325358851673, |
|
"grad_norm": 1.2128546237945557, |
|
"learning_rate": 0.0002198791470238497, |
|
"loss": 0.9726, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.41626794258373206, |
|
"grad_norm": 1.216680884361267, |
|
"learning_rate": 0.00021765151788241525, |
|
"loss": 0.965, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 1.2095612287521362, |
|
"learning_rate": 0.0002154049978915774, |
|
"loss": 0.9734, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4258373205741627, |
|
"grad_norm": 2.2863378524780273, |
|
"learning_rate": 0.00021314021436425024, |
|
"loss": 0.9668, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.430622009569378, |
|
"grad_norm": 1.6475805044174194, |
|
"learning_rate": 0.00021085779971321454, |
|
"loss": 0.953, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.4354066985645933, |
|
"grad_norm": 2.581031560897827, |
|
"learning_rate": 0.0002085583912745242, |
|
"loss": 0.9481, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.44019138755980863, |
|
"grad_norm": 1.6886945962905884, |
|
"learning_rate": 0.00020624263112953815, |
|
"loss": 0.9521, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4449760765550239, |
|
"grad_norm": 1.6784495115280151, |
|
"learning_rate": 0.0002039111659256269, |
|
"loss": 0.9448, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.44976076555023925, |
|
"grad_norm": 1.741603970527649, |
|
"learning_rate": 0.00020156464669560448, |
|
"loss": 0.9396, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 1.4577537775039673, |
|
"learning_rate": 0.00019920372867593533, |
|
"loss": 0.9381, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.45933014354066987, |
|
"grad_norm": 1.2143363952636719, |
|
"learning_rate": 0.00019682907112376796, |
|
"loss": 0.942, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.46411483253588515, |
|
"grad_norm": 1.133268117904663, |
|
"learning_rate": 0.0001944413371328451, |
|
"loss": 0.9307, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.4688995215311005, |
|
"grad_norm": 1.3096511363983154, |
|
"learning_rate": 0.0001920411934483434, |
|
"loss": 0.9489, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.47368421052631576, |
|
"grad_norm": 1.1326684951782227, |
|
"learning_rate": 0.00018962931028069293, |
|
"loss": 0.9481, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4784688995215311, |
|
"grad_norm": 1.435521125793457, |
|
"learning_rate": 0.00018720636111842898, |
|
"loss": 0.941, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.48325358851674644, |
|
"grad_norm": 1.5249536037445068, |
|
"learning_rate": 0.00018477302254012924, |
|
"loss": 0.9466, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.4880382775119617, |
|
"grad_norm": 1.5078822374343872, |
|
"learning_rate": 0.000182329974025487, |
|
"loss": 0.9407, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.49282296650717705, |
|
"grad_norm": 1.1270455121994019, |
|
"learning_rate": 0.0001798778977655754, |
|
"loss": 0.9342, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.49760765550239233, |
|
"grad_norm": 1.8892922401428223, |
|
"learning_rate": 0.00017741747847235353, |
|
"loss": 0.9215, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5023923444976076, |
|
"grad_norm": 1.1376285552978516, |
|
"learning_rate": 0.0001749494031874695, |
|
"loss": 0.941, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.507177033492823, |
|
"grad_norm": 1.3463551998138428, |
|
"learning_rate": 0.0001724743610904122, |
|
"loss": 0.9494, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.5119617224880383, |
|
"grad_norm": 2.9610071182250977, |
|
"learning_rate": 0.00016999304330606683, |
|
"loss": 0.9342, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5167464114832536, |
|
"grad_norm": 1.1553057432174683, |
|
"learning_rate": 0.00016750614271172643, |
|
"loss": 0.9421, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5215311004784688, |
|
"grad_norm": 1.025996446609497, |
|
"learning_rate": 0.00016501435374361475, |
|
"loss": 0.9165, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 1.1247053146362305, |
|
"learning_rate": 0.00016251837220297347, |
|
"loss": 0.9382, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5311004784688995, |
|
"grad_norm": 1.027742624282837, |
|
"learning_rate": 0.00016001889506176824, |
|
"loss": 0.9461, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5358851674641149, |
|
"grad_norm": 1.2423522472381592, |
|
"learning_rate": 0.00015751662026806788, |
|
"loss": 0.9411, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5406698564593302, |
|
"grad_norm": 1.5147805213928223, |
|
"learning_rate": 0.00015501224655115118, |
|
"loss": 0.9334, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 1.2580386400222778, |
|
"learning_rate": 0.00015250647322639512, |
|
"loss": 0.9258, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5502392344497608, |
|
"grad_norm": 1.1148898601531982, |
|
"learning_rate": 0.00015, |
|
"loss": 0.92, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.5550239234449761, |
|
"grad_norm": 1.6557159423828125, |
|
"learning_rate": 0.00014749352677360483, |
|
"loss": 0.9124, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5598086124401914, |
|
"grad_norm": 1.3142054080963135, |
|
"learning_rate": 0.00014498775344884884, |
|
"loss": 0.9292, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5645933014354066, |
|
"grad_norm": 1.3109759092330933, |
|
"learning_rate": 0.00014248337973193212, |
|
"loss": 0.9075, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.569377990430622, |
|
"grad_norm": 1.8519434928894043, |
|
"learning_rate": 0.00013998110493823176, |
|
"loss": 0.9334, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5741626794258373, |
|
"grad_norm": 1.467532992362976, |
|
"learning_rate": 0.00013748162779702653, |
|
"loss": 0.9038, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5789473684210527, |
|
"grad_norm": 1.2827484607696533, |
|
"learning_rate": 0.00013498564625638522, |
|
"loss": 0.9209, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.583732057416268, |
|
"grad_norm": 2.0187387466430664, |
|
"learning_rate": 0.00013249385728827357, |
|
"loss": 0.909, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5885167464114832, |
|
"grad_norm": 1.0019465684890747, |
|
"learning_rate": 0.0001300069566939332, |
|
"loss": 0.9324, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5933014354066986, |
|
"grad_norm": 1.1702934503555298, |
|
"learning_rate": 0.00012752563890958778, |
|
"loss": 0.8818, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5980861244019139, |
|
"grad_norm": 1.1036059856414795, |
|
"learning_rate": 0.0001250505968125305, |
|
"loss": 0.9082, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.6028708133971292, |
|
"grad_norm": 1.3649473190307617, |
|
"learning_rate": 0.00012258252152764647, |
|
"loss": 0.9077, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.6076555023923444, |
|
"grad_norm": 1.2819594144821167, |
|
"learning_rate": 0.0001201221022344246, |
|
"loss": 0.899, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.6124401913875598, |
|
"grad_norm": 1.1259446144104004, |
|
"learning_rate": 0.00011767002597451296, |
|
"loss": 0.8888, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6172248803827751, |
|
"grad_norm": 1.0908528566360474, |
|
"learning_rate": 0.00011522697745987075, |
|
"loss": 0.8954, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6220095693779905, |
|
"grad_norm": 1.0630327463150024, |
|
"learning_rate": 0.000112793638881571, |
|
"loss": 0.8974, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6267942583732058, |
|
"grad_norm": 1.2224838733673096, |
|
"learning_rate": 0.00011037068971930709, |
|
"loss": 0.8945, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.631578947368421, |
|
"grad_norm": 1.2856760025024414, |
|
"learning_rate": 0.00010795880655165656, |
|
"loss": 0.8994, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6363636363636364, |
|
"grad_norm": 1.0856343507766724, |
|
"learning_rate": 0.0001055586628671549, |
|
"loss": 0.8883, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.6411483253588517, |
|
"grad_norm": 1.063491940498352, |
|
"learning_rate": 0.00010317092887623205, |
|
"loss": 0.8866, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.645933014354067, |
|
"grad_norm": 1.2210334539413452, |
|
"learning_rate": 0.00010079627132406462, |
|
"loss": 0.8902, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.6507177033492823, |
|
"grad_norm": 0.9613766074180603, |
|
"learning_rate": 9.843535330439555e-05, |
|
"loss": 0.8748, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6555023923444976, |
|
"grad_norm": 0.9286556839942932, |
|
"learning_rate": 9.608883407437309e-05, |
|
"loss": 0.8824, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.6602870813397129, |
|
"grad_norm": 1.13881254196167, |
|
"learning_rate": 9.375736887046185e-05, |
|
"loss": 0.8779, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6650717703349283, |
|
"grad_norm": 1.128664255142212, |
|
"learning_rate": 9.144160872547578e-05, |
|
"loss": 0.8623, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.6698564593301436, |
|
"grad_norm": 1.3374972343444824, |
|
"learning_rate": 8.914220028678546e-05, |
|
"loss": 0.8831, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6746411483253588, |
|
"grad_norm": 1.5959270000457764, |
|
"learning_rate": 8.685978563574976e-05, |
|
"loss": 0.8956, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6794258373205742, |
|
"grad_norm": 1.582465410232544, |
|
"learning_rate": 8.459500210842261e-05, |
|
"loss": 0.8804, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6842105263157895, |
|
"grad_norm": 1.3153728246688843, |
|
"learning_rate": 8.23484821175847e-05, |
|
"loss": 0.8776, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6889952153110048, |
|
"grad_norm": 1.1769953966140747, |
|
"learning_rate": 8.012085297615027e-05, |
|
"loss": 0.8777, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.69377990430622, |
|
"grad_norm": 1.1776398420333862, |
|
"learning_rate": 7.791273672199742e-05, |
|
"loss": 0.8866, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6985645933014354, |
|
"grad_norm": 1.0515143871307373, |
|
"learning_rate": 7.572474994427197e-05, |
|
"loss": 0.8887, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.7033492822966507, |
|
"grad_norm": 1.0670396089553833, |
|
"learning_rate": 7.355750361121237e-05, |
|
"loss": 0.8631, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.7081339712918661, |
|
"grad_norm": 1.3679234981536865, |
|
"learning_rate": 7.141160289954471e-05, |
|
"loss": 0.8701, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.7129186602870813, |
|
"grad_norm": 1.0791184902191162, |
|
"learning_rate": 6.92876470254941e-05, |
|
"loss": 0.879, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.7177033492822966, |
|
"grad_norm": 1.0610390901565552, |
|
"learning_rate": 6.718622907746151e-05, |
|
"loss": 0.8617, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.722488038277512, |
|
"grad_norm": 1.062545657157898, |
|
"learning_rate": 6.51079358504107e-05, |
|
"loss": 0.8743, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 0.8820713758468628, |
|
"learning_rate": 6.305334768201336e-05, |
|
"loss": 0.8553, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.7320574162679426, |
|
"grad_norm": 0.9072192311286926, |
|
"learning_rate": 6.1023038290596715e-05, |
|
"loss": 0.8613, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.7368421052631579, |
|
"grad_norm": 1.0919402837753296, |
|
"learning_rate": 5.901757461493988e-05, |
|
"loss": 0.8679, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7416267942583732, |
|
"grad_norm": 1.0981807708740234, |
|
"learning_rate": 5.703751665596328e-05, |
|
"loss": 0.8665, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.7464114832535885, |
|
"grad_norm": 1.0239495038986206, |
|
"learning_rate": 5.50834173203552e-05, |
|
"loss": 0.8819, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7511961722488039, |
|
"grad_norm": 1.0707833766937256, |
|
"learning_rate": 5.315582226617963e-05, |
|
"loss": 0.8571, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.7559808612440191, |
|
"grad_norm": 1.0176221132278442, |
|
"learning_rate": 5.125526975050776e-05, |
|
"loss": 0.8718, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7607655502392344, |
|
"grad_norm": 0.9414092898368835, |
|
"learning_rate": 4.938229047911651e-05, |
|
"loss": 0.8547, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.7655502392344498, |
|
"grad_norm": 0.8967716097831726, |
|
"learning_rate": 4.753740745829537e-05, |
|
"loss": 0.8385, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.7703349282296651, |
|
"grad_norm": 1.160251498222351, |
|
"learning_rate": 4.5721135848803653e-05, |
|
"loss": 0.8471, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.7751196172248804, |
|
"grad_norm": 0.9440346360206604, |
|
"learning_rate": 4.3933982822017876e-05, |
|
"loss": 0.8526, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.7799043062200957, |
|
"grad_norm": 0.9394163489341736, |
|
"learning_rate": 4.217644741831062e-05, |
|
"loss": 0.8661, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.784688995215311, |
|
"grad_norm": 0.9960381388664246, |
|
"learning_rate": 4.044902040769963e-05, |
|
"loss": 0.8848, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7894736842105263, |
|
"grad_norm": 0.9392317533493042, |
|
"learning_rate": 3.875218415280636e-05, |
|
"loss": 0.8562, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.7942583732057417, |
|
"grad_norm": 1.0181559324264526, |
|
"learning_rate": 3.708641247416229e-05, |
|
"loss": 0.854, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7990430622009569, |
|
"grad_norm": 1.1440843343734741, |
|
"learning_rate": 3.54521705179002e-05, |
|
"loss": 0.8597, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.8038277511961722, |
|
"grad_norm": 1.0670745372772217, |
|
"learning_rate": 3.3849914625868004e-05, |
|
"loss": 0.8847, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.8086124401913876, |
|
"grad_norm": 0.9774266481399536, |
|
"learning_rate": 3.228009220820085e-05, |
|
"loss": 0.8673, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.8133971291866029, |
|
"grad_norm": 0.9691421389579773, |
|
"learning_rate": 3.074314161838754e-05, |
|
"loss": 0.8315, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.8181818181818182, |
|
"grad_norm": 0.9607205986976624, |
|
"learning_rate": 2.923949203086557e-05, |
|
"loss": 0.862, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.8229665071770335, |
|
"grad_norm": 1.0411772727966309, |
|
"learning_rate": 2.77695633211795e-05, |
|
"loss": 0.8659, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.8277511961722488, |
|
"grad_norm": 1.090226173400879, |
|
"learning_rate": 2.6333765948735986e-05, |
|
"loss": 0.8491, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.8325358851674641, |
|
"grad_norm": 0.8712325096130371, |
|
"learning_rate": 2.493250084218795e-05, |
|
"loss": 0.8626, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.8373205741626795, |
|
"grad_norm": 0.8832561373710632, |
|
"learning_rate": 2.3566159287480447e-05, |
|
"loss": 0.8479, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.8421052631578947, |
|
"grad_norm": 0.9606965184211731, |
|
"learning_rate": 2.2235122818588565e-05, |
|
"loss": 0.8625, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.84688995215311, |
|
"grad_norm": 1.056420087814331, |
|
"learning_rate": 2.0939763110979125e-05, |
|
"loss": 0.863, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.8516746411483254, |
|
"grad_norm": 0.9405905604362488, |
|
"learning_rate": 1.9680441877824527e-05, |
|
"loss": 0.853, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8564593301435407, |
|
"grad_norm": 0.8905633687973022, |
|
"learning_rate": 1.8457510768999273e-05, |
|
"loss": 0.8431, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.861244019138756, |
|
"grad_norm": 0.981576144695282, |
|
"learning_rate": 1.727131127288572e-05, |
|
"loss": 0.8477, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8660287081339713, |
|
"grad_norm": 1.0129060745239258, |
|
"learning_rate": 1.612217462101783e-05, |
|
"loss": 0.8384, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.8708133971291866, |
|
"grad_norm": 0.9811695218086243, |
|
"learning_rate": 1.5010421695588825e-05, |
|
"loss": 0.8573, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8755980861244019, |
|
"grad_norm": 0.9269863963127136, |
|
"learning_rate": 1.3936362939848893e-05, |
|
"loss": 0.8561, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.8803827751196173, |
|
"grad_norm": 0.8773042559623718, |
|
"learning_rate": 1.2900298271417592e-05, |
|
"loss": 0.8203, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.8851674641148325, |
|
"grad_norm": 0.9804287552833557, |
|
"learning_rate": 1.1902516998535666e-05, |
|
"loss": 0.8382, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.8899521531100478, |
|
"grad_norm": 0.8934711217880249, |
|
"learning_rate": 1.0943297739279395e-05, |
|
"loss": 0.8416, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.8947368421052632, |
|
"grad_norm": 1.0209717750549316, |
|
"learning_rate": 1.0022908343759616e-05, |
|
"loss": 0.8595, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.8995215311004785, |
|
"grad_norm": 0.8881580829620361, |
|
"learning_rate": 9.141605819328318e-06, |
|
"loss": 0.8332, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.9043062200956937, |
|
"grad_norm": 0.8664154410362244, |
|
"learning_rate": 8.299636258812197e-06, |
|
"loss": 0.8437, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.8912498950958252, |
|
"learning_rate": 7.4972347717943916e-06, |
|
"loss": 0.8485, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.9138755980861244, |
|
"grad_norm": 0.9088477492332458, |
|
"learning_rate": 6.734625418963135e-06, |
|
"loss": 0.8472, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.9186602870813397, |
|
"grad_norm": 0.9696795344352722, |
|
"learning_rate": 6.012021149545382e-06, |
|
"loss": 0.8548, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.9234449760765551, |
|
"grad_norm": 0.9158973097801208, |
|
"learning_rate": 5.329623741843531e-06, |
|
"loss": 0.8333, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.9282296650717703, |
|
"grad_norm": 0.9025459289550781, |
|
"learning_rate": 4.6876237468912e-06, |
|
"loss": 0.8422, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.9330143540669856, |
|
"grad_norm": 0.8703954815864563, |
|
"learning_rate": 4.086200435244441e-06, |
|
"loss": 0.8265, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.937799043062201, |
|
"grad_norm": 0.9033318161964417, |
|
"learning_rate": 3.5255217469222942e-06, |
|
"loss": 0.8203, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.9425837320574163, |
|
"grad_norm": 1.0247541666030884, |
|
"learning_rate": 3.0057442445119872e-06, |
|
"loss": 0.8296, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.9473684210526315, |
|
"grad_norm": 0.9569628238677979, |
|
"learning_rate": 2.5270130694504733e-06, |
|
"loss": 0.8644, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.9521531100478469, |
|
"grad_norm": 0.8948321342468262, |
|
"learning_rate": 2.089461901495715e-06, |
|
"loss": 0.8278, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.9569377990430622, |
|
"grad_norm": 0.969680666923523, |
|
"learning_rate": 1.6932129213980793e-06, |
|
"loss": 0.8452, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.9617224880382775, |
|
"grad_norm": 0.9193556904792786, |
|
"learning_rate": 1.3383767767829956e-06, |
|
"loss": 0.8341, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.9665071770334929, |
|
"grad_norm": 0.9120749235153198, |
|
"learning_rate": 1.0250525512538855e-06, |
|
"loss": 0.8359, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.9712918660287081, |
|
"grad_norm": 0.8678485155105591, |
|
"learning_rate": 7.533277367243795e-07, |
|
"loss": 0.8187, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.9760765550239234, |
|
"grad_norm": 1.054465889930725, |
|
"learning_rate": 5.232782089872601e-07, |
|
"loss": 0.8487, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.9808612440191388, |
|
"grad_norm": 0.8861187696456909, |
|
"learning_rate": 3.349682065270254e-07, |
|
"loss": 0.8504, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.9856459330143541, |
|
"grad_norm": 0.9375335574150085, |
|
"learning_rate": 1.8845031258213905e-07, |
|
"loss": 0.8459, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.9904306220095693, |
|
"grad_norm": 0.9127489924430847, |
|
"learning_rate": 8.376544046180822e-08, |
|
"loss": 0.8447, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.9952153110047847, |
|
"grad_norm": 0.8907783031463623, |
|
"learning_rate": 2.0942822121439164e-08, |
|
"loss": 0.8246, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.9073578715324402, |
|
"learning_rate": 0.0, |
|
"loss": 0.8382, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 2.4202542304992676, |
|
"eval_runtime": 1.1602, |
|
"eval_samples_per_second": 8.619, |
|
"eval_steps_per_second": 0.862, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1045, |
|
"total_flos": 6.049215074407547e+17, |
|
"train_loss": 1.5597172723432478, |
|
"train_runtime": 7161.753, |
|
"train_samples_per_second": 1.751, |
|
"train_steps_per_second": 0.146 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1045, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.049215074407547e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|