diff --git "a/finetune-1.1b-llava-tiny-llama-eva640/trainer_state.json" "b/finetune-1.1b-llava-tiny-llama-eva640/trainer_state.json" new file mode 100644--- /dev/null +++ "b/finetune-1.1b-llava-tiny-llama-eva640/trainer_state.json" @@ -0,0 +1,50904 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 1.0, + "eval_steps": 500, + "global_step": 7266, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.00013762730525736306, + "grad_norm": 125.19473920812882, + "learning_rate": 9.174311926605506e-08, + "loss": 9.1743, + "step": 1 + }, + { + "epoch": 0.0002752546105147261, + "grad_norm": 122.30798929948465, + "learning_rate": 1.8348623853211012e-07, + "loss": 9.1505, + "step": 2 + }, + { + "epoch": 0.00041288191577208916, + "grad_norm": 127.96765390189857, + "learning_rate": 2.752293577981652e-07, + "loss": 9.1093, + "step": 3 + }, + { + "epoch": 0.0005505092210294523, + "grad_norm": 113.35082853293781, + "learning_rate": 3.6697247706422023e-07, + "loss": 9.1053, + "step": 4 + }, + { + "epoch": 0.0006881365262868153, + "grad_norm": 109.99694948301452, + "learning_rate": 4.587155963302753e-07, + "loss": 9.1692, + "step": 5 + }, + { + "epoch": 0.0008257638315441783, + "grad_norm": 69.78117222147398, + "learning_rate": 5.504587155963304e-07, + "loss": 8.6909, + "step": 6 + }, + { + "epoch": 0.0009633911368015414, + "grad_norm": 57.80940552084882, + "learning_rate": 6.422018348623854e-07, + "loss": 8.5063, + "step": 7 + }, + { + "epoch": 0.0011010184420589045, + "grad_norm": 67.77576466371379, + "learning_rate": 7.339449541284405e-07, + "loss": 8.5205, + "step": 8 + }, + { + "epoch": 0.0012386457473162675, + "grad_norm": 70.46469666692839, + "learning_rate": 8.256880733944956e-07, + "loss": 8.2066, + "step": 9 + }, + { + "epoch": 0.0013762730525736307, + "grad_norm": 57.423476779940195, + "learning_rate": 9.174311926605506e-07, + "loss": 7.907, + "step": 10 + }, + { + "epoch": 0.0015139003578309937, + "grad_norm": 46.52489442826151, + "learning_rate": 1.0091743119266057e-06, + "loss": 7.3497, + "step": 11 + }, + { + "epoch": 0.0016515276630883566, + "grad_norm": 42.39966056483745, + "learning_rate": 1.1009174311926608e-06, + "loss": 6.988, + "step": 12 + }, + { + "epoch": 0.0017891549683457198, + "grad_norm": 33.67260907893102, + "learning_rate": 1.1926605504587159e-06, + "loss": 6.9455, + "step": 13 + }, + { + "epoch": 0.0019267822736030828, + "grad_norm": 41.146175167699894, + "learning_rate": 1.2844036697247707e-06, + "loss": 6.297, + "step": 14 + }, + { + "epoch": 0.002064409578860446, + "grad_norm": 28.177944155862917, + "learning_rate": 1.3761467889908258e-06, + "loss": 6.4059, + "step": 15 + }, + { + "epoch": 0.002202036884117809, + "grad_norm": 26.610618628606783, + "learning_rate": 1.467889908256881e-06, + "loss": 5.8543, + "step": 16 + }, + { + "epoch": 0.002339664189375172, + "grad_norm": 31.837106689327197, + "learning_rate": 1.559633027522936e-06, + "loss": 6.1293, + "step": 17 + }, + { + "epoch": 0.002477291494632535, + "grad_norm": 29.090116611244447, + "learning_rate": 1.6513761467889911e-06, + "loss": 5.7072, + "step": 18 + }, + { + "epoch": 0.002614918799889898, + "grad_norm": 26.22572277646329, + "learning_rate": 1.743119266055046e-06, + "loss": 5.5007, + "step": 19 + }, + { + "epoch": 0.0027525461051472614, + "grad_norm": 26.77430720735284, + "learning_rate": 1.8348623853211011e-06, + "loss": 5.5339, + "step": 20 + }, + { + "epoch": 0.002890173410404624, + "grad_norm": 24.538818580717084, + "learning_rate": 1.9266055045871564e-06, + "loss": 5.1773, + "step": 21 + }, + { + "epoch": 0.0030278007156619873, + "grad_norm": 25.789682554194393, + "learning_rate": 2.0183486238532113e-06, + "loss": 4.9759, + "step": 22 + }, + { + "epoch": 0.0031654280209193505, + "grad_norm": 26.77784988960033, + "learning_rate": 2.110091743119266e-06, + "loss": 4.7808, + "step": 23 + }, + { + "epoch": 0.0033030553261767133, + "grad_norm": 30.783704917578614, + "learning_rate": 2.2018348623853215e-06, + "loss": 4.5444, + "step": 24 + }, + { + "epoch": 0.0034406826314340765, + "grad_norm": 35.40576005126993, + "learning_rate": 2.2935779816513764e-06, + "loss": 4.3848, + "step": 25 + }, + { + "epoch": 0.0035783099366914397, + "grad_norm": 29.629770483842115, + "learning_rate": 2.3853211009174317e-06, + "loss": 4.3358, + "step": 26 + }, + { + "epoch": 0.0037159372419488025, + "grad_norm": 19.958437560482693, + "learning_rate": 2.4770642201834866e-06, + "loss": 3.9298, + "step": 27 + }, + { + "epoch": 0.0038535645472061657, + "grad_norm": 18.113189543914967, + "learning_rate": 2.5688073394495415e-06, + "loss": 3.9833, + "step": 28 + }, + { + "epoch": 0.003991191852463528, + "grad_norm": 19.629459014814753, + "learning_rate": 2.6605504587155968e-06, + "loss": 3.9207, + "step": 29 + }, + { + "epoch": 0.004128819157720892, + "grad_norm": 17.91583591372297, + "learning_rate": 2.7522935779816517e-06, + "loss": 3.8832, + "step": 30 + }, + { + "epoch": 0.004266446462978255, + "grad_norm": 18.46401475377401, + "learning_rate": 2.844036697247707e-06, + "loss": 3.6923, + "step": 31 + }, + { + "epoch": 0.004404073768235618, + "grad_norm": 18.72285351538884, + "learning_rate": 2.935779816513762e-06, + "loss": 3.4699, + "step": 32 + }, + { + "epoch": 0.004541701073492981, + "grad_norm": 16.259640150416583, + "learning_rate": 3.0275229357798168e-06, + "loss": 3.3905, + "step": 33 + }, + { + "epoch": 0.004679328378750344, + "grad_norm": 14.570187923394842, + "learning_rate": 3.119266055045872e-06, + "loss": 3.3477, + "step": 34 + }, + { + "epoch": 0.004816955684007707, + "grad_norm": 16.939322171144347, + "learning_rate": 3.211009174311927e-06, + "loss": 3.2639, + "step": 35 + }, + { + "epoch": 0.00495458298926507, + "grad_norm": 16.966681420183825, + "learning_rate": 3.3027522935779823e-06, + "loss": 3.5091, + "step": 36 + }, + { + "epoch": 0.005092210294522433, + "grad_norm": 13.209059958726078, + "learning_rate": 3.394495412844037e-06, + "loss": 3.0526, + "step": 37 + }, + { + "epoch": 0.005229837599779796, + "grad_norm": 15.651744192367802, + "learning_rate": 3.486238532110092e-06, + "loss": 2.9199, + "step": 38 + }, + { + "epoch": 0.0053674649050371595, + "grad_norm": 14.311846143454474, + "learning_rate": 3.5779816513761473e-06, + "loss": 2.9452, + "step": 39 + }, + { + "epoch": 0.005505092210294523, + "grad_norm": 12.613915749020137, + "learning_rate": 3.6697247706422022e-06, + "loss": 2.7292, + "step": 40 + }, + { + "epoch": 0.005642719515551885, + "grad_norm": 13.223492123273966, + "learning_rate": 3.7614678899082575e-06, + "loss": 2.4475, + "step": 41 + }, + { + "epoch": 0.005780346820809248, + "grad_norm": 14.336826364729564, + "learning_rate": 3.853211009174313e-06, + "loss": 2.6571, + "step": 42 + }, + { + "epoch": 0.0059179741260666115, + "grad_norm": 10.383910530550867, + "learning_rate": 3.944954128440367e-06, + "loss": 2.4292, + "step": 43 + }, + { + "epoch": 0.006055601431323975, + "grad_norm": 8.99565021368346, + "learning_rate": 4.036697247706423e-06, + "loss": 2.3244, + "step": 44 + }, + { + "epoch": 0.006193228736581338, + "grad_norm": 8.817102013735807, + "learning_rate": 4.128440366972478e-06, + "loss": 2.1943, + "step": 45 + }, + { + "epoch": 0.006330856041838701, + "grad_norm": 9.702298784362961, + "learning_rate": 4.220183486238532e-06, + "loss": 2.1952, + "step": 46 + }, + { + "epoch": 0.006468483347096064, + "grad_norm": 8.88701992027617, + "learning_rate": 4.311926605504588e-06, + "loss": 2.0957, + "step": 47 + }, + { + "epoch": 0.006606110652353427, + "grad_norm": 9.05653946287146, + "learning_rate": 4.403669724770643e-06, + "loss": 1.9548, + "step": 48 + }, + { + "epoch": 0.00674373795761079, + "grad_norm": 9.945685017558295, + "learning_rate": 4.4954128440366975e-06, + "loss": 2.1238, + "step": 49 + }, + { + "epoch": 0.006881365262868153, + "grad_norm": 9.86954527309786, + "learning_rate": 4.587155963302753e-06, + "loss": 1.8804, + "step": 50 + }, + { + "epoch": 0.007018992568125516, + "grad_norm": 11.147524766075072, + "learning_rate": 4.678899082568808e-06, + "loss": 1.7962, + "step": 51 + }, + { + "epoch": 0.007156619873382879, + "grad_norm": 12.103687673579824, + "learning_rate": 4.770642201834863e-06, + "loss": 1.7479, + "step": 52 + }, + { + "epoch": 0.007294247178640243, + "grad_norm": 11.28196444108274, + "learning_rate": 4.862385321100918e-06, + "loss": 1.7689, + "step": 53 + }, + { + "epoch": 0.007431874483897605, + "grad_norm": 8.318397086615121, + "learning_rate": 4.954128440366973e-06, + "loss": 1.6661, + "step": 54 + }, + { + "epoch": 0.007569501789154968, + "grad_norm": 7.8950793538117665, + "learning_rate": 5.045871559633028e-06, + "loss": 1.7077, + "step": 55 + }, + { + "epoch": 0.007707129094412331, + "grad_norm": 5.5054801936451545, + "learning_rate": 5.137614678899083e-06, + "loss": 1.5939, + "step": 56 + }, + { + "epoch": 0.007844756399669695, + "grad_norm": 4.169025037965987, + "learning_rate": 5.229357798165137e-06, + "loss": 1.5904, + "step": 57 + }, + { + "epoch": 0.007982383704927057, + "grad_norm": 4.897484074363938, + "learning_rate": 5.3211009174311936e-06, + "loss": 1.6256, + "step": 58 + }, + { + "epoch": 0.008120011010184421, + "grad_norm": 6.909386381109011, + "learning_rate": 5.412844036697248e-06, + "loss": 1.5304, + "step": 59 + }, + { + "epoch": 0.008257638315441783, + "grad_norm": 3.8966671981572865, + "learning_rate": 5.504587155963303e-06, + "loss": 1.6455, + "step": 60 + }, + { + "epoch": 0.008395265620699147, + "grad_norm": 4.3631520694277395, + "learning_rate": 5.596330275229358e-06, + "loss": 1.6049, + "step": 61 + }, + { + "epoch": 0.00853289292595651, + "grad_norm": 3.7637060529275987, + "learning_rate": 5.688073394495414e-06, + "loss": 1.6034, + "step": 62 + }, + { + "epoch": 0.008670520231213872, + "grad_norm": 5.80413139969054, + "learning_rate": 5.7798165137614684e-06, + "loss": 1.5927, + "step": 63 + }, + { + "epoch": 0.008808147536471236, + "grad_norm": 6.399309128340181, + "learning_rate": 5.871559633027524e-06, + "loss": 1.6421, + "step": 64 + }, + { + "epoch": 0.008945774841728598, + "grad_norm": 3.5967425904318255, + "learning_rate": 5.963302752293578e-06, + "loss": 1.6728, + "step": 65 + }, + { + "epoch": 0.009083402146985962, + "grad_norm": 4.164940529668355, + "learning_rate": 6.0550458715596335e-06, + "loss": 1.5774, + "step": 66 + }, + { + "epoch": 0.009221029452243325, + "grad_norm": 3.942303098856934, + "learning_rate": 6.146788990825688e-06, + "loss": 1.4309, + "step": 67 + }, + { + "epoch": 0.009358656757500689, + "grad_norm": 3.6098390288887434, + "learning_rate": 6.238532110091744e-06, + "loss": 1.5521, + "step": 68 + }, + { + "epoch": 0.009496284062758051, + "grad_norm": 3.613790576808798, + "learning_rate": 6.330275229357799e-06, + "loss": 1.4476, + "step": 69 + }, + { + "epoch": 0.009633911368015413, + "grad_norm": 3.6054707610689527, + "learning_rate": 6.422018348623854e-06, + "loss": 1.5639, + "step": 70 + }, + { + "epoch": 0.009771538673272778, + "grad_norm": 3.7183900147459004, + "learning_rate": 6.513761467889908e-06, + "loss": 1.5219, + "step": 71 + }, + { + "epoch": 0.00990916597853014, + "grad_norm": 3.1318978731203426, + "learning_rate": 6.6055045871559645e-06, + "loss": 1.5748, + "step": 72 + }, + { + "epoch": 0.010046793283787504, + "grad_norm": 2.980705402594747, + "learning_rate": 6.697247706422019e-06, + "loss": 1.5212, + "step": 73 + }, + { + "epoch": 0.010184420589044866, + "grad_norm": 2.5719692786623063, + "learning_rate": 6.788990825688074e-06, + "loss": 1.6141, + "step": 74 + }, + { + "epoch": 0.01032204789430223, + "grad_norm": 2.9602160150446353, + "learning_rate": 6.880733944954129e-06, + "loss": 1.5481, + "step": 75 + }, + { + "epoch": 0.010459675199559593, + "grad_norm": 4.6484036208468895, + "learning_rate": 6.972477064220184e-06, + "loss": 1.5382, + "step": 76 + }, + { + "epoch": 0.010597302504816955, + "grad_norm": 2.6422519125904738, + "learning_rate": 7.0642201834862385e-06, + "loss": 1.5199, + "step": 77 + }, + { + "epoch": 0.010734929810074319, + "grad_norm": 2.2462609269807463, + "learning_rate": 7.155963302752295e-06, + "loss": 1.4589, + "step": 78 + }, + { + "epoch": 0.010872557115331681, + "grad_norm": 2.5958416602354526, + "learning_rate": 7.247706422018349e-06, + "loss": 1.4196, + "step": 79 + }, + { + "epoch": 0.011010184420589045, + "grad_norm": 2.4322557882552434, + "learning_rate": 7.3394495412844045e-06, + "loss": 1.5169, + "step": 80 + }, + { + "epoch": 0.011147811725846408, + "grad_norm": 2.3721454375389763, + "learning_rate": 7.431192660550459e-06, + "loss": 1.4884, + "step": 81 + }, + { + "epoch": 0.01128543903110377, + "grad_norm": 2.517517646959331, + "learning_rate": 7.522935779816515e-06, + "loss": 1.5131, + "step": 82 + }, + { + "epoch": 0.011423066336361134, + "grad_norm": 2.1111513768983947, + "learning_rate": 7.6146788990825695e-06, + "loss": 1.5024, + "step": 83 + }, + { + "epoch": 0.011560693641618497, + "grad_norm": 2.19144633613799, + "learning_rate": 7.706422018348626e-06, + "loss": 1.5059, + "step": 84 + }, + { + "epoch": 0.01169832094687586, + "grad_norm": 2.649489817808233, + "learning_rate": 7.79816513761468e-06, + "loss": 1.437, + "step": 85 + }, + { + "epoch": 0.011835948252133223, + "grad_norm": 2.215611260494801, + "learning_rate": 7.889908256880735e-06, + "loss": 1.4768, + "step": 86 + }, + { + "epoch": 0.011973575557390587, + "grad_norm": 2.223805042274753, + "learning_rate": 7.981651376146789e-06, + "loss": 1.4697, + "step": 87 + }, + { + "epoch": 0.01211120286264795, + "grad_norm": 3.0018648238797803, + "learning_rate": 8.073394495412845e-06, + "loss": 1.6319, + "step": 88 + }, + { + "epoch": 0.012248830167905312, + "grad_norm": 2.0243528943519937, + "learning_rate": 8.1651376146789e-06, + "loss": 1.4016, + "step": 89 + }, + { + "epoch": 0.012386457473162676, + "grad_norm": 2.386818589224275, + "learning_rate": 8.256880733944956e-06, + "loss": 1.4194, + "step": 90 + }, + { + "epoch": 0.012524084778420038, + "grad_norm": 2.0395702062949073, + "learning_rate": 8.34862385321101e-06, + "loss": 1.5033, + "step": 91 + }, + { + "epoch": 0.012661712083677402, + "grad_norm": 2.633921139273025, + "learning_rate": 8.440366972477065e-06, + "loss": 1.5427, + "step": 92 + }, + { + "epoch": 0.012799339388934764, + "grad_norm": 2.276591928540183, + "learning_rate": 8.53211009174312e-06, + "loss": 1.4927, + "step": 93 + }, + { + "epoch": 0.012936966694192129, + "grad_norm": 2.288218912762329, + "learning_rate": 8.623853211009175e-06, + "loss": 1.4559, + "step": 94 + }, + { + "epoch": 0.01307459399944949, + "grad_norm": 2.1128719242838665, + "learning_rate": 8.71559633027523e-06, + "loss": 1.4829, + "step": 95 + }, + { + "epoch": 0.013212221304706853, + "grad_norm": 2.2711246581766913, + "learning_rate": 8.807339449541286e-06, + "loss": 1.4727, + "step": 96 + }, + { + "epoch": 0.013349848609964217, + "grad_norm": 2.1130231906451664, + "learning_rate": 8.89908256880734e-06, + "loss": 1.4609, + "step": 97 + }, + { + "epoch": 0.01348747591522158, + "grad_norm": 2.220821852164481, + "learning_rate": 8.990825688073395e-06, + "loss": 1.3396, + "step": 98 + }, + { + "epoch": 0.013625103220478944, + "grad_norm": 2.3633420704730743, + "learning_rate": 9.08256880733945e-06, + "loss": 1.5284, + "step": 99 + }, + { + "epoch": 0.013762730525736306, + "grad_norm": 2.7357866827984103, + "learning_rate": 9.174311926605506e-06, + "loss": 1.5814, + "step": 100 + }, + { + "epoch": 0.013900357830993668, + "grad_norm": 2.5183026028685274, + "learning_rate": 9.26605504587156e-06, + "loss": 1.4183, + "step": 101 + }, + { + "epoch": 0.014037985136251032, + "grad_norm": 2.220648136823632, + "learning_rate": 9.357798165137616e-06, + "loss": 1.3791, + "step": 102 + }, + { + "epoch": 0.014175612441508395, + "grad_norm": 2.620225939456079, + "learning_rate": 9.44954128440367e-06, + "loss": 1.5901, + "step": 103 + }, + { + "epoch": 0.014313239746765759, + "grad_norm": 2.373693059130836, + "learning_rate": 9.541284403669727e-06, + "loss": 1.3812, + "step": 104 + }, + { + "epoch": 0.014450867052023121, + "grad_norm": 2.290956215731559, + "learning_rate": 9.633027522935781e-06, + "loss": 1.5668, + "step": 105 + }, + { + "epoch": 0.014588494357280485, + "grad_norm": 2.151827990042943, + "learning_rate": 9.724770642201836e-06, + "loss": 1.4094, + "step": 106 + }, + { + "epoch": 0.014726121662537848, + "grad_norm": 2.25935072143266, + "learning_rate": 9.81651376146789e-06, + "loss": 1.389, + "step": 107 + }, + { + "epoch": 0.01486374896779521, + "grad_norm": 1.9427811247759121, + "learning_rate": 9.908256880733946e-06, + "loss": 1.3752, + "step": 108 + }, + { + "epoch": 0.015001376273052574, + "grad_norm": 2.670354810474395, + "learning_rate": 1e-05, + "loss": 1.3999, + "step": 109 + }, + { + "epoch": 0.015139003578309936, + "grad_norm": 2.6207542392337064, + "learning_rate": 1.0091743119266055e-05, + "loss": 1.4274, + "step": 110 + }, + { + "epoch": 0.0152766308835673, + "grad_norm": 2.032814746469394, + "learning_rate": 1.018348623853211e-05, + "loss": 1.4037, + "step": 111 + }, + { + "epoch": 0.015414258188824663, + "grad_norm": 2.3553013514581376, + "learning_rate": 1.0275229357798166e-05, + "loss": 1.3847, + "step": 112 + }, + { + "epoch": 0.015551885494082027, + "grad_norm": 2.3604510534366185, + "learning_rate": 1.036697247706422e-05, + "loss": 1.4721, + "step": 113 + }, + { + "epoch": 0.01568951279933939, + "grad_norm": 1.990776274775859, + "learning_rate": 1.0458715596330275e-05, + "loss": 1.4307, + "step": 114 + }, + { + "epoch": 0.01582714010459675, + "grad_norm": 2.4366239781037335, + "learning_rate": 1.055045871559633e-05, + "loss": 1.3784, + "step": 115 + }, + { + "epoch": 0.015964767409854114, + "grad_norm": 2.1530688662663025, + "learning_rate": 1.0642201834862387e-05, + "loss": 1.5301, + "step": 116 + }, + { + "epoch": 0.01610239471511148, + "grad_norm": 2.4856890182030273, + "learning_rate": 1.0733944954128442e-05, + "loss": 1.5627, + "step": 117 + }, + { + "epoch": 0.016240022020368842, + "grad_norm": 2.4715849958308707, + "learning_rate": 1.0825688073394496e-05, + "loss": 1.5468, + "step": 118 + }, + { + "epoch": 0.016377649325626204, + "grad_norm": 2.535683782971731, + "learning_rate": 1.091743119266055e-05, + "loss": 1.4756, + "step": 119 + }, + { + "epoch": 0.016515276630883566, + "grad_norm": 2.4541634820421754, + "learning_rate": 1.1009174311926607e-05, + "loss": 1.5382, + "step": 120 + }, + { + "epoch": 0.01665290393614093, + "grad_norm": 2.1539947467318554, + "learning_rate": 1.1100917431192661e-05, + "loss": 1.358, + "step": 121 + }, + { + "epoch": 0.016790531241398295, + "grad_norm": 1.9474224783077931, + "learning_rate": 1.1192660550458716e-05, + "loss": 1.3763, + "step": 122 + }, + { + "epoch": 0.016928158546655657, + "grad_norm": 2.078875556516513, + "learning_rate": 1.128440366972477e-05, + "loss": 1.3807, + "step": 123 + }, + { + "epoch": 0.01706578585191302, + "grad_norm": 2.4055668082267108, + "learning_rate": 1.1376146788990828e-05, + "loss": 1.3872, + "step": 124 + }, + { + "epoch": 0.01720341315717038, + "grad_norm": 2.3132588920774912, + "learning_rate": 1.1467889908256882e-05, + "loss": 1.4327, + "step": 125 + }, + { + "epoch": 0.017341040462427744, + "grad_norm": 2.4527793924936936, + "learning_rate": 1.1559633027522937e-05, + "loss": 1.5077, + "step": 126 + }, + { + "epoch": 0.01747866776768511, + "grad_norm": 2.0978600503608504, + "learning_rate": 1.1651376146788991e-05, + "loss": 1.3669, + "step": 127 + }, + { + "epoch": 0.017616295072942472, + "grad_norm": 2.1239701609371626, + "learning_rate": 1.1743119266055047e-05, + "loss": 1.401, + "step": 128 + }, + { + "epoch": 0.017753922378199834, + "grad_norm": 2.5224577529066083, + "learning_rate": 1.1834862385321102e-05, + "loss": 1.4933, + "step": 129 + }, + { + "epoch": 0.017891549683457197, + "grad_norm": 1.9091441052093827, + "learning_rate": 1.1926605504587156e-05, + "loss": 1.47, + "step": 130 + }, + { + "epoch": 0.018029176988714563, + "grad_norm": 2.117237280408027, + "learning_rate": 1.2018348623853211e-05, + "loss": 1.5108, + "step": 131 + }, + { + "epoch": 0.018166804293971925, + "grad_norm": 2.2082410149915574, + "learning_rate": 1.2110091743119267e-05, + "loss": 1.4637, + "step": 132 + }, + { + "epoch": 0.018304431599229287, + "grad_norm": 2.6122172487718007, + "learning_rate": 1.2201834862385321e-05, + "loss": 1.5086, + "step": 133 + }, + { + "epoch": 0.01844205890448665, + "grad_norm": 2.008839943937337, + "learning_rate": 1.2293577981651376e-05, + "loss": 1.2705, + "step": 134 + }, + { + "epoch": 0.018579686209744012, + "grad_norm": 2.3695876697774083, + "learning_rate": 1.238532110091743e-05, + "loss": 1.4219, + "step": 135 + }, + { + "epoch": 0.018717313515001378, + "grad_norm": 2.2118887087092185, + "learning_rate": 1.2477064220183488e-05, + "loss": 1.4856, + "step": 136 + }, + { + "epoch": 0.01885494082025874, + "grad_norm": 2.189617479512794, + "learning_rate": 1.2568807339449543e-05, + "loss": 1.4182, + "step": 137 + }, + { + "epoch": 0.018992568125516102, + "grad_norm": 1.9338747414395177, + "learning_rate": 1.2660550458715597e-05, + "loss": 1.3458, + "step": 138 + }, + { + "epoch": 0.019130195430773465, + "grad_norm": 2.5686958772824613, + "learning_rate": 1.2752293577981652e-05, + "loss": 1.3838, + "step": 139 + }, + { + "epoch": 0.019267822736030827, + "grad_norm": 2.030881709271741, + "learning_rate": 1.2844036697247708e-05, + "loss": 1.3908, + "step": 140 + }, + { + "epoch": 0.019405450041288193, + "grad_norm": 2.563599227740635, + "learning_rate": 1.2935779816513762e-05, + "loss": 1.4337, + "step": 141 + }, + { + "epoch": 0.019543077346545555, + "grad_norm": 1.8176109793242443, + "learning_rate": 1.3027522935779817e-05, + "loss": 1.3158, + "step": 142 + }, + { + "epoch": 0.019680704651802917, + "grad_norm": 1.8019193031728145, + "learning_rate": 1.3119266055045871e-05, + "loss": 1.3311, + "step": 143 + }, + { + "epoch": 0.01981833195706028, + "grad_norm": 2.698646522727441, + "learning_rate": 1.3211009174311929e-05, + "loss": 1.5612, + "step": 144 + }, + { + "epoch": 0.019955959262317642, + "grad_norm": 1.9171636115670228, + "learning_rate": 1.3302752293577984e-05, + "loss": 1.305, + "step": 145 + }, + { + "epoch": 0.020093586567575008, + "grad_norm": 1.7563775578267051, + "learning_rate": 1.3394495412844038e-05, + "loss": 1.346, + "step": 146 + }, + { + "epoch": 0.02023121387283237, + "grad_norm": 1.683948752339103, + "learning_rate": 1.3486238532110092e-05, + "loss": 1.3838, + "step": 147 + }, + { + "epoch": 0.020368841178089733, + "grad_norm": 1.9218370036130723, + "learning_rate": 1.3577981651376149e-05, + "loss": 1.3623, + "step": 148 + }, + { + "epoch": 0.020506468483347095, + "grad_norm": 1.9968951763915517, + "learning_rate": 1.3669724770642203e-05, + "loss": 1.3466, + "step": 149 + }, + { + "epoch": 0.02064409578860446, + "grad_norm": 1.7657209630970832, + "learning_rate": 1.3761467889908258e-05, + "loss": 1.335, + "step": 150 + }, + { + "epoch": 0.020781723093861823, + "grad_norm": 2.275067067517579, + "learning_rate": 1.3853211009174312e-05, + "loss": 1.3795, + "step": 151 + }, + { + "epoch": 0.020919350399119185, + "grad_norm": 2.0946969906583455, + "learning_rate": 1.3944954128440368e-05, + "loss": 1.4477, + "step": 152 + }, + { + "epoch": 0.021056977704376548, + "grad_norm": 1.94844802962327, + "learning_rate": 1.4036697247706423e-05, + "loss": 1.2773, + "step": 153 + }, + { + "epoch": 0.02119460500963391, + "grad_norm": 1.895306116339568, + "learning_rate": 1.4128440366972477e-05, + "loss": 1.3181, + "step": 154 + }, + { + "epoch": 0.021332232314891276, + "grad_norm": 2.367957867670617, + "learning_rate": 1.4220183486238533e-05, + "loss": 1.2996, + "step": 155 + }, + { + "epoch": 0.021469859620148638, + "grad_norm": 2.275921389073251, + "learning_rate": 1.431192660550459e-05, + "loss": 1.3412, + "step": 156 + }, + { + "epoch": 0.021607486925406, + "grad_norm": 2.311174722296487, + "learning_rate": 1.4403669724770644e-05, + "loss": 1.4006, + "step": 157 + }, + { + "epoch": 0.021745114230663363, + "grad_norm": 1.9058240934259199, + "learning_rate": 1.4495412844036698e-05, + "loss": 1.3754, + "step": 158 + }, + { + "epoch": 0.021882741535920725, + "grad_norm": 2.0591383358940933, + "learning_rate": 1.4587155963302753e-05, + "loss": 1.3629, + "step": 159 + }, + { + "epoch": 0.02202036884117809, + "grad_norm": 2.2042816428987124, + "learning_rate": 1.4678899082568809e-05, + "loss": 1.4356, + "step": 160 + }, + { + "epoch": 0.022157996146435453, + "grad_norm": 2.324548086443444, + "learning_rate": 1.4770642201834863e-05, + "loss": 1.3237, + "step": 161 + }, + { + "epoch": 0.022295623451692816, + "grad_norm": 2.297329627871107, + "learning_rate": 1.4862385321100918e-05, + "loss": 1.3938, + "step": 162 + }, + { + "epoch": 0.022433250756950178, + "grad_norm": 1.845311250165973, + "learning_rate": 1.4954128440366972e-05, + "loss": 1.3465, + "step": 163 + }, + { + "epoch": 0.02257087806220754, + "grad_norm": 2.3928422276791292, + "learning_rate": 1.504587155963303e-05, + "loss": 1.2567, + "step": 164 + }, + { + "epoch": 0.022708505367464906, + "grad_norm": 2.0869370512078005, + "learning_rate": 1.5137614678899085e-05, + "loss": 1.354, + "step": 165 + }, + { + "epoch": 0.02284613267272227, + "grad_norm": 2.2948529551834866, + "learning_rate": 1.5229357798165139e-05, + "loss": 1.3956, + "step": 166 + }, + { + "epoch": 0.02298375997797963, + "grad_norm": 2.1356579266269633, + "learning_rate": 1.5321100917431192e-05, + "loss": 1.3029, + "step": 167 + }, + { + "epoch": 0.023121387283236993, + "grad_norm": 1.879315448535232, + "learning_rate": 1.541284403669725e-05, + "loss": 1.4359, + "step": 168 + }, + { + "epoch": 0.02325901458849436, + "grad_norm": 2.2985390452183383, + "learning_rate": 1.5504587155963304e-05, + "loss": 1.428, + "step": 169 + }, + { + "epoch": 0.02339664189375172, + "grad_norm": 2.1614017756521218, + "learning_rate": 1.559633027522936e-05, + "loss": 1.362, + "step": 170 + }, + { + "epoch": 0.023534269199009084, + "grad_norm": 1.9571233839509083, + "learning_rate": 1.5688073394495413e-05, + "loss": 1.3408, + "step": 171 + }, + { + "epoch": 0.023671896504266446, + "grad_norm": 2.204507265231331, + "learning_rate": 1.577981651376147e-05, + "loss": 1.3756, + "step": 172 + }, + { + "epoch": 0.023809523809523808, + "grad_norm": 1.8598561696357072, + "learning_rate": 1.5871559633027525e-05, + "loss": 1.3254, + "step": 173 + }, + { + "epoch": 0.023947151114781174, + "grad_norm": 2.028593408652933, + "learning_rate": 1.5963302752293578e-05, + "loss": 1.3713, + "step": 174 + }, + { + "epoch": 0.024084778420038536, + "grad_norm": 2.068922340732782, + "learning_rate": 1.6055045871559634e-05, + "loss": 1.4301, + "step": 175 + }, + { + "epoch": 0.0242224057252959, + "grad_norm": 2.6177132341150635, + "learning_rate": 1.614678899082569e-05, + "loss": 1.3865, + "step": 176 + }, + { + "epoch": 0.02436003303055326, + "grad_norm": 1.9257037964088355, + "learning_rate": 1.6238532110091743e-05, + "loss": 1.3321, + "step": 177 + }, + { + "epoch": 0.024497660335810623, + "grad_norm": 2.089865470637852, + "learning_rate": 1.63302752293578e-05, + "loss": 1.3191, + "step": 178 + }, + { + "epoch": 0.02463528764106799, + "grad_norm": 2.2931036240406115, + "learning_rate": 1.6422018348623852e-05, + "loss": 1.4369, + "step": 179 + }, + { + "epoch": 0.02477291494632535, + "grad_norm": 2.0069970933947836, + "learning_rate": 1.6513761467889912e-05, + "loss": 1.3773, + "step": 180 + }, + { + "epoch": 0.024910542251582714, + "grad_norm": 2.17213886306559, + "learning_rate": 1.6605504587155964e-05, + "loss": 1.434, + "step": 181 + }, + { + "epoch": 0.025048169556840076, + "grad_norm": 1.8345577373199526, + "learning_rate": 1.669724770642202e-05, + "loss": 1.292, + "step": 182 + }, + { + "epoch": 0.02518579686209744, + "grad_norm": 2.2382844643998583, + "learning_rate": 1.6788990825688073e-05, + "loss": 1.3798, + "step": 183 + }, + { + "epoch": 0.025323424167354804, + "grad_norm": 2.3380349507564033, + "learning_rate": 1.688073394495413e-05, + "loss": 1.329, + "step": 184 + }, + { + "epoch": 0.025461051472612167, + "grad_norm": 1.802959440174109, + "learning_rate": 1.6972477064220186e-05, + "loss": 1.308, + "step": 185 + }, + { + "epoch": 0.02559867877786953, + "grad_norm": 2.152666761656666, + "learning_rate": 1.706422018348624e-05, + "loss": 1.2755, + "step": 186 + }, + { + "epoch": 0.02573630608312689, + "grad_norm": 1.8448538889980963, + "learning_rate": 1.7155963302752295e-05, + "loss": 1.3412, + "step": 187 + }, + { + "epoch": 0.025873933388384257, + "grad_norm": 1.8126166741608833, + "learning_rate": 1.724770642201835e-05, + "loss": 1.3208, + "step": 188 + }, + { + "epoch": 0.02601156069364162, + "grad_norm": 2.468183706025089, + "learning_rate": 1.7339449541284407e-05, + "loss": 1.3522, + "step": 189 + }, + { + "epoch": 0.02614918799889898, + "grad_norm": 2.0889631328324145, + "learning_rate": 1.743119266055046e-05, + "loss": 1.303, + "step": 190 + }, + { + "epoch": 0.026286815304156344, + "grad_norm": 1.8802549683018899, + "learning_rate": 1.7522935779816516e-05, + "loss": 1.3033, + "step": 191 + }, + { + "epoch": 0.026424442609413706, + "grad_norm": 2.102205226581823, + "learning_rate": 1.7614678899082572e-05, + "loss": 1.3699, + "step": 192 + }, + { + "epoch": 0.026562069914671072, + "grad_norm": 1.9127372886810032, + "learning_rate": 1.7706422018348625e-05, + "loss": 1.3024, + "step": 193 + }, + { + "epoch": 0.026699697219928435, + "grad_norm": 2.263119460489956, + "learning_rate": 1.779816513761468e-05, + "loss": 1.3812, + "step": 194 + }, + { + "epoch": 0.026837324525185797, + "grad_norm": 3.2316683630938847, + "learning_rate": 1.7889908256880734e-05, + "loss": 1.449, + "step": 195 + }, + { + "epoch": 0.02697495183044316, + "grad_norm": 2.2282560974129475, + "learning_rate": 1.798165137614679e-05, + "loss": 1.3174, + "step": 196 + }, + { + "epoch": 0.02711257913570052, + "grad_norm": 2.0650109257707827, + "learning_rate": 1.8073394495412846e-05, + "loss": 1.401, + "step": 197 + }, + { + "epoch": 0.027250206440957887, + "grad_norm": 1.9541818117446426, + "learning_rate": 1.81651376146789e-05, + "loss": 1.3052, + "step": 198 + }, + { + "epoch": 0.02738783374621525, + "grad_norm": 2.29522795249014, + "learning_rate": 1.8256880733944955e-05, + "loss": 1.4411, + "step": 199 + }, + { + "epoch": 0.027525461051472612, + "grad_norm": 2.0593967064852485, + "learning_rate": 1.834862385321101e-05, + "loss": 1.3792, + "step": 200 + }, + { + "epoch": 0.027663088356729974, + "grad_norm": 1.9422640470971306, + "learning_rate": 1.8440366972477067e-05, + "loss": 1.2357, + "step": 201 + }, + { + "epoch": 0.027800715661987337, + "grad_norm": 1.8175882378871986, + "learning_rate": 1.853211009174312e-05, + "loss": 1.3464, + "step": 202 + }, + { + "epoch": 0.027938342967244702, + "grad_norm": 1.8652650357485816, + "learning_rate": 1.8623853211009176e-05, + "loss": 1.3388, + "step": 203 + }, + { + "epoch": 0.028075970272502065, + "grad_norm": 1.9321739266272018, + "learning_rate": 1.8715596330275232e-05, + "loss": 1.4048, + "step": 204 + }, + { + "epoch": 0.028213597577759427, + "grad_norm": 1.9395894514827925, + "learning_rate": 1.8807339449541285e-05, + "loss": 1.3342, + "step": 205 + }, + { + "epoch": 0.02835122488301679, + "grad_norm": 2.1963289957192593, + "learning_rate": 1.889908256880734e-05, + "loss": 1.3347, + "step": 206 + }, + { + "epoch": 0.028488852188274155, + "grad_norm": 2.3476467008195225, + "learning_rate": 1.8990825688073394e-05, + "loss": 1.3726, + "step": 207 + }, + { + "epoch": 0.028626479493531518, + "grad_norm": 1.7420923036134865, + "learning_rate": 1.9082568807339454e-05, + "loss": 1.3369, + "step": 208 + }, + { + "epoch": 0.02876410679878888, + "grad_norm": 1.939565410980372, + "learning_rate": 1.9174311926605506e-05, + "loss": 1.31, + "step": 209 + }, + { + "epoch": 0.028901734104046242, + "grad_norm": 2.1394287453368537, + "learning_rate": 1.9266055045871563e-05, + "loss": 1.3769, + "step": 210 + }, + { + "epoch": 0.029039361409303605, + "grad_norm": 2.071505816789772, + "learning_rate": 1.9357798165137615e-05, + "loss": 1.339, + "step": 211 + }, + { + "epoch": 0.02917698871456097, + "grad_norm": 2.4395853297564636, + "learning_rate": 1.944954128440367e-05, + "loss": 1.3921, + "step": 212 + }, + { + "epoch": 0.029314616019818333, + "grad_norm": 1.9367232795084275, + "learning_rate": 1.9541284403669728e-05, + "loss": 1.3692, + "step": 213 + }, + { + "epoch": 0.029452243325075695, + "grad_norm": 2.1528438766735403, + "learning_rate": 1.963302752293578e-05, + "loss": 1.4347, + "step": 214 + }, + { + "epoch": 0.029589870630333057, + "grad_norm": 2.0232019543566793, + "learning_rate": 1.9724770642201837e-05, + "loss": 1.3108, + "step": 215 + }, + { + "epoch": 0.02972749793559042, + "grad_norm": 2.048476375562197, + "learning_rate": 1.9816513761467893e-05, + "loss": 1.3485, + "step": 216 + }, + { + "epoch": 0.029865125240847785, + "grad_norm": 1.8724633867335398, + "learning_rate": 1.9908256880733945e-05, + "loss": 1.3775, + "step": 217 + }, + { + "epoch": 0.030002752546105148, + "grad_norm": 1.9466022384594104, + "learning_rate": 2e-05, + "loss": 1.3636, + "step": 218 + }, + { + "epoch": 0.03014037985136251, + "grad_norm": 1.7548298294927687, + "learning_rate": 1.9999999006568445e-05, + "loss": 1.3478, + "step": 219 + }, + { + "epoch": 0.030278007156619872, + "grad_norm": 1.8347380541503355, + "learning_rate": 1.999999602627397e-05, + "loss": 1.3415, + "step": 220 + }, + { + "epoch": 0.030415634461877235, + "grad_norm": 1.9335671410050412, + "learning_rate": 1.9999991059117167e-05, + "loss": 1.2799, + "step": 221 + }, + { + "epoch": 0.0305532617671346, + "grad_norm": 2.00152236637167, + "learning_rate": 1.999998410509903e-05, + "loss": 1.279, + "step": 222 + }, + { + "epoch": 0.030690889072391963, + "grad_norm": 1.8593056101521477, + "learning_rate": 1.999997516422093e-05, + "loss": 1.355, + "step": 223 + }, + { + "epoch": 0.030828516377649325, + "grad_norm": 2.0407496049002467, + "learning_rate": 1.9999964236484656e-05, + "loss": 1.297, + "step": 224 + }, + { + "epoch": 0.030966143682906688, + "grad_norm": 2.198753152129754, + "learning_rate": 1.9999951321892372e-05, + "loss": 1.3341, + "step": 225 + }, + { + "epoch": 0.031103770988164053, + "grad_norm": 1.9251066337928342, + "learning_rate": 1.9999936420446643e-05, + "loss": 1.2439, + "step": 226 + }, + { + "epoch": 0.031241398293421416, + "grad_norm": 2.0392613455325375, + "learning_rate": 1.9999919532150435e-05, + "loss": 1.362, + "step": 227 + }, + { + "epoch": 0.03137902559867878, + "grad_norm": 2.0012281863675416, + "learning_rate": 1.9999900657007094e-05, + "loss": 1.3278, + "step": 228 + }, + { + "epoch": 0.03151665290393614, + "grad_norm": 2.091729274288602, + "learning_rate": 1.999987979502038e-05, + "loss": 1.2744, + "step": 229 + }, + { + "epoch": 0.0316542802091935, + "grad_norm": 1.9583046678445861, + "learning_rate": 1.9999856946194435e-05, + "loss": 1.3289, + "step": 230 + }, + { + "epoch": 0.031791907514450865, + "grad_norm": 2.2321874709738494, + "learning_rate": 1.9999832110533794e-05, + "loss": 1.4639, + "step": 231 + }, + { + "epoch": 0.03192953481970823, + "grad_norm": 1.9568634380870933, + "learning_rate": 1.9999805288043398e-05, + "loss": 1.404, + "step": 232 + }, + { + "epoch": 0.03206716212496559, + "grad_norm": 2.0794142752238804, + "learning_rate": 1.9999776478728574e-05, + "loss": 1.2842, + "step": 233 + }, + { + "epoch": 0.03220478943022296, + "grad_norm": 2.03025781353407, + "learning_rate": 1.9999745682595044e-05, + "loss": 1.2752, + "step": 234 + }, + { + "epoch": 0.03234241673548032, + "grad_norm": 1.8111188733288912, + "learning_rate": 1.999971289964893e-05, + "loss": 1.3175, + "step": 235 + }, + { + "epoch": 0.032480044040737684, + "grad_norm": 1.9569011952305575, + "learning_rate": 1.9999678129896742e-05, + "loss": 1.3261, + "step": 236 + }, + { + "epoch": 0.032617671345995046, + "grad_norm": 1.9990194066635878, + "learning_rate": 1.9999641373345394e-05, + "loss": 1.336, + "step": 237 + }, + { + "epoch": 0.03275529865125241, + "grad_norm": 1.8364956102738534, + "learning_rate": 1.999960263000218e-05, + "loss": 1.255, + "step": 238 + }, + { + "epoch": 0.03289292595650977, + "grad_norm": 1.929709663665978, + "learning_rate": 1.9999561899874807e-05, + "loss": 1.3415, + "step": 239 + }, + { + "epoch": 0.03303055326176713, + "grad_norm": 1.8032861518293837, + "learning_rate": 1.9999519182971364e-05, + "loss": 1.3253, + "step": 240 + }, + { + "epoch": 0.033168180567024495, + "grad_norm": 2.0279852481671714, + "learning_rate": 1.9999474479300335e-05, + "loss": 1.2381, + "step": 241 + }, + { + "epoch": 0.03330580787228186, + "grad_norm": 1.792431506777118, + "learning_rate": 1.9999427788870607e-05, + "loss": 1.2443, + "step": 242 + }, + { + "epoch": 0.03344343517753923, + "grad_norm": 2.3241923034830894, + "learning_rate": 1.9999379111691455e-05, + "loss": 1.3475, + "step": 243 + }, + { + "epoch": 0.03358106248279659, + "grad_norm": 2.0510620159686885, + "learning_rate": 1.999932844777255e-05, + "loss": 1.339, + "step": 244 + }, + { + "epoch": 0.03371868978805395, + "grad_norm": 2.1677207215823655, + "learning_rate": 1.9999275797123955e-05, + "loss": 1.3016, + "step": 245 + }, + { + "epoch": 0.033856317093311314, + "grad_norm": 1.7919750093120932, + "learning_rate": 1.9999221159756137e-05, + "loss": 1.3405, + "step": 246 + }, + { + "epoch": 0.033993944398568676, + "grad_norm": 1.9042317968595934, + "learning_rate": 1.9999164535679948e-05, + "loss": 1.2821, + "step": 247 + }, + { + "epoch": 0.03413157170382604, + "grad_norm": 3.6777603979460642, + "learning_rate": 1.9999105924906642e-05, + "loss": 1.3485, + "step": 248 + }, + { + "epoch": 0.0342691990090834, + "grad_norm": 1.9428729713164685, + "learning_rate": 1.999904532744786e-05, + "loss": 1.3255, + "step": 249 + }, + { + "epoch": 0.03440682631434076, + "grad_norm": 2.383411066942213, + "learning_rate": 1.9998982743315643e-05, + "loss": 1.35, + "step": 250 + }, + { + "epoch": 0.034544453619598126, + "grad_norm": 2.2379833061766097, + "learning_rate": 1.9998918172522422e-05, + "loss": 1.4816, + "step": 251 + }, + { + "epoch": 0.03468208092485549, + "grad_norm": 1.923733923959379, + "learning_rate": 1.9998851615081037e-05, + "loss": 1.3208, + "step": 252 + }, + { + "epoch": 0.03481970823011286, + "grad_norm": 2.0879502477647005, + "learning_rate": 1.9998783071004704e-05, + "loss": 1.4089, + "step": 253 + }, + { + "epoch": 0.03495733553537022, + "grad_norm": 1.965937217365374, + "learning_rate": 1.999871254030704e-05, + "loss": 1.3721, + "step": 254 + }, + { + "epoch": 0.03509496284062758, + "grad_norm": 2.1222299354082796, + "learning_rate": 1.9998640023002065e-05, + "loss": 1.3264, + "step": 255 + }, + { + "epoch": 0.035232590145884944, + "grad_norm": 1.9253363012862292, + "learning_rate": 1.9998565519104182e-05, + "loss": 1.3766, + "step": 256 + }, + { + "epoch": 0.035370217451142306, + "grad_norm": 2.1348805530495536, + "learning_rate": 1.9998489028628197e-05, + "loss": 1.3228, + "step": 257 + }, + { + "epoch": 0.03550784475639967, + "grad_norm": 2.044301356776676, + "learning_rate": 1.9998410551589308e-05, + "loss": 1.3664, + "step": 258 + }, + { + "epoch": 0.03564547206165703, + "grad_norm": 1.9081403517070847, + "learning_rate": 1.99983300880031e-05, + "loss": 1.3123, + "step": 259 + }, + { + "epoch": 0.03578309936691439, + "grad_norm": 2.3317066324035514, + "learning_rate": 1.9998247637885574e-05, + "loss": 1.4008, + "step": 260 + }, + { + "epoch": 0.035920726672171756, + "grad_norm": 1.8314659800631017, + "learning_rate": 1.9998163201253097e-05, + "loss": 1.2814, + "step": 261 + }, + { + "epoch": 0.036058353977429125, + "grad_norm": 2.0508282899685923, + "learning_rate": 1.999807677812245e-05, + "loss": 1.3333, + "step": 262 + }, + { + "epoch": 0.03619598128268649, + "grad_norm": 2.299867489692615, + "learning_rate": 1.9997988368510812e-05, + "loss": 1.2614, + "step": 263 + }, + { + "epoch": 0.03633360858794385, + "grad_norm": 1.8252336045175042, + "learning_rate": 1.9997897972435744e-05, + "loss": 1.3035, + "step": 264 + }, + { + "epoch": 0.03647123589320121, + "grad_norm": 2.48587944963577, + "learning_rate": 1.99978055899152e-05, + "loss": 1.4024, + "step": 265 + }, + { + "epoch": 0.036608863198458574, + "grad_norm": 2.2016066964884375, + "learning_rate": 1.999771122096754e-05, + "loss": 1.3368, + "step": 266 + }, + { + "epoch": 0.03674649050371594, + "grad_norm": 2.257269395715839, + "learning_rate": 1.9997614865611515e-05, + "loss": 1.3245, + "step": 267 + }, + { + "epoch": 0.0368841178089733, + "grad_norm": 1.9392772728395122, + "learning_rate": 1.9997516523866273e-05, + "loss": 1.3736, + "step": 268 + }, + { + "epoch": 0.03702174511423066, + "grad_norm": 1.800868859241569, + "learning_rate": 1.9997416195751343e-05, + "loss": 1.2669, + "step": 269 + }, + { + "epoch": 0.037159372419488024, + "grad_norm": 1.8835866861637278, + "learning_rate": 1.999731388128667e-05, + "loss": 1.3274, + "step": 270 + }, + { + "epoch": 0.037296999724745386, + "grad_norm": 1.86509997187314, + "learning_rate": 1.999720958049257e-05, + "loss": 1.2804, + "step": 271 + }, + { + "epoch": 0.037434627030002755, + "grad_norm": 1.9535121874766557, + "learning_rate": 1.999710329338978e-05, + "loss": 1.1958, + "step": 272 + }, + { + "epoch": 0.03757225433526012, + "grad_norm": 1.8226933365592513, + "learning_rate": 1.9996995019999408e-05, + "loss": 1.2574, + "step": 273 + }, + { + "epoch": 0.03770988164051748, + "grad_norm": 2.1225172773983725, + "learning_rate": 1.999688476034297e-05, + "loss": 1.2712, + "step": 274 + }, + { + "epoch": 0.03784750894577484, + "grad_norm": 1.8854002368178748, + "learning_rate": 1.9996772514442372e-05, + "loss": 1.3378, + "step": 275 + }, + { + "epoch": 0.037985136251032205, + "grad_norm": 1.9575746838816686, + "learning_rate": 1.999665828231992e-05, + "loss": 1.326, + "step": 276 + }, + { + "epoch": 0.03812276355628957, + "grad_norm": 1.992758874242281, + "learning_rate": 1.9996542063998302e-05, + "loss": 1.3026, + "step": 277 + }, + { + "epoch": 0.03826039086154693, + "grad_norm": 2.0898539554785738, + "learning_rate": 1.999642385950061e-05, + "loss": 1.243, + "step": 278 + }, + { + "epoch": 0.03839801816680429, + "grad_norm": 2.0479644406210724, + "learning_rate": 1.9996303668850343e-05, + "loss": 1.2691, + "step": 279 + }, + { + "epoch": 0.038535645472061654, + "grad_norm": 1.7496656826647126, + "learning_rate": 1.9996181492071365e-05, + "loss": 1.2616, + "step": 280 + }, + { + "epoch": 0.03867327277731902, + "grad_norm": 1.9329945252568672, + "learning_rate": 1.999605732918796e-05, + "loss": 1.2337, + "step": 281 + }, + { + "epoch": 0.038810900082576386, + "grad_norm": 1.897288648846803, + "learning_rate": 1.9995931180224794e-05, + "loss": 1.3319, + "step": 282 + }, + { + "epoch": 0.03894852738783375, + "grad_norm": 1.971862006752804, + "learning_rate": 1.9995803045206932e-05, + "loss": 1.2937, + "step": 283 + }, + { + "epoch": 0.03908615469309111, + "grad_norm": 1.97870082633005, + "learning_rate": 1.9995672924159833e-05, + "loss": 1.3127, + "step": 284 + }, + { + "epoch": 0.03922378199834847, + "grad_norm": 1.9065868730995048, + "learning_rate": 1.9995540817109348e-05, + "loss": 1.377, + "step": 285 + }, + { + "epoch": 0.039361409303605835, + "grad_norm": 1.9755311239559321, + "learning_rate": 1.999540672408173e-05, + "loss": 1.2757, + "step": 286 + }, + { + "epoch": 0.0394990366088632, + "grad_norm": 1.8140810813211568, + "learning_rate": 1.9995270645103612e-05, + "loss": 1.2681, + "step": 287 + }, + { + "epoch": 0.03963666391412056, + "grad_norm": 2.0146856861227542, + "learning_rate": 1.9995132580202043e-05, + "loss": 1.323, + "step": 288 + }, + { + "epoch": 0.03977429121937792, + "grad_norm": 2.01051832955701, + "learning_rate": 1.9994992529404448e-05, + "loss": 1.2374, + "step": 289 + }, + { + "epoch": 0.039911918524635284, + "grad_norm": 1.8492282150780337, + "learning_rate": 1.9994850492738653e-05, + "loss": 1.2018, + "step": 290 + }, + { + "epoch": 0.040049545829892653, + "grad_norm": 1.9389421531955668, + "learning_rate": 1.9994706470232878e-05, + "loss": 1.3533, + "step": 291 + }, + { + "epoch": 0.040187173135150016, + "grad_norm": 2.172373919946752, + "learning_rate": 1.999456046191574e-05, + "loss": 1.3005, + "step": 292 + }, + { + "epoch": 0.04032480044040738, + "grad_norm": 1.9029291943778814, + "learning_rate": 1.9994412467816252e-05, + "loss": 1.3316, + "step": 293 + }, + { + "epoch": 0.04046242774566474, + "grad_norm": 2.036563530414646, + "learning_rate": 1.9994262487963814e-05, + "loss": 1.3751, + "step": 294 + }, + { + "epoch": 0.0406000550509221, + "grad_norm": 2.0099481046515573, + "learning_rate": 1.9994110522388227e-05, + "loss": 1.3179, + "step": 295 + }, + { + "epoch": 0.040737682356179465, + "grad_norm": 1.9347221479381318, + "learning_rate": 1.999395657111968e-05, + "loss": 1.3262, + "step": 296 + }, + { + "epoch": 0.04087530966143683, + "grad_norm": 1.8746567728621637, + "learning_rate": 1.999380063418877e-05, + "loss": 1.2625, + "step": 297 + }, + { + "epoch": 0.04101293696669419, + "grad_norm": 2.000673507533951, + "learning_rate": 1.999364271162647e-05, + "loss": 1.2587, + "step": 298 + }, + { + "epoch": 0.04115056427195155, + "grad_norm": 2.1470349843525955, + "learning_rate": 1.9993482803464163e-05, + "loss": 1.3166, + "step": 299 + }, + { + "epoch": 0.04128819157720892, + "grad_norm": 2.082712481226252, + "learning_rate": 1.9993320909733618e-05, + "loss": 1.2266, + "step": 300 + }, + { + "epoch": 0.041425818882466284, + "grad_norm": 1.8518462224302648, + "learning_rate": 1.9993157030467002e-05, + "loss": 1.2869, + "step": 301 + }, + { + "epoch": 0.041563446187723646, + "grad_norm": 1.9955519906665373, + "learning_rate": 1.9992991165696875e-05, + "loss": 1.1822, + "step": 302 + }, + { + "epoch": 0.04170107349298101, + "grad_norm": 2.2311341822797583, + "learning_rate": 1.9992823315456197e-05, + "loss": 1.3345, + "step": 303 + }, + { + "epoch": 0.04183870079823837, + "grad_norm": 2.0750759989995444, + "learning_rate": 1.9992653479778307e-05, + "loss": 1.2559, + "step": 304 + }, + { + "epoch": 0.04197632810349573, + "grad_norm": 1.9139735838239553, + "learning_rate": 1.9992481658696957e-05, + "loss": 1.2901, + "step": 305 + }, + { + "epoch": 0.042113955408753095, + "grad_norm": 1.9785785414243133, + "learning_rate": 1.9992307852246283e-05, + "loss": 1.3835, + "step": 306 + }, + { + "epoch": 0.04225158271401046, + "grad_norm": 1.950811371080704, + "learning_rate": 1.9992132060460824e-05, + "loss": 1.2565, + "step": 307 + }, + { + "epoch": 0.04238921001926782, + "grad_norm": 1.8806893274339707, + "learning_rate": 1.9991954283375494e-05, + "loss": 1.2616, + "step": 308 + }, + { + "epoch": 0.04252683732452518, + "grad_norm": 1.9627813991714094, + "learning_rate": 1.9991774521025625e-05, + "loss": 1.285, + "step": 309 + }, + { + "epoch": 0.04266446462978255, + "grad_norm": 2.1000262070869478, + "learning_rate": 1.9991592773446934e-05, + "loss": 1.2997, + "step": 310 + }, + { + "epoch": 0.042802091935039914, + "grad_norm": 2.2443392965099385, + "learning_rate": 1.999140904067553e-05, + "loss": 1.3041, + "step": 311 + }, + { + "epoch": 0.042939719240297276, + "grad_norm": 1.904198006761273, + "learning_rate": 1.9991223322747914e-05, + "loss": 1.3205, + "step": 312 + }, + { + "epoch": 0.04307734654555464, + "grad_norm": 2.037733430757215, + "learning_rate": 1.9991035619700988e-05, + "loss": 1.3453, + "step": 313 + }, + { + "epoch": 0.043214973850812, + "grad_norm": 2.0271634868244717, + "learning_rate": 1.9990845931572048e-05, + "loss": 1.3254, + "step": 314 + }, + { + "epoch": 0.04335260115606936, + "grad_norm": 2.056518991454578, + "learning_rate": 1.999065425839878e-05, + "loss": 1.2907, + "step": 315 + }, + { + "epoch": 0.043490228461326726, + "grad_norm": 1.9450411910085335, + "learning_rate": 1.999046060021927e-05, + "loss": 1.246, + "step": 316 + }, + { + "epoch": 0.04362785576658409, + "grad_norm": 2.3013272256160477, + "learning_rate": 1.9990264957071996e-05, + "loss": 1.3439, + "step": 317 + }, + { + "epoch": 0.04376548307184145, + "grad_norm": 1.7469933983299086, + "learning_rate": 1.999006732899582e-05, + "loss": 1.4132, + "step": 318 + }, + { + "epoch": 0.04390311037709882, + "grad_norm": 1.9843759897211115, + "learning_rate": 1.998986771603002e-05, + "loss": 1.2616, + "step": 319 + }, + { + "epoch": 0.04404073768235618, + "grad_norm": 2.0912297698851816, + "learning_rate": 1.998966611821425e-05, + "loss": 1.2584, + "step": 320 + }, + { + "epoch": 0.044178364987613544, + "grad_norm": 1.9979224115725074, + "learning_rate": 1.9989462535588563e-05, + "loss": 1.2447, + "step": 321 + }, + { + "epoch": 0.04431599229287091, + "grad_norm": 1.8046442671301806, + "learning_rate": 1.9989256968193412e-05, + "loss": 1.305, + "step": 322 + }, + { + "epoch": 0.04445361959812827, + "grad_norm": 1.8304603195229936, + "learning_rate": 1.9989049416069643e-05, + "loss": 1.2999, + "step": 323 + }, + { + "epoch": 0.04459124690338563, + "grad_norm": 1.6554488060689538, + "learning_rate": 1.998883987925849e-05, + "loss": 1.1887, + "step": 324 + }, + { + "epoch": 0.044728874208642994, + "grad_norm": 2.2301382251330946, + "learning_rate": 1.998862835780158e-05, + "loss": 1.3089, + "step": 325 + }, + { + "epoch": 0.044866501513900356, + "grad_norm": 2.012467783875781, + "learning_rate": 1.9988414851740948e-05, + "loss": 1.336, + "step": 326 + }, + { + "epoch": 0.04500412881915772, + "grad_norm": 2.039803083423073, + "learning_rate": 1.9988199361119012e-05, + "loss": 1.2423, + "step": 327 + }, + { + "epoch": 0.04514175612441508, + "grad_norm": 1.7328938639393288, + "learning_rate": 1.9987981885978587e-05, + "loss": 1.1952, + "step": 328 + }, + { + "epoch": 0.04527938342967245, + "grad_norm": 1.8719596794351048, + "learning_rate": 1.998776242636288e-05, + "loss": 1.3441, + "step": 329 + }, + { + "epoch": 0.04541701073492981, + "grad_norm": 2.1170910662433444, + "learning_rate": 1.9987540982315497e-05, + "loss": 1.3259, + "step": 330 + }, + { + "epoch": 0.045554638040187174, + "grad_norm": 1.8939646339648109, + "learning_rate": 1.9987317553880435e-05, + "loss": 1.2743, + "step": 331 + }, + { + "epoch": 0.04569226534544454, + "grad_norm": 2.0732442964571827, + "learning_rate": 1.9987092141102088e-05, + "loss": 1.3028, + "step": 332 + }, + { + "epoch": 0.0458298926507019, + "grad_norm": 1.9756422826790319, + "learning_rate": 1.998686474402524e-05, + "loss": 1.2877, + "step": 333 + }, + { + "epoch": 0.04596751995595926, + "grad_norm": 2.081035273139465, + "learning_rate": 1.998663536269507e-05, + "loss": 1.3367, + "step": 334 + }, + { + "epoch": 0.046105147261216624, + "grad_norm": 1.9192226676341722, + "learning_rate": 1.998640399715716e-05, + "loss": 1.3373, + "step": 335 + }, + { + "epoch": 0.046242774566473986, + "grad_norm": 1.9271005799805245, + "learning_rate": 1.9986170647457473e-05, + "loss": 1.3318, + "step": 336 + }, + { + "epoch": 0.04638040187173135, + "grad_norm": 2.19044500789315, + "learning_rate": 1.998593531364238e-05, + "loss": 1.2437, + "step": 337 + }, + { + "epoch": 0.04651802917698872, + "grad_norm": 1.9494024594105408, + "learning_rate": 1.9985697995758623e-05, + "loss": 1.2268, + "step": 338 + }, + { + "epoch": 0.04665565648224608, + "grad_norm": 1.8829341282836296, + "learning_rate": 1.9985458693853367e-05, + "loss": 1.2664, + "step": 339 + }, + { + "epoch": 0.04679328378750344, + "grad_norm": 1.8870065990956553, + "learning_rate": 1.9985217407974157e-05, + "loss": 1.218, + "step": 340 + }, + { + "epoch": 0.046930911092760805, + "grad_norm": 2.060396980479203, + "learning_rate": 1.9984974138168927e-05, + "loss": 1.2898, + "step": 341 + }, + { + "epoch": 0.04706853839801817, + "grad_norm": 1.9419054971590057, + "learning_rate": 1.998472888448602e-05, + "loss": 1.3088, + "step": 342 + }, + { + "epoch": 0.04720616570327553, + "grad_norm": 1.6749626290520405, + "learning_rate": 1.9984481646974156e-05, + "loss": 1.1737, + "step": 343 + }, + { + "epoch": 0.04734379300853289, + "grad_norm": 2.07319264303262, + "learning_rate": 1.998423242568246e-05, + "loss": 1.3571, + "step": 344 + }, + { + "epoch": 0.047481420313790254, + "grad_norm": 2.049637865300209, + "learning_rate": 1.9983981220660453e-05, + "loss": 1.2898, + "step": 345 + }, + { + "epoch": 0.047619047619047616, + "grad_norm": 1.7279163765246852, + "learning_rate": 1.9983728031958045e-05, + "loss": 1.3704, + "step": 346 + }, + { + "epoch": 0.04775667492430498, + "grad_norm": 2.19329447436634, + "learning_rate": 1.9983472859625538e-05, + "loss": 1.2793, + "step": 347 + }, + { + "epoch": 0.04789430222956235, + "grad_norm": 2.0000656809954114, + "learning_rate": 1.998321570371363e-05, + "loss": 1.3677, + "step": 348 + }, + { + "epoch": 0.04803192953481971, + "grad_norm": 2.1140808385113994, + "learning_rate": 1.998295656427342e-05, + "loss": 1.2437, + "step": 349 + }, + { + "epoch": 0.04816955684007707, + "grad_norm": 2.2620320368775313, + "learning_rate": 1.998269544135639e-05, + "loss": 1.1969, + "step": 350 + }, + { + "epoch": 0.048307184145334435, + "grad_norm": 2.035491051074993, + "learning_rate": 1.9982432335014424e-05, + "loss": 1.2543, + "step": 351 + }, + { + "epoch": 0.0484448114505918, + "grad_norm": 1.9188178767874544, + "learning_rate": 1.99821672452998e-05, + "loss": 1.3715, + "step": 352 + }, + { + "epoch": 0.04858243875584916, + "grad_norm": 1.773103606908032, + "learning_rate": 1.9981900172265183e-05, + "loss": 1.2937, + "step": 353 + }, + { + "epoch": 0.04872006606110652, + "grad_norm": 1.8255602342492092, + "learning_rate": 1.998163111596364e-05, + "loss": 1.2485, + "step": 354 + }, + { + "epoch": 0.048857693366363884, + "grad_norm": 1.8177790018006474, + "learning_rate": 1.998136007644863e-05, + "loss": 1.2067, + "step": 355 + }, + { + "epoch": 0.04899532067162125, + "grad_norm": 1.8523525631451863, + "learning_rate": 1.9981087053774e-05, + "loss": 1.2702, + "step": 356 + }, + { + "epoch": 0.049132947976878616, + "grad_norm": 1.9180926138060057, + "learning_rate": 1.9980812047993998e-05, + "loss": 1.2702, + "step": 357 + }, + { + "epoch": 0.04927057528213598, + "grad_norm": 1.8398547657295286, + "learning_rate": 1.9980535059163268e-05, + "loss": 1.3007, + "step": 358 + }, + { + "epoch": 0.04940820258739334, + "grad_norm": 2.248073413784736, + "learning_rate": 1.9980256087336837e-05, + "loss": 1.374, + "step": 359 + }, + { + "epoch": 0.0495458298926507, + "grad_norm": 2.1565660311421055, + "learning_rate": 1.9979975132570136e-05, + "loss": 1.3018, + "step": 360 + }, + { + "epoch": 0.049683457197908065, + "grad_norm": 2.1246831299298248, + "learning_rate": 1.997969219491899e-05, + "loss": 1.264, + "step": 361 + }, + { + "epoch": 0.04982108450316543, + "grad_norm": 1.914329865097868, + "learning_rate": 1.997940727443961e-05, + "loss": 1.2822, + "step": 362 + }, + { + "epoch": 0.04995871180842279, + "grad_norm": 1.8992103495887487, + "learning_rate": 1.997912037118861e-05, + "loss": 1.3134, + "step": 363 + }, + { + "epoch": 0.05009633911368015, + "grad_norm": 1.9877935298192693, + "learning_rate": 1.997883148522299e-05, + "loss": 1.3024, + "step": 364 + }, + { + "epoch": 0.050233966418937515, + "grad_norm": 2.0183664107663164, + "learning_rate": 1.997854061660015e-05, + "loss": 1.3124, + "step": 365 + }, + { + "epoch": 0.05037159372419488, + "grad_norm": 2.100563135989665, + "learning_rate": 1.997824776537788e-05, + "loss": 1.3131, + "step": 366 + }, + { + "epoch": 0.050509221029452246, + "grad_norm": 1.9323042709870288, + "learning_rate": 1.9977952931614367e-05, + "loss": 1.3546, + "step": 367 + }, + { + "epoch": 0.05064684833470961, + "grad_norm": 2.0135447926682417, + "learning_rate": 1.9977656115368188e-05, + "loss": 1.2968, + "step": 368 + }, + { + "epoch": 0.05078447563996697, + "grad_norm": 2.0423576289706395, + "learning_rate": 1.997735731669832e-05, + "loss": 1.3582, + "step": 369 + }, + { + "epoch": 0.05092210294522433, + "grad_norm": 1.93104028666782, + "learning_rate": 1.9977056535664126e-05, + "loss": 1.2859, + "step": 370 + }, + { + "epoch": 0.051059730250481696, + "grad_norm": 2.0062015604170735, + "learning_rate": 1.997675377232537e-05, + "loss": 1.338, + "step": 371 + }, + { + "epoch": 0.05119735755573906, + "grad_norm": 1.7190018801986924, + "learning_rate": 1.9976449026742207e-05, + "loss": 1.1972, + "step": 372 + }, + { + "epoch": 0.05133498486099642, + "grad_norm": 2.0383342953000696, + "learning_rate": 1.9976142298975185e-05, + "loss": 1.3037, + "step": 373 + }, + { + "epoch": 0.05147261216625378, + "grad_norm": 1.9237636324072152, + "learning_rate": 1.9975833589085248e-05, + "loss": 1.2522, + "step": 374 + }, + { + "epoch": 0.051610239471511145, + "grad_norm": 2.019093419845557, + "learning_rate": 1.997552289713373e-05, + "loss": 1.3249, + "step": 375 + }, + { + "epoch": 0.051747866776768514, + "grad_norm": 2.0185035479057434, + "learning_rate": 1.9975210223182362e-05, + "loss": 1.274, + "step": 376 + }, + { + "epoch": 0.051885494082025876, + "grad_norm": 2.1183820265903948, + "learning_rate": 1.997489556729327e-05, + "loss": 1.2689, + "step": 377 + }, + { + "epoch": 0.05202312138728324, + "grad_norm": 1.9445286126627803, + "learning_rate": 1.997457892952897e-05, + "loss": 1.2651, + "step": 378 + }, + { + "epoch": 0.0521607486925406, + "grad_norm": 1.8633943800092068, + "learning_rate": 1.997426030995237e-05, + "loss": 1.1868, + "step": 379 + }, + { + "epoch": 0.05229837599779796, + "grad_norm": 1.8001205037987675, + "learning_rate": 1.9973939708626783e-05, + "loss": 1.2185, + "step": 380 + }, + { + "epoch": 0.052436003303055326, + "grad_norm": 1.691984854979059, + "learning_rate": 1.9973617125615905e-05, + "loss": 1.2676, + "step": 381 + }, + { + "epoch": 0.05257363060831269, + "grad_norm": 1.9956581709026606, + "learning_rate": 1.9973292560983824e-05, + "loss": 1.3274, + "step": 382 + }, + { + "epoch": 0.05271125791357005, + "grad_norm": 1.9127609189649284, + "learning_rate": 1.9972966014795037e-05, + "loss": 1.2833, + "step": 383 + }, + { + "epoch": 0.05284888521882741, + "grad_norm": 2.36956331830658, + "learning_rate": 1.997263748711441e-05, + "loss": 1.3796, + "step": 384 + }, + { + "epoch": 0.052986512524084775, + "grad_norm": 1.8591358110423712, + "learning_rate": 1.997230697800723e-05, + "loss": 1.206, + "step": 385 + }, + { + "epoch": 0.053124139829342144, + "grad_norm": 1.7971418097910214, + "learning_rate": 1.9971974487539157e-05, + "loss": 1.2734, + "step": 386 + }, + { + "epoch": 0.05326176713459951, + "grad_norm": 2.162818982720296, + "learning_rate": 1.997164001577626e-05, + "loss": 1.3393, + "step": 387 + }, + { + "epoch": 0.05339939443985687, + "grad_norm": 1.9010844236248636, + "learning_rate": 1.997130356278498e-05, + "loss": 1.2105, + "step": 388 + }, + { + "epoch": 0.05353702174511423, + "grad_norm": 1.8862243986294585, + "learning_rate": 1.9970965128632182e-05, + "loss": 1.299, + "step": 389 + }, + { + "epoch": 0.053674649050371594, + "grad_norm": 1.8811403580279635, + "learning_rate": 1.9970624713385098e-05, + "loss": 1.1756, + "step": 390 + }, + { + "epoch": 0.053812276355628956, + "grad_norm": 1.934476324240299, + "learning_rate": 1.9970282317111366e-05, + "loss": 1.3337, + "step": 391 + }, + { + "epoch": 0.05394990366088632, + "grad_norm": 1.9113824359188867, + "learning_rate": 1.9969937939879015e-05, + "loss": 1.2909, + "step": 392 + }, + { + "epoch": 0.05408753096614368, + "grad_norm": 2.17083176906069, + "learning_rate": 1.996959158175647e-05, + "loss": 1.2969, + "step": 393 + }, + { + "epoch": 0.05422515827140104, + "grad_norm": 1.863157109046354, + "learning_rate": 1.9969243242812548e-05, + "loss": 1.3732, + "step": 394 + }, + { + "epoch": 0.05436278557665841, + "grad_norm": 1.918559551273086, + "learning_rate": 1.9968892923116458e-05, + "loss": 1.2147, + "step": 395 + }, + { + "epoch": 0.054500412881915775, + "grad_norm": 1.8970874447218284, + "learning_rate": 1.99685406227378e-05, + "loss": 1.2886, + "step": 396 + }, + { + "epoch": 0.05463804018717314, + "grad_norm": 1.7106394486847707, + "learning_rate": 1.9968186341746577e-05, + "loss": 1.2718, + "step": 397 + }, + { + "epoch": 0.0547756674924305, + "grad_norm": 2.194502873483654, + "learning_rate": 1.9967830080213177e-05, + "loss": 1.3638, + "step": 398 + }, + { + "epoch": 0.05491329479768786, + "grad_norm": 1.8064799309541488, + "learning_rate": 1.9967471838208384e-05, + "loss": 1.2916, + "step": 399 + }, + { + "epoch": 0.055050922102945224, + "grad_norm": 1.8271244373348412, + "learning_rate": 1.9967111615803375e-05, + "loss": 1.1705, + "step": 400 + }, + { + "epoch": 0.055188549408202586, + "grad_norm": 1.6791195988031211, + "learning_rate": 1.9966749413069725e-05, + "loss": 1.2967, + "step": 401 + }, + { + "epoch": 0.05532617671345995, + "grad_norm": 2.2396720681627222, + "learning_rate": 1.9966385230079398e-05, + "loss": 1.2927, + "step": 402 + }, + { + "epoch": 0.05546380401871731, + "grad_norm": 2.0996442757188944, + "learning_rate": 1.996601906690475e-05, + "loss": 1.4073, + "step": 403 + }, + { + "epoch": 0.05560143132397467, + "grad_norm": 1.8849503810783812, + "learning_rate": 1.9965650923618533e-05, + "loss": 1.2971, + "step": 404 + }, + { + "epoch": 0.05573905862923204, + "grad_norm": 1.7862736226937408, + "learning_rate": 1.996528080029389e-05, + "loss": 1.1044, + "step": 405 + }, + { + "epoch": 0.055876685934489405, + "grad_norm": 1.7587660367767275, + "learning_rate": 1.9964908697004365e-05, + "loss": 1.2746, + "step": 406 + }, + { + "epoch": 0.05601431323974677, + "grad_norm": 1.9919955565245953, + "learning_rate": 1.9964534613823887e-05, + "loss": 1.2618, + "step": 407 + }, + { + "epoch": 0.05615194054500413, + "grad_norm": 2.2063945668263165, + "learning_rate": 1.996415855082678e-05, + "loss": 1.2899, + "step": 408 + }, + { + "epoch": 0.05628956785026149, + "grad_norm": 1.8569914802032457, + "learning_rate": 1.9963780508087762e-05, + "loss": 1.2451, + "step": 409 + }, + { + "epoch": 0.056427195155518854, + "grad_norm": 1.9653190955473436, + "learning_rate": 1.996340048568195e-05, + "loss": 1.2956, + "step": 410 + }, + { + "epoch": 0.056564822460776217, + "grad_norm": 1.8139149050748382, + "learning_rate": 1.996301848368484e-05, + "loss": 1.2045, + "step": 411 + }, + { + "epoch": 0.05670244976603358, + "grad_norm": 1.564938337794532, + "learning_rate": 1.9962634502172337e-05, + "loss": 1.237, + "step": 412 + }, + { + "epoch": 0.05684007707129094, + "grad_norm": 1.934555588251456, + "learning_rate": 1.9962248541220736e-05, + "loss": 1.2473, + "step": 413 + }, + { + "epoch": 0.05697770437654831, + "grad_norm": 1.6613692931490067, + "learning_rate": 1.9961860600906715e-05, + "loss": 1.209, + "step": 414 + }, + { + "epoch": 0.05711533168180567, + "grad_norm": 1.7323167090809082, + "learning_rate": 1.9961470681307356e-05, + "loss": 1.1927, + "step": 415 + }, + { + "epoch": 0.057252958987063035, + "grad_norm": 1.9121359938175644, + "learning_rate": 1.996107878250013e-05, + "loss": 1.2966, + "step": 416 + }, + { + "epoch": 0.0573905862923204, + "grad_norm": 2.276850724118106, + "learning_rate": 1.9960684904562906e-05, + "loss": 1.3582, + "step": 417 + }, + { + "epoch": 0.05752821359757776, + "grad_norm": 1.8421745621957972, + "learning_rate": 1.9960289047573937e-05, + "loss": 1.3685, + "step": 418 + }, + { + "epoch": 0.05766584090283512, + "grad_norm": 1.9317876497639175, + "learning_rate": 1.9959891211611873e-05, + "loss": 1.2558, + "step": 419 + }, + { + "epoch": 0.057803468208092484, + "grad_norm": 1.7312446850719045, + "learning_rate": 1.995949139675576e-05, + "loss": 1.2059, + "step": 420 + }, + { + "epoch": 0.05794109551334985, + "grad_norm": 1.876212419182311, + "learning_rate": 1.9959089603085043e-05, + "loss": 1.2748, + "step": 421 + }, + { + "epoch": 0.05807872281860721, + "grad_norm": 1.9622129284643401, + "learning_rate": 1.995868583067954e-05, + "loss": 1.2435, + "step": 422 + }, + { + "epoch": 0.05821635012386457, + "grad_norm": 1.9174901875998451, + "learning_rate": 1.995828007961949e-05, + "loss": 1.3094, + "step": 423 + }, + { + "epoch": 0.05835397742912194, + "grad_norm": 2.110436852907964, + "learning_rate": 1.9957872349985497e-05, + "loss": 1.1854, + "step": 424 + }, + { + "epoch": 0.0584916047343793, + "grad_norm": 1.8515117355286672, + "learning_rate": 1.9957462641858578e-05, + "loss": 1.3051, + "step": 425 + }, + { + "epoch": 0.058629232039636665, + "grad_norm": 2.0012891728170987, + "learning_rate": 1.9957050955320132e-05, + "loss": 1.3385, + "step": 426 + }, + { + "epoch": 0.05876685934489403, + "grad_norm": 1.84818568117921, + "learning_rate": 1.995663729045196e-05, + "loss": 1.2645, + "step": 427 + }, + { + "epoch": 0.05890448665015139, + "grad_norm": 1.6990181462479568, + "learning_rate": 1.995622164733625e-05, + "loss": 1.302, + "step": 428 + }, + { + "epoch": 0.05904211395540875, + "grad_norm": 2.0195448260835747, + "learning_rate": 1.9955804026055588e-05, + "loss": 1.357, + "step": 429 + }, + { + "epoch": 0.059179741260666115, + "grad_norm": 2.2826671523874134, + "learning_rate": 1.995538442669294e-05, + "loss": 1.2609, + "step": 430 + }, + { + "epoch": 0.05931736856592348, + "grad_norm": 1.855375433612593, + "learning_rate": 1.9954962849331683e-05, + "loss": 1.3405, + "step": 431 + }, + { + "epoch": 0.05945499587118084, + "grad_norm": 1.8944896973759686, + "learning_rate": 1.995453929405558e-05, + "loss": 1.2659, + "step": 432 + }, + { + "epoch": 0.05959262317643821, + "grad_norm": 2.0665824320121193, + "learning_rate": 1.995411376094878e-05, + "loss": 1.3329, + "step": 433 + }, + { + "epoch": 0.05973025048169557, + "grad_norm": 1.890664622189023, + "learning_rate": 1.9953686250095832e-05, + "loss": 1.2831, + "step": 434 + }, + { + "epoch": 0.05986787778695293, + "grad_norm": 1.9210316852899385, + "learning_rate": 1.9953256761581676e-05, + "loss": 1.2864, + "step": 435 + }, + { + "epoch": 0.060005505092210296, + "grad_norm": 2.0807789068763256, + "learning_rate": 1.9952825295491646e-05, + "loss": 1.2978, + "step": 436 + }, + { + "epoch": 0.06014313239746766, + "grad_norm": 1.9289002736800502, + "learning_rate": 1.9952391851911472e-05, + "loss": 1.2796, + "step": 437 + }, + { + "epoch": 0.06028075970272502, + "grad_norm": 1.7939765493175006, + "learning_rate": 1.995195643092727e-05, + "loss": 1.1806, + "step": 438 + }, + { + "epoch": 0.06041838700798238, + "grad_norm": 1.929164866322231, + "learning_rate": 1.9951519032625553e-05, + "loss": 1.233, + "step": 439 + }, + { + "epoch": 0.060556014313239745, + "grad_norm": 1.743479810393273, + "learning_rate": 1.9951079657093224e-05, + "loss": 1.2521, + "step": 440 + }, + { + "epoch": 0.06069364161849711, + "grad_norm": 2.0613972824025306, + "learning_rate": 1.9950638304417586e-05, + "loss": 1.2734, + "step": 441 + }, + { + "epoch": 0.06083126892375447, + "grad_norm": 1.8358539949381785, + "learning_rate": 1.9950194974686323e-05, + "loss": 1.2735, + "step": 442 + }, + { + "epoch": 0.06096889622901184, + "grad_norm": 1.8588496519748607, + "learning_rate": 1.994974966798752e-05, + "loss": 1.2849, + "step": 443 + }, + { + "epoch": 0.0611065235342692, + "grad_norm": 1.9323398411409636, + "learning_rate": 1.9949302384409658e-05, + "loss": 1.207, + "step": 444 + }, + { + "epoch": 0.061244150839526564, + "grad_norm": 1.8837049212364023, + "learning_rate": 1.9948853124041605e-05, + "loss": 1.2625, + "step": 445 + }, + { + "epoch": 0.061381778144783926, + "grad_norm": 2.083906517694679, + "learning_rate": 1.994840188697262e-05, + "loss": 1.3255, + "step": 446 + }, + { + "epoch": 0.06151940545004129, + "grad_norm": 2.118115994699614, + "learning_rate": 1.9947948673292355e-05, + "loss": 1.226, + "step": 447 + }, + { + "epoch": 0.06165703275529865, + "grad_norm": 1.9495937007307207, + "learning_rate": 1.9947493483090868e-05, + "loss": 1.275, + "step": 448 + }, + { + "epoch": 0.06179466006055601, + "grad_norm": 1.8613733424514662, + "learning_rate": 1.9947036316458586e-05, + "loss": 1.2404, + "step": 449 + }, + { + "epoch": 0.061932287365813375, + "grad_norm": 2.00336335993132, + "learning_rate": 1.994657717348635e-05, + "loss": 1.2119, + "step": 450 + }, + { + "epoch": 0.06206991467107074, + "grad_norm": 1.7603164089522412, + "learning_rate": 1.994611605426538e-05, + "loss": 1.2477, + "step": 451 + }, + { + "epoch": 0.06220754197632811, + "grad_norm": 1.9460811460971439, + "learning_rate": 1.9945652958887307e-05, + "loss": 1.2729, + "step": 452 + }, + { + "epoch": 0.06234516928158547, + "grad_norm": 2.158714337051103, + "learning_rate": 1.9945187887444125e-05, + "loss": 1.2744, + "step": 453 + }, + { + "epoch": 0.06248279658684283, + "grad_norm": 2.0849629537107606, + "learning_rate": 1.994472084002825e-05, + "loss": 1.2764, + "step": 454 + }, + { + "epoch": 0.0626204238921002, + "grad_norm": 1.8921686259368096, + "learning_rate": 1.9944251816732465e-05, + "loss": 1.2631, + "step": 455 + }, + { + "epoch": 0.06275805119735756, + "grad_norm": 1.8139476210515333, + "learning_rate": 1.994378081764997e-05, + "loss": 1.2492, + "step": 456 + }, + { + "epoch": 0.06289567850261492, + "grad_norm": 1.658948647418169, + "learning_rate": 1.9943307842874343e-05, + "loss": 1.265, + "step": 457 + }, + { + "epoch": 0.06303330580787228, + "grad_norm": 1.957374565040711, + "learning_rate": 1.9942832892499554e-05, + "loss": 1.3416, + "step": 458 + }, + { + "epoch": 0.06317093311312964, + "grad_norm": 1.784477911383927, + "learning_rate": 1.9942355966619973e-05, + "loss": 1.1849, + "step": 459 + }, + { + "epoch": 0.063308560418387, + "grad_norm": 1.7841123823226424, + "learning_rate": 1.994187706533036e-05, + "loss": 1.2017, + "step": 460 + }, + { + "epoch": 0.06344618772364437, + "grad_norm": 2.023149833569769, + "learning_rate": 1.994139618872586e-05, + "loss": 1.2725, + "step": 461 + }, + { + "epoch": 0.06358381502890173, + "grad_norm": 1.6274094999384983, + "learning_rate": 1.994091333690202e-05, + "loss": 1.2145, + "step": 462 + }, + { + "epoch": 0.06372144233415909, + "grad_norm": 1.9408764622805104, + "learning_rate": 1.994042850995478e-05, + "loss": 1.2045, + "step": 463 + }, + { + "epoch": 0.06385906963941645, + "grad_norm": 1.8871658537140907, + "learning_rate": 1.9939941707980462e-05, + "loss": 1.2864, + "step": 464 + }, + { + "epoch": 0.06399669694467382, + "grad_norm": 2.1552974071740807, + "learning_rate": 1.993945293107579e-05, + "loss": 1.3313, + "step": 465 + }, + { + "epoch": 0.06413432424993118, + "grad_norm": 1.8076542039299266, + "learning_rate": 1.9938962179337877e-05, + "loss": 1.2683, + "step": 466 + }, + { + "epoch": 0.06427195155518856, + "grad_norm": 2.0413606219325544, + "learning_rate": 1.9938469452864228e-05, + "loss": 1.1935, + "step": 467 + }, + { + "epoch": 0.06440957886044592, + "grad_norm": 1.8034038379822015, + "learning_rate": 1.9937974751752742e-05, + "loss": 1.2596, + "step": 468 + }, + { + "epoch": 0.06454720616570328, + "grad_norm": 2.192878316769907, + "learning_rate": 1.993747807610171e-05, + "loss": 1.2511, + "step": 469 + }, + { + "epoch": 0.06468483347096064, + "grad_norm": 1.9280308180215493, + "learning_rate": 1.993697942600981e-05, + "loss": 1.2295, + "step": 470 + }, + { + "epoch": 0.064822460776218, + "grad_norm": 2.0030590524892764, + "learning_rate": 1.9936478801576126e-05, + "loss": 1.2074, + "step": 471 + }, + { + "epoch": 0.06496008808147537, + "grad_norm": 1.9156370956774154, + "learning_rate": 1.9935976202900113e-05, + "loss": 1.3269, + "step": 472 + }, + { + "epoch": 0.06509771538673273, + "grad_norm": 1.8945829849145701, + "learning_rate": 1.9935471630081643e-05, + "loss": 1.1089, + "step": 473 + }, + { + "epoch": 0.06523534269199009, + "grad_norm": 2.0557742392810447, + "learning_rate": 1.9934965083220962e-05, + "loss": 1.2919, + "step": 474 + }, + { + "epoch": 0.06537296999724745, + "grad_norm": 1.895026334766847, + "learning_rate": 1.993445656241871e-05, + "loss": 1.2658, + "step": 475 + }, + { + "epoch": 0.06551059730250482, + "grad_norm": 2.085726631951332, + "learning_rate": 1.9933946067775926e-05, + "loss": 1.3042, + "step": 476 + }, + { + "epoch": 0.06564822460776218, + "grad_norm": 2.1898164350676605, + "learning_rate": 1.9933433599394044e-05, + "loss": 1.2352, + "step": 477 + }, + { + "epoch": 0.06578585191301954, + "grad_norm": 1.7912662915920923, + "learning_rate": 1.9932919157374875e-05, + "loss": 1.2668, + "step": 478 + }, + { + "epoch": 0.0659234792182769, + "grad_norm": 1.751634242571438, + "learning_rate": 1.993240274182064e-05, + "loss": 1.2506, + "step": 479 + }, + { + "epoch": 0.06606110652353427, + "grad_norm": 1.7499311679189105, + "learning_rate": 1.993188435283394e-05, + "loss": 1.2484, + "step": 480 + }, + { + "epoch": 0.06619873382879163, + "grad_norm": 1.8954821340679753, + "learning_rate": 1.993136399051777e-05, + "loss": 1.3322, + "step": 481 + }, + { + "epoch": 0.06633636113404899, + "grad_norm": 1.99824946164007, + "learning_rate": 1.993084165497552e-05, + "loss": 1.2665, + "step": 482 + }, + { + "epoch": 0.06647398843930635, + "grad_norm": 1.9107533598514703, + "learning_rate": 1.9930317346310973e-05, + "loss": 1.2524, + "step": 483 + }, + { + "epoch": 0.06661161574456372, + "grad_norm": 1.8697004034047635, + "learning_rate": 1.9929791064628298e-05, + "loss": 1.2589, + "step": 484 + }, + { + "epoch": 0.06674924304982108, + "grad_norm": 1.8787892591244844, + "learning_rate": 1.9929262810032066e-05, + "loss": 1.1634, + "step": 485 + }, + { + "epoch": 0.06688687035507845, + "grad_norm": 1.7249769684886362, + "learning_rate": 1.9928732582627227e-05, + "loss": 1.2129, + "step": 486 + }, + { + "epoch": 0.06702449766033582, + "grad_norm": 1.9434460654529309, + "learning_rate": 1.9928200382519137e-05, + "loss": 1.3121, + "step": 487 + }, + { + "epoch": 0.06716212496559318, + "grad_norm": 1.8087670749345492, + "learning_rate": 1.9927666209813533e-05, + "loss": 1.2442, + "step": 488 + }, + { + "epoch": 0.06729975227085054, + "grad_norm": 1.9667890540167725, + "learning_rate": 1.9927130064616545e-05, + "loss": 1.2028, + "step": 489 + }, + { + "epoch": 0.0674373795761079, + "grad_norm": 1.9770404104985562, + "learning_rate": 1.9926591947034703e-05, + "loss": 1.296, + "step": 490 + }, + { + "epoch": 0.06757500688136527, + "grad_norm": 1.9943108838405652, + "learning_rate": 1.9926051857174918e-05, + "loss": 1.1825, + "step": 491 + }, + { + "epoch": 0.06771263418662263, + "grad_norm": 1.8889648957124738, + "learning_rate": 1.9925509795144507e-05, + "loss": 1.2721, + "step": 492 + }, + { + "epoch": 0.06785026149187999, + "grad_norm": 1.7832558671605818, + "learning_rate": 1.9924965761051162e-05, + "loss": 1.3489, + "step": 493 + }, + { + "epoch": 0.06798788879713735, + "grad_norm": 1.9964227718191707, + "learning_rate": 1.992441975500298e-05, + "loss": 1.1739, + "step": 494 + }, + { + "epoch": 0.06812551610239471, + "grad_norm": 1.7785734577726646, + "learning_rate": 1.9923871777108444e-05, + "loss": 1.3104, + "step": 495 + }, + { + "epoch": 0.06826314340765208, + "grad_norm": 1.854080157068782, + "learning_rate": 1.9923321827476424e-05, + "loss": 1.2536, + "step": 496 + }, + { + "epoch": 0.06840077071290944, + "grad_norm": 1.8296990592019273, + "learning_rate": 1.9922769906216197e-05, + "loss": 1.2576, + "step": 497 + }, + { + "epoch": 0.0685383980181668, + "grad_norm": 1.9168431893427422, + "learning_rate": 1.9922216013437416e-05, + "loss": 1.2094, + "step": 498 + }, + { + "epoch": 0.06867602532342416, + "grad_norm": 1.884493633168247, + "learning_rate": 1.9921660149250136e-05, + "loss": 1.2731, + "step": 499 + }, + { + "epoch": 0.06881365262868153, + "grad_norm": 1.7962972392112313, + "learning_rate": 1.9921102313764792e-05, + "loss": 1.2245, + "step": 500 + }, + { + "epoch": 0.06895127993393889, + "grad_norm": 1.913754930363172, + "learning_rate": 1.9920542507092224e-05, + "loss": 1.1765, + "step": 501 + }, + { + "epoch": 0.06908890723919625, + "grad_norm": 2.058309558958913, + "learning_rate": 1.991998072934366e-05, + "loss": 1.3113, + "step": 502 + }, + { + "epoch": 0.06922653454445361, + "grad_norm": 1.8918350737789915, + "learning_rate": 1.9919416980630716e-05, + "loss": 1.2009, + "step": 503 + }, + { + "epoch": 0.06936416184971098, + "grad_norm": 1.6294278475860247, + "learning_rate": 1.9918851261065396e-05, + "loss": 1.1632, + "step": 504 + }, + { + "epoch": 0.06950178915496835, + "grad_norm": 2.3358958648407957, + "learning_rate": 1.9918283570760106e-05, + "loss": 1.2233, + "step": 505 + }, + { + "epoch": 0.06963941646022571, + "grad_norm": 1.9964956885839813, + "learning_rate": 1.991771390982764e-05, + "loss": 1.1868, + "step": 506 + }, + { + "epoch": 0.06977704376548308, + "grad_norm": 2.1455556527906774, + "learning_rate": 1.9917142278381174e-05, + "loss": 1.3121, + "step": 507 + }, + { + "epoch": 0.06991467107074044, + "grad_norm": 1.8536691004070176, + "learning_rate": 1.9916568676534293e-05, + "loss": 1.3062, + "step": 508 + }, + { + "epoch": 0.0700522983759978, + "grad_norm": 1.8421357833064747, + "learning_rate": 1.9915993104400956e-05, + "loss": 1.264, + "step": 509 + }, + { + "epoch": 0.07018992568125516, + "grad_norm": 1.704082348208926, + "learning_rate": 1.9915415562095526e-05, + "loss": 1.0597, + "step": 510 + }, + { + "epoch": 0.07032755298651253, + "grad_norm": 1.7081094380669708, + "learning_rate": 1.991483604973275e-05, + "loss": 1.2118, + "step": 511 + }, + { + "epoch": 0.07046518029176989, + "grad_norm": 2.137867699063183, + "learning_rate": 1.991425456742777e-05, + "loss": 1.2689, + "step": 512 + }, + { + "epoch": 0.07060280759702725, + "grad_norm": 2.1243982146673672, + "learning_rate": 1.991367111529612e-05, + "loss": 1.2886, + "step": 513 + }, + { + "epoch": 0.07074043490228461, + "grad_norm": 1.8434831560047957, + "learning_rate": 1.991308569345372e-05, + "loss": 1.1534, + "step": 514 + }, + { + "epoch": 0.07087806220754198, + "grad_norm": 2.0962906673915844, + "learning_rate": 1.991249830201689e-05, + "loss": 1.2563, + "step": 515 + }, + { + "epoch": 0.07101568951279934, + "grad_norm": 1.7898084007142196, + "learning_rate": 1.9911908941102336e-05, + "loss": 1.2567, + "step": 516 + }, + { + "epoch": 0.0711533168180567, + "grad_norm": 1.8977713607965296, + "learning_rate": 1.991131761082715e-05, + "loss": 1.1845, + "step": 517 + }, + { + "epoch": 0.07129094412331406, + "grad_norm": 2.016993814550598, + "learning_rate": 1.991072431130883e-05, + "loss": 1.2505, + "step": 518 + }, + { + "epoch": 0.07142857142857142, + "grad_norm": 1.9529632818952092, + "learning_rate": 1.9910129042665253e-05, + "loss": 1.2945, + "step": 519 + }, + { + "epoch": 0.07156619873382879, + "grad_norm": 2.0760690048294643, + "learning_rate": 1.9909531805014686e-05, + "loss": 1.2622, + "step": 520 + }, + { + "epoch": 0.07170382603908615, + "grad_norm": 2.091813347100997, + "learning_rate": 1.99089325984758e-05, + "loss": 1.3105, + "step": 521 + }, + { + "epoch": 0.07184145334434351, + "grad_norm": 1.9985380056672712, + "learning_rate": 1.9908331423167644e-05, + "loss": 1.343, + "step": 522 + }, + { + "epoch": 0.07197908064960087, + "grad_norm": 1.9039271946062237, + "learning_rate": 1.9907728279209665e-05, + "loss": 1.2964, + "step": 523 + }, + { + "epoch": 0.07211670795485825, + "grad_norm": 1.925355312411513, + "learning_rate": 1.99071231667217e-05, + "loss": 1.1624, + "step": 524 + }, + { + "epoch": 0.07225433526011561, + "grad_norm": 1.9208508557748165, + "learning_rate": 1.9906516085823974e-05, + "loss": 1.18, + "step": 525 + }, + { + "epoch": 0.07239196256537297, + "grad_norm": 1.8401921667012449, + "learning_rate": 1.9905907036637103e-05, + "loss": 1.2058, + "step": 526 + }, + { + "epoch": 0.07252958987063034, + "grad_norm": 1.8185225338885393, + "learning_rate": 1.9905296019282105e-05, + "loss": 1.2032, + "step": 527 + }, + { + "epoch": 0.0726672171758877, + "grad_norm": 2.0907999865265614, + "learning_rate": 1.990468303388038e-05, + "loss": 1.2156, + "step": 528 + }, + { + "epoch": 0.07280484448114506, + "grad_norm": 1.9473419406852845, + "learning_rate": 1.990406808055371e-05, + "loss": 1.2697, + "step": 529 + }, + { + "epoch": 0.07294247178640242, + "grad_norm": 1.8795191621729275, + "learning_rate": 1.990345115942429e-05, + "loss": 1.2713, + "step": 530 + }, + { + "epoch": 0.07308009909165979, + "grad_norm": 1.7089408163592847, + "learning_rate": 1.9902832270614683e-05, + "loss": 1.2537, + "step": 531 + }, + { + "epoch": 0.07321772639691715, + "grad_norm": 1.938789752281339, + "learning_rate": 1.9902211414247864e-05, + "loss": 1.273, + "step": 532 + }, + { + "epoch": 0.07335535370217451, + "grad_norm": 1.9324931804949115, + "learning_rate": 1.990158859044718e-05, + "loss": 1.3781, + "step": 533 + }, + { + "epoch": 0.07349298100743187, + "grad_norm": 1.864103670510535, + "learning_rate": 1.990096379933638e-05, + "loss": 1.2843, + "step": 534 + }, + { + "epoch": 0.07363060831268924, + "grad_norm": 1.8999361286453935, + "learning_rate": 1.9900337041039605e-05, + "loss": 1.2507, + "step": 535 + }, + { + "epoch": 0.0737682356179466, + "grad_norm": 1.6643939919544746, + "learning_rate": 1.989970831568138e-05, + "loss": 1.237, + "step": 536 + }, + { + "epoch": 0.07390586292320396, + "grad_norm": 1.6263359925684833, + "learning_rate": 1.9899077623386628e-05, + "loss": 1.1924, + "step": 537 + }, + { + "epoch": 0.07404349022846132, + "grad_norm": 1.7678926810869806, + "learning_rate": 1.989844496428065e-05, + "loss": 1.245, + "step": 538 + }, + { + "epoch": 0.07418111753371869, + "grad_norm": 1.854960029020722, + "learning_rate": 1.9897810338489157e-05, + "loss": 1.1983, + "step": 539 + }, + { + "epoch": 0.07431874483897605, + "grad_norm": 1.7955959752445425, + "learning_rate": 1.9897173746138234e-05, + "loss": 1.3088, + "step": 540 + }, + { + "epoch": 0.07445637214423341, + "grad_norm": 1.7420834283480413, + "learning_rate": 1.9896535187354365e-05, + "loss": 1.2503, + "step": 541 + }, + { + "epoch": 0.07459399944949077, + "grad_norm": 1.7211647391789828, + "learning_rate": 1.9895894662264423e-05, + "loss": 1.2186, + "step": 542 + }, + { + "epoch": 0.07473162675474815, + "grad_norm": 1.8325303616295099, + "learning_rate": 1.9895252170995675e-05, + "loss": 1.2486, + "step": 543 + }, + { + "epoch": 0.07486925406000551, + "grad_norm": 1.8518483511369566, + "learning_rate": 1.9894607713675765e-05, + "loss": 1.2479, + "step": 544 + }, + { + "epoch": 0.07500688136526287, + "grad_norm": 1.7080146845786037, + "learning_rate": 1.989396129043275e-05, + "loss": 1.241, + "step": 545 + }, + { + "epoch": 0.07514450867052024, + "grad_norm": 1.787776166443195, + "learning_rate": 1.989331290139506e-05, + "loss": 1.2329, + "step": 546 + }, + { + "epoch": 0.0752821359757776, + "grad_norm": 1.9675816082582034, + "learning_rate": 1.989266254669152e-05, + "loss": 1.2143, + "step": 547 + }, + { + "epoch": 0.07541976328103496, + "grad_norm": 2.0264806515707265, + "learning_rate": 1.9892010226451347e-05, + "loss": 1.2665, + "step": 548 + }, + { + "epoch": 0.07555739058629232, + "grad_norm": 1.8518089805026696, + "learning_rate": 1.989135594080415e-05, + "loss": 1.267, + "step": 549 + }, + { + "epoch": 0.07569501789154968, + "grad_norm": 1.9321483022536319, + "learning_rate": 1.9890699689879926e-05, + "loss": 1.2446, + "step": 550 + }, + { + "epoch": 0.07583264519680705, + "grad_norm": 1.8052040599963532, + "learning_rate": 1.989004147380906e-05, + "loss": 1.1975, + "step": 551 + }, + { + "epoch": 0.07597027250206441, + "grad_norm": 1.7004916155362677, + "learning_rate": 1.9889381292722336e-05, + "loss": 1.1435, + "step": 552 + }, + { + "epoch": 0.07610789980732177, + "grad_norm": 1.8804445637446154, + "learning_rate": 1.9888719146750918e-05, + "loss": 1.2393, + "step": 553 + }, + { + "epoch": 0.07624552711257913, + "grad_norm": 1.967804620888914, + "learning_rate": 1.988805503602637e-05, + "loss": 1.2418, + "step": 554 + }, + { + "epoch": 0.0763831544178365, + "grad_norm": 1.7138058240798248, + "learning_rate": 1.9887388960680638e-05, + "loss": 1.2663, + "step": 555 + }, + { + "epoch": 0.07652078172309386, + "grad_norm": 2.113844612507439, + "learning_rate": 1.9886720920846063e-05, + "loss": 1.3577, + "step": 556 + }, + { + "epoch": 0.07665840902835122, + "grad_norm": 1.8325053943133653, + "learning_rate": 1.9886050916655375e-05, + "loss": 1.2186, + "step": 557 + }, + { + "epoch": 0.07679603633360858, + "grad_norm": 1.8909385711887048, + "learning_rate": 1.9885378948241696e-05, + "loss": 1.275, + "step": 558 + }, + { + "epoch": 0.07693366363886595, + "grad_norm": 1.883956694389189, + "learning_rate": 1.9884705015738536e-05, + "loss": 1.2713, + "step": 559 + }, + { + "epoch": 0.07707129094412331, + "grad_norm": 1.8226106789412526, + "learning_rate": 1.9884029119279794e-05, + "loss": 1.2087, + "step": 560 + }, + { + "epoch": 0.07720891824938067, + "grad_norm": 2.003347655718789, + "learning_rate": 1.9883351258999764e-05, + "loss": 1.2244, + "step": 561 + }, + { + "epoch": 0.07734654555463805, + "grad_norm": 1.8504277193618903, + "learning_rate": 1.988267143503313e-05, + "loss": 1.234, + "step": 562 + }, + { + "epoch": 0.07748417285989541, + "grad_norm": 1.9751309965127748, + "learning_rate": 1.988198964751496e-05, + "loss": 1.2329, + "step": 563 + }, + { + "epoch": 0.07762180016515277, + "grad_norm": 1.7120810852198987, + "learning_rate": 1.9881305896580715e-05, + "loss": 1.2759, + "step": 564 + }, + { + "epoch": 0.07775942747041013, + "grad_norm": 1.7660846849413923, + "learning_rate": 1.988062018236625e-05, + "loss": 1.2106, + "step": 565 + }, + { + "epoch": 0.0778970547756675, + "grad_norm": 2.1765335761884814, + "learning_rate": 1.9879932505007803e-05, + "loss": 1.3522, + "step": 566 + }, + { + "epoch": 0.07803468208092486, + "grad_norm": 1.8089315794528318, + "learning_rate": 1.9879242864642013e-05, + "loss": 1.2475, + "step": 567 + }, + { + "epoch": 0.07817230938618222, + "grad_norm": 2.1708236031325034, + "learning_rate": 1.9878551261405893e-05, + "loss": 1.4144, + "step": 568 + }, + { + "epoch": 0.07830993669143958, + "grad_norm": 2.001722612405579, + "learning_rate": 1.9877857695436863e-05, + "loss": 1.3721, + "step": 569 + }, + { + "epoch": 0.07844756399669695, + "grad_norm": 1.79750491041061, + "learning_rate": 1.9877162166872727e-05, + "loss": 1.2368, + "step": 570 + }, + { + "epoch": 0.07858519130195431, + "grad_norm": 1.8581748444348096, + "learning_rate": 1.9876464675851665e-05, + "loss": 1.2423, + "step": 571 + }, + { + "epoch": 0.07872281860721167, + "grad_norm": 2.018617395741517, + "learning_rate": 1.9875765222512267e-05, + "loss": 1.1859, + "step": 572 + }, + { + "epoch": 0.07886044591246903, + "grad_norm": 1.9573830078171723, + "learning_rate": 1.9875063806993508e-05, + "loss": 1.2288, + "step": 573 + }, + { + "epoch": 0.0789980732177264, + "grad_norm": 1.780591420553638, + "learning_rate": 1.987436042943474e-05, + "loss": 1.2473, + "step": 574 + }, + { + "epoch": 0.07913570052298376, + "grad_norm": 1.7854249175916004, + "learning_rate": 1.9873655089975722e-05, + "loss": 1.2346, + "step": 575 + }, + { + "epoch": 0.07927332782824112, + "grad_norm": 1.7471304396664455, + "learning_rate": 1.9872947788756592e-05, + "loss": 1.2789, + "step": 576 + }, + { + "epoch": 0.07941095513349848, + "grad_norm": 1.9086477608673977, + "learning_rate": 1.9872238525917885e-05, + "loss": 1.3156, + "step": 577 + }, + { + "epoch": 0.07954858243875584, + "grad_norm": 1.74526336497638, + "learning_rate": 1.9871527301600516e-05, + "loss": 1.3196, + "step": 578 + }, + { + "epoch": 0.0796862097440132, + "grad_norm": 1.7796989086839412, + "learning_rate": 1.98708141159458e-05, + "loss": 1.2096, + "step": 579 + }, + { + "epoch": 0.07982383704927057, + "grad_norm": 1.7409836389829947, + "learning_rate": 1.987009896909544e-05, + "loss": 1.1783, + "step": 580 + }, + { + "epoch": 0.07996146435452794, + "grad_norm": 2.0288717992342704, + "learning_rate": 1.9869381861191518e-05, + "loss": 1.362, + "step": 581 + }, + { + "epoch": 0.08009909165978531, + "grad_norm": 2.22104670935202, + "learning_rate": 1.9868662792376518e-05, + "loss": 1.1934, + "step": 582 + }, + { + "epoch": 0.08023671896504267, + "grad_norm": 1.6938462514595205, + "learning_rate": 1.9867941762793307e-05, + "loss": 1.1924, + "step": 583 + }, + { + "epoch": 0.08037434627030003, + "grad_norm": 1.7501484004589887, + "learning_rate": 1.9867218772585146e-05, + "loss": 1.1451, + "step": 584 + }, + { + "epoch": 0.0805119735755574, + "grad_norm": 1.8104157588693333, + "learning_rate": 1.9866493821895684e-05, + "loss": 1.2997, + "step": 585 + }, + { + "epoch": 0.08064960088081476, + "grad_norm": 1.881624799711022, + "learning_rate": 1.9865766910868954e-05, + "loss": 1.3111, + "step": 586 + }, + { + "epoch": 0.08078722818607212, + "grad_norm": 1.9013486114759162, + "learning_rate": 1.9865038039649386e-05, + "loss": 1.1747, + "step": 587 + }, + { + "epoch": 0.08092485549132948, + "grad_norm": 1.8162016199870816, + "learning_rate": 1.9864307208381802e-05, + "loss": 1.3138, + "step": 588 + }, + { + "epoch": 0.08106248279658684, + "grad_norm": 1.8299051952470389, + "learning_rate": 1.98635744172114e-05, + "loss": 1.2967, + "step": 589 + }, + { + "epoch": 0.0812001101018442, + "grad_norm": 1.6327750679709911, + "learning_rate": 1.986283966628378e-05, + "loss": 1.1681, + "step": 590 + }, + { + "epoch": 0.08133773740710157, + "grad_norm": 1.7536784820951652, + "learning_rate": 1.9862102955744923e-05, + "loss": 1.2247, + "step": 591 + }, + { + "epoch": 0.08147536471235893, + "grad_norm": 1.7359732299869188, + "learning_rate": 1.986136428574121e-05, + "loss": 1.0886, + "step": 592 + }, + { + "epoch": 0.08161299201761629, + "grad_norm": 1.7604144586515782, + "learning_rate": 1.98606236564194e-05, + "loss": 1.162, + "step": 593 + }, + { + "epoch": 0.08175061932287365, + "grad_norm": 1.8099089635305472, + "learning_rate": 1.9859881067926646e-05, + "loss": 1.2731, + "step": 594 + }, + { + "epoch": 0.08188824662813102, + "grad_norm": 1.7795524106857699, + "learning_rate": 1.985913652041049e-05, + "loss": 1.1829, + "step": 595 + }, + { + "epoch": 0.08202587393338838, + "grad_norm": 1.9820053136758584, + "learning_rate": 1.985839001401887e-05, + "loss": 1.2816, + "step": 596 + }, + { + "epoch": 0.08216350123864574, + "grad_norm": 1.7325962212105188, + "learning_rate": 1.985764154890009e-05, + "loss": 1.2378, + "step": 597 + }, + { + "epoch": 0.0823011285439031, + "grad_norm": 1.951390479975755, + "learning_rate": 1.9856891125202876e-05, + "loss": 1.2897, + "step": 598 + }, + { + "epoch": 0.08243875584916047, + "grad_norm": 1.8575623540278854, + "learning_rate": 1.9856138743076324e-05, + "loss": 1.1912, + "step": 599 + }, + { + "epoch": 0.08257638315441784, + "grad_norm": 1.9755032706377214, + "learning_rate": 1.9855384402669918e-05, + "loss": 1.2628, + "step": 600 + }, + { + "epoch": 0.0827140104596752, + "grad_norm": 1.7478271222634836, + "learning_rate": 1.9854628104133537e-05, + "loss": 1.2031, + "step": 601 + }, + { + "epoch": 0.08285163776493257, + "grad_norm": 1.8133089383753362, + "learning_rate": 1.9853869847617446e-05, + "loss": 1.1753, + "step": 602 + }, + { + "epoch": 0.08298926507018993, + "grad_norm": 1.7121916845828211, + "learning_rate": 1.9853109633272304e-05, + "loss": 1.2198, + "step": 603 + }, + { + "epoch": 0.08312689237544729, + "grad_norm": 1.832810934571967, + "learning_rate": 1.9852347461249147e-05, + "loss": 1.2359, + "step": 604 + }, + { + "epoch": 0.08326451968070465, + "grad_norm": 1.9455698035097133, + "learning_rate": 1.985158333169942e-05, + "loss": 1.2741, + "step": 605 + }, + { + "epoch": 0.08340214698596202, + "grad_norm": 1.7785525766174886, + "learning_rate": 1.985081724477493e-05, + "loss": 1.2948, + "step": 606 + }, + { + "epoch": 0.08353977429121938, + "grad_norm": 1.8250397292848948, + "learning_rate": 1.98500492006279e-05, + "loss": 1.1642, + "step": 607 + }, + { + "epoch": 0.08367740159647674, + "grad_norm": 1.877612555176493, + "learning_rate": 1.984927919941093e-05, + "loss": 1.2648, + "step": 608 + }, + { + "epoch": 0.0838150289017341, + "grad_norm": 2.0300331667181064, + "learning_rate": 1.9848507241277e-05, + "loss": 1.2384, + "step": 609 + }, + { + "epoch": 0.08395265620699147, + "grad_norm": 1.8324365552440949, + "learning_rate": 1.9847733326379498e-05, + "loss": 1.2175, + "step": 610 + }, + { + "epoch": 0.08409028351224883, + "grad_norm": 1.7009951015478637, + "learning_rate": 1.9846957454872184e-05, + "loss": 1.1513, + "step": 611 + }, + { + "epoch": 0.08422791081750619, + "grad_norm": 1.8723007199515214, + "learning_rate": 1.984617962690921e-05, + "loss": 1.1792, + "step": 612 + }, + { + "epoch": 0.08436553812276355, + "grad_norm": 1.6333198481656388, + "learning_rate": 1.9845399842645122e-05, + "loss": 1.2576, + "step": 613 + }, + { + "epoch": 0.08450316542802092, + "grad_norm": 1.9587193634240436, + "learning_rate": 1.984461810223486e-05, + "loss": 1.197, + "step": 614 + }, + { + "epoch": 0.08464079273327828, + "grad_norm": 1.7852430846973624, + "learning_rate": 1.9843834405833737e-05, + "loss": 1.2481, + "step": 615 + }, + { + "epoch": 0.08477842003853564, + "grad_norm": 1.885921597105817, + "learning_rate": 1.9843048753597465e-05, + "loss": 1.3181, + "step": 616 + }, + { + "epoch": 0.084916047343793, + "grad_norm": 1.6990015177886433, + "learning_rate": 1.984226114568214e-05, + "loss": 1.22, + "step": 617 + }, + { + "epoch": 0.08505367464905036, + "grad_norm": 1.8980496281782069, + "learning_rate": 1.9841471582244255e-05, + "loss": 1.2611, + "step": 618 + }, + { + "epoch": 0.08519130195430774, + "grad_norm": 1.7915875141063173, + "learning_rate": 1.9840680063440677e-05, + "loss": 1.1858, + "step": 619 + }, + { + "epoch": 0.0853289292595651, + "grad_norm": 1.8936549672848455, + "learning_rate": 1.9839886589428677e-05, + "loss": 1.2093, + "step": 620 + }, + { + "epoch": 0.08546655656482247, + "grad_norm": 1.758208040838905, + "learning_rate": 1.9839091160365905e-05, + "loss": 1.2658, + "step": 621 + }, + { + "epoch": 0.08560418387007983, + "grad_norm": 1.7564253481193826, + "learning_rate": 1.9838293776410403e-05, + "loss": 1.1366, + "step": 622 + }, + { + "epoch": 0.08574181117533719, + "grad_norm": 2.243469023207016, + "learning_rate": 1.98374944377206e-05, + "loss": 1.2652, + "step": 623 + }, + { + "epoch": 0.08587943848059455, + "grad_norm": 1.7147136350412064, + "learning_rate": 1.983669314445531e-05, + "loss": 1.2963, + "step": 624 + }, + { + "epoch": 0.08601706578585191, + "grad_norm": 1.9159372904227485, + "learning_rate": 1.9835889896773745e-05, + "loss": 1.279, + "step": 625 + }, + { + "epoch": 0.08615469309110928, + "grad_norm": 1.9872855549008928, + "learning_rate": 1.9835084694835494e-05, + "loss": 1.3619, + "step": 626 + }, + { + "epoch": 0.08629232039636664, + "grad_norm": 1.731810190424292, + "learning_rate": 1.9834277538800542e-05, + "loss": 1.2183, + "step": 627 + }, + { + "epoch": 0.086429947701624, + "grad_norm": 1.9784673080145145, + "learning_rate": 1.983346842882926e-05, + "loss": 1.2106, + "step": 628 + }, + { + "epoch": 0.08656757500688136, + "grad_norm": 1.7866826871725954, + "learning_rate": 1.9832657365082405e-05, + "loss": 1.2775, + "step": 629 + }, + { + "epoch": 0.08670520231213873, + "grad_norm": 1.8406817147451717, + "learning_rate": 1.983184434772113e-05, + "loss": 1.2212, + "step": 630 + }, + { + "epoch": 0.08684282961739609, + "grad_norm": 1.9065171842861885, + "learning_rate": 1.9831029376906964e-05, + "loss": 1.2547, + "step": 631 + }, + { + "epoch": 0.08698045692265345, + "grad_norm": 1.7619078995339281, + "learning_rate": 1.9830212452801833e-05, + "loss": 1.2579, + "step": 632 + }, + { + "epoch": 0.08711808422791081, + "grad_norm": 1.8872222608480584, + "learning_rate": 1.9829393575568047e-05, + "loss": 1.2933, + "step": 633 + }, + { + "epoch": 0.08725571153316818, + "grad_norm": 1.8093014240197023, + "learning_rate": 1.9828572745368308e-05, + "loss": 1.1833, + "step": 634 + }, + { + "epoch": 0.08739333883842554, + "grad_norm": 1.7917366553721097, + "learning_rate": 1.9827749962365703e-05, + "loss": 1.1602, + "step": 635 + }, + { + "epoch": 0.0875309661436829, + "grad_norm": 1.7988128028268695, + "learning_rate": 1.982692522672371e-05, + "loss": 1.2033, + "step": 636 + }, + { + "epoch": 0.08766859344894026, + "grad_norm": 1.8909545385019588, + "learning_rate": 1.982609853860619e-05, + "loss": 1.2734, + "step": 637 + }, + { + "epoch": 0.08780622075419764, + "grad_norm": 1.918998366977536, + "learning_rate": 1.9825269898177393e-05, + "loss": 1.261, + "step": 638 + }, + { + "epoch": 0.087943848059455, + "grad_norm": 1.9258087631701541, + "learning_rate": 1.9824439305601962e-05, + "loss": 1.2354, + "step": 639 + }, + { + "epoch": 0.08808147536471236, + "grad_norm": 2.0900426506399303, + "learning_rate": 1.982360676104492e-05, + "loss": 1.2465, + "step": 640 + }, + { + "epoch": 0.08821910266996973, + "grad_norm": 1.948945569543138, + "learning_rate": 1.9822772264671686e-05, + "loss": 1.2968, + "step": 641 + }, + { + "epoch": 0.08835672997522709, + "grad_norm": 1.7982632978073727, + "learning_rate": 1.9821935816648066e-05, + "loss": 1.1876, + "step": 642 + }, + { + "epoch": 0.08849435728048445, + "grad_norm": 1.9484754134540905, + "learning_rate": 1.9821097417140244e-05, + "loss": 1.1904, + "step": 643 + }, + { + "epoch": 0.08863198458574181, + "grad_norm": 1.7276502992932183, + "learning_rate": 1.9820257066314802e-05, + "loss": 1.2259, + "step": 644 + }, + { + "epoch": 0.08876961189099918, + "grad_norm": 1.5885474147554055, + "learning_rate": 1.9819414764338703e-05, + "loss": 1.1687, + "step": 645 + }, + { + "epoch": 0.08890723919625654, + "grad_norm": 1.7560298017509959, + "learning_rate": 1.9818570511379308e-05, + "loss": 1.1898, + "step": 646 + }, + { + "epoch": 0.0890448665015139, + "grad_norm": 1.7793076010414537, + "learning_rate": 1.9817724307604352e-05, + "loss": 1.1515, + "step": 647 + }, + { + "epoch": 0.08918249380677126, + "grad_norm": 1.8593857721543998, + "learning_rate": 1.9816876153181966e-05, + "loss": 1.2463, + "step": 648 + }, + { + "epoch": 0.08932012111202862, + "grad_norm": 1.7594835472864392, + "learning_rate": 1.9816026048280667e-05, + "loss": 1.2119, + "step": 649 + }, + { + "epoch": 0.08945774841728599, + "grad_norm": 1.5350244591404703, + "learning_rate": 1.9815173993069358e-05, + "loss": 1.1324, + "step": 650 + }, + { + "epoch": 0.08959537572254335, + "grad_norm": 2.076378114591709, + "learning_rate": 1.9814319987717334e-05, + "loss": 1.2727, + "step": 651 + }, + { + "epoch": 0.08973300302780071, + "grad_norm": 1.8855989007056961, + "learning_rate": 1.9813464032394268e-05, + "loss": 1.267, + "step": 652 + }, + { + "epoch": 0.08987063033305807, + "grad_norm": 2.108034190234174, + "learning_rate": 1.9812606127270235e-05, + "loss": 1.1576, + "step": 653 + }, + { + "epoch": 0.09000825763831544, + "grad_norm": 1.9859441124596537, + "learning_rate": 1.981174627251568e-05, + "loss": 1.2892, + "step": 654 + }, + { + "epoch": 0.0901458849435728, + "grad_norm": 1.8345461010068997, + "learning_rate": 1.9810884468301447e-05, + "loss": 1.3095, + "step": 655 + }, + { + "epoch": 0.09028351224883016, + "grad_norm": 1.8743731884736723, + "learning_rate": 1.981002071479877e-05, + "loss": 1.1875, + "step": 656 + }, + { + "epoch": 0.09042113955408754, + "grad_norm": 1.794317154941314, + "learning_rate": 1.9809155012179263e-05, + "loss": 1.2998, + "step": 657 + }, + { + "epoch": 0.0905587668593449, + "grad_norm": 1.7443226042588642, + "learning_rate": 1.980828736061493e-05, + "loss": 1.2207, + "step": 658 + }, + { + "epoch": 0.09069639416460226, + "grad_norm": 1.5814225235293542, + "learning_rate": 1.9807417760278155e-05, + "loss": 1.1775, + "step": 659 + }, + { + "epoch": 0.09083402146985962, + "grad_norm": 1.8304642950928993, + "learning_rate": 1.980654621134172e-05, + "loss": 1.2466, + "step": 660 + }, + { + "epoch": 0.09097164877511699, + "grad_norm": 2.3211483028102315, + "learning_rate": 1.9805672713978792e-05, + "loss": 1.2705, + "step": 661 + }, + { + "epoch": 0.09110927608037435, + "grad_norm": 1.6929008555449716, + "learning_rate": 1.9804797268362917e-05, + "loss": 1.2016, + "step": 662 + }, + { + "epoch": 0.09124690338563171, + "grad_norm": 1.8285618639988, + "learning_rate": 1.980391987466804e-05, + "loss": 1.2918, + "step": 663 + }, + { + "epoch": 0.09138453069088907, + "grad_norm": 1.7383522645955118, + "learning_rate": 1.9803040533068486e-05, + "loss": 1.1953, + "step": 664 + }, + { + "epoch": 0.09152215799614644, + "grad_norm": 1.6763602778235207, + "learning_rate": 1.9802159243738965e-05, + "loss": 1.1899, + "step": 665 + }, + { + "epoch": 0.0916597853014038, + "grad_norm": 1.6983935385806757, + "learning_rate": 1.980127600685458e-05, + "loss": 1.1132, + "step": 666 + }, + { + "epoch": 0.09179741260666116, + "grad_norm": 1.8739719072599466, + "learning_rate": 1.980039082259082e-05, + "loss": 1.1821, + "step": 667 + }, + { + "epoch": 0.09193503991191852, + "grad_norm": 1.7070208391799184, + "learning_rate": 1.979950369112355e-05, + "loss": 1.2924, + "step": 668 + }, + { + "epoch": 0.09207266721717589, + "grad_norm": 1.7080292769872887, + "learning_rate": 1.9798614612629046e-05, + "loss": 1.2166, + "step": 669 + }, + { + "epoch": 0.09221029452243325, + "grad_norm": 1.7157127820778475, + "learning_rate": 1.979772358728394e-05, + "loss": 1.2743, + "step": 670 + }, + { + "epoch": 0.09234792182769061, + "grad_norm": 1.727731542149226, + "learning_rate": 1.9796830615265277e-05, + "loss": 1.2061, + "step": 671 + }, + { + "epoch": 0.09248554913294797, + "grad_norm": 1.7332766679140847, + "learning_rate": 1.979593569675047e-05, + "loss": 1.1293, + "step": 672 + }, + { + "epoch": 0.09262317643820533, + "grad_norm": 2.0207110871890643, + "learning_rate": 1.9795038831917336e-05, + "loss": 1.1878, + "step": 673 + }, + { + "epoch": 0.0927608037434627, + "grad_norm": 2.0379833263438303, + "learning_rate": 1.9794140020944065e-05, + "loss": 1.1672, + "step": 674 + }, + { + "epoch": 0.09289843104872006, + "grad_norm": 1.8842495706527753, + "learning_rate": 1.979323926400924e-05, + "loss": 1.2484, + "step": 675 + }, + { + "epoch": 0.09303605835397744, + "grad_norm": 1.8724062474041343, + "learning_rate": 1.9792336561291826e-05, + "loss": 1.1874, + "step": 676 + }, + { + "epoch": 0.0931736856592348, + "grad_norm": 1.7144139868458128, + "learning_rate": 1.979143191297118e-05, + "loss": 1.2042, + "step": 677 + }, + { + "epoch": 0.09331131296449216, + "grad_norm": 1.7374984338058037, + "learning_rate": 1.9790525319227044e-05, + "loss": 1.2094, + "step": 678 + }, + { + "epoch": 0.09344894026974952, + "grad_norm": 1.8866441966630032, + "learning_rate": 1.9789616780239546e-05, + "loss": 1.2202, + "step": 679 + }, + { + "epoch": 0.09358656757500688, + "grad_norm": 1.7595446646295216, + "learning_rate": 1.9788706296189196e-05, + "loss": 1.2561, + "step": 680 + }, + { + "epoch": 0.09372419488026425, + "grad_norm": 1.9510244119012567, + "learning_rate": 1.9787793867256897e-05, + "loss": 1.1178, + "step": 681 + }, + { + "epoch": 0.09386182218552161, + "grad_norm": 1.880639658252601, + "learning_rate": 1.978687949362394e-05, + "loss": 1.2798, + "step": 682 + }, + { + "epoch": 0.09399944949077897, + "grad_norm": 1.944500901743983, + "learning_rate": 1.9785963175471995e-05, + "loss": 1.2167, + "step": 683 + }, + { + "epoch": 0.09413707679603633, + "grad_norm": 1.9942131359447002, + "learning_rate": 1.978504491298312e-05, + "loss": 1.262, + "step": 684 + }, + { + "epoch": 0.0942747041012937, + "grad_norm": 1.697629518782316, + "learning_rate": 1.9784124706339763e-05, + "loss": 1.2152, + "step": 685 + }, + { + "epoch": 0.09441233140655106, + "grad_norm": 1.8902814321213357, + "learning_rate": 1.978320255572476e-05, + "loss": 1.2812, + "step": 686 + }, + { + "epoch": 0.09454995871180842, + "grad_norm": 1.848342889841255, + "learning_rate": 1.9782278461321324e-05, + "loss": 1.2622, + "step": 687 + }, + { + "epoch": 0.09468758601706578, + "grad_norm": 1.9161678038484973, + "learning_rate": 1.9781352423313066e-05, + "loss": 1.118, + "step": 688 + }, + { + "epoch": 0.09482521332232315, + "grad_norm": 1.9036310641252565, + "learning_rate": 1.9780424441883973e-05, + "loss": 1.1664, + "step": 689 + }, + { + "epoch": 0.09496284062758051, + "grad_norm": 1.765079882881183, + "learning_rate": 1.977949451721842e-05, + "loss": 1.2104, + "step": 690 + }, + { + "epoch": 0.09510046793283787, + "grad_norm": 1.8390557654507544, + "learning_rate": 1.9778562649501178e-05, + "loss": 1.1319, + "step": 691 + }, + { + "epoch": 0.09523809523809523, + "grad_norm": 1.845007639060345, + "learning_rate": 1.9777628838917384e-05, + "loss": 1.2095, + "step": 692 + }, + { + "epoch": 0.0953757225433526, + "grad_norm": 1.9765014798328004, + "learning_rate": 1.9776693085652586e-05, + "loss": 1.309, + "step": 693 + }, + { + "epoch": 0.09551334984860996, + "grad_norm": 1.6779809099621654, + "learning_rate": 1.97757553898927e-05, + "loss": 1.1817, + "step": 694 + }, + { + "epoch": 0.09565097715386733, + "grad_norm": 1.6871863828426394, + "learning_rate": 1.9774815751824032e-05, + "loss": 1.2152, + "step": 695 + }, + { + "epoch": 0.0957886044591247, + "grad_norm": 1.742631167516675, + "learning_rate": 1.977387417163328e-05, + "loss": 1.205, + "step": 696 + }, + { + "epoch": 0.09592623176438206, + "grad_norm": 1.723068237013323, + "learning_rate": 1.9772930649507517e-05, + "loss": 1.1354, + "step": 697 + }, + { + "epoch": 0.09606385906963942, + "grad_norm": 1.9852375974753866, + "learning_rate": 1.9771985185634208e-05, + "loss": 1.1616, + "step": 698 + }, + { + "epoch": 0.09620148637489678, + "grad_norm": 2.4106687258470183, + "learning_rate": 1.977103778020121e-05, + "loss": 1.3333, + "step": 699 + }, + { + "epoch": 0.09633911368015415, + "grad_norm": 1.7530300741770577, + "learning_rate": 1.9770088433396757e-05, + "loss": 1.2055, + "step": 700 + }, + { + "epoch": 0.09647674098541151, + "grad_norm": 1.8967411690939935, + "learning_rate": 1.9769137145409466e-05, + "loss": 1.3456, + "step": 701 + }, + { + "epoch": 0.09661436829066887, + "grad_norm": 1.8158234589562774, + "learning_rate": 1.9768183916428353e-05, + "loss": 1.2027, + "step": 702 + }, + { + "epoch": 0.09675199559592623, + "grad_norm": 1.8892584425949879, + "learning_rate": 1.976722874664281e-05, + "loss": 1.2815, + "step": 703 + }, + { + "epoch": 0.0968896229011836, + "grad_norm": 1.7515117308536774, + "learning_rate": 1.9766271636242606e-05, + "loss": 1.223, + "step": 704 + }, + { + "epoch": 0.09702725020644096, + "grad_norm": 1.7168746036644345, + "learning_rate": 1.976531258541792e-05, + "loss": 1.132, + "step": 705 + }, + { + "epoch": 0.09716487751169832, + "grad_norm": 1.8208770612527814, + "learning_rate": 1.9764351594359295e-05, + "loss": 1.2828, + "step": 706 + }, + { + "epoch": 0.09730250481695568, + "grad_norm": 1.7735239200978077, + "learning_rate": 1.9763388663257665e-05, + "loss": 1.1977, + "step": 707 + }, + { + "epoch": 0.09744013212221304, + "grad_norm": 1.8943610560320723, + "learning_rate": 1.9762423792304355e-05, + "loss": 1.2475, + "step": 708 + }, + { + "epoch": 0.0975777594274704, + "grad_norm": 1.7374472409077133, + "learning_rate": 1.976145698169107e-05, + "loss": 1.1813, + "step": 709 + }, + { + "epoch": 0.09771538673272777, + "grad_norm": 1.8365119293894638, + "learning_rate": 1.9760488231609903e-05, + "loss": 1.2114, + "step": 710 + }, + { + "epoch": 0.09785301403798513, + "grad_norm": 1.7555311528848458, + "learning_rate": 1.975951754225333e-05, + "loss": 1.1959, + "step": 711 + }, + { + "epoch": 0.0979906413432425, + "grad_norm": 1.9808748840913646, + "learning_rate": 1.975854491381422e-05, + "loss": 1.1611, + "step": 712 + }, + { + "epoch": 0.09812826864849986, + "grad_norm": 1.5992386760621216, + "learning_rate": 1.9757570346485807e-05, + "loss": 1.1579, + "step": 713 + }, + { + "epoch": 0.09826589595375723, + "grad_norm": 1.701042550628502, + "learning_rate": 1.975659384046174e-05, + "loss": 1.207, + "step": 714 + }, + { + "epoch": 0.0984035232590146, + "grad_norm": 1.9661190987595265, + "learning_rate": 1.9755615395936026e-05, + "loss": 1.2784, + "step": 715 + }, + { + "epoch": 0.09854115056427196, + "grad_norm": 1.9156973558469255, + "learning_rate": 1.9754635013103072e-05, + "loss": 1.2755, + "step": 716 + }, + { + "epoch": 0.09867877786952932, + "grad_norm": 1.9986525269566224, + "learning_rate": 1.975365269215767e-05, + "loss": 1.2136, + "step": 717 + }, + { + "epoch": 0.09881640517478668, + "grad_norm": 1.8910712486238896, + "learning_rate": 1.9752668433294992e-05, + "loss": 1.1378, + "step": 718 + }, + { + "epoch": 0.09895403248004404, + "grad_norm": 2.057682622037617, + "learning_rate": 1.975168223671059e-05, + "loss": 1.2798, + "step": 719 + }, + { + "epoch": 0.0990916597853014, + "grad_norm": 1.9251974933693452, + "learning_rate": 1.975069410260042e-05, + "loss": 1.183, + "step": 720 + }, + { + "epoch": 0.09922928709055877, + "grad_norm": 1.637918280432621, + "learning_rate": 1.97497040311608e-05, + "loss": 1.1388, + "step": 721 + }, + { + "epoch": 0.09936691439581613, + "grad_norm": 1.6647602186504662, + "learning_rate": 1.9748712022588453e-05, + "loss": 1.1798, + "step": 722 + }, + { + "epoch": 0.09950454170107349, + "grad_norm": 1.959191109201414, + "learning_rate": 1.974771807708047e-05, + "loss": 1.3067, + "step": 723 + }, + { + "epoch": 0.09964216900633086, + "grad_norm": 1.8601822721385304, + "learning_rate": 1.9746722194834334e-05, + "loss": 1.2668, + "step": 724 + }, + { + "epoch": 0.09977979631158822, + "grad_norm": 1.8000742464283541, + "learning_rate": 1.974572437604792e-05, + "loss": 1.1486, + "step": 725 + }, + { + "epoch": 0.09991742361684558, + "grad_norm": 1.9399314993453969, + "learning_rate": 1.9744724620919474e-05, + "loss": 1.2432, + "step": 726 + }, + { + "epoch": 0.10005505092210294, + "grad_norm": 1.7784740151465612, + "learning_rate": 1.9743722929647634e-05, + "loss": 1.2243, + "step": 727 + }, + { + "epoch": 0.1001926782273603, + "grad_norm": 2.042182417173032, + "learning_rate": 1.974271930243143e-05, + "loss": 1.1788, + "step": 728 + }, + { + "epoch": 0.10033030553261767, + "grad_norm": 1.917460258303292, + "learning_rate": 1.9741713739470263e-05, + "loss": 1.2277, + "step": 729 + }, + { + "epoch": 0.10046793283787503, + "grad_norm": 1.8038060517866223, + "learning_rate": 1.974070624096392e-05, + "loss": 1.1389, + "step": 730 + }, + { + "epoch": 0.10060556014313239, + "grad_norm": 1.8977446811372967, + "learning_rate": 1.9739696807112592e-05, + "loss": 1.2265, + "step": 731 + }, + { + "epoch": 0.10074318744838975, + "grad_norm": 1.7868537758492014, + "learning_rate": 1.9738685438116825e-05, + "loss": 1.2211, + "step": 732 + }, + { + "epoch": 0.10088081475364713, + "grad_norm": 2.0106842945571124, + "learning_rate": 1.973767213417757e-05, + "loss": 1.2148, + "step": 733 + }, + { + "epoch": 0.10101844205890449, + "grad_norm": 1.6712782823250862, + "learning_rate": 1.9736656895496157e-05, + "loss": 1.1554, + "step": 734 + }, + { + "epoch": 0.10115606936416185, + "grad_norm": 1.5643888114139286, + "learning_rate": 1.97356397222743e-05, + "loss": 1.1379, + "step": 735 + }, + { + "epoch": 0.10129369666941922, + "grad_norm": 2.114090341942714, + "learning_rate": 1.9734620614714094e-05, + "loss": 1.2676, + "step": 736 + }, + { + "epoch": 0.10143132397467658, + "grad_norm": 1.7656795822169238, + "learning_rate": 1.9733599573018027e-05, + "loss": 1.234, + "step": 737 + }, + { + "epoch": 0.10156895127993394, + "grad_norm": 2.0119765189067524, + "learning_rate": 1.973257659738896e-05, + "loss": 1.2152, + "step": 738 + }, + { + "epoch": 0.1017065785851913, + "grad_norm": 1.7799545473837703, + "learning_rate": 1.9731551688030147e-05, + "loss": 1.2504, + "step": 739 + }, + { + "epoch": 0.10184420589044867, + "grad_norm": 2.0072899277008935, + "learning_rate": 1.973052484514523e-05, + "loss": 1.2618, + "step": 740 + }, + { + "epoch": 0.10198183319570603, + "grad_norm": 1.7754229916643216, + "learning_rate": 1.9729496068938216e-05, + "loss": 1.2147, + "step": 741 + }, + { + "epoch": 0.10211946050096339, + "grad_norm": 1.7440777510285383, + "learning_rate": 1.972846535961352e-05, + "loss": 1.2779, + "step": 742 + }, + { + "epoch": 0.10225708780622075, + "grad_norm": 1.75526064957948, + "learning_rate": 1.9727432717375924e-05, + "loss": 1.1549, + "step": 743 + }, + { + "epoch": 0.10239471511147812, + "grad_norm": 1.6249605696946492, + "learning_rate": 1.97263981424306e-05, + "loss": 1.1791, + "step": 744 + }, + { + "epoch": 0.10253234241673548, + "grad_norm": 1.6986864781343542, + "learning_rate": 1.9725361634983103e-05, + "loss": 1.2508, + "step": 745 + }, + { + "epoch": 0.10266996972199284, + "grad_norm": 1.93764499091909, + "learning_rate": 1.972432319523938e-05, + "loss": 1.1811, + "step": 746 + }, + { + "epoch": 0.1028075970272502, + "grad_norm": 2.0230995540669796, + "learning_rate": 1.9723282823405748e-05, + "loss": 1.2663, + "step": 747 + }, + { + "epoch": 0.10294522433250756, + "grad_norm": 1.730298986036247, + "learning_rate": 1.9722240519688915e-05, + "loss": 1.1682, + "step": 748 + }, + { + "epoch": 0.10308285163776493, + "grad_norm": 1.7089365539586916, + "learning_rate": 1.9721196284295973e-05, + "loss": 1.1776, + "step": 749 + }, + { + "epoch": 0.10322047894302229, + "grad_norm": 1.7837131574908531, + "learning_rate": 1.9720150117434397e-05, + "loss": 1.0927, + "step": 750 + }, + { + "epoch": 0.10335810624827965, + "grad_norm": 1.8373385477618713, + "learning_rate": 1.971910201931205e-05, + "loss": 1.2086, + "step": 751 + }, + { + "epoch": 0.10349573355353703, + "grad_norm": 1.7111699441748365, + "learning_rate": 1.971805199013717e-05, + "loss": 1.2304, + "step": 752 + }, + { + "epoch": 0.10363336085879439, + "grad_norm": 1.9201801896746795, + "learning_rate": 1.971700003011839e-05, + "loss": 1.1513, + "step": 753 + }, + { + "epoch": 0.10377098816405175, + "grad_norm": 1.7856567369268712, + "learning_rate": 1.9715946139464708e-05, + "loss": 1.2368, + "step": 754 + }, + { + "epoch": 0.10390861546930912, + "grad_norm": 1.7045737803004442, + "learning_rate": 1.971489031838553e-05, + "loss": 1.2169, + "step": 755 + }, + { + "epoch": 0.10404624277456648, + "grad_norm": 1.8956621696738971, + "learning_rate": 1.9713832567090623e-05, + "loss": 1.2508, + "step": 756 + }, + { + "epoch": 0.10418387007982384, + "grad_norm": 1.8207264687685447, + "learning_rate": 1.9712772885790158e-05, + "loss": 1.1835, + "step": 757 + }, + { + "epoch": 0.1043214973850812, + "grad_norm": 1.711579234121396, + "learning_rate": 1.9711711274694672e-05, + "loss": 1.1762, + "step": 758 + }, + { + "epoch": 0.10445912469033856, + "grad_norm": 1.6961355602286854, + "learning_rate": 1.9710647734015095e-05, + "loss": 1.2785, + "step": 759 + }, + { + "epoch": 0.10459675199559593, + "grad_norm": 2.005026216754533, + "learning_rate": 1.9709582263962733e-05, + "loss": 1.223, + "step": 760 + }, + { + "epoch": 0.10473437930085329, + "grad_norm": 1.6812742381859451, + "learning_rate": 1.970851486474929e-05, + "loss": 1.2093, + "step": 761 + }, + { + "epoch": 0.10487200660611065, + "grad_norm": 1.6338131791444162, + "learning_rate": 1.9707445536586838e-05, + "loss": 1.1787, + "step": 762 + }, + { + "epoch": 0.10500963391136801, + "grad_norm": 1.9276818871701158, + "learning_rate": 1.970637427968784e-05, + "loss": 1.1688, + "step": 763 + }, + { + "epoch": 0.10514726121662538, + "grad_norm": 1.7096468041785715, + "learning_rate": 1.9705301094265133e-05, + "loss": 1.1396, + "step": 764 + }, + { + "epoch": 0.10528488852188274, + "grad_norm": 1.67731357024276, + "learning_rate": 1.9704225980531955e-05, + "loss": 1.1899, + "step": 765 + }, + { + "epoch": 0.1054225158271401, + "grad_norm": 1.7965973099396828, + "learning_rate": 1.9703148938701908e-05, + "loss": 1.2839, + "step": 766 + }, + { + "epoch": 0.10556014313239746, + "grad_norm": 1.8123593135723155, + "learning_rate": 1.970206996898899e-05, + "loss": 1.2097, + "step": 767 + }, + { + "epoch": 0.10569777043765483, + "grad_norm": 1.649070274547035, + "learning_rate": 1.9700989071607576e-05, + "loss": 1.1442, + "step": 768 + }, + { + "epoch": 0.10583539774291219, + "grad_norm": 1.6642024215214961, + "learning_rate": 1.969990624677242e-05, + "loss": 1.1348, + "step": 769 + }, + { + "epoch": 0.10597302504816955, + "grad_norm": 1.7995298999157636, + "learning_rate": 1.969882149469868e-05, + "loss": 1.2297, + "step": 770 + }, + { + "epoch": 0.10611065235342693, + "grad_norm": 1.814648402489889, + "learning_rate": 1.9697734815601864e-05, + "loss": 1.2754, + "step": 771 + }, + { + "epoch": 0.10624827965868429, + "grad_norm": 1.7078896126295318, + "learning_rate": 1.969664620969789e-05, + "loss": 1.1533, + "step": 772 + }, + { + "epoch": 0.10638590696394165, + "grad_norm": 1.963466038072907, + "learning_rate": 1.9695555677203046e-05, + "loss": 1.1985, + "step": 773 + }, + { + "epoch": 0.10652353426919901, + "grad_norm": 1.6990478184049804, + "learning_rate": 1.969446321833401e-05, + "loss": 1.2044, + "step": 774 + }, + { + "epoch": 0.10666116157445638, + "grad_norm": 1.6851565048110029, + "learning_rate": 1.969336883330783e-05, + "loss": 1.1992, + "step": 775 + }, + { + "epoch": 0.10679878887971374, + "grad_norm": 1.8198013206758212, + "learning_rate": 1.9692272522341957e-05, + "loss": 1.215, + "step": 776 + }, + { + "epoch": 0.1069364161849711, + "grad_norm": 1.8414563705435112, + "learning_rate": 1.96911742856542e-05, + "loss": 1.165, + "step": 777 + }, + { + "epoch": 0.10707404349022846, + "grad_norm": 1.562134767351314, + "learning_rate": 1.9690074123462772e-05, + "loss": 1.1559, + "step": 778 + }, + { + "epoch": 0.10721167079548583, + "grad_norm": 1.7005654833028276, + "learning_rate": 1.968897203598626e-05, + "loss": 1.1267, + "step": 779 + }, + { + "epoch": 0.10734929810074319, + "grad_norm": 2.0035774578012786, + "learning_rate": 1.9687868023443633e-05, + "loss": 1.3623, + "step": 780 + }, + { + "epoch": 0.10748692540600055, + "grad_norm": 1.9407029596539918, + "learning_rate": 1.968676208605424e-05, + "loss": 1.2352, + "step": 781 + }, + { + "epoch": 0.10762455271125791, + "grad_norm": 1.9187328905612822, + "learning_rate": 1.9685654224037817e-05, + "loss": 1.2461, + "step": 782 + }, + { + "epoch": 0.10776218001651527, + "grad_norm": 2.4778910930837164, + "learning_rate": 1.968454443761448e-05, + "loss": 1.2034, + "step": 783 + }, + { + "epoch": 0.10789980732177264, + "grad_norm": 1.8364560332503237, + "learning_rate": 1.9683432727004736e-05, + "loss": 1.2181, + "step": 784 + }, + { + "epoch": 0.10803743462703, + "grad_norm": 1.8310485288297873, + "learning_rate": 1.968231909242946e-05, + "loss": 1.2354, + "step": 785 + }, + { + "epoch": 0.10817506193228736, + "grad_norm": 1.7264948258887771, + "learning_rate": 1.9681203534109915e-05, + "loss": 1.171, + "step": 786 + }, + { + "epoch": 0.10831268923754472, + "grad_norm": 1.7016499010742772, + "learning_rate": 1.9680086052267747e-05, + "loss": 1.2477, + "step": 787 + }, + { + "epoch": 0.10845031654280209, + "grad_norm": 1.8246020603939477, + "learning_rate": 1.967896664712499e-05, + "loss": 1.2836, + "step": 788 + }, + { + "epoch": 0.10858794384805945, + "grad_norm": 1.884603169691201, + "learning_rate": 1.967784531890405e-05, + "loss": 1.1921, + "step": 789 + }, + { + "epoch": 0.10872557115331682, + "grad_norm": 1.7542162102528964, + "learning_rate": 1.967672206782772e-05, + "loss": 1.1712, + "step": 790 + }, + { + "epoch": 0.10886319845857419, + "grad_norm": 1.818258118080152, + "learning_rate": 1.967559689411917e-05, + "loss": 1.2835, + "step": 791 + }, + { + "epoch": 0.10900082576383155, + "grad_norm": 1.7748266843569085, + "learning_rate": 1.967446979800197e-05, + "loss": 1.2189, + "step": 792 + }, + { + "epoch": 0.10913845306908891, + "grad_norm": 2.0508140240013106, + "learning_rate": 1.9673340779700045e-05, + "loss": 1.176, + "step": 793 + }, + { + "epoch": 0.10927608037434627, + "grad_norm": 1.7440930909016372, + "learning_rate": 1.967220983943772e-05, + "loss": 1.0881, + "step": 794 + }, + { + "epoch": 0.10941370767960364, + "grad_norm": 1.8089669284261072, + "learning_rate": 1.9671076977439704e-05, + "loss": 1.2417, + "step": 795 + }, + { + "epoch": 0.109551334984861, + "grad_norm": 1.8817810959088215, + "learning_rate": 1.966994219393107e-05, + "loss": 1.271, + "step": 796 + }, + { + "epoch": 0.10968896229011836, + "grad_norm": 1.8696471343247156, + "learning_rate": 1.966880548913729e-05, + "loss": 1.2113, + "step": 797 + }, + { + "epoch": 0.10982658959537572, + "grad_norm": 1.8233629280841457, + "learning_rate": 1.966766686328421e-05, + "loss": 1.2153, + "step": 798 + }, + { + "epoch": 0.10996421690063309, + "grad_norm": 1.9364882446464375, + "learning_rate": 1.9666526316598063e-05, + "loss": 1.2581, + "step": 799 + }, + { + "epoch": 0.11010184420589045, + "grad_norm": 1.748060179594741, + "learning_rate": 1.966538384930546e-05, + "loss": 1.2059, + "step": 800 + }, + { + "epoch": 0.11023947151114781, + "grad_norm": 1.6224895433154127, + "learning_rate": 1.9664239461633384e-05, + "loss": 1.1624, + "step": 801 + }, + { + "epoch": 0.11037709881640517, + "grad_norm": 1.5471068768537146, + "learning_rate": 1.966309315380922e-05, + "loss": 1.1836, + "step": 802 + }, + { + "epoch": 0.11051472612166253, + "grad_norm": 1.996415501924688, + "learning_rate": 1.9661944926060714e-05, + "loss": 1.3271, + "step": 803 + }, + { + "epoch": 0.1106523534269199, + "grad_norm": 1.8500775643092564, + "learning_rate": 1.9660794778616016e-05, + "loss": 1.2281, + "step": 804 + }, + { + "epoch": 0.11078998073217726, + "grad_norm": 1.6000750212089965, + "learning_rate": 1.9659642711703634e-05, + "loss": 1.1316, + "step": 805 + }, + { + "epoch": 0.11092760803743462, + "grad_norm": 1.8759961343323412, + "learning_rate": 1.965848872555247e-05, + "loss": 1.1781, + "step": 806 + }, + { + "epoch": 0.11106523534269198, + "grad_norm": 1.670197242669125, + "learning_rate": 1.965733282039181e-05, + "loss": 1.1585, + "step": 807 + }, + { + "epoch": 0.11120286264794935, + "grad_norm": 1.9309876152607959, + "learning_rate": 1.9656174996451313e-05, + "loss": 1.1772, + "step": 808 + }, + { + "epoch": 0.11134048995320672, + "grad_norm": 1.6680067793506097, + "learning_rate": 1.9655015253961018e-05, + "loss": 1.199, + "step": 809 + }, + { + "epoch": 0.11147811725846409, + "grad_norm": 1.9119723618363742, + "learning_rate": 1.965385359315136e-05, + "loss": 1.1388, + "step": 810 + }, + { + "epoch": 0.11161574456372145, + "grad_norm": 1.8613221549664547, + "learning_rate": 1.9652690014253134e-05, + "loss": 1.1879, + "step": 811 + }, + { + "epoch": 0.11175337186897881, + "grad_norm": 1.6390392855243738, + "learning_rate": 1.965152451749754e-05, + "loss": 1.1766, + "step": 812 + }, + { + "epoch": 0.11189099917423617, + "grad_norm": 1.7462595359826287, + "learning_rate": 1.9650357103116134e-05, + "loss": 1.1712, + "step": 813 + }, + { + "epoch": 0.11202862647949353, + "grad_norm": 1.9498657499696128, + "learning_rate": 1.9649187771340873e-05, + "loss": 1.1652, + "step": 814 + }, + { + "epoch": 0.1121662537847509, + "grad_norm": 1.8671501162070752, + "learning_rate": 1.9648016522404087e-05, + "loss": 1.2681, + "step": 815 + }, + { + "epoch": 0.11230388109000826, + "grad_norm": 1.703099533861835, + "learning_rate": 1.9646843356538482e-05, + "loss": 1.2402, + "step": 816 + }, + { + "epoch": 0.11244150839526562, + "grad_norm": 1.707723108808178, + "learning_rate": 1.9645668273977154e-05, + "loss": 1.2, + "step": 817 + }, + { + "epoch": 0.11257913570052298, + "grad_norm": 1.8617622271902463, + "learning_rate": 1.9644491274953573e-05, + "loss": 1.1954, + "step": 818 + }, + { + "epoch": 0.11271676300578035, + "grad_norm": 1.9961433759016127, + "learning_rate": 1.9643312359701596e-05, + "loss": 1.2775, + "step": 819 + }, + { + "epoch": 0.11285439031103771, + "grad_norm": 1.8725230314491939, + "learning_rate": 1.9642131528455458e-05, + "loss": 1.2394, + "step": 820 + }, + { + "epoch": 0.11299201761629507, + "grad_norm": 1.8993790330750357, + "learning_rate": 1.9640948781449766e-05, + "loss": 1.2574, + "step": 821 + }, + { + "epoch": 0.11312964492155243, + "grad_norm": 1.61326478689415, + "learning_rate": 1.9639764118919525e-05, + "loss": 1.1587, + "step": 822 + }, + { + "epoch": 0.1132672722268098, + "grad_norm": 1.9097744955803853, + "learning_rate": 1.963857754110011e-05, + "loss": 1.2792, + "step": 823 + }, + { + "epoch": 0.11340489953206716, + "grad_norm": 1.8198049781596035, + "learning_rate": 1.963738904822727e-05, + "loss": 1.2117, + "step": 824 + }, + { + "epoch": 0.11354252683732452, + "grad_norm": 1.6698221562481035, + "learning_rate": 1.963619864053715e-05, + "loss": 1.2298, + "step": 825 + }, + { + "epoch": 0.11368015414258188, + "grad_norm": 2.22089543543673, + "learning_rate": 1.9635006318266264e-05, + "loss": 1.2443, + "step": 826 + }, + { + "epoch": 0.11381778144783924, + "grad_norm": 1.8790557310023606, + "learning_rate": 1.9633812081651514e-05, + "loss": 1.1758, + "step": 827 + }, + { + "epoch": 0.11395540875309662, + "grad_norm": 1.905489590669244, + "learning_rate": 1.9632615930930175e-05, + "loss": 1.1858, + "step": 828 + }, + { + "epoch": 0.11409303605835398, + "grad_norm": 1.8798053582950898, + "learning_rate": 1.9631417866339908e-05, + "loss": 1.2721, + "step": 829 + }, + { + "epoch": 0.11423066336361135, + "grad_norm": 1.7110812505620618, + "learning_rate": 1.9630217888118748e-05, + "loss": 1.257, + "step": 830 + }, + { + "epoch": 0.11436829066886871, + "grad_norm": 1.6684295565096017, + "learning_rate": 1.9629015996505117e-05, + "loss": 1.1245, + "step": 831 + }, + { + "epoch": 0.11450591797412607, + "grad_norm": 1.8124179377767673, + "learning_rate": 1.9627812191737815e-05, + "loss": 1.2686, + "step": 832 + }, + { + "epoch": 0.11464354527938343, + "grad_norm": 1.7700020488521024, + "learning_rate": 1.9626606474056022e-05, + "loss": 1.264, + "step": 833 + }, + { + "epoch": 0.1147811725846408, + "grad_norm": 1.6888144526293025, + "learning_rate": 1.9625398843699295e-05, + "loss": 1.2051, + "step": 834 + }, + { + "epoch": 0.11491879988989816, + "grad_norm": 1.785257425435577, + "learning_rate": 1.9624189300907574e-05, + "loss": 1.1827, + "step": 835 + }, + { + "epoch": 0.11505642719515552, + "grad_norm": 1.8808853577203786, + "learning_rate": 1.9622977845921185e-05, + "loss": 1.2914, + "step": 836 + }, + { + "epoch": 0.11519405450041288, + "grad_norm": 1.7027648780128342, + "learning_rate": 1.962176447898082e-05, + "loss": 1.1537, + "step": 837 + }, + { + "epoch": 0.11533168180567024, + "grad_norm": 1.8105159301384934, + "learning_rate": 1.9620549200327558e-05, + "loss": 1.1222, + "step": 838 + }, + { + "epoch": 0.1154693091109276, + "grad_norm": 1.8510744983305099, + "learning_rate": 1.9619332010202862e-05, + "loss": 1.1916, + "step": 839 + }, + { + "epoch": 0.11560693641618497, + "grad_norm": 1.9288719901878486, + "learning_rate": 1.961811290884857e-05, + "loss": 1.2782, + "step": 840 + }, + { + "epoch": 0.11574456372144233, + "grad_norm": 1.7036963394850868, + "learning_rate": 1.96168918965069e-05, + "loss": 1.142, + "step": 841 + }, + { + "epoch": 0.1158821910266997, + "grad_norm": 1.8908257807963516, + "learning_rate": 1.9615668973420453e-05, + "loss": 1.242, + "step": 842 + }, + { + "epoch": 0.11601981833195706, + "grad_norm": 1.6996979182194627, + "learning_rate": 1.9614444139832204e-05, + "loss": 1.1526, + "step": 843 + }, + { + "epoch": 0.11615744563721442, + "grad_norm": 1.81368215009564, + "learning_rate": 1.9613217395985508e-05, + "loss": 1.1692, + "step": 844 + }, + { + "epoch": 0.11629507294247178, + "grad_norm": 1.9160781015939727, + "learning_rate": 1.961198874212411e-05, + "loss": 1.1775, + "step": 845 + }, + { + "epoch": 0.11643270024772914, + "grad_norm": 1.9475045493186849, + "learning_rate": 1.9610758178492123e-05, + "loss": 1.2796, + "step": 846 + }, + { + "epoch": 0.11657032755298652, + "grad_norm": 1.668364768270674, + "learning_rate": 1.9609525705334047e-05, + "loss": 1.1427, + "step": 847 + }, + { + "epoch": 0.11670795485824388, + "grad_norm": 1.701056052059771, + "learning_rate": 1.9608291322894745e-05, + "loss": 1.1939, + "step": 848 + }, + { + "epoch": 0.11684558216350124, + "grad_norm": 1.6741979979033652, + "learning_rate": 1.9607055031419485e-05, + "loss": 1.1607, + "step": 849 + }, + { + "epoch": 0.1169832094687586, + "grad_norm": 1.8334524030645067, + "learning_rate": 1.9605816831153897e-05, + "loss": 1.2256, + "step": 850 + }, + { + "epoch": 0.11712083677401597, + "grad_norm": 1.764896912502412, + "learning_rate": 1.960457672234399e-05, + "loss": 1.2586, + "step": 851 + }, + { + "epoch": 0.11725846407927333, + "grad_norm": 1.7490870606132587, + "learning_rate": 1.9603334705236165e-05, + "loss": 1.3306, + "step": 852 + }, + { + "epoch": 0.11739609138453069, + "grad_norm": 1.7589331806171191, + "learning_rate": 1.9602090780077184e-05, + "loss": 1.2887, + "step": 853 + }, + { + "epoch": 0.11753371868978806, + "grad_norm": 1.7531140945576942, + "learning_rate": 1.960084494711421e-05, + "loss": 1.2094, + "step": 854 + }, + { + "epoch": 0.11767134599504542, + "grad_norm": 1.7381167836118683, + "learning_rate": 1.9599597206594764e-05, + "loss": 1.188, + "step": 855 + }, + { + "epoch": 0.11780897330030278, + "grad_norm": 1.662611953461017, + "learning_rate": 1.9598347558766755e-05, + "loss": 1.078, + "step": 856 + }, + { + "epoch": 0.11794660060556014, + "grad_norm": 1.7853197150648044, + "learning_rate": 1.9597096003878474e-05, + "loss": 1.1884, + "step": 857 + }, + { + "epoch": 0.1180842279108175, + "grad_norm": 1.812426785010667, + "learning_rate": 1.9595842542178588e-05, + "loss": 1.2952, + "step": 858 + }, + { + "epoch": 0.11822185521607487, + "grad_norm": 1.9771318816184522, + "learning_rate": 1.959458717391614e-05, + "loss": 1.1888, + "step": 859 + }, + { + "epoch": 0.11835948252133223, + "grad_norm": 1.9590285727958894, + "learning_rate": 1.9593329899340556e-05, + "loss": 1.1648, + "step": 860 + }, + { + "epoch": 0.11849710982658959, + "grad_norm": 1.7975882711701672, + "learning_rate": 1.9592070718701645e-05, + "loss": 1.1918, + "step": 861 + }, + { + "epoch": 0.11863473713184695, + "grad_norm": 1.7638369780117387, + "learning_rate": 1.9590809632249576e-05, + "loss": 1.199, + "step": 862 + }, + { + "epoch": 0.11877236443710432, + "grad_norm": 1.9692409783066018, + "learning_rate": 1.958954664023492e-05, + "loss": 1.2083, + "step": 863 + }, + { + "epoch": 0.11890999174236168, + "grad_norm": 1.9069685008249782, + "learning_rate": 1.9588281742908615e-05, + "loss": 1.2375, + "step": 864 + }, + { + "epoch": 0.11904761904761904, + "grad_norm": 1.7885446339451163, + "learning_rate": 1.9587014940521975e-05, + "loss": 1.1749, + "step": 865 + }, + { + "epoch": 0.11918524635287642, + "grad_norm": 2.0044570723063897, + "learning_rate": 1.9585746233326698e-05, + "loss": 1.1707, + "step": 866 + }, + { + "epoch": 0.11932287365813378, + "grad_norm": 2.055830830005792, + "learning_rate": 1.9584475621574857e-05, + "loss": 1.2536, + "step": 867 + }, + { + "epoch": 0.11946050096339114, + "grad_norm": 1.910676245488409, + "learning_rate": 1.958320310551891e-05, + "loss": 1.1936, + "step": 868 + }, + { + "epoch": 0.1195981282686485, + "grad_norm": 1.748345875820702, + "learning_rate": 1.9581928685411685e-05, + "loss": 1.2188, + "step": 869 + }, + { + "epoch": 0.11973575557390587, + "grad_norm": 1.701262009569421, + "learning_rate": 1.958065236150639e-05, + "loss": 1.2505, + "step": 870 + }, + { + "epoch": 0.11987338287916323, + "grad_norm": 1.720350519101937, + "learning_rate": 1.957937413405662e-05, + "loss": 1.1504, + "step": 871 + }, + { + "epoch": 0.12001101018442059, + "grad_norm": 1.8301356647355744, + "learning_rate": 1.9578094003316336e-05, + "loss": 1.3008, + "step": 872 + }, + { + "epoch": 0.12014863748967795, + "grad_norm": 1.74662350534866, + "learning_rate": 1.9576811969539884e-05, + "loss": 1.1875, + "step": 873 + }, + { + "epoch": 0.12028626479493532, + "grad_norm": 1.8750771111823648, + "learning_rate": 1.957552803298198e-05, + "loss": 1.1388, + "step": 874 + }, + { + "epoch": 0.12042389210019268, + "grad_norm": 1.7952243704730921, + "learning_rate": 1.9574242193897735e-05, + "loss": 1.1582, + "step": 875 + }, + { + "epoch": 0.12056151940545004, + "grad_norm": 1.5682299961618378, + "learning_rate": 1.9572954452542627e-05, + "loss": 1.1697, + "step": 876 + }, + { + "epoch": 0.1206991467107074, + "grad_norm": 1.7516962879332203, + "learning_rate": 1.9571664809172505e-05, + "loss": 1.1978, + "step": 877 + }, + { + "epoch": 0.12083677401596477, + "grad_norm": 1.852355924708167, + "learning_rate": 1.9570373264043606e-05, + "loss": 1.2418, + "step": 878 + }, + { + "epoch": 0.12097440132122213, + "grad_norm": 1.7535538721339785, + "learning_rate": 1.9569079817412545e-05, + "loss": 1.2343, + "step": 879 + }, + { + "epoch": 0.12111202862647949, + "grad_norm": 1.7169762275226343, + "learning_rate": 1.956778446953631e-05, + "loss": 1.2458, + "step": 880 + }, + { + "epoch": 0.12124965593173685, + "grad_norm": 1.786448507139269, + "learning_rate": 1.956648722067227e-05, + "loss": 1.2379, + "step": 881 + }, + { + "epoch": 0.12138728323699421, + "grad_norm": 1.8872721138716886, + "learning_rate": 1.956518807107817e-05, + "loss": 1.2703, + "step": 882 + }, + { + "epoch": 0.12152491054225158, + "grad_norm": 1.8933606860245056, + "learning_rate": 1.9563887021012135e-05, + "loss": 1.2002, + "step": 883 + }, + { + "epoch": 0.12166253784750894, + "grad_norm": 1.8799858929558066, + "learning_rate": 1.9562584070732663e-05, + "loss": 1.2304, + "step": 884 + }, + { + "epoch": 0.12180016515276632, + "grad_norm": 1.9863110537050936, + "learning_rate": 1.9561279220498632e-05, + "loss": 1.273, + "step": 885 + }, + { + "epoch": 0.12193779245802368, + "grad_norm": 1.4976881244914186, + "learning_rate": 1.9559972470569302e-05, + "loss": 1.1797, + "step": 886 + }, + { + "epoch": 0.12207541976328104, + "grad_norm": 1.6519086895173287, + "learning_rate": 1.9558663821204302e-05, + "loss": 1.1606, + "step": 887 + }, + { + "epoch": 0.1222130470685384, + "grad_norm": 1.6979986986458808, + "learning_rate": 1.9557353272663648e-05, + "loss": 1.2686, + "step": 888 + }, + { + "epoch": 0.12235067437379576, + "grad_norm": 1.8682733886495013, + "learning_rate": 1.955604082520772e-05, + "loss": 1.2198, + "step": 889 + }, + { + "epoch": 0.12248830167905313, + "grad_norm": 1.5445401032171855, + "learning_rate": 1.955472647909729e-05, + "loss": 1.133, + "step": 890 + }, + { + "epoch": 0.12262592898431049, + "grad_norm": 1.7580662185546172, + "learning_rate": 1.95534102345935e-05, + "loss": 1.1769, + "step": 891 + }, + { + "epoch": 0.12276355628956785, + "grad_norm": 1.7562581012535048, + "learning_rate": 1.9552092091957864e-05, + "loss": 1.1413, + "step": 892 + }, + { + "epoch": 0.12290118359482521, + "grad_norm": 1.7279215983630576, + "learning_rate": 1.9550772051452287e-05, + "loss": 1.3172, + "step": 893 + }, + { + "epoch": 0.12303881090008258, + "grad_norm": 1.7076100369758198, + "learning_rate": 1.954945011333904e-05, + "loss": 1.1432, + "step": 894 + }, + { + "epoch": 0.12317643820533994, + "grad_norm": 1.673233751341388, + "learning_rate": 1.954812627788077e-05, + "loss": 1.1622, + "step": 895 + }, + { + "epoch": 0.1233140655105973, + "grad_norm": 1.7419266562126345, + "learning_rate": 1.954680054534051e-05, + "loss": 1.1963, + "step": 896 + }, + { + "epoch": 0.12345169281585466, + "grad_norm": 1.8171513358468807, + "learning_rate": 1.9545472915981664e-05, + "loss": 1.2056, + "step": 897 + }, + { + "epoch": 0.12358932012111203, + "grad_norm": 1.6679337355766228, + "learning_rate": 1.9544143390068012e-05, + "loss": 1.1497, + "step": 898 + }, + { + "epoch": 0.12372694742636939, + "grad_norm": 1.94580127851496, + "learning_rate": 1.9542811967863714e-05, + "loss": 1.2018, + "step": 899 + }, + { + "epoch": 0.12386457473162675, + "grad_norm": 1.8241949534980415, + "learning_rate": 1.9541478649633304e-05, + "loss": 1.2464, + "step": 900 + }, + { + "epoch": 0.12400220203688411, + "grad_norm": 1.936540773230824, + "learning_rate": 1.9540143435641693e-05, + "loss": 1.2172, + "step": 901 + }, + { + "epoch": 0.12413982934214148, + "grad_norm": 1.876067854726912, + "learning_rate": 1.9538806326154173e-05, + "loss": 1.1921, + "step": 902 + }, + { + "epoch": 0.12427745664739884, + "grad_norm": 1.7665301315509787, + "learning_rate": 1.9537467321436413e-05, + "loss": 1.1737, + "step": 903 + }, + { + "epoch": 0.12441508395265621, + "grad_norm": 1.8162055564067796, + "learning_rate": 1.9536126421754444e-05, + "loss": 1.0952, + "step": 904 + }, + { + "epoch": 0.12455271125791358, + "grad_norm": 1.7296780190435752, + "learning_rate": 1.9534783627374693e-05, + "loss": 1.1698, + "step": 905 + }, + { + "epoch": 0.12469033856317094, + "grad_norm": 1.671988218076766, + "learning_rate": 1.953343893856395e-05, + "loss": 1.2054, + "step": 906 + }, + { + "epoch": 0.1248279658684283, + "grad_norm": 1.9083236270954649, + "learning_rate": 1.953209235558939e-05, + "loss": 1.1741, + "step": 907 + }, + { + "epoch": 0.12496559317368566, + "grad_norm": 1.7519943695966007, + "learning_rate": 1.9530743878718556e-05, + "loss": 1.2594, + "step": 908 + }, + { + "epoch": 0.125103220478943, + "grad_norm": 1.6984133546060982, + "learning_rate": 1.952939350821938e-05, + "loss": 1.2143, + "step": 909 + }, + { + "epoch": 0.1252408477842004, + "grad_norm": 1.634057158730061, + "learning_rate": 1.9528041244360154e-05, + "loss": 1.1355, + "step": 910 + }, + { + "epoch": 0.12537847508945774, + "grad_norm": 1.9614450274222952, + "learning_rate": 1.952668708740956e-05, + "loss": 1.2045, + "step": 911 + }, + { + "epoch": 0.1255161023947151, + "grad_norm": 1.6451820967591815, + "learning_rate": 1.9525331037636644e-05, + "loss": 1.226, + "step": 912 + }, + { + "epoch": 0.12565372969997246, + "grad_norm": 1.7079880857151484, + "learning_rate": 1.952397309531084e-05, + "loss": 1.2498, + "step": 913 + }, + { + "epoch": 0.12579135700522984, + "grad_norm": 1.898090558307993, + "learning_rate": 1.9522613260701953e-05, + "loss": 1.2303, + "step": 914 + }, + { + "epoch": 0.1259289843104872, + "grad_norm": 1.9464490987711276, + "learning_rate": 1.952125153408016e-05, + "loss": 1.2242, + "step": 915 + }, + { + "epoch": 0.12606661161574456, + "grad_norm": 1.7066171121219222, + "learning_rate": 1.9519887915716017e-05, + "loss": 1.1619, + "step": 916 + }, + { + "epoch": 0.12620423892100194, + "grad_norm": 1.867252243384439, + "learning_rate": 1.951852240588046e-05, + "loss": 1.2599, + "step": 917 + }, + { + "epoch": 0.1263418662262593, + "grad_norm": 1.6361455203299031, + "learning_rate": 1.9517155004844795e-05, + "loss": 1.155, + "step": 918 + }, + { + "epoch": 0.12647949353151666, + "grad_norm": 1.795034711430814, + "learning_rate": 1.951578571288071e-05, + "loss": 1.1376, + "step": 919 + }, + { + "epoch": 0.126617120836774, + "grad_norm": 1.7023381840888188, + "learning_rate": 1.9514414530260256e-05, + "loss": 1.2517, + "step": 920 + }, + { + "epoch": 0.1267547481420314, + "grad_norm": 2.0873406467605684, + "learning_rate": 1.9513041457255873e-05, + "loss": 1.2014, + "step": 921 + }, + { + "epoch": 0.12689237544728874, + "grad_norm": 1.7184294618811753, + "learning_rate": 1.9511666494140375e-05, + "loss": 1.1628, + "step": 922 + }, + { + "epoch": 0.1270300027525461, + "grad_norm": 1.7379198521780002, + "learning_rate": 1.9510289641186945e-05, + "loss": 1.1927, + "step": 923 + }, + { + "epoch": 0.12716763005780346, + "grad_norm": 1.6348611550642247, + "learning_rate": 1.950891089866914e-05, + "loss": 1.126, + "step": 924 + }, + { + "epoch": 0.12730525736306084, + "grad_norm": 2.001623064603782, + "learning_rate": 1.9507530266860906e-05, + "loss": 1.2412, + "step": 925 + }, + { + "epoch": 0.12744288466831818, + "grad_norm": 1.7738383139429934, + "learning_rate": 1.950614774603655e-05, + "loss": 1.2028, + "step": 926 + }, + { + "epoch": 0.12758051197357556, + "grad_norm": 1.933072542887908, + "learning_rate": 1.9504763336470766e-05, + "loss": 1.2251, + "step": 927 + }, + { + "epoch": 0.1277181392788329, + "grad_norm": 1.7999483387018915, + "learning_rate": 1.950337703843861e-05, + "loss": 1.2151, + "step": 928 + }, + { + "epoch": 0.12785576658409029, + "grad_norm": 1.6487682772734311, + "learning_rate": 1.950198885221552e-05, + "loss": 1.1147, + "step": 929 + }, + { + "epoch": 0.12799339388934763, + "grad_norm": 1.8730587695500056, + "learning_rate": 1.950059877807732e-05, + "loss": 1.1684, + "step": 930 + }, + { + "epoch": 0.128131021194605, + "grad_norm": 1.8528040632627034, + "learning_rate": 1.949920681630019e-05, + "loss": 1.2139, + "step": 931 + }, + { + "epoch": 0.12826864849986236, + "grad_norm": 1.6732805206115529, + "learning_rate": 1.9497812967160692e-05, + "loss": 1.2013, + "step": 932 + }, + { + "epoch": 0.12840627580511974, + "grad_norm": 1.8608366539603611, + "learning_rate": 1.949641723093577e-05, + "loss": 1.268, + "step": 933 + }, + { + "epoch": 0.1285439031103771, + "grad_norm": 1.8669876562808079, + "learning_rate": 1.949501960790274e-05, + "loss": 1.1791, + "step": 934 + }, + { + "epoch": 0.12868153041563446, + "grad_norm": 1.7427210829656057, + "learning_rate": 1.949362009833928e-05, + "loss": 1.2205, + "step": 935 + }, + { + "epoch": 0.12881915772089184, + "grad_norm": 1.7347219291429874, + "learning_rate": 1.9492218702523464e-05, + "loss": 1.1405, + "step": 936 + }, + { + "epoch": 0.12895678502614918, + "grad_norm": 1.8575892328881445, + "learning_rate": 1.9490815420733724e-05, + "loss": 1.2235, + "step": 937 + }, + { + "epoch": 0.12909441233140656, + "grad_norm": 1.8638912828402099, + "learning_rate": 1.9489410253248876e-05, + "loss": 1.0953, + "step": 938 + }, + { + "epoch": 0.1292320396366639, + "grad_norm": 1.775823391274239, + "learning_rate": 1.9488003200348105e-05, + "loss": 1.1953, + "step": 939 + }, + { + "epoch": 0.12936966694192129, + "grad_norm": 1.6915162545378302, + "learning_rate": 1.9486594262310977e-05, + "loss": 1.1773, + "step": 940 + }, + { + "epoch": 0.12950729424717863, + "grad_norm": 1.6747112639953017, + "learning_rate": 1.9485183439417428e-05, + "loss": 1.2225, + "step": 941 + }, + { + "epoch": 0.129644921552436, + "grad_norm": 2.0704571179317313, + "learning_rate": 1.9483770731947766e-05, + "loss": 1.2667, + "step": 942 + }, + { + "epoch": 0.12978254885769336, + "grad_norm": 1.7495707875070545, + "learning_rate": 1.948235614018268e-05, + "loss": 1.1347, + "step": 943 + }, + { + "epoch": 0.12992017616295073, + "grad_norm": 1.5889053769632995, + "learning_rate": 1.9480939664403223e-05, + "loss": 1.14, + "step": 944 + }, + { + "epoch": 0.13005780346820808, + "grad_norm": 1.999426728163562, + "learning_rate": 1.9479521304890837e-05, + "loss": 1.2003, + "step": 945 + }, + { + "epoch": 0.13019543077346546, + "grad_norm": 1.7824427710009412, + "learning_rate": 1.947810106192733e-05, + "loss": 1.1148, + "step": 946 + }, + { + "epoch": 0.1303330580787228, + "grad_norm": 2.0298392691296203, + "learning_rate": 1.947667893579488e-05, + "loss": 1.2918, + "step": 947 + }, + { + "epoch": 0.13047068538398018, + "grad_norm": 1.7003766456052392, + "learning_rate": 1.947525492677605e-05, + "loss": 1.1613, + "step": 948 + }, + { + "epoch": 0.13060831268923753, + "grad_norm": 1.7999058410871331, + "learning_rate": 1.9473829035153767e-05, + "loss": 1.1968, + "step": 949 + }, + { + "epoch": 0.1307459399944949, + "grad_norm": 2.076816770750908, + "learning_rate": 1.9472401261211337e-05, + "loss": 1.2655, + "step": 950 + }, + { + "epoch": 0.13088356729975226, + "grad_norm": 1.8597615323640324, + "learning_rate": 1.9470971605232442e-05, + "loss": 1.2028, + "step": 951 + }, + { + "epoch": 0.13102119460500963, + "grad_norm": 1.7375982906603942, + "learning_rate": 1.946954006750113e-05, + "loss": 1.1663, + "step": 952 + }, + { + "epoch": 0.131158821910267, + "grad_norm": 1.8234967342291553, + "learning_rate": 1.946810664830183e-05, + "loss": 1.2284, + "step": 953 + }, + { + "epoch": 0.13129644921552436, + "grad_norm": 1.9554474505837507, + "learning_rate": 1.9466671347919344e-05, + "loss": 1.2314, + "step": 954 + }, + { + "epoch": 0.13143407652078173, + "grad_norm": 1.9156508013180402, + "learning_rate": 1.9465234166638846e-05, + "loss": 1.177, + "step": 955 + }, + { + "epoch": 0.13157170382603908, + "grad_norm": 1.7085134496264502, + "learning_rate": 1.9463795104745884e-05, + "loss": 1.152, + "step": 956 + }, + { + "epoch": 0.13170933113129646, + "grad_norm": 1.6434138197626693, + "learning_rate": 1.946235416252638e-05, + "loss": 1.1768, + "step": 957 + }, + { + "epoch": 0.1318469584365538, + "grad_norm": 1.8287900367511738, + "learning_rate": 1.946091134026663e-05, + "loss": 1.0793, + "step": 958 + }, + { + "epoch": 0.13198458574181118, + "grad_norm": 1.7780833774465106, + "learning_rate": 1.94594666382533e-05, + "loss": 1.1682, + "step": 959 + }, + { + "epoch": 0.13212221304706853, + "grad_norm": 1.7610874079193026, + "learning_rate": 1.9458020056773437e-05, + "loss": 1.1199, + "step": 960 + }, + { + "epoch": 0.1322598403523259, + "grad_norm": 1.8662459632903046, + "learning_rate": 1.9456571596114453e-05, + "loss": 1.1771, + "step": 961 + }, + { + "epoch": 0.13239746765758326, + "grad_norm": 1.5986172070234737, + "learning_rate": 1.9455121256564142e-05, + "loss": 1.1181, + "step": 962 + }, + { + "epoch": 0.13253509496284063, + "grad_norm": 1.5864292859221902, + "learning_rate": 1.9453669038410662e-05, + "loss": 1.1469, + "step": 963 + }, + { + "epoch": 0.13267272226809798, + "grad_norm": 1.6643144323686463, + "learning_rate": 1.945221494194255e-05, + "loss": 1.1307, + "step": 964 + }, + { + "epoch": 0.13281034957335536, + "grad_norm": 1.710399941688845, + "learning_rate": 1.945075896744872e-05, + "loss": 1.2325, + "step": 965 + }, + { + "epoch": 0.1329479768786127, + "grad_norm": 1.8138100319122905, + "learning_rate": 1.9449301115218446e-05, + "loss": 1.17, + "step": 966 + }, + { + "epoch": 0.13308560418387008, + "grad_norm": 1.6987073161560877, + "learning_rate": 1.9447841385541387e-05, + "loss": 1.199, + "step": 967 + }, + { + "epoch": 0.13322323148912743, + "grad_norm": 1.8201087419553204, + "learning_rate": 1.9446379778707573e-05, + "loss": 1.1858, + "step": 968 + }, + { + "epoch": 0.1333608587943848, + "grad_norm": 1.9144000429448347, + "learning_rate": 1.9444916295007404e-05, + "loss": 1.2228, + "step": 969 + }, + { + "epoch": 0.13349848609964216, + "grad_norm": 1.7404283608545148, + "learning_rate": 1.9443450934731653e-05, + "loss": 1.1596, + "step": 970 + }, + { + "epoch": 0.13363611340489953, + "grad_norm": 1.7346457983269885, + "learning_rate": 1.944198369817147e-05, + "loss": 1.1319, + "step": 971 + }, + { + "epoch": 0.1337737407101569, + "grad_norm": 1.831135867258422, + "learning_rate": 1.944051458561837e-05, + "loss": 1.1445, + "step": 972 + }, + { + "epoch": 0.13391136801541426, + "grad_norm": 1.5902069722950272, + "learning_rate": 1.9439043597364252e-05, + "loss": 1.1545, + "step": 973 + }, + { + "epoch": 0.13404899532067163, + "grad_norm": 1.744437434324077, + "learning_rate": 1.9437570733701375e-05, + "loss": 1.2082, + "step": 974 + }, + { + "epoch": 0.13418662262592898, + "grad_norm": 1.9540738678049723, + "learning_rate": 1.943609599492238e-05, + "loss": 1.2003, + "step": 975 + }, + { + "epoch": 0.13432424993118636, + "grad_norm": 1.7992368627666153, + "learning_rate": 1.9434619381320278e-05, + "loss": 1.1986, + "step": 976 + }, + { + "epoch": 0.1344618772364437, + "grad_norm": 1.8787671340662577, + "learning_rate": 1.9433140893188448e-05, + "loss": 1.1974, + "step": 977 + }, + { + "epoch": 0.13459950454170108, + "grad_norm": 1.942763566283457, + "learning_rate": 1.943166053082065e-05, + "loss": 1.1802, + "step": 978 + }, + { + "epoch": 0.13473713184695843, + "grad_norm": 1.77450404026138, + "learning_rate": 1.943017829451101e-05, + "loss": 1.2281, + "step": 979 + }, + { + "epoch": 0.1348747591522158, + "grad_norm": 1.984280537582193, + "learning_rate": 1.9428694184554028e-05, + "loss": 1.1355, + "step": 980 + }, + { + "epoch": 0.13501238645747315, + "grad_norm": 1.7601278338710733, + "learning_rate": 1.9427208201244574e-05, + "loss": 1.1676, + "step": 981 + }, + { + "epoch": 0.13515001376273053, + "grad_norm": 1.797922221760212, + "learning_rate": 1.9425720344877898e-05, + "loss": 1.1582, + "step": 982 + }, + { + "epoch": 0.13528764106798788, + "grad_norm": 1.7285323922114277, + "learning_rate": 1.942423061574961e-05, + "loss": 1.1811, + "step": 983 + }, + { + "epoch": 0.13542526837324526, + "grad_norm": 2.08796500588785, + "learning_rate": 1.9422739014155706e-05, + "loss": 1.1933, + "step": 984 + }, + { + "epoch": 0.1355628956785026, + "grad_norm": 1.9537189972531754, + "learning_rate": 1.9421245540392542e-05, + "loss": 1.2037, + "step": 985 + }, + { + "epoch": 0.13570052298375998, + "grad_norm": 1.596405297410078, + "learning_rate": 1.9419750194756852e-05, + "loss": 1.21, + "step": 986 + }, + { + "epoch": 0.13583815028901733, + "grad_norm": 1.8410900344787045, + "learning_rate": 1.941825297754574e-05, + "loss": 1.256, + "step": 987 + }, + { + "epoch": 0.1359757775942747, + "grad_norm": 1.7058783634179229, + "learning_rate": 1.9416753889056683e-05, + "loss": 1.1779, + "step": 988 + }, + { + "epoch": 0.13611340489953205, + "grad_norm": 1.5744881145235696, + "learning_rate": 1.941525292958753e-05, + "loss": 1.08, + "step": 989 + }, + { + "epoch": 0.13625103220478943, + "grad_norm": 1.8437728403828717, + "learning_rate": 1.94137500994365e-05, + "loss": 1.2129, + "step": 990 + }, + { + "epoch": 0.1363886595100468, + "grad_norm": 1.7826378706870099, + "learning_rate": 1.9412245398902187e-05, + "loss": 1.1938, + "step": 991 + }, + { + "epoch": 0.13652628681530415, + "grad_norm": 1.6310487316330167, + "learning_rate": 1.9410738828283552e-05, + "loss": 1.137, + "step": 992 + }, + { + "epoch": 0.13666391412056153, + "grad_norm": 1.8128725023210575, + "learning_rate": 1.940923038787993e-05, + "loss": 1.084, + "step": 993 + }, + { + "epoch": 0.13680154142581888, + "grad_norm": 1.7791035332214813, + "learning_rate": 1.940772007799103e-05, + "loss": 1.1933, + "step": 994 + }, + { + "epoch": 0.13693916873107626, + "grad_norm": 1.6534117925347271, + "learning_rate": 1.9406207898916927e-05, + "loss": 1.1888, + "step": 995 + }, + { + "epoch": 0.1370767960363336, + "grad_norm": 1.6779203161696332, + "learning_rate": 1.9404693850958072e-05, + "loss": 1.155, + "step": 996 + }, + { + "epoch": 0.13721442334159098, + "grad_norm": 1.8769608977013257, + "learning_rate": 1.9403177934415286e-05, + "loss": 1.287, + "step": 997 + }, + { + "epoch": 0.13735205064684833, + "grad_norm": 2.083083583318729, + "learning_rate": 1.9401660149589757e-05, + "loss": 1.1051, + "step": 998 + }, + { + "epoch": 0.1374896779521057, + "grad_norm": 1.7496090381720109, + "learning_rate": 1.9400140496783052e-05, + "loss": 1.2172, + "step": 999 + }, + { + "epoch": 0.13762730525736305, + "grad_norm": 1.8305695694032829, + "learning_rate": 1.9398618976297104e-05, + "loss": 1.2164, + "step": 1000 + }, + { + "epoch": 0.13776493256262043, + "grad_norm": 1.9222872600593532, + "learning_rate": 1.939709558843422e-05, + "loss": 1.1451, + "step": 1001 + }, + { + "epoch": 0.13790255986787778, + "grad_norm": 1.7281968622028878, + "learning_rate": 1.9395570333497074e-05, + "loss": 1.2143, + "step": 1002 + }, + { + "epoch": 0.13804018717313515, + "grad_norm": 1.6635202909868703, + "learning_rate": 1.9394043211788715e-05, + "loss": 1.1511, + "step": 1003 + }, + { + "epoch": 0.1381778144783925, + "grad_norm": 1.7759548107742285, + "learning_rate": 1.939251422361256e-05, + "loss": 1.214, + "step": 1004 + }, + { + "epoch": 0.13831544178364988, + "grad_norm": 1.7051114919664374, + "learning_rate": 1.9390983369272397e-05, + "loss": 1.2002, + "step": 1005 + }, + { + "epoch": 0.13845306908890723, + "grad_norm": 1.9916929530575822, + "learning_rate": 1.9389450649072387e-05, + "loss": 1.1544, + "step": 1006 + }, + { + "epoch": 0.1385906963941646, + "grad_norm": 1.7018306325073047, + "learning_rate": 1.938791606331706e-05, + "loss": 1.128, + "step": 1007 + }, + { + "epoch": 0.13872832369942195, + "grad_norm": 1.7620122935853855, + "learning_rate": 1.9386379612311322e-05, + "loss": 1.2104, + "step": 1008 + }, + { + "epoch": 0.13886595100467933, + "grad_norm": 1.758463166097322, + "learning_rate": 1.9384841296360436e-05, + "loss": 1.198, + "step": 1009 + }, + { + "epoch": 0.1390035783099367, + "grad_norm": 1.7035755842904703, + "learning_rate": 1.938330111577005e-05, + "loss": 1.137, + "step": 1010 + }, + { + "epoch": 0.13914120561519405, + "grad_norm": 1.6180985550871507, + "learning_rate": 1.9381759070846177e-05, + "loss": 1.1109, + "step": 1011 + }, + { + "epoch": 0.13927883292045143, + "grad_norm": 1.8239650085082386, + "learning_rate": 1.93802151618952e-05, + "loss": 1.1805, + "step": 1012 + }, + { + "epoch": 0.13941646022570878, + "grad_norm": 1.717971985154755, + "learning_rate": 1.9378669389223867e-05, + "loss": 1.1397, + "step": 1013 + }, + { + "epoch": 0.13955408753096615, + "grad_norm": 1.8742652820598187, + "learning_rate": 1.937712175313931e-05, + "loss": 1.0707, + "step": 1014 + }, + { + "epoch": 0.1396917148362235, + "grad_norm": 1.716158338179538, + "learning_rate": 1.937557225394902e-05, + "loss": 1.1402, + "step": 1015 + }, + { + "epoch": 0.13982934214148088, + "grad_norm": 1.8125418435452227, + "learning_rate": 1.9374020891960858e-05, + "loss": 1.1629, + "step": 1016 + }, + { + "epoch": 0.13996696944673823, + "grad_norm": 1.830710050842697, + "learning_rate": 1.9372467667483065e-05, + "loss": 1.2176, + "step": 1017 + }, + { + "epoch": 0.1401045967519956, + "grad_norm": 1.782669261878356, + "learning_rate": 1.9370912580824238e-05, + "loss": 1.2282, + "step": 1018 + }, + { + "epoch": 0.14024222405725295, + "grad_norm": 1.7579993267498053, + "learning_rate": 1.9369355632293352e-05, + "loss": 1.1382, + "step": 1019 + }, + { + "epoch": 0.14037985136251033, + "grad_norm": 1.9767420983376895, + "learning_rate": 1.936779682219976e-05, + "loss": 1.217, + "step": 1020 + }, + { + "epoch": 0.14051747866776768, + "grad_norm": 1.7926710583251622, + "learning_rate": 1.936623615085317e-05, + "loss": 1.1541, + "step": 1021 + }, + { + "epoch": 0.14065510597302505, + "grad_norm": 2.0166884806822205, + "learning_rate": 1.9364673618563666e-05, + "loss": 1.3269, + "step": 1022 + }, + { + "epoch": 0.1407927332782824, + "grad_norm": 2.251048067645687, + "learning_rate": 1.9363109225641702e-05, + "loss": 1.2945, + "step": 1023 + }, + { + "epoch": 0.14093036058353978, + "grad_norm": 1.7266424840449794, + "learning_rate": 1.93615429723981e-05, + "loss": 1.2156, + "step": 1024 + }, + { + "epoch": 0.14106798788879713, + "grad_norm": 1.7653596668641565, + "learning_rate": 1.9359974859144058e-05, + "loss": 1.2482, + "step": 1025 + }, + { + "epoch": 0.1412056151940545, + "grad_norm": 1.6493715042919326, + "learning_rate": 1.9358404886191133e-05, + "loss": 1.1414, + "step": 1026 + }, + { + "epoch": 0.14134324249931185, + "grad_norm": 1.63504392731604, + "learning_rate": 1.9356833053851262e-05, + "loss": 1.225, + "step": 1027 + }, + { + "epoch": 0.14148086980456923, + "grad_norm": 1.7665428026053909, + "learning_rate": 1.9355259362436743e-05, + "loss": 1.1353, + "step": 1028 + }, + { + "epoch": 0.1416184971098266, + "grad_norm": 1.6125876395540197, + "learning_rate": 1.935368381226025e-05, + "loss": 1.2844, + "step": 1029 + }, + { + "epoch": 0.14175612441508395, + "grad_norm": 1.788981634479404, + "learning_rate": 1.935210640363482e-05, + "loss": 1.2099, + "step": 1030 + }, + { + "epoch": 0.14189375172034133, + "grad_norm": 1.726604615163737, + "learning_rate": 1.935052713687386e-05, + "loss": 1.1622, + "step": 1031 + }, + { + "epoch": 0.14203137902559868, + "grad_norm": 1.743455890007309, + "learning_rate": 1.934894601229116e-05, + "loss": 1.1364, + "step": 1032 + }, + { + "epoch": 0.14216900633085605, + "grad_norm": 1.9621735073421336, + "learning_rate": 1.9347363030200857e-05, + "loss": 1.1423, + "step": 1033 + }, + { + "epoch": 0.1423066336361134, + "grad_norm": 1.758988591245746, + "learning_rate": 1.934577819091747e-05, + "loss": 1.1739, + "step": 1034 + }, + { + "epoch": 0.14244426094137078, + "grad_norm": 1.751718249895641, + "learning_rate": 1.9344191494755887e-05, + "loss": 1.2405, + "step": 1035 + }, + { + "epoch": 0.14258188824662812, + "grad_norm": 1.8790472774464573, + "learning_rate": 1.9342602942031364e-05, + "loss": 1.1736, + "step": 1036 + }, + { + "epoch": 0.1427195155518855, + "grad_norm": 1.8532339534859714, + "learning_rate": 1.934101253305952e-05, + "loss": 1.2269, + "step": 1037 + }, + { + "epoch": 0.14285714285714285, + "grad_norm": 1.732238789058539, + "learning_rate": 1.933942026815635e-05, + "loss": 1.1781, + "step": 1038 + }, + { + "epoch": 0.14299477016240023, + "grad_norm": 1.9106705025407693, + "learning_rate": 1.933782614763822e-05, + "loss": 1.3124, + "step": 1039 + }, + { + "epoch": 0.14313239746765757, + "grad_norm": 1.7809880613524174, + "learning_rate": 1.933623017182185e-05, + "loss": 1.2129, + "step": 1040 + }, + { + "epoch": 0.14327002477291495, + "grad_norm": 2.293840668653045, + "learning_rate": 1.9334632341024352e-05, + "loss": 1.2869, + "step": 1041 + }, + { + "epoch": 0.1434076520781723, + "grad_norm": 1.8962266658647235, + "learning_rate": 1.9333032655563176e-05, + "loss": 1.1879, + "step": 1042 + }, + { + "epoch": 0.14354527938342967, + "grad_norm": 1.7333168283422837, + "learning_rate": 1.933143111575617e-05, + "loss": 1.2069, + "step": 1043 + }, + { + "epoch": 0.14368290668868702, + "grad_norm": 1.7512778750804503, + "learning_rate": 1.9329827721921535e-05, + "loss": 1.0966, + "step": 1044 + }, + { + "epoch": 0.1438205339939444, + "grad_norm": 1.7731853258870127, + "learning_rate": 1.9328222474377843e-05, + "loss": 1.1012, + "step": 1045 + }, + { + "epoch": 0.14395816129920175, + "grad_norm": 1.6706797510131859, + "learning_rate": 1.9326615373444036e-05, + "loss": 1.178, + "step": 1046 + }, + { + "epoch": 0.14409578860445912, + "grad_norm": 1.7886250745714387, + "learning_rate": 1.9325006419439418e-05, + "loss": 1.1862, + "step": 1047 + }, + { + "epoch": 0.1442334159097165, + "grad_norm": 1.7842012031483538, + "learning_rate": 1.9323395612683673e-05, + "loss": 1.2039, + "step": 1048 + }, + { + "epoch": 0.14437104321497385, + "grad_norm": 1.766246892959719, + "learning_rate": 1.9321782953496843e-05, + "loss": 1.166, + "step": 1049 + }, + { + "epoch": 0.14450867052023122, + "grad_norm": 1.5490139118158635, + "learning_rate": 1.932016844219934e-05, + "loss": 1.1899, + "step": 1050 + }, + { + "epoch": 0.14464629782548857, + "grad_norm": 1.8267749256918715, + "learning_rate": 1.9318552079111946e-05, + "loss": 1.2621, + "step": 1051 + }, + { + "epoch": 0.14478392513074595, + "grad_norm": 1.8740076347849273, + "learning_rate": 1.9316933864555814e-05, + "loss": 1.149, + "step": 1052 + }, + { + "epoch": 0.1449215524360033, + "grad_norm": 1.6240384842519076, + "learning_rate": 1.9315313798852456e-05, + "loss": 1.1747, + "step": 1053 + }, + { + "epoch": 0.14505917974126067, + "grad_norm": 1.9158754137865142, + "learning_rate": 1.931369188232376e-05, + "loss": 1.181, + "step": 1054 + }, + { + "epoch": 0.14519680704651802, + "grad_norm": 1.6661373349137558, + "learning_rate": 1.931206811529198e-05, + "loss": 1.214, + "step": 1055 + }, + { + "epoch": 0.1453344343517754, + "grad_norm": 1.6873691015904142, + "learning_rate": 1.9310442498079732e-05, + "loss": 1.0654, + "step": 1056 + }, + { + "epoch": 0.14547206165703275, + "grad_norm": 1.878988417018608, + "learning_rate": 1.9308815031010003e-05, + "loss": 1.1782, + "step": 1057 + }, + { + "epoch": 0.14560968896229012, + "grad_norm": 1.7727817859829433, + "learning_rate": 1.930718571440615e-05, + "loss": 1.1697, + "step": 1058 + }, + { + "epoch": 0.14574731626754747, + "grad_norm": 1.8044338444917591, + "learning_rate": 1.93055545485919e-05, + "loss": 1.2463, + "step": 1059 + }, + { + "epoch": 0.14588494357280485, + "grad_norm": 1.8348774430262578, + "learning_rate": 1.9303921533891342e-05, + "loss": 1.1561, + "step": 1060 + }, + { + "epoch": 0.1460225708780622, + "grad_norm": 1.8579746835310416, + "learning_rate": 1.9302286670628932e-05, + "loss": 1.1746, + "step": 1061 + }, + { + "epoch": 0.14616019818331957, + "grad_norm": 1.7584891340215778, + "learning_rate": 1.930064995912949e-05, + "loss": 1.2167, + "step": 1062 + }, + { + "epoch": 0.14629782548857692, + "grad_norm": 1.9271754680079283, + "learning_rate": 1.929901139971822e-05, + "loss": 1.2323, + "step": 1063 + }, + { + "epoch": 0.1464354527938343, + "grad_norm": 1.6735516321392738, + "learning_rate": 1.9297370992720667e-05, + "loss": 1.1562, + "step": 1064 + }, + { + "epoch": 0.14657308009909165, + "grad_norm": 1.660688131026209, + "learning_rate": 1.929572873846277e-05, + "loss": 1.1151, + "step": 1065 + }, + { + "epoch": 0.14671070740434902, + "grad_norm": 1.6417273457225148, + "learning_rate": 1.9294084637270816e-05, + "loss": 1.1849, + "step": 1066 + }, + { + "epoch": 0.1468483347096064, + "grad_norm": 1.7055508407109583, + "learning_rate": 1.9292438689471466e-05, + "loss": 1.1513, + "step": 1067 + }, + { + "epoch": 0.14698596201486375, + "grad_norm": 1.680377650619904, + "learning_rate": 1.929079089539175e-05, + "loss": 1.1348, + "step": 1068 + }, + { + "epoch": 0.14712358932012112, + "grad_norm": 2.0644459502440644, + "learning_rate": 1.9289141255359055e-05, + "loss": 1.1607, + "step": 1069 + }, + { + "epoch": 0.14726121662537847, + "grad_norm": 1.9181268286509217, + "learning_rate": 1.9287489769701154e-05, + "loss": 1.2297, + "step": 1070 + }, + { + "epoch": 0.14739884393063585, + "grad_norm": 1.961396147867366, + "learning_rate": 1.9285836438746163e-05, + "loss": 1.168, + "step": 1071 + }, + { + "epoch": 0.1475364712358932, + "grad_norm": 1.6400062968449955, + "learning_rate": 1.9284181262822582e-05, + "loss": 1.1445, + "step": 1072 + }, + { + "epoch": 0.14767409854115057, + "grad_norm": 1.802438324944088, + "learning_rate": 1.928252424225927e-05, + "loss": 1.0839, + "step": 1073 + }, + { + "epoch": 0.14781172584640792, + "grad_norm": 1.884699344357473, + "learning_rate": 1.9280865377385458e-05, + "loss": 1.1297, + "step": 1074 + }, + { + "epoch": 0.1479493531516653, + "grad_norm": 1.5926213909405698, + "learning_rate": 1.9279204668530736e-05, + "loss": 1.1156, + "step": 1075 + }, + { + "epoch": 0.14808698045692265, + "grad_norm": 1.7060801070475513, + "learning_rate": 1.9277542116025063e-05, + "loss": 1.1051, + "step": 1076 + }, + { + "epoch": 0.14822460776218002, + "grad_norm": 1.693883986145678, + "learning_rate": 1.927587772019877e-05, + "loss": 1.1408, + "step": 1077 + }, + { + "epoch": 0.14836223506743737, + "grad_norm": 1.821795209016418, + "learning_rate": 1.9274211481382542e-05, + "loss": 1.1327, + "step": 1078 + }, + { + "epoch": 0.14849986237269475, + "grad_norm": 1.710208067331486, + "learning_rate": 1.9272543399907446e-05, + "loss": 1.1074, + "step": 1079 + }, + { + "epoch": 0.1486374896779521, + "grad_norm": 1.781492477275213, + "learning_rate": 1.9270873476104902e-05, + "loss": 1.1232, + "step": 1080 + }, + { + "epoch": 0.14877511698320947, + "grad_norm": 1.814575671002813, + "learning_rate": 1.9269201710306706e-05, + "loss": 1.2064, + "step": 1081 + }, + { + "epoch": 0.14891274428846682, + "grad_norm": 1.8733754758388939, + "learning_rate": 1.926752810284501e-05, + "loss": 1.1649, + "step": 1082 + }, + { + "epoch": 0.1490503715937242, + "grad_norm": 1.7540829513207346, + "learning_rate": 1.9265852654052337e-05, + "loss": 1.1768, + "step": 1083 + }, + { + "epoch": 0.14918799889898154, + "grad_norm": 1.6666321966271487, + "learning_rate": 1.926417536426158e-05, + "loss": 1.1761, + "step": 1084 + }, + { + "epoch": 0.14932562620423892, + "grad_norm": 2.1235814795968357, + "learning_rate": 1.9262496233805986e-05, + "loss": 1.192, + "step": 1085 + }, + { + "epoch": 0.1494632535094963, + "grad_norm": 1.627920618863355, + "learning_rate": 1.9260815263019185e-05, + "loss": 1.1553, + "step": 1086 + }, + { + "epoch": 0.14960088081475365, + "grad_norm": 1.6342504526960437, + "learning_rate": 1.9259132452235157e-05, + "loss": 1.2189, + "step": 1087 + }, + { + "epoch": 0.14973850812001102, + "grad_norm": 1.8156515782588236, + "learning_rate": 1.9257447801788254e-05, + "loss": 1.1686, + "step": 1088 + }, + { + "epoch": 0.14987613542526837, + "grad_norm": 1.8845872414845555, + "learning_rate": 1.925576131201319e-05, + "loss": 1.1894, + "step": 1089 + }, + { + "epoch": 0.15001376273052575, + "grad_norm": 1.7534399727616279, + "learning_rate": 1.9254072983245057e-05, + "loss": 1.1432, + "step": 1090 + }, + { + "epoch": 0.1501513900357831, + "grad_norm": 1.8862758263807577, + "learning_rate": 1.9252382815819295e-05, + "loss": 1.1742, + "step": 1091 + }, + { + "epoch": 0.15028901734104047, + "grad_norm": 1.6962041993965917, + "learning_rate": 1.925069081007172e-05, + "loss": 1.1694, + "step": 1092 + }, + { + "epoch": 0.15042664464629782, + "grad_norm": 1.7126127380678224, + "learning_rate": 1.9248996966338504e-05, + "loss": 1.1106, + "step": 1093 + }, + { + "epoch": 0.1505642719515552, + "grad_norm": 1.7778475151056303, + "learning_rate": 1.92473012849562e-05, + "loss": 1.1778, + "step": 1094 + }, + { + "epoch": 0.15070189925681254, + "grad_norm": 1.6788842778221431, + "learning_rate": 1.924560376626171e-05, + "loss": 1.1781, + "step": 1095 + }, + { + "epoch": 0.15083952656206992, + "grad_norm": 1.8919662578352594, + "learning_rate": 1.9243904410592314e-05, + "loss": 1.1802, + "step": 1096 + }, + { + "epoch": 0.15097715386732727, + "grad_norm": 1.7223160681474352, + "learning_rate": 1.9242203218285644e-05, + "loss": 1.1919, + "step": 1097 + }, + { + "epoch": 0.15111478117258464, + "grad_norm": 1.8451220900734726, + "learning_rate": 1.9240500189679706e-05, + "loss": 1.1771, + "step": 1098 + }, + { + "epoch": 0.151252408477842, + "grad_norm": 1.7973510421571919, + "learning_rate": 1.9238795325112867e-05, + "loss": 1.2209, + "step": 1099 + }, + { + "epoch": 0.15139003578309937, + "grad_norm": 1.673102273571798, + "learning_rate": 1.9237088624923866e-05, + "loss": 1.1634, + "step": 1100 + }, + { + "epoch": 0.15152766308835672, + "grad_norm": 1.662903547827202, + "learning_rate": 1.9235380089451796e-05, + "loss": 1.1413, + "step": 1101 + }, + { + "epoch": 0.1516652903936141, + "grad_norm": 1.8351188693541205, + "learning_rate": 1.9233669719036122e-05, + "loss": 1.181, + "step": 1102 + }, + { + "epoch": 0.15180291769887144, + "grad_norm": 1.7578699356040393, + "learning_rate": 1.923195751401667e-05, + "loss": 1.183, + "step": 1103 + }, + { + "epoch": 0.15194054500412882, + "grad_norm": 1.877239388456024, + "learning_rate": 1.923024347473363e-05, + "loss": 1.2347, + "step": 1104 + }, + { + "epoch": 0.1520781723093862, + "grad_norm": 1.6931098624796037, + "learning_rate": 1.922852760152756e-05, + "loss": 1.2622, + "step": 1105 + }, + { + "epoch": 0.15221579961464354, + "grad_norm": 1.6173988489502167, + "learning_rate": 1.9226809894739383e-05, + "loss": 1.1361, + "step": 1106 + }, + { + "epoch": 0.15235342691990092, + "grad_norm": 1.597422562520179, + "learning_rate": 1.9225090354710377e-05, + "loss": 1.1046, + "step": 1107 + }, + { + "epoch": 0.15249105422515827, + "grad_norm": 1.7597677458259218, + "learning_rate": 1.9223368981782196e-05, + "loss": 1.0444, + "step": 1108 + }, + { + "epoch": 0.15262868153041564, + "grad_norm": 1.6349361096354922, + "learning_rate": 1.9221645776296855e-05, + "loss": 1.1541, + "step": 1109 + }, + { + "epoch": 0.152766308835673, + "grad_norm": 2.0712384877647545, + "learning_rate": 1.9219920738596728e-05, + "loss": 1.2074, + "step": 1110 + }, + { + "epoch": 0.15290393614093037, + "grad_norm": 1.7190146324682027, + "learning_rate": 1.9218193869024558e-05, + "loss": 1.1441, + "step": 1111 + }, + { + "epoch": 0.15304156344618772, + "grad_norm": 1.8584502305855388, + "learning_rate": 1.9216465167923447e-05, + "loss": 1.0596, + "step": 1112 + }, + { + "epoch": 0.1531791907514451, + "grad_norm": 1.70788262946192, + "learning_rate": 1.921473463563687e-05, + "loss": 1.1082, + "step": 1113 + }, + { + "epoch": 0.15331681805670244, + "grad_norm": 1.7450804091735537, + "learning_rate": 1.9213002272508655e-05, + "loss": 1.1796, + "step": 1114 + }, + { + "epoch": 0.15345444536195982, + "grad_norm": 1.8436541996446891, + "learning_rate": 1.9211268078883e-05, + "loss": 1.1605, + "step": 1115 + }, + { + "epoch": 0.15359207266721717, + "grad_norm": 1.7762221323965188, + "learning_rate": 1.9209532055104464e-05, + "loss": 1.1714, + "step": 1116 + }, + { + "epoch": 0.15372969997247454, + "grad_norm": 1.7697579876112075, + "learning_rate": 1.920779420151798e-05, + "loss": 1.1769, + "step": 1117 + }, + { + "epoch": 0.1538673272777319, + "grad_norm": 1.9362753815275267, + "learning_rate": 1.9206054518468823e-05, + "loss": 1.145, + "step": 1118 + }, + { + "epoch": 0.15400495458298927, + "grad_norm": 1.8211496788316504, + "learning_rate": 1.9204313006302654e-05, + "loss": 1.2133, + "step": 1119 + }, + { + "epoch": 0.15414258188824662, + "grad_norm": 1.7131108977526066, + "learning_rate": 1.920256966536548e-05, + "loss": 1.2251, + "step": 1120 + }, + { + "epoch": 0.154280209193504, + "grad_norm": 1.6369555605884978, + "learning_rate": 1.920082449600369e-05, + "loss": 1.1679, + "step": 1121 + }, + { + "epoch": 0.15441783649876134, + "grad_norm": 1.568505566958367, + "learning_rate": 1.919907749856401e-05, + "loss": 1.1307, + "step": 1122 + }, + { + "epoch": 0.15455546380401872, + "grad_norm": 1.7271908182126796, + "learning_rate": 1.9197328673393558e-05, + "loss": 1.1814, + "step": 1123 + }, + { + "epoch": 0.1546930911092761, + "grad_norm": 1.8720518552940209, + "learning_rate": 1.9195578020839795e-05, + "loss": 1.2299, + "step": 1124 + }, + { + "epoch": 0.15483071841453344, + "grad_norm": 1.800848239215077, + "learning_rate": 1.9193825541250554e-05, + "loss": 1.1586, + "step": 1125 + }, + { + "epoch": 0.15496834571979082, + "grad_norm": 1.7856939922639687, + "learning_rate": 1.9192071234974027e-05, + "loss": 1.2087, + "step": 1126 + }, + { + "epoch": 0.15510597302504817, + "grad_norm": 1.8142270394472655, + "learning_rate": 1.919031510235877e-05, + "loss": 1.1881, + "step": 1127 + }, + { + "epoch": 0.15524360033030554, + "grad_norm": 2.0015206557273095, + "learning_rate": 1.9188557143753706e-05, + "loss": 1.1957, + "step": 1128 + }, + { + "epoch": 0.1553812276355629, + "grad_norm": 1.6866156058746307, + "learning_rate": 1.918679735950811e-05, + "loss": 1.1477, + "step": 1129 + }, + { + "epoch": 0.15551885494082027, + "grad_norm": 1.8906385222782192, + "learning_rate": 1.9185035749971638e-05, + "loss": 1.1886, + "step": 1130 + }, + { + "epoch": 0.15565648224607762, + "grad_norm": 1.5737492122406012, + "learning_rate": 1.9183272315494294e-05, + "loss": 1.0834, + "step": 1131 + }, + { + "epoch": 0.155794109551335, + "grad_norm": 1.72114040270019, + "learning_rate": 1.918150705642644e-05, + "loss": 1.1165, + "step": 1132 + }, + { + "epoch": 0.15593173685659234, + "grad_norm": 1.9928451339861477, + "learning_rate": 1.9179739973118815e-05, + "loss": 1.2278, + "step": 1133 + }, + { + "epoch": 0.15606936416184972, + "grad_norm": 1.9067862677065102, + "learning_rate": 1.9177971065922517e-05, + "loss": 1.1819, + "step": 1134 + }, + { + "epoch": 0.15620699146710706, + "grad_norm": 1.6415217453617363, + "learning_rate": 1.9176200335189e-05, + "loss": 1.1651, + "step": 1135 + }, + { + "epoch": 0.15634461877236444, + "grad_norm": 1.7038795309168628, + "learning_rate": 1.917442778127009e-05, + "loss": 1.1599, + "step": 1136 + }, + { + "epoch": 0.1564822460776218, + "grad_norm": 1.6945769298594398, + "learning_rate": 1.9172653404517957e-05, + "loss": 1.181, + "step": 1137 + }, + { + "epoch": 0.15661987338287917, + "grad_norm": 1.7415109485560851, + "learning_rate": 1.9170877205285157e-05, + "loss": 1.2343, + "step": 1138 + }, + { + "epoch": 0.15675750068813651, + "grad_norm": 1.6511722458961884, + "learning_rate": 1.9169099183924588e-05, + "loss": 1.1697, + "step": 1139 + }, + { + "epoch": 0.1568951279933939, + "grad_norm": 1.8991494689279091, + "learning_rate": 1.9167319340789524e-05, + "loss": 1.2807, + "step": 1140 + }, + { + "epoch": 0.15703275529865124, + "grad_norm": 1.8301084928792084, + "learning_rate": 1.9165537676233598e-05, + "loss": 1.2145, + "step": 1141 + }, + { + "epoch": 0.15717038260390861, + "grad_norm": 1.5765563019301203, + "learning_rate": 1.9163754190610794e-05, + "loss": 1.1565, + "step": 1142 + }, + { + "epoch": 0.157308009909166, + "grad_norm": 1.780969213194246, + "learning_rate": 1.916196888427547e-05, + "loss": 1.2189, + "step": 1143 + }, + { + "epoch": 0.15744563721442334, + "grad_norm": 1.8029724059973156, + "learning_rate": 1.9160181757582343e-05, + "loss": 1.1583, + "step": 1144 + }, + { + "epoch": 0.15758326451968072, + "grad_norm": 1.6385068951646138, + "learning_rate": 1.915839281088649e-05, + "loss": 1.1617, + "step": 1145 + }, + { + "epoch": 0.15772089182493806, + "grad_norm": 1.8547153541130317, + "learning_rate": 1.915660204454335e-05, + "loss": 1.13, + "step": 1146 + }, + { + "epoch": 0.15785851913019544, + "grad_norm": 1.8340859392542355, + "learning_rate": 1.9154809458908725e-05, + "loss": 1.1341, + "step": 1147 + }, + { + "epoch": 0.1579961464354528, + "grad_norm": 1.9289024952068476, + "learning_rate": 1.9153015054338775e-05, + "loss": 1.0964, + "step": 1148 + }, + { + "epoch": 0.15813377374071017, + "grad_norm": 2.3353063408216173, + "learning_rate": 1.9151218831190026e-05, + "loss": 1.2917, + "step": 1149 + }, + { + "epoch": 0.1582714010459675, + "grad_norm": 1.9065187723715638, + "learning_rate": 1.9149420789819363e-05, + "loss": 1.1229, + "step": 1150 + }, + { + "epoch": 0.1584090283512249, + "grad_norm": 1.8838328948951106, + "learning_rate": 1.914762093058403e-05, + "loss": 1.21, + "step": 1151 + }, + { + "epoch": 0.15854665565648224, + "grad_norm": 1.9340852966292466, + "learning_rate": 1.9145819253841633e-05, + "loss": 1.1442, + "step": 1152 + }, + { + "epoch": 0.15868428296173961, + "grad_norm": 1.7331393305817628, + "learning_rate": 1.9144015759950147e-05, + "loss": 1.2471, + "step": 1153 + }, + { + "epoch": 0.15882191026699696, + "grad_norm": 1.5973441891568578, + "learning_rate": 1.9142210449267897e-05, + "loss": 1.1188, + "step": 1154 + }, + { + "epoch": 0.15895953757225434, + "grad_norm": 1.549708320140468, + "learning_rate": 1.9140403322153568e-05, + "loss": 1.1523, + "step": 1155 + }, + { + "epoch": 0.1590971648775117, + "grad_norm": 1.6848260781347864, + "learning_rate": 1.913859437896622e-05, + "loss": 1.1453, + "step": 1156 + }, + { + "epoch": 0.15923479218276906, + "grad_norm": 1.839655842737148, + "learning_rate": 1.9136783620065268e-05, + "loss": 1.1423, + "step": 1157 + }, + { + "epoch": 0.1593724194880264, + "grad_norm": 1.760337683218225, + "learning_rate": 1.9134971045810475e-05, + "loss": 1.2196, + "step": 1158 + }, + { + "epoch": 0.1595100467932838, + "grad_norm": 1.7876502708734725, + "learning_rate": 1.9133156656561978e-05, + "loss": 1.1391, + "step": 1159 + }, + { + "epoch": 0.15964767409854114, + "grad_norm": 1.6716825317924568, + "learning_rate": 1.9131340452680276e-05, + "loss": 1.1313, + "step": 1160 + }, + { + "epoch": 0.1597853014037985, + "grad_norm": 1.8481667076136052, + "learning_rate": 1.912952243452622e-05, + "loss": 1.1971, + "step": 1161 + }, + { + "epoch": 0.1599229287090559, + "grad_norm": 1.7821914493313937, + "learning_rate": 1.9127702602461025e-05, + "loss": 1.1545, + "step": 1162 + }, + { + "epoch": 0.16006055601431324, + "grad_norm": 1.6759202493310554, + "learning_rate": 1.9125880956846264e-05, + "loss": 1.238, + "step": 1163 + }, + { + "epoch": 0.16019818331957061, + "grad_norm": 1.8706385103342449, + "learning_rate": 1.912405749804388e-05, + "loss": 1.2643, + "step": 1164 + }, + { + "epoch": 0.16033581062482796, + "grad_norm": 1.764663560639773, + "learning_rate": 1.9122232226416168e-05, + "loss": 1.1404, + "step": 1165 + }, + { + "epoch": 0.16047343793008534, + "grad_norm": 1.7051372452431504, + "learning_rate": 1.9120405142325782e-05, + "loss": 1.1924, + "step": 1166 + }, + { + "epoch": 0.1606110652353427, + "grad_norm": 1.8277927801193208, + "learning_rate": 1.9118576246135738e-05, + "loss": 1.2084, + "step": 1167 + }, + { + "epoch": 0.16074869254060006, + "grad_norm": 1.731419163435174, + "learning_rate": 1.9116745538209413e-05, + "loss": 1.2567, + "step": 1168 + }, + { + "epoch": 0.1608863198458574, + "grad_norm": 1.7366494842037918, + "learning_rate": 1.9114913018910544e-05, + "loss": 1.1153, + "step": 1169 + }, + { + "epoch": 0.1610239471511148, + "grad_norm": 1.7096956288944254, + "learning_rate": 1.9113078688603225e-05, + "loss": 1.1912, + "step": 1170 + }, + { + "epoch": 0.16116157445637214, + "grad_norm": 1.8688828483884679, + "learning_rate": 1.911124254765192e-05, + "loss": 1.1745, + "step": 1171 + }, + { + "epoch": 0.1612992017616295, + "grad_norm": 1.7694918672894087, + "learning_rate": 1.9109404596421436e-05, + "loss": 1.1868, + "step": 1172 + }, + { + "epoch": 0.16143682906688686, + "grad_norm": 1.7162555917332398, + "learning_rate": 1.9107564835276955e-05, + "loss": 1.1354, + "step": 1173 + }, + { + "epoch": 0.16157445637214424, + "grad_norm": 1.796469216251665, + "learning_rate": 1.910572326458401e-05, + "loss": 1.1491, + "step": 1174 + }, + { + "epoch": 0.16171208367740159, + "grad_norm": 1.8153470943521934, + "learning_rate": 1.9103879884708496e-05, + "loss": 1.2759, + "step": 1175 + }, + { + "epoch": 0.16184971098265896, + "grad_norm": 1.6584917976749105, + "learning_rate": 1.9102034696016667e-05, + "loss": 1.1787, + "step": 1176 + }, + { + "epoch": 0.1619873382879163, + "grad_norm": 1.720254603321273, + "learning_rate": 1.910018769887514e-05, + "loss": 1.1175, + "step": 1177 + }, + { + "epoch": 0.1621249655931737, + "grad_norm": 1.8927521261492168, + "learning_rate": 1.9098338893650883e-05, + "loss": 1.1794, + "step": 1178 + }, + { + "epoch": 0.16226259289843104, + "grad_norm": 1.815195981529941, + "learning_rate": 1.9096488280711228e-05, + "loss": 1.2181, + "step": 1179 + }, + { + "epoch": 0.1624002202036884, + "grad_norm": 1.9401431716349582, + "learning_rate": 1.9094635860423875e-05, + "loss": 1.2432, + "step": 1180 + }, + { + "epoch": 0.1625378475089458, + "grad_norm": 1.6216445585990626, + "learning_rate": 1.9092781633156863e-05, + "loss": 1.144, + "step": 1181 + }, + { + "epoch": 0.16267547481420314, + "grad_norm": 1.8310199994715557, + "learning_rate": 1.9090925599278608e-05, + "loss": 1.1435, + "step": 1182 + }, + { + "epoch": 0.1628131021194605, + "grad_norm": 1.8154804524673684, + "learning_rate": 1.9089067759157876e-05, + "loss": 1.201, + "step": 1183 + }, + { + "epoch": 0.16295072942471786, + "grad_norm": 1.5943740126400703, + "learning_rate": 1.9087208113163803e-05, + "loss": 1.2086, + "step": 1184 + }, + { + "epoch": 0.16308835672997524, + "grad_norm": 1.7887876493343156, + "learning_rate": 1.9085346661665866e-05, + "loss": 1.1996, + "step": 1185 + }, + { + "epoch": 0.16322598403523259, + "grad_norm": 1.674001168610742, + "learning_rate": 1.9083483405033908e-05, + "loss": 1.1983, + "step": 1186 + }, + { + "epoch": 0.16336361134048996, + "grad_norm": 1.8094104887070412, + "learning_rate": 1.908161834363814e-05, + "loss": 1.1624, + "step": 1187 + }, + { + "epoch": 0.1635012386457473, + "grad_norm": 1.6646968399994766, + "learning_rate": 1.9079751477849117e-05, + "loss": 1.1684, + "step": 1188 + }, + { + "epoch": 0.1636388659510047, + "grad_norm": 1.585954761073073, + "learning_rate": 1.9077882808037766e-05, + "loss": 1.1459, + "step": 1189 + }, + { + "epoch": 0.16377649325626203, + "grad_norm": 1.5928228775041737, + "learning_rate": 1.9076012334575366e-05, + "loss": 1.1387, + "step": 1190 + }, + { + "epoch": 0.1639141205615194, + "grad_norm": 1.6036192388573898, + "learning_rate": 1.907414005783355e-05, + "loss": 1.1259, + "step": 1191 + }, + { + "epoch": 0.16405174786677676, + "grad_norm": 1.8016017870511996, + "learning_rate": 1.9072265978184315e-05, + "loss": 1.1368, + "step": 1192 + }, + { + "epoch": 0.16418937517203414, + "grad_norm": 1.8314631312719303, + "learning_rate": 1.9070390096000016e-05, + "loss": 1.1657, + "step": 1193 + }, + { + "epoch": 0.16432700247729148, + "grad_norm": 1.665241550837254, + "learning_rate": 1.906851241165337e-05, + "loss": 1.158, + "step": 1194 + }, + { + "epoch": 0.16446462978254886, + "grad_norm": 1.6646890439441453, + "learning_rate": 1.9066632925517437e-05, + "loss": 1.071, + "step": 1195 + }, + { + "epoch": 0.1646022570878062, + "grad_norm": 1.9083295487952538, + "learning_rate": 1.9064751637965653e-05, + "loss": 1.1955, + "step": 1196 + }, + { + "epoch": 0.16473988439306358, + "grad_norm": 1.7149647264657328, + "learning_rate": 1.90628685493718e-05, + "loss": 1.0902, + "step": 1197 + }, + { + "epoch": 0.16487751169832093, + "grad_norm": 1.9228771282746275, + "learning_rate": 1.906098366011002e-05, + "loss": 1.2401, + "step": 1198 + }, + { + "epoch": 0.1650151390035783, + "grad_norm": 1.769875710395189, + "learning_rate": 1.9059096970554824e-05, + "loss": 1.2453, + "step": 1199 + }, + { + "epoch": 0.16515276630883569, + "grad_norm": 1.663309308617753, + "learning_rate": 1.9057208481081063e-05, + "loss": 1.1216, + "step": 1200 + }, + { + "epoch": 0.16529039361409303, + "grad_norm": 1.7652471117275161, + "learning_rate": 1.9055318192063955e-05, + "loss": 1.1684, + "step": 1201 + }, + { + "epoch": 0.1654280209193504, + "grad_norm": 1.9152250010725427, + "learning_rate": 1.905342610387908e-05, + "loss": 1.2469, + "step": 1202 + }, + { + "epoch": 0.16556564822460776, + "grad_norm": 1.9016327773445862, + "learning_rate": 1.9051532216902366e-05, + "loss": 1.1416, + "step": 1203 + }, + { + "epoch": 0.16570327552986513, + "grad_norm": 1.7598960794938714, + "learning_rate": 1.9049636531510098e-05, + "loss": 1.185, + "step": 1204 + }, + { + "epoch": 0.16584090283512248, + "grad_norm": 1.6619027849908046, + "learning_rate": 1.904773904807893e-05, + "loss": 1.1055, + "step": 1205 + }, + { + "epoch": 0.16597853014037986, + "grad_norm": 1.6524499458290003, + "learning_rate": 1.9045839766985864e-05, + "loss": 1.2159, + "step": 1206 + }, + { + "epoch": 0.1661161574456372, + "grad_norm": 1.7770658011682856, + "learning_rate": 1.9043938688608258e-05, + "loss": 1.1731, + "step": 1207 + }, + { + "epoch": 0.16625378475089458, + "grad_norm": 1.8758344754586038, + "learning_rate": 1.9042035813323835e-05, + "loss": 1.227, + "step": 1208 + }, + { + "epoch": 0.16639141205615193, + "grad_norm": 1.6260721375736185, + "learning_rate": 1.9040131141510666e-05, + "loss": 1.1785, + "step": 1209 + }, + { + "epoch": 0.1665290393614093, + "grad_norm": 1.6559796751256615, + "learning_rate": 1.9038224673547185e-05, + "loss": 1.1827, + "step": 1210 + }, + { + "epoch": 0.16666666666666666, + "grad_norm": 1.90419068138968, + "learning_rate": 1.903631640981218e-05, + "loss": 1.1749, + "step": 1211 + }, + { + "epoch": 0.16680429397192403, + "grad_norm": 1.6017989655001967, + "learning_rate": 1.90344063506848e-05, + "loss": 1.0195, + "step": 1212 + }, + { + "epoch": 0.16694192127718138, + "grad_norm": 1.69109864820019, + "learning_rate": 1.9032494496544545e-05, + "loss": 1.1208, + "step": 1213 + }, + { + "epoch": 0.16707954858243876, + "grad_norm": 1.8735570256230418, + "learning_rate": 1.9030580847771278e-05, + "loss": 1.1963, + "step": 1214 + }, + { + "epoch": 0.1672171758876961, + "grad_norm": 1.8926080826017415, + "learning_rate": 1.9028665404745208e-05, + "loss": 1.1991, + "step": 1215 + }, + { + "epoch": 0.16735480319295348, + "grad_norm": 1.8016349622727303, + "learning_rate": 1.9026748167846914e-05, + "loss": 1.2143, + "step": 1216 + }, + { + "epoch": 0.16749243049821083, + "grad_norm": 1.793061437836532, + "learning_rate": 1.9024829137457317e-05, + "loss": 1.087, + "step": 1217 + }, + { + "epoch": 0.1676300578034682, + "grad_norm": 1.7054045851447461, + "learning_rate": 1.902290831395771e-05, + "loss": 1.1403, + "step": 1218 + }, + { + "epoch": 0.16776768510872558, + "grad_norm": 1.5680307244180376, + "learning_rate": 1.902098569772973e-05, + "loss": 1.1575, + "step": 1219 + }, + { + "epoch": 0.16790531241398293, + "grad_norm": 1.8117692514637878, + "learning_rate": 1.901906128915538e-05, + "loss": 1.1163, + "step": 1220 + }, + { + "epoch": 0.1680429397192403, + "grad_norm": 1.9217914895114794, + "learning_rate": 1.9017135088617004e-05, + "loss": 1.158, + "step": 1221 + }, + { + "epoch": 0.16818056702449766, + "grad_norm": 1.6999795797673225, + "learning_rate": 1.901520709649732e-05, + "loss": 1.1332, + "step": 1222 + }, + { + "epoch": 0.16831819432975503, + "grad_norm": 1.7156560391626765, + "learning_rate": 1.901327731317939e-05, + "loss": 1.2244, + "step": 1223 + }, + { + "epoch": 0.16845582163501238, + "grad_norm": 1.6647902896309577, + "learning_rate": 1.9011345739046636e-05, + "loss": 1.1432, + "step": 1224 + }, + { + "epoch": 0.16859344894026976, + "grad_norm": 1.7821199514561332, + "learning_rate": 1.9009412374482835e-05, + "loss": 1.2725, + "step": 1225 + }, + { + "epoch": 0.1687310762455271, + "grad_norm": 1.8497540614531893, + "learning_rate": 1.9007477219872122e-05, + "loss": 1.2248, + "step": 1226 + }, + { + "epoch": 0.16886870355078448, + "grad_norm": 1.7965090156589574, + "learning_rate": 1.900554027559898e-05, + "loss": 1.1234, + "step": 1227 + }, + { + "epoch": 0.16900633085604183, + "grad_norm": 1.6874676730461604, + "learning_rate": 1.9003601542048263e-05, + "loss": 1.1601, + "step": 1228 + }, + { + "epoch": 0.1691439581612992, + "grad_norm": 1.7693791881956678, + "learning_rate": 1.900166101960516e-05, + "loss": 1.1662, + "step": 1229 + }, + { + "epoch": 0.16928158546655656, + "grad_norm": 1.6998545420256448, + "learning_rate": 1.8999718708655238e-05, + "loss": 1.2309, + "step": 1230 + }, + { + "epoch": 0.16941921277181393, + "grad_norm": 1.62366962173597, + "learning_rate": 1.89977746095844e-05, + "loss": 1.0779, + "step": 1231 + }, + { + "epoch": 0.16955684007707128, + "grad_norm": 1.8222242228271763, + "learning_rate": 1.899582872277891e-05, + "loss": 1.1856, + "step": 1232 + }, + { + "epoch": 0.16969446738232866, + "grad_norm": 1.9442680573982873, + "learning_rate": 1.8993881048625396e-05, + "loss": 1.013, + "step": 1233 + }, + { + "epoch": 0.169832094687586, + "grad_norm": 1.555422513678515, + "learning_rate": 1.899193158751083e-05, + "loss": 1.1317, + "step": 1234 + }, + { + "epoch": 0.16996972199284338, + "grad_norm": 1.8836160756597673, + "learning_rate": 1.898998033982254e-05, + "loss": 1.1097, + "step": 1235 + }, + { + "epoch": 0.17010734929810073, + "grad_norm": 1.8048326379285833, + "learning_rate": 1.8988027305948218e-05, + "loss": 1.159, + "step": 1236 + }, + { + "epoch": 0.1702449766033581, + "grad_norm": 1.7627950397515593, + "learning_rate": 1.8986072486275904e-05, + "loss": 1.1384, + "step": 1237 + }, + { + "epoch": 0.17038260390861548, + "grad_norm": 1.8043614168999946, + "learning_rate": 1.898411588119399e-05, + "loss": 1.2142, + "step": 1238 + }, + { + "epoch": 0.17052023121387283, + "grad_norm": 1.7052618399596813, + "learning_rate": 1.8982157491091232e-05, + "loss": 1.1984, + "step": 1239 + }, + { + "epoch": 0.1706578585191302, + "grad_norm": 1.6310255014342303, + "learning_rate": 1.8980197316356733e-05, + "loss": 1.1473, + "step": 1240 + }, + { + "epoch": 0.17079548582438756, + "grad_norm": 1.563935091402885, + "learning_rate": 1.897823535737995e-05, + "loss": 1.0674, + "step": 1241 + }, + { + "epoch": 0.17093311312964493, + "grad_norm": 1.5520983951857958, + "learning_rate": 1.89762716145507e-05, + "loss": 1.0586, + "step": 1242 + }, + { + "epoch": 0.17107074043490228, + "grad_norm": 1.7490032067166692, + "learning_rate": 1.8974306088259158e-05, + "loss": 1.0755, + "step": 1243 + }, + { + "epoch": 0.17120836774015966, + "grad_norm": 1.8694016472313402, + "learning_rate": 1.8972338778895837e-05, + "loss": 1.1823, + "step": 1244 + }, + { + "epoch": 0.171345995045417, + "grad_norm": 1.8057959975355817, + "learning_rate": 1.897036968685162e-05, + "loss": 1.1623, + "step": 1245 + }, + { + "epoch": 0.17148362235067438, + "grad_norm": 1.8170912877968763, + "learning_rate": 1.8968398812517734e-05, + "loss": 1.0887, + "step": 1246 + }, + { + "epoch": 0.17162124965593173, + "grad_norm": 1.7243067133256853, + "learning_rate": 1.896642615628577e-05, + "loss": 1.1716, + "step": 1247 + }, + { + "epoch": 0.1717588769611891, + "grad_norm": 1.8691466518813193, + "learning_rate": 1.896445171854767e-05, + "loss": 1.2415, + "step": 1248 + }, + { + "epoch": 0.17189650426644645, + "grad_norm": 1.9905927401860763, + "learning_rate": 1.8962475499695715e-05, + "loss": 1.2241, + "step": 1249 + }, + { + "epoch": 0.17203413157170383, + "grad_norm": 1.6072388676311713, + "learning_rate": 1.8960497500122565e-05, + "loss": 1.1763, + "step": 1250 + }, + { + "epoch": 0.17217175887696118, + "grad_norm": 1.7307767900030802, + "learning_rate": 1.895851772022122e-05, + "loss": 1.251, + "step": 1251 + }, + { + "epoch": 0.17230938618221855, + "grad_norm": 1.8075546907865503, + "learning_rate": 1.895653616038503e-05, + "loss": 1.244, + "step": 1252 + }, + { + "epoch": 0.1724470134874759, + "grad_norm": 1.7073247573453674, + "learning_rate": 1.895455282100771e-05, + "loss": 1.212, + "step": 1253 + }, + { + "epoch": 0.17258464079273328, + "grad_norm": 1.6861808448896867, + "learning_rate": 1.8952567702483314e-05, + "loss": 1.1134, + "step": 1254 + }, + { + "epoch": 0.17272226809799063, + "grad_norm": 1.559343840245276, + "learning_rate": 1.8950580805206264e-05, + "loss": 1.1116, + "step": 1255 + }, + { + "epoch": 0.172859895403248, + "grad_norm": 1.5853992306502935, + "learning_rate": 1.894859212957133e-05, + "loss": 1.2251, + "step": 1256 + }, + { + "epoch": 0.17299752270850538, + "grad_norm": 1.7629676423451743, + "learning_rate": 1.8946601675973632e-05, + "loss": 1.0989, + "step": 1257 + }, + { + "epoch": 0.17313515001376273, + "grad_norm": 1.7091134488396544, + "learning_rate": 1.894460944480865e-05, + "loss": 1.138, + "step": 1258 + }, + { + "epoch": 0.1732727773190201, + "grad_norm": 1.7438958072158701, + "learning_rate": 1.8942615436472203e-05, + "loss": 1.2311, + "step": 1259 + }, + { + "epoch": 0.17341040462427745, + "grad_norm": 1.6785772050025798, + "learning_rate": 1.8940619651360484e-05, + "loss": 1.089, + "step": 1260 + }, + { + "epoch": 0.17354803192953483, + "grad_norm": 1.7805768293703217, + "learning_rate": 1.893862208987002e-05, + "loss": 1.2063, + "step": 1261 + }, + { + "epoch": 0.17368565923479218, + "grad_norm": 1.767612901092861, + "learning_rate": 1.8936622752397708e-05, + "loss": 1.1608, + "step": 1262 + }, + { + "epoch": 0.17382328654004955, + "grad_norm": 1.7606099723303654, + "learning_rate": 1.893462163934078e-05, + "loss": 1.1485, + "step": 1263 + }, + { + "epoch": 0.1739609138453069, + "grad_norm": 1.9642517108874387, + "learning_rate": 1.893261875109684e-05, + "loss": 1.2192, + "step": 1264 + }, + { + "epoch": 0.17409854115056428, + "grad_norm": 1.7487943259888767, + "learning_rate": 1.8930614088063823e-05, + "loss": 1.12, + "step": 1265 + }, + { + "epoch": 0.17423616845582163, + "grad_norm": 1.6893141759300843, + "learning_rate": 1.8928607650640037e-05, + "loss": 1.1507, + "step": 1266 + }, + { + "epoch": 0.174373795761079, + "grad_norm": 1.6147309819890632, + "learning_rate": 1.8926599439224124e-05, + "loss": 1.2136, + "step": 1267 + }, + { + "epoch": 0.17451142306633635, + "grad_norm": 1.9625141971687832, + "learning_rate": 1.89245894542151e-05, + "loss": 1.1445, + "step": 1268 + }, + { + "epoch": 0.17464905037159373, + "grad_norm": 1.8082180482141077, + "learning_rate": 1.8922577696012317e-05, + "loss": 1.2045, + "step": 1269 + }, + { + "epoch": 0.17478667767685108, + "grad_norm": 1.6485283003357651, + "learning_rate": 1.8920564165015477e-05, + "loss": 1.1654, + "step": 1270 + }, + { + "epoch": 0.17492430498210845, + "grad_norm": 1.7884947317470825, + "learning_rate": 1.891854886162465e-05, + "loss": 1.1848, + "step": 1271 + }, + { + "epoch": 0.1750619322873658, + "grad_norm": 1.7535664229004793, + "learning_rate": 1.8916531786240243e-05, + "loss": 1.1656, + "step": 1272 + }, + { + "epoch": 0.17519955959262318, + "grad_norm": 1.5008181517724697, + "learning_rate": 1.891451293926303e-05, + "loss": 1.1512, + "step": 1273 + }, + { + "epoch": 0.17533718689788053, + "grad_norm": 1.7017101035828872, + "learning_rate": 1.8912492321094117e-05, + "loss": 1.1512, + "step": 1274 + }, + { + "epoch": 0.1754748142031379, + "grad_norm": 1.7661096321611374, + "learning_rate": 1.891046993213498e-05, + "loss": 1.2335, + "step": 1275 + }, + { + "epoch": 0.17561244150839528, + "grad_norm": 1.9033019707546557, + "learning_rate": 1.890844577278744e-05, + "loss": 1.156, + "step": 1276 + }, + { + "epoch": 0.17575006881365263, + "grad_norm": 1.6646980906776103, + "learning_rate": 1.890641984345367e-05, + "loss": 1.149, + "step": 1277 + }, + { + "epoch": 0.17588769611891, + "grad_norm": 1.9127021313646553, + "learning_rate": 1.8904392144536187e-05, + "loss": 1.2671, + "step": 1278 + }, + { + "epoch": 0.17602532342416735, + "grad_norm": 1.786222995126419, + "learning_rate": 1.8902362676437876e-05, + "loss": 1.1817, + "step": 1279 + }, + { + "epoch": 0.17616295072942473, + "grad_norm": 1.8407894657074446, + "learning_rate": 1.890033143956196e-05, + "loss": 1.1658, + "step": 1280 + }, + { + "epoch": 0.17630057803468208, + "grad_norm": 1.7872220499616576, + "learning_rate": 1.889829843431202e-05, + "loss": 1.1884, + "step": 1281 + }, + { + "epoch": 0.17643820533993945, + "grad_norm": 1.8052835353599588, + "learning_rate": 1.8896263661091986e-05, + "loss": 1.1714, + "step": 1282 + }, + { + "epoch": 0.1765758326451968, + "grad_norm": 1.7854605477143324, + "learning_rate": 1.889422712030614e-05, + "loss": 1.1467, + "step": 1283 + }, + { + "epoch": 0.17671345995045418, + "grad_norm": 1.8278469103832629, + "learning_rate": 1.889218881235911e-05, + "loss": 1.2026, + "step": 1284 + }, + { + "epoch": 0.17685108725571153, + "grad_norm": 1.6175291682475705, + "learning_rate": 1.8890148737655883e-05, + "loss": 1.0917, + "step": 1285 + }, + { + "epoch": 0.1769887145609689, + "grad_norm": 1.6946899292545088, + "learning_rate": 1.8888106896601803e-05, + "loss": 1.1542, + "step": 1286 + }, + { + "epoch": 0.17712634186622625, + "grad_norm": 1.8590474940446629, + "learning_rate": 1.888606328960254e-05, + "loss": 1.1743, + "step": 1287 + }, + { + "epoch": 0.17726396917148363, + "grad_norm": 1.8468986505771823, + "learning_rate": 1.888401791706414e-05, + "loss": 1.1482, + "step": 1288 + }, + { + "epoch": 0.17740159647674097, + "grad_norm": 1.695164655444383, + "learning_rate": 1.888197077939299e-05, + "loss": 1.1621, + "step": 1289 + }, + { + "epoch": 0.17753922378199835, + "grad_norm": 1.6479109288906586, + "learning_rate": 1.887992187699583e-05, + "loss": 1.1036, + "step": 1290 + }, + { + "epoch": 0.1776768510872557, + "grad_norm": 1.828556680449922, + "learning_rate": 1.887787121027974e-05, + "loss": 1.1818, + "step": 1291 + }, + { + "epoch": 0.17781447839251308, + "grad_norm": 1.8302543225443302, + "learning_rate": 1.887581877965217e-05, + "loss": 1.1916, + "step": 1292 + }, + { + "epoch": 0.17795210569777042, + "grad_norm": 1.8513660691283713, + "learning_rate": 1.88737645855209e-05, + "loss": 1.1142, + "step": 1293 + }, + { + "epoch": 0.1780897330030278, + "grad_norm": 1.6905249442918073, + "learning_rate": 1.887170862829408e-05, + "loss": 1.1957, + "step": 1294 + }, + { + "epoch": 0.17822736030828518, + "grad_norm": 1.7357174245832458, + "learning_rate": 1.8869650908380196e-05, + "loss": 1.1342, + "step": 1295 + }, + { + "epoch": 0.17836498761354252, + "grad_norm": 1.718182362722972, + "learning_rate": 1.8867591426188084e-05, + "loss": 1.14, + "step": 1296 + }, + { + "epoch": 0.1785026149187999, + "grad_norm": 1.6606057961257188, + "learning_rate": 1.8865530182126947e-05, + "loss": 1.2008, + "step": 1297 + }, + { + "epoch": 0.17864024222405725, + "grad_norm": 1.8648644334662043, + "learning_rate": 1.8863467176606315e-05, + "loss": 1.3018, + "step": 1298 + }, + { + "epoch": 0.17877786952931463, + "grad_norm": 1.7374865475563974, + "learning_rate": 1.886140241003608e-05, + "loss": 1.1765, + "step": 1299 + }, + { + "epoch": 0.17891549683457197, + "grad_norm": 1.763254211971766, + "learning_rate": 1.885933588282649e-05, + "loss": 1.2579, + "step": 1300 + }, + { + "epoch": 0.17905312413982935, + "grad_norm": 1.883305768809243, + "learning_rate": 1.885726759538813e-05, + "loss": 1.1837, + "step": 1301 + }, + { + "epoch": 0.1791907514450867, + "grad_norm": 1.533172977654244, + "learning_rate": 1.8855197548131938e-05, + "loss": 1.0219, + "step": 1302 + }, + { + "epoch": 0.17932837875034408, + "grad_norm": 1.7348673710435476, + "learning_rate": 1.8853125741469212e-05, + "loss": 1.209, + "step": 1303 + }, + { + "epoch": 0.17946600605560142, + "grad_norm": 1.7165100110994085, + "learning_rate": 1.8851052175811587e-05, + "loss": 1.1811, + "step": 1304 + }, + { + "epoch": 0.1796036333608588, + "grad_norm": 1.7687152793989553, + "learning_rate": 1.884897685157105e-05, + "loss": 1.1187, + "step": 1305 + }, + { + "epoch": 0.17974126066611615, + "grad_norm": 1.5178890000689444, + "learning_rate": 1.8846899769159945e-05, + "loss": 1.0949, + "step": 1306 + }, + { + "epoch": 0.17987888797137352, + "grad_norm": 1.8314478797136182, + "learning_rate": 1.884482092899095e-05, + "loss": 1.135, + "step": 1307 + }, + { + "epoch": 0.18001651527663087, + "grad_norm": 1.8669879250925665, + "learning_rate": 1.8842740331477116e-05, + "loss": 1.1846, + "step": 1308 + }, + { + "epoch": 0.18015414258188825, + "grad_norm": 1.6245078516656457, + "learning_rate": 1.884065797703182e-05, + "loss": 1.133, + "step": 1309 + }, + { + "epoch": 0.1802917698871456, + "grad_norm": 2.1141938380423846, + "learning_rate": 1.88385738660688e-05, + "loss": 1.3486, + "step": 1310 + }, + { + "epoch": 0.18042939719240297, + "grad_norm": 1.7572692345075003, + "learning_rate": 1.8836487999002136e-05, + "loss": 1.1952, + "step": 1311 + }, + { + "epoch": 0.18056702449766032, + "grad_norm": 1.7089898825007488, + "learning_rate": 1.883440037624627e-05, + "loss": 1.1283, + "step": 1312 + }, + { + "epoch": 0.1807046518029177, + "grad_norm": 1.7969319275632885, + "learning_rate": 1.8832310998215975e-05, + "loss": 1.2225, + "step": 1313 + }, + { + "epoch": 0.18084227910817507, + "grad_norm": 1.837975467830707, + "learning_rate": 1.8830219865326386e-05, + "loss": 1.2251, + "step": 1314 + }, + { + "epoch": 0.18097990641343242, + "grad_norm": 1.7986163843209684, + "learning_rate": 1.8828126977992984e-05, + "loss": 1.1381, + "step": 1315 + }, + { + "epoch": 0.1811175337186898, + "grad_norm": 1.6292621695023655, + "learning_rate": 1.8826032336631598e-05, + "loss": 1.2114, + "step": 1316 + }, + { + "epoch": 0.18125516102394715, + "grad_norm": 1.7507397304605983, + "learning_rate": 1.8823935941658396e-05, + "loss": 1.1796, + "step": 1317 + }, + { + "epoch": 0.18139278832920452, + "grad_norm": 1.7041657609668732, + "learning_rate": 1.8821837793489912e-05, + "loss": 1.1318, + "step": 1318 + }, + { + "epoch": 0.18153041563446187, + "grad_norm": 1.7979476039567885, + "learning_rate": 1.8819737892543018e-05, + "loss": 1.1972, + "step": 1319 + }, + { + "epoch": 0.18166804293971925, + "grad_norm": 1.804581707685713, + "learning_rate": 1.881763623923493e-05, + "loss": 1.165, + "step": 1320 + }, + { + "epoch": 0.1818056702449766, + "grad_norm": 1.5937108838679828, + "learning_rate": 1.8815532833983226e-05, + "loss": 1.2164, + "step": 1321 + }, + { + "epoch": 0.18194329755023397, + "grad_norm": 1.9142066558573876, + "learning_rate": 1.8813427677205815e-05, + "loss": 1.1715, + "step": 1322 + }, + { + "epoch": 0.18208092485549132, + "grad_norm": 1.8834601707699743, + "learning_rate": 1.881132076932097e-05, + "loss": 1.2342, + "step": 1323 + }, + { + "epoch": 0.1822185521607487, + "grad_norm": 1.8048255607689512, + "learning_rate": 1.88092121107473e-05, + "loss": 1.1372, + "step": 1324 + }, + { + "epoch": 0.18235617946600605, + "grad_norm": 1.613665127291824, + "learning_rate": 1.8807101701903773e-05, + "loss": 1.1692, + "step": 1325 + }, + { + "epoch": 0.18249380677126342, + "grad_norm": 1.9695684202270132, + "learning_rate": 1.880498954320969e-05, + "loss": 1.184, + "step": 1326 + }, + { + "epoch": 0.18263143407652077, + "grad_norm": 1.767869229084148, + "learning_rate": 1.8802875635084708e-05, + "loss": 1.1809, + "step": 1327 + }, + { + "epoch": 0.18276906138177815, + "grad_norm": 1.8020965822905677, + "learning_rate": 1.8800759977948844e-05, + "loss": 1.0834, + "step": 1328 + }, + { + "epoch": 0.1829066886870355, + "grad_norm": 1.6091152412115173, + "learning_rate": 1.8798642572222437e-05, + "loss": 1.0841, + "step": 1329 + }, + { + "epoch": 0.18304431599229287, + "grad_norm": 1.6864269655680584, + "learning_rate": 1.8796523418326193e-05, + "loss": 1.0901, + "step": 1330 + }, + { + "epoch": 0.18318194329755022, + "grad_norm": 1.9344097860333702, + "learning_rate": 1.8794402516681153e-05, + "loss": 1.1277, + "step": 1331 + }, + { + "epoch": 0.1833195706028076, + "grad_norm": 1.6634023661180872, + "learning_rate": 1.879227986770872e-05, + "loss": 1.1867, + "step": 1332 + }, + { + "epoch": 0.18345719790806497, + "grad_norm": 1.8378425885234546, + "learning_rate": 1.879015547183063e-05, + "loss": 1.2014, + "step": 1333 + }, + { + "epoch": 0.18359482521332232, + "grad_norm": 1.7514022464196024, + "learning_rate": 1.878802932946897e-05, + "loss": 1.1555, + "step": 1334 + }, + { + "epoch": 0.1837324525185797, + "grad_norm": 1.8422191997613435, + "learning_rate": 1.8785901441046178e-05, + "loss": 1.213, + "step": 1335 + }, + { + "epoch": 0.18387007982383705, + "grad_norm": 1.5228323238414818, + "learning_rate": 1.8783771806985035e-05, + "loss": 1.1858, + "step": 1336 + }, + { + "epoch": 0.18400770712909442, + "grad_norm": 1.5988258305602032, + "learning_rate": 1.878164042770867e-05, + "loss": 1.045, + "step": 1337 + }, + { + "epoch": 0.18414533443435177, + "grad_norm": 1.919091080769259, + "learning_rate": 1.8779507303640557e-05, + "loss": 1.132, + "step": 1338 + }, + { + "epoch": 0.18428296173960915, + "grad_norm": 2.432965915591615, + "learning_rate": 1.8777372435204526e-05, + "loss": 1.0891, + "step": 1339 + }, + { + "epoch": 0.1844205890448665, + "grad_norm": 1.7950952441408612, + "learning_rate": 1.877523582282474e-05, + "loss": 1.1232, + "step": 1340 + }, + { + "epoch": 0.18455821635012387, + "grad_norm": 1.8445019801893467, + "learning_rate": 1.877309746692572e-05, + "loss": 1.1971, + "step": 1341 + }, + { + "epoch": 0.18469584365538122, + "grad_norm": 1.6882410744905951, + "learning_rate": 1.8770957367932318e-05, + "loss": 1.1237, + "step": 1342 + }, + { + "epoch": 0.1848334709606386, + "grad_norm": 1.9528681955209033, + "learning_rate": 1.876881552626975e-05, + "loss": 1.2132, + "step": 1343 + }, + { + "epoch": 0.18497109826589594, + "grad_norm": 1.7398127168625208, + "learning_rate": 1.8766671942363566e-05, + "loss": 1.1674, + "step": 1344 + }, + { + "epoch": 0.18510872557115332, + "grad_norm": 1.7402533363358288, + "learning_rate": 1.8764526616639675e-05, + "loss": 1.1232, + "step": 1345 + }, + { + "epoch": 0.18524635287641067, + "grad_norm": 1.71242725300125, + "learning_rate": 1.8762379549524318e-05, + "loss": 1.1123, + "step": 1346 + }, + { + "epoch": 0.18538398018166805, + "grad_norm": 1.6092102332589888, + "learning_rate": 1.8760230741444086e-05, + "loss": 1.0973, + "step": 1347 + }, + { + "epoch": 0.1855216074869254, + "grad_norm": 1.667671721285342, + "learning_rate": 1.8758080192825917e-05, + "loss": 1.16, + "step": 1348 + }, + { + "epoch": 0.18565923479218277, + "grad_norm": 1.7004956906942499, + "learning_rate": 1.8755927904097105e-05, + "loss": 1.1257, + "step": 1349 + }, + { + "epoch": 0.18579686209744012, + "grad_norm": 1.6074914674499095, + "learning_rate": 1.875377387568527e-05, + "loss": 1.0837, + "step": 1350 + }, + { + "epoch": 0.1859344894026975, + "grad_norm": 1.9916854166724056, + "learning_rate": 1.8751618108018396e-05, + "loss": 1.1069, + "step": 1351 + }, + { + "epoch": 0.18607211670795487, + "grad_norm": 1.6729894212690504, + "learning_rate": 1.8749460601524796e-05, + "loss": 1.1603, + "step": 1352 + }, + { + "epoch": 0.18620974401321222, + "grad_norm": 1.8747355316824585, + "learning_rate": 1.8747301356633145e-05, + "loss": 1.1904, + "step": 1353 + }, + { + "epoch": 0.1863473713184696, + "grad_norm": 1.9292232760193697, + "learning_rate": 1.8745140373772452e-05, + "loss": 1.1354, + "step": 1354 + }, + { + "epoch": 0.18648499862372694, + "grad_norm": 1.9161753248927433, + "learning_rate": 1.874297765337207e-05, + "loss": 1.0891, + "step": 1355 + }, + { + "epoch": 0.18662262592898432, + "grad_norm": 1.7380786286486736, + "learning_rate": 1.8740813195861713e-05, + "loss": 1.0881, + "step": 1356 + }, + { + "epoch": 0.18676025323424167, + "grad_norm": 1.8106954572929312, + "learning_rate": 1.873864700167142e-05, + "loss": 1.1598, + "step": 1357 + }, + { + "epoch": 0.18689788053949905, + "grad_norm": 1.7807308826423203, + "learning_rate": 1.8736479071231587e-05, + "loss": 1.2506, + "step": 1358 + }, + { + "epoch": 0.1870355078447564, + "grad_norm": 1.6481367892060674, + "learning_rate": 1.8734309404972946e-05, + "loss": 1.1045, + "step": 1359 + }, + { + "epoch": 0.18717313515001377, + "grad_norm": 1.699384823592173, + "learning_rate": 1.8732138003326597e-05, + "loss": 1.0641, + "step": 1360 + }, + { + "epoch": 0.18731076245527112, + "grad_norm": 1.7372377477945322, + "learning_rate": 1.872996486672395e-05, + "loss": 1.1346, + "step": 1361 + }, + { + "epoch": 0.1874483897605285, + "grad_norm": 1.7395479090438313, + "learning_rate": 1.8727789995596784e-05, + "loss": 1.1675, + "step": 1362 + }, + { + "epoch": 0.18758601706578584, + "grad_norm": 1.8600412255048144, + "learning_rate": 1.872561339037722e-05, + "loss": 1.1278, + "step": 1363 + }, + { + "epoch": 0.18772364437104322, + "grad_norm": 1.6258840477338212, + "learning_rate": 1.872343505149771e-05, + "loss": 1.0879, + "step": 1364 + }, + { + "epoch": 0.18786127167630057, + "grad_norm": 1.721394025625272, + "learning_rate": 1.8721254979391072e-05, + "loss": 1.2177, + "step": 1365 + }, + { + "epoch": 0.18799889898155794, + "grad_norm": 1.7347207094336736, + "learning_rate": 1.871907317449045e-05, + "loss": 1.2197, + "step": 1366 + }, + { + "epoch": 0.1881365262868153, + "grad_norm": 1.836533971747441, + "learning_rate": 1.8716889637229334e-05, + "loss": 1.1957, + "step": 1367 + }, + { + "epoch": 0.18827415359207267, + "grad_norm": 1.7688364946797064, + "learning_rate": 1.871470436804157e-05, + "loss": 1.1222, + "step": 1368 + }, + { + "epoch": 0.18841178089733002, + "grad_norm": 1.5318304831719753, + "learning_rate": 1.871251736736134e-05, + "loss": 1.1265, + "step": 1369 + }, + { + "epoch": 0.1885494082025874, + "grad_norm": 1.63723809086378, + "learning_rate": 1.8710328635623172e-05, + "loss": 1.1687, + "step": 1370 + }, + { + "epoch": 0.18868703550784477, + "grad_norm": 1.7732303396194524, + "learning_rate": 1.8708138173261936e-05, + "loss": 1.2927, + "step": 1371 + }, + { + "epoch": 0.18882466281310212, + "grad_norm": 1.5929481110051298, + "learning_rate": 1.8705945980712845e-05, + "loss": 1.1643, + "step": 1372 + }, + { + "epoch": 0.1889622901183595, + "grad_norm": 1.6634775350897766, + "learning_rate": 1.870375205841146e-05, + "loss": 1.1021, + "step": 1373 + }, + { + "epoch": 0.18909991742361684, + "grad_norm": 1.7255356183089035, + "learning_rate": 1.8701556406793682e-05, + "loss": 1.1641, + "step": 1374 + }, + { + "epoch": 0.18923754472887422, + "grad_norm": 1.6461185944999166, + "learning_rate": 1.8699359026295755e-05, + "loss": 1.1327, + "step": 1375 + }, + { + "epoch": 0.18937517203413157, + "grad_norm": 1.5037595609581154, + "learning_rate": 1.8697159917354275e-05, + "loss": 1.1332, + "step": 1376 + }, + { + "epoch": 0.18951279933938894, + "grad_norm": 1.7487701950460637, + "learning_rate": 1.869495908040617e-05, + "loss": 1.1696, + "step": 1377 + }, + { + "epoch": 0.1896504266446463, + "grad_norm": 1.8554712375099518, + "learning_rate": 1.8692756515888714e-05, + "loss": 1.1761, + "step": 1378 + }, + { + "epoch": 0.18978805394990367, + "grad_norm": 1.73223124422111, + "learning_rate": 1.869055222423953e-05, + "loss": 1.1074, + "step": 1379 + }, + { + "epoch": 0.18992568125516102, + "grad_norm": 1.728830299557569, + "learning_rate": 1.868834620589658e-05, + "loss": 1.1494, + "step": 1380 + }, + { + "epoch": 0.1900633085604184, + "grad_norm": 1.5698559674756527, + "learning_rate": 1.868613846129817e-05, + "loss": 1.1132, + "step": 1381 + }, + { + "epoch": 0.19020093586567574, + "grad_norm": 1.866817330061516, + "learning_rate": 1.8683928990882946e-05, + "loss": 1.2559, + "step": 1382 + }, + { + "epoch": 0.19033856317093312, + "grad_norm": 1.7623575350668947, + "learning_rate": 1.8681717795089903e-05, + "loss": 1.1889, + "step": 1383 + }, + { + "epoch": 0.19047619047619047, + "grad_norm": 1.474895816521599, + "learning_rate": 1.8679504874358376e-05, + "loss": 1.2057, + "step": 1384 + }, + { + "epoch": 0.19061381778144784, + "grad_norm": 1.576435309019052, + "learning_rate": 1.8677290229128034e-05, + "loss": 1.1238, + "step": 1385 + }, + { + "epoch": 0.1907514450867052, + "grad_norm": 1.9662905088005784, + "learning_rate": 1.867507385983891e-05, + "loss": 1.2325, + "step": 1386 + }, + { + "epoch": 0.19088907239196257, + "grad_norm": 1.6019653318570228, + "learning_rate": 1.8672855766931354e-05, + "loss": 1.1174, + "step": 1387 + }, + { + "epoch": 0.19102669969721991, + "grad_norm": 1.5541417147207683, + "learning_rate": 1.8670635950846077e-05, + "loss": 1.0783, + "step": 1388 + }, + { + "epoch": 0.1911643270024773, + "grad_norm": 1.9478059186171508, + "learning_rate": 1.8668414412024124e-05, + "loss": 1.1729, + "step": 1389 + }, + { + "epoch": 0.19130195430773467, + "grad_norm": 1.9248375436355005, + "learning_rate": 1.8666191150906888e-05, + "loss": 1.2866, + "step": 1390 + }, + { + "epoch": 0.19143958161299202, + "grad_norm": 1.6685985209711145, + "learning_rate": 1.8663966167936093e-05, + "loss": 1.2177, + "step": 1391 + }, + { + "epoch": 0.1915772089182494, + "grad_norm": 1.7826729917476112, + "learning_rate": 1.866173946355382e-05, + "loss": 1.1118, + "step": 1392 + }, + { + "epoch": 0.19171483622350674, + "grad_norm": 1.8903543004752952, + "learning_rate": 1.865951103820248e-05, + "loss": 1.2123, + "step": 1393 + }, + { + "epoch": 0.19185246352876412, + "grad_norm": 1.7949355284410375, + "learning_rate": 1.8657280892324833e-05, + "loss": 1.165, + "step": 1394 + }, + { + "epoch": 0.19199009083402147, + "grad_norm": 1.790311976743421, + "learning_rate": 1.865504902636398e-05, + "loss": 1.2449, + "step": 1395 + }, + { + "epoch": 0.19212771813927884, + "grad_norm": 1.8683394734432548, + "learning_rate": 1.865281544076336e-05, + "loss": 1.1502, + "step": 1396 + }, + { + "epoch": 0.1922653454445362, + "grad_norm": 1.6800404273773077, + "learning_rate": 1.8650580135966755e-05, + "loss": 1.1591, + "step": 1397 + }, + { + "epoch": 0.19240297274979357, + "grad_norm": 1.87458100075439, + "learning_rate": 1.8648343112418287e-05, + "loss": 1.1836, + "step": 1398 + }, + { + "epoch": 0.19254060005505091, + "grad_norm": 1.7034136822126607, + "learning_rate": 1.864610437056243e-05, + "loss": 1.1262, + "step": 1399 + }, + { + "epoch": 0.1926782273603083, + "grad_norm": 1.6799033596699697, + "learning_rate": 1.8643863910843986e-05, + "loss": 1.1333, + "step": 1400 + }, + { + "epoch": 0.19281585466556564, + "grad_norm": 1.8758567176964704, + "learning_rate": 1.8641621733708103e-05, + "loss": 1.1017, + "step": 1401 + }, + { + "epoch": 0.19295348197082302, + "grad_norm": 1.750418138836148, + "learning_rate": 1.8639377839600274e-05, + "loss": 1.1213, + "step": 1402 + }, + { + "epoch": 0.19309110927608036, + "grad_norm": 1.9846878188428023, + "learning_rate": 1.863713222896633e-05, + "loss": 1.2122, + "step": 1403 + }, + { + "epoch": 0.19322873658133774, + "grad_norm": 2.0712533831888607, + "learning_rate": 1.8634884902252438e-05, + "loss": 1.1835, + "step": 1404 + }, + { + "epoch": 0.1933663638865951, + "grad_norm": 1.7025072088794955, + "learning_rate": 1.8632635859905118e-05, + "loss": 1.1099, + "step": 1405 + }, + { + "epoch": 0.19350399119185246, + "grad_norm": 1.8184803734768709, + "learning_rate": 1.863038510237122e-05, + "loss": 1.0742, + "step": 1406 + }, + { + "epoch": 0.1936416184971098, + "grad_norm": 1.7729785930947264, + "learning_rate": 1.8628132630097936e-05, + "loss": 1.1315, + "step": 1407 + }, + { + "epoch": 0.1937792458023672, + "grad_norm": 1.9028281421901685, + "learning_rate": 1.8625878443532808e-05, + "loss": 1.261, + "step": 1408 + }, + { + "epoch": 0.19391687310762457, + "grad_norm": 1.729115802343711, + "learning_rate": 1.8623622543123705e-05, + "loss": 1.2271, + "step": 1409 + }, + { + "epoch": 0.19405450041288191, + "grad_norm": 1.8999807364485768, + "learning_rate": 1.862136492931885e-05, + "loss": 1.2311, + "step": 1410 + }, + { + "epoch": 0.1941921277181393, + "grad_norm": 1.7067465313617611, + "learning_rate": 1.8619105602566797e-05, + "loss": 1.1575, + "step": 1411 + }, + { + "epoch": 0.19432975502339664, + "grad_norm": 1.727526623999419, + "learning_rate": 1.8616844563316444e-05, + "loss": 1.0584, + "step": 1412 + }, + { + "epoch": 0.19446738232865401, + "grad_norm": 1.9034677298256828, + "learning_rate": 1.8614581812017026e-05, + "loss": 1.1266, + "step": 1413 + }, + { + "epoch": 0.19460500963391136, + "grad_norm": 1.6851083411792012, + "learning_rate": 1.8612317349118125e-05, + "loss": 1.1844, + "step": 1414 + }, + { + "epoch": 0.19474263693916874, + "grad_norm": 1.8460375479801978, + "learning_rate": 1.8610051175069658e-05, + "loss": 1.2395, + "step": 1415 + }, + { + "epoch": 0.1948802642444261, + "grad_norm": 1.8675280164641854, + "learning_rate": 1.8607783290321876e-05, + "loss": 1.1573, + "step": 1416 + }, + { + "epoch": 0.19501789154968346, + "grad_norm": 1.887737228426763, + "learning_rate": 1.8605513695325385e-05, + "loss": 1.1635, + "step": 1417 + }, + { + "epoch": 0.1951555188549408, + "grad_norm": 1.5812824623528716, + "learning_rate": 1.860324239053112e-05, + "loss": 1.0703, + "step": 1418 + }, + { + "epoch": 0.1952931461601982, + "grad_norm": 1.9122706301212489, + "learning_rate": 1.8600969376390354e-05, + "loss": 1.2402, + "step": 1419 + }, + { + "epoch": 0.19543077346545554, + "grad_norm": 1.8098456965620218, + "learning_rate": 1.8598694653354712e-05, + "loss": 1.1187, + "step": 1420 + }, + { + "epoch": 0.1955684007707129, + "grad_norm": 1.6370595709752755, + "learning_rate": 1.8596418221876143e-05, + "loss": 1.1025, + "step": 1421 + }, + { + "epoch": 0.19570602807597026, + "grad_norm": 1.6850778271488844, + "learning_rate": 1.8594140082406946e-05, + "loss": 1.1944, + "step": 1422 + }, + { + "epoch": 0.19584365538122764, + "grad_norm": 1.630424868183961, + "learning_rate": 1.8591860235399758e-05, + "loss": 1.1156, + "step": 1423 + }, + { + "epoch": 0.195981282686485, + "grad_norm": 1.6978019669830673, + "learning_rate": 1.8589578681307548e-05, + "loss": 1.168, + "step": 1424 + }, + { + "epoch": 0.19611890999174236, + "grad_norm": 2.0712098179296796, + "learning_rate": 1.858729542058363e-05, + "loss": 1.1526, + "step": 1425 + }, + { + "epoch": 0.1962565372969997, + "grad_norm": 1.702740143532348, + "learning_rate": 1.8585010453681665e-05, + "loss": 1.1472, + "step": 1426 + }, + { + "epoch": 0.1963941646022571, + "grad_norm": 1.7197165767658424, + "learning_rate": 1.8582723781055637e-05, + "loss": 1.1622, + "step": 1427 + }, + { + "epoch": 0.19653179190751446, + "grad_norm": 1.760041143670627, + "learning_rate": 1.8580435403159875e-05, + "loss": 1.0441, + "step": 1428 + }, + { + "epoch": 0.1966694192127718, + "grad_norm": 1.553361656885028, + "learning_rate": 1.8578145320449055e-05, + "loss": 1.1338, + "step": 1429 + }, + { + "epoch": 0.1968070465180292, + "grad_norm": 1.7111594284557685, + "learning_rate": 1.857585353337818e-05, + "loss": 1.1329, + "step": 1430 + }, + { + "epoch": 0.19694467382328654, + "grad_norm": 1.679618901409045, + "learning_rate": 1.85735600424026e-05, + "loss": 1.123, + "step": 1431 + }, + { + "epoch": 0.1970823011285439, + "grad_norm": 1.4946698972002228, + "learning_rate": 1.8571264847978e-05, + "loss": 1.057, + "step": 1432 + }, + { + "epoch": 0.19721992843380126, + "grad_norm": 1.5939227998254473, + "learning_rate": 1.85689679505604e-05, + "loss": 1.1829, + "step": 1433 + }, + { + "epoch": 0.19735755573905864, + "grad_norm": 2.048369781363598, + "learning_rate": 1.856666935060617e-05, + "loss": 1.2109, + "step": 1434 + }, + { + "epoch": 0.197495183044316, + "grad_norm": 1.5599801101418271, + "learning_rate": 1.8564369048572e-05, + "loss": 1.0624, + "step": 1435 + }, + { + "epoch": 0.19763281034957336, + "grad_norm": 1.7181288027230486, + "learning_rate": 1.8562067044914935e-05, + "loss": 1.1065, + "step": 1436 + }, + { + "epoch": 0.1977704376548307, + "grad_norm": 1.6160246748302163, + "learning_rate": 1.8559763340092348e-05, + "loss": 1.1735, + "step": 1437 + }, + { + "epoch": 0.1979080649600881, + "grad_norm": 1.6668393092741962, + "learning_rate": 1.8557457934561956e-05, + "loss": 1.1315, + "step": 1438 + }, + { + "epoch": 0.19804569226534544, + "grad_norm": 1.7296865550108709, + "learning_rate": 1.8555150828781815e-05, + "loss": 1.1274, + "step": 1439 + }, + { + "epoch": 0.1981833195706028, + "grad_norm": 1.653796195649246, + "learning_rate": 1.8552842023210303e-05, + "loss": 1.1277, + "step": 1440 + }, + { + "epoch": 0.19832094687586016, + "grad_norm": 1.776718986461215, + "learning_rate": 1.8550531518306165e-05, + "loss": 1.1705, + "step": 1441 + }, + { + "epoch": 0.19845857418111754, + "grad_norm": 1.5619998802309825, + "learning_rate": 1.8548219314528455e-05, + "loss": 1.0974, + "step": 1442 + }, + { + "epoch": 0.19859620148637488, + "grad_norm": 1.74124357188006, + "learning_rate": 1.8545905412336582e-05, + "loss": 1.1785, + "step": 1443 + }, + { + "epoch": 0.19873382879163226, + "grad_norm": 1.6661727338002352, + "learning_rate": 1.8543589812190284e-05, + "loss": 1.1966, + "step": 1444 + }, + { + "epoch": 0.1988714560968896, + "grad_norm": 1.5680752755518732, + "learning_rate": 1.8541272514549637e-05, + "loss": 1.096, + "step": 1445 + }, + { + "epoch": 0.19900908340214699, + "grad_norm": 1.6334510961202302, + "learning_rate": 1.853895351987506e-05, + "loss": 1.1594, + "step": 1446 + }, + { + "epoch": 0.19914671070740436, + "grad_norm": 1.7849088492686243, + "learning_rate": 1.8536632828627303e-05, + "loss": 1.1868, + "step": 1447 + }, + { + "epoch": 0.1992843380126617, + "grad_norm": 1.6494135324323504, + "learning_rate": 1.853431044126746e-05, + "loss": 1.0938, + "step": 1448 + }, + { + "epoch": 0.1994219653179191, + "grad_norm": 1.7806091447157413, + "learning_rate": 1.853198635825695e-05, + "loss": 1.1449, + "step": 1449 + }, + { + "epoch": 0.19955959262317643, + "grad_norm": 1.635779114478088, + "learning_rate": 1.852966058005754e-05, + "loss": 1.1249, + "step": 1450 + }, + { + "epoch": 0.1996972199284338, + "grad_norm": 1.6197146437437697, + "learning_rate": 1.8527333107131336e-05, + "loss": 1.2314, + "step": 1451 + }, + { + "epoch": 0.19983484723369116, + "grad_norm": 1.7534321757255151, + "learning_rate": 1.8525003939940767e-05, + "loss": 1.1674, + "step": 1452 + }, + { + "epoch": 0.19997247453894854, + "grad_norm": 1.7818009599403286, + "learning_rate": 1.852267307894861e-05, + "loss": 1.0936, + "step": 1453 + }, + { + "epoch": 0.20011010184420588, + "grad_norm": 1.814397905873886, + "learning_rate": 1.8520340524617976e-05, + "loss": 1.1068, + "step": 1454 + }, + { + "epoch": 0.20024772914946326, + "grad_norm": 1.818530314058793, + "learning_rate": 1.851800627741231e-05, + "loss": 1.2086, + "step": 1455 + }, + { + "epoch": 0.2003853564547206, + "grad_norm": 1.7067755781520677, + "learning_rate": 1.8515670337795395e-05, + "loss": 1.1804, + "step": 1456 + }, + { + "epoch": 0.20052298375997799, + "grad_norm": 1.7644386314039746, + "learning_rate": 1.851333270623135e-05, + "loss": 1.1891, + "step": 1457 + }, + { + "epoch": 0.20066061106523533, + "grad_norm": 1.659899609931917, + "learning_rate": 1.851099338318463e-05, + "loss": 1.1942, + "step": 1458 + }, + { + "epoch": 0.2007982383704927, + "grad_norm": 1.8032983252222388, + "learning_rate": 1.850865236912003e-05, + "loss": 1.1726, + "step": 1459 + }, + { + "epoch": 0.20093586567575006, + "grad_norm": 1.8819256036773617, + "learning_rate": 1.8506309664502677e-05, + "loss": 1.1104, + "step": 1460 + }, + { + "epoch": 0.20107349298100743, + "grad_norm": 1.5615530173714074, + "learning_rate": 1.850396526979803e-05, + "loss": 1.0727, + "step": 1461 + }, + { + "epoch": 0.20121112028626478, + "grad_norm": 1.6889936496309117, + "learning_rate": 1.8501619185471892e-05, + "loss": 1.2714, + "step": 1462 + }, + { + "epoch": 0.20134874759152216, + "grad_norm": 1.6447418620446248, + "learning_rate": 1.8499271411990393e-05, + "loss": 1.1642, + "step": 1463 + }, + { + "epoch": 0.2014863748967795, + "grad_norm": 1.8079970925014657, + "learning_rate": 1.849692194982001e-05, + "loss": 1.1666, + "step": 1464 + }, + { + "epoch": 0.20162400220203688, + "grad_norm": 1.9881441777965214, + "learning_rate": 1.8494570799427546e-05, + "loss": 1.2401, + "step": 1465 + }, + { + "epoch": 0.20176162950729426, + "grad_norm": 1.7507568876818547, + "learning_rate": 1.849221796128014e-05, + "loss": 1.0601, + "step": 1466 + }, + { + "epoch": 0.2018992568125516, + "grad_norm": 1.666751208017914, + "learning_rate": 1.848986343584527e-05, + "loss": 1.1561, + "step": 1467 + }, + { + "epoch": 0.20203688411780898, + "grad_norm": 1.7431047779107722, + "learning_rate": 1.848750722359075e-05, + "loss": 1.1831, + "step": 1468 + }, + { + "epoch": 0.20217451142306633, + "grad_norm": 1.7704530634393503, + "learning_rate": 1.8485149324984727e-05, + "loss": 1.1704, + "step": 1469 + }, + { + "epoch": 0.2023121387283237, + "grad_norm": 1.6244326440711412, + "learning_rate": 1.8482789740495683e-05, + "loss": 1.0498, + "step": 1470 + }, + { + "epoch": 0.20244976603358106, + "grad_norm": 1.6526028101202077, + "learning_rate": 1.8480428470592434e-05, + "loss": 1.0975, + "step": 1471 + }, + { + "epoch": 0.20258739333883843, + "grad_norm": 1.5858887814994425, + "learning_rate": 1.8478065515744127e-05, + "loss": 1.092, + "step": 1472 + }, + { + "epoch": 0.20272502064409578, + "grad_norm": 1.6222728951109036, + "learning_rate": 1.847570087642026e-05, + "loss": 1.1763, + "step": 1473 + }, + { + "epoch": 0.20286264794935316, + "grad_norm": 1.7067187705398859, + "learning_rate": 1.847333455309065e-05, + "loss": 1.0824, + "step": 1474 + }, + { + "epoch": 0.2030002752546105, + "grad_norm": 1.8514270786862388, + "learning_rate": 1.8470966546225442e-05, + "loss": 1.1826, + "step": 1475 + }, + { + "epoch": 0.20313790255986788, + "grad_norm": 1.6495514950651438, + "learning_rate": 1.8468596856295145e-05, + "loss": 1.1695, + "step": 1476 + }, + { + "epoch": 0.20327552986512523, + "grad_norm": 1.7043857334280428, + "learning_rate": 1.8466225483770574e-05, + "loss": 1.1265, + "step": 1477 + }, + { + "epoch": 0.2034131571703826, + "grad_norm": 1.7505422894365834, + "learning_rate": 1.846385242912289e-05, + "loss": 1.2209, + "step": 1478 + }, + { + "epoch": 0.20355078447563996, + "grad_norm": 1.8372738039420844, + "learning_rate": 1.8461477692823582e-05, + "loss": 1.1598, + "step": 1479 + }, + { + "epoch": 0.20368841178089733, + "grad_norm": 1.747337738666948, + "learning_rate": 1.8459101275344482e-05, + "loss": 1.1163, + "step": 1480 + }, + { + "epoch": 0.20382603908615468, + "grad_norm": 1.68488409175198, + "learning_rate": 1.8456723177157753e-05, + "loss": 1.1136, + "step": 1481 + }, + { + "epoch": 0.20396366639141206, + "grad_norm": 1.7814469235023478, + "learning_rate": 1.845434339873589e-05, + "loss": 1.1094, + "step": 1482 + }, + { + "epoch": 0.2041012936966694, + "grad_norm": 1.531909168755628, + "learning_rate": 1.8451961940551716e-05, + "loss": 1.1319, + "step": 1483 + }, + { + "epoch": 0.20423892100192678, + "grad_norm": 1.7078744515602595, + "learning_rate": 1.8449578803078402e-05, + "loss": 1.1861, + "step": 1484 + }, + { + "epoch": 0.20437654830718416, + "grad_norm": 1.7890446991180813, + "learning_rate": 1.8447193986789442e-05, + "loss": 1.1799, + "step": 1485 + }, + { + "epoch": 0.2045141756124415, + "grad_norm": 1.6853028501671425, + "learning_rate": 1.8444807492158668e-05, + "loss": 1.1074, + "step": 1486 + }, + { + "epoch": 0.20465180291769888, + "grad_norm": 1.7548640045548987, + "learning_rate": 1.844241931966024e-05, + "loss": 1.1582, + "step": 1487 + }, + { + "epoch": 0.20478943022295623, + "grad_norm": 1.5582333318690997, + "learning_rate": 1.8440029469768656e-05, + "loss": 1.1635, + "step": 1488 + }, + { + "epoch": 0.2049270575282136, + "grad_norm": 1.7127795758252853, + "learning_rate": 1.843763794295875e-05, + "loss": 1.1038, + "step": 1489 + }, + { + "epoch": 0.20506468483347096, + "grad_norm": 1.7348784455099946, + "learning_rate": 1.8435244739705683e-05, + "loss": 1.0809, + "step": 1490 + }, + { + "epoch": 0.20520231213872833, + "grad_norm": 1.7950534366288966, + "learning_rate": 1.8432849860484953e-05, + "loss": 1.1106, + "step": 1491 + }, + { + "epoch": 0.20533993944398568, + "grad_norm": 1.828597656454292, + "learning_rate": 1.843045330577239e-05, + "loss": 1.1708, + "step": 1492 + }, + { + "epoch": 0.20547756674924306, + "grad_norm": 1.6432822710817476, + "learning_rate": 1.8428055076044152e-05, + "loss": 1.2025, + "step": 1493 + }, + { + "epoch": 0.2056151940545004, + "grad_norm": 1.7223027931756794, + "learning_rate": 1.842565517177674e-05, + "loss": 1.1548, + "step": 1494 + }, + { + "epoch": 0.20575282135975778, + "grad_norm": 1.7781453526983706, + "learning_rate": 1.842325359344698e-05, + "loss": 1.2065, + "step": 1495 + }, + { + "epoch": 0.20589044866501513, + "grad_norm": 1.7876647041451774, + "learning_rate": 1.842085034153203e-05, + "loss": 1.0787, + "step": 1496 + }, + { + "epoch": 0.2060280759702725, + "grad_norm": 1.6396241831269793, + "learning_rate": 1.841844541650939e-05, + "loss": 1.1253, + "step": 1497 + }, + { + "epoch": 0.20616570327552985, + "grad_norm": 1.8184726392000505, + "learning_rate": 1.841603881885688e-05, + "loss": 1.1897, + "step": 1498 + }, + { + "epoch": 0.20630333058078723, + "grad_norm": 1.7051772473613152, + "learning_rate": 1.841363054905266e-05, + "loss": 1.0871, + "step": 1499 + }, + { + "epoch": 0.20644095788604458, + "grad_norm": 1.5922567486806138, + "learning_rate": 1.8411220607575217e-05, + "loss": 1.1079, + "step": 1500 + }, + { + "epoch": 0.20657858519130196, + "grad_norm": 1.7135429844538697, + "learning_rate": 1.8408808994903383e-05, + "loss": 1.0997, + "step": 1501 + }, + { + "epoch": 0.2067162124965593, + "grad_norm": 1.6265002061853238, + "learning_rate": 1.8406395711516302e-05, + "loss": 1.1642, + "step": 1502 + }, + { + "epoch": 0.20685383980181668, + "grad_norm": 1.723867090123497, + "learning_rate": 1.840398075789346e-05, + "loss": 1.1241, + "step": 1503 + }, + { + "epoch": 0.20699146710707406, + "grad_norm": 1.8314962145741862, + "learning_rate": 1.8401564134514685e-05, + "loss": 1.2169, + "step": 1504 + }, + { + "epoch": 0.2071290944123314, + "grad_norm": 1.6989073528314984, + "learning_rate": 1.8399145841860118e-05, + "loss": 1.1078, + "step": 1505 + }, + { + "epoch": 0.20726672171758878, + "grad_norm": 1.4408169308052605, + "learning_rate": 1.8396725880410248e-05, + "loss": 1.0885, + "step": 1506 + }, + { + "epoch": 0.20740434902284613, + "grad_norm": 1.672325123770154, + "learning_rate": 1.839430425064588e-05, + "loss": 1.1501, + "step": 1507 + }, + { + "epoch": 0.2075419763281035, + "grad_norm": 1.706428530051366, + "learning_rate": 1.8391880953048165e-05, + "loss": 1.0881, + "step": 1508 + }, + { + "epoch": 0.20767960363336085, + "grad_norm": 1.9072069516622783, + "learning_rate": 1.8389455988098578e-05, + "loss": 1.1422, + "step": 1509 + }, + { + "epoch": 0.20781723093861823, + "grad_norm": 1.5962664859905822, + "learning_rate": 1.838702935627892e-05, + "loss": 1.0953, + "step": 1510 + }, + { + "epoch": 0.20795485824387558, + "grad_norm": 1.9522962180375956, + "learning_rate": 1.8384601058071342e-05, + "loss": 1.1023, + "step": 1511 + }, + { + "epoch": 0.20809248554913296, + "grad_norm": 1.8589532820394925, + "learning_rate": 1.83821710939583e-05, + "loss": 1.2127, + "step": 1512 + }, + { + "epoch": 0.2082301128543903, + "grad_norm": 1.6217516367365552, + "learning_rate": 1.8379739464422605e-05, + "loss": 1.136, + "step": 1513 + }, + { + "epoch": 0.20836774015964768, + "grad_norm": 1.8127800191192904, + "learning_rate": 1.8377306169947387e-05, + "loss": 1.1016, + "step": 1514 + }, + { + "epoch": 0.20850536746490503, + "grad_norm": 1.8585550749728161, + "learning_rate": 1.83748712110161e-05, + "loss": 1.1603, + "step": 1515 + }, + { + "epoch": 0.2086429947701624, + "grad_norm": 1.565547222205009, + "learning_rate": 1.8372434588112546e-05, + "loss": 1.099, + "step": 1516 + }, + { + "epoch": 0.20878062207541975, + "grad_norm": 1.7742271598531, + "learning_rate": 1.8369996301720845e-05, + "loss": 1.1852, + "step": 1517 + }, + { + "epoch": 0.20891824938067713, + "grad_norm": 1.6813180716757246, + "learning_rate": 1.836755635232545e-05, + "loss": 1.1303, + "step": 1518 + }, + { + "epoch": 0.20905587668593448, + "grad_norm": 1.6220405153113173, + "learning_rate": 1.8365114740411153e-05, + "loss": 1.1468, + "step": 1519 + }, + { + "epoch": 0.20919350399119185, + "grad_norm": 1.947273421725539, + "learning_rate": 1.8362671466463057e-05, + "loss": 1.1162, + "step": 1520 + }, + { + "epoch": 0.2093311312964492, + "grad_norm": 1.927623344348797, + "learning_rate": 1.8360226530966617e-05, + "loss": 1.213, + "step": 1521 + }, + { + "epoch": 0.20946875860170658, + "grad_norm": 1.6617161482620493, + "learning_rate": 1.83577799344076e-05, + "loss": 1.1008, + "step": 1522 + }, + { + "epoch": 0.20960638590696395, + "grad_norm": 1.5286888332597184, + "learning_rate": 1.8355331677272117e-05, + "loss": 1.1545, + "step": 1523 + }, + { + "epoch": 0.2097440132122213, + "grad_norm": 1.7621532694185635, + "learning_rate": 1.8352881760046604e-05, + "loss": 1.1773, + "step": 1524 + }, + { + "epoch": 0.20988164051747868, + "grad_norm": 1.5593393942279592, + "learning_rate": 1.8350430183217823e-05, + "loss": 1.1041, + "step": 1525 + }, + { + "epoch": 0.21001926782273603, + "grad_norm": 1.8885665356344805, + "learning_rate": 1.8347976947272868e-05, + "loss": 1.1647, + "step": 1526 + }, + { + "epoch": 0.2101568951279934, + "grad_norm": 1.7268926701560279, + "learning_rate": 1.8345522052699164e-05, + "loss": 1.1733, + "step": 1527 + }, + { + "epoch": 0.21029452243325075, + "grad_norm": 1.888184466498517, + "learning_rate": 1.834306549998447e-05, + "loss": 1.2119, + "step": 1528 + }, + { + "epoch": 0.21043214973850813, + "grad_norm": 1.5873141171628622, + "learning_rate": 1.8340607289616863e-05, + "loss": 1.0735, + "step": 1529 + }, + { + "epoch": 0.21056977704376548, + "grad_norm": 1.6578126571316665, + "learning_rate": 1.8338147422084756e-05, + "loss": 1.1526, + "step": 1530 + }, + { + "epoch": 0.21070740434902285, + "grad_norm": 2.0205953466974895, + "learning_rate": 1.8335685897876896e-05, + "loss": 1.2828, + "step": 1531 + }, + { + "epoch": 0.2108450316542802, + "grad_norm": 1.7549753092008, + "learning_rate": 1.833322271748235e-05, + "loss": 1.0157, + "step": 1532 + }, + { + "epoch": 0.21098265895953758, + "grad_norm": 1.6156389796454735, + "learning_rate": 1.833075788139052e-05, + "loss": 1.0719, + "step": 1533 + }, + { + "epoch": 0.21112028626479493, + "grad_norm": 1.690397089079198, + "learning_rate": 1.8328291390091135e-05, + "loss": 1.1228, + "step": 1534 + }, + { + "epoch": 0.2112579135700523, + "grad_norm": 1.7700883055225065, + "learning_rate": 1.832582324407425e-05, + "loss": 1.1462, + "step": 1535 + }, + { + "epoch": 0.21139554087530965, + "grad_norm": 1.9287872263338504, + "learning_rate": 1.8323353443830258e-05, + "loss": 1.1415, + "step": 1536 + }, + { + "epoch": 0.21153316818056703, + "grad_norm": 1.8012065321892141, + "learning_rate": 1.832088198984987e-05, + "loss": 1.151, + "step": 1537 + }, + { + "epoch": 0.21167079548582438, + "grad_norm": 1.7326192395221385, + "learning_rate": 1.831840888262413e-05, + "loss": 1.1847, + "step": 1538 + }, + { + "epoch": 0.21180842279108175, + "grad_norm": 1.757066715589881, + "learning_rate": 1.8315934122644413e-05, + "loss": 1.1218, + "step": 1539 + }, + { + "epoch": 0.2119460500963391, + "grad_norm": 1.6913213061764916, + "learning_rate": 1.8313457710402415e-05, + "loss": 1.1215, + "step": 1540 + }, + { + "epoch": 0.21208367740159648, + "grad_norm": 1.7245041209060892, + "learning_rate": 1.8310979646390172e-05, + "loss": 1.2049, + "step": 1541 + }, + { + "epoch": 0.21222130470685385, + "grad_norm": 1.959165607462594, + "learning_rate": 1.830849993110004e-05, + "loss": 1.243, + "step": 1542 + }, + { + "epoch": 0.2123589320121112, + "grad_norm": 1.8056087058705172, + "learning_rate": 1.8306018565024698e-05, + "loss": 1.2385, + "step": 1543 + }, + { + "epoch": 0.21249655931736858, + "grad_norm": 1.9096284034025444, + "learning_rate": 1.8303535548657166e-05, + "loss": 1.212, + "step": 1544 + }, + { + "epoch": 0.21263418662262593, + "grad_norm": 1.8045486437431584, + "learning_rate": 1.8301050882490786e-05, + "loss": 1.1449, + "step": 1545 + }, + { + "epoch": 0.2127718139278833, + "grad_norm": 1.7002331488190596, + "learning_rate": 1.8298564567019223e-05, + "loss": 1.143, + "step": 1546 + }, + { + "epoch": 0.21290944123314065, + "grad_norm": 1.7084133374422172, + "learning_rate": 1.8296076602736472e-05, + "loss": 1.149, + "step": 1547 + }, + { + "epoch": 0.21304706853839803, + "grad_norm": 1.771061620507088, + "learning_rate": 1.8293586990136865e-05, + "loss": 1.1654, + "step": 1548 + }, + { + "epoch": 0.21318469584365538, + "grad_norm": 1.7036337900487009, + "learning_rate": 1.829109572971505e-05, + "loss": 1.0914, + "step": 1549 + }, + { + "epoch": 0.21332232314891275, + "grad_norm": 1.6639024702304208, + "learning_rate": 1.8288602821966005e-05, + "loss": 1.1988, + "step": 1550 + }, + { + "epoch": 0.2134599504541701, + "grad_norm": 1.7420115550828694, + "learning_rate": 1.8286108267385038e-05, + "loss": 1.0555, + "step": 1551 + }, + { + "epoch": 0.21359757775942748, + "grad_norm": 1.8470119120613782, + "learning_rate": 1.828361206646778e-05, + "loss": 1.231, + "step": 1552 + }, + { + "epoch": 0.21373520506468482, + "grad_norm": 1.6290196984186365, + "learning_rate": 1.8281114219710198e-05, + "loss": 1.1714, + "step": 1553 + }, + { + "epoch": 0.2138728323699422, + "grad_norm": 1.8226191688186457, + "learning_rate": 1.8278614727608576e-05, + "loss": 1.0612, + "step": 1554 + }, + { + "epoch": 0.21401045967519955, + "grad_norm": 1.7457363196785947, + "learning_rate": 1.8276113590659532e-05, + "loss": 1.1341, + "step": 1555 + }, + { + "epoch": 0.21414808698045693, + "grad_norm": 1.7397466820341942, + "learning_rate": 1.8273610809360003e-05, + "loss": 1.1432, + "step": 1556 + }, + { + "epoch": 0.21428571428571427, + "grad_norm": 1.4539936444210937, + "learning_rate": 1.827110638420726e-05, + "loss": 1.1304, + "step": 1557 + }, + { + "epoch": 0.21442334159097165, + "grad_norm": 1.860034165797952, + "learning_rate": 1.8268600315698893e-05, + "loss": 1.2328, + "step": 1558 + }, + { + "epoch": 0.214560968896229, + "grad_norm": 1.8535194553246104, + "learning_rate": 1.8266092604332833e-05, + "loss": 1.1377, + "step": 1559 + }, + { + "epoch": 0.21469859620148637, + "grad_norm": 1.9104603102347049, + "learning_rate": 1.8263583250607324e-05, + "loss": 1.1389, + "step": 1560 + }, + { + "epoch": 0.21483622350674375, + "grad_norm": 1.5901665682693031, + "learning_rate": 1.8261072255020936e-05, + "loss": 1.075, + "step": 1561 + }, + { + "epoch": 0.2149738508120011, + "grad_norm": 1.763983087281282, + "learning_rate": 1.8258559618072575e-05, + "loss": 1.1511, + "step": 1562 + }, + { + "epoch": 0.21511147811725848, + "grad_norm": 1.9360440778496673, + "learning_rate": 1.8256045340261462e-05, + "loss": 1.1467, + "step": 1563 + }, + { + "epoch": 0.21524910542251582, + "grad_norm": 1.5805731408101602, + "learning_rate": 1.8253529422087156e-05, + "loss": 1.1088, + "step": 1564 + }, + { + "epoch": 0.2153867327277732, + "grad_norm": 1.6558575141928982, + "learning_rate": 1.8251011864049533e-05, + "loss": 1.1368, + "step": 1565 + }, + { + "epoch": 0.21552436003303055, + "grad_norm": 1.6922532002590815, + "learning_rate": 1.8248492666648795e-05, + "loss": 1.2594, + "step": 1566 + }, + { + "epoch": 0.21566198733828792, + "grad_norm": 1.7302516661250806, + "learning_rate": 1.8245971830385475e-05, + "loss": 1.1611, + "step": 1567 + }, + { + "epoch": 0.21579961464354527, + "grad_norm": 1.631442286253137, + "learning_rate": 1.8243449355760424e-05, + "loss": 1.1594, + "step": 1568 + }, + { + "epoch": 0.21593724194880265, + "grad_norm": 1.7023564513198883, + "learning_rate": 1.8240925243274827e-05, + "loss": 1.0893, + "step": 1569 + }, + { + "epoch": 0.21607486925406, + "grad_norm": 1.917024172190429, + "learning_rate": 1.8238399493430196e-05, + "loss": 1.2098, + "step": 1570 + }, + { + "epoch": 0.21621249655931737, + "grad_norm": 1.7051953655629273, + "learning_rate": 1.8235872106728353e-05, + "loss": 1.1684, + "step": 1571 + }, + { + "epoch": 0.21635012386457472, + "grad_norm": 1.7879858543743854, + "learning_rate": 1.8233343083671458e-05, + "loss": 1.115, + "step": 1572 + }, + { + "epoch": 0.2164877511698321, + "grad_norm": 1.5461768752435598, + "learning_rate": 1.8230812424761996e-05, + "loss": 1.1345, + "step": 1573 + }, + { + "epoch": 0.21662537847508945, + "grad_norm": 1.9390451026137865, + "learning_rate": 1.8228280130502768e-05, + "loss": 1.0945, + "step": 1574 + }, + { + "epoch": 0.21676300578034682, + "grad_norm": 1.7756756796425552, + "learning_rate": 1.8225746201396916e-05, + "loss": 1.0549, + "step": 1575 + }, + { + "epoch": 0.21690063308560417, + "grad_norm": 1.92496170616975, + "learning_rate": 1.822321063794789e-05, + "loss": 1.1697, + "step": 1576 + }, + { + "epoch": 0.21703826039086155, + "grad_norm": 2.044080448664165, + "learning_rate": 1.8220673440659478e-05, + "loss": 1.1937, + "step": 1577 + }, + { + "epoch": 0.2171758876961189, + "grad_norm": 1.8467012622420291, + "learning_rate": 1.821813461003578e-05, + "loss": 1.0525, + "step": 1578 + }, + { + "epoch": 0.21731351500137627, + "grad_norm": 1.8675649167212505, + "learning_rate": 1.821559414658123e-05, + "loss": 1.168, + "step": 1579 + }, + { + "epoch": 0.21745114230663365, + "grad_norm": 1.5867764837479763, + "learning_rate": 1.8213052050800576e-05, + "loss": 1.1193, + "step": 1580 + }, + { + "epoch": 0.217588769611891, + "grad_norm": 1.713416189112341, + "learning_rate": 1.821050832319891e-05, + "loss": 1.1247, + "step": 1581 + }, + { + "epoch": 0.21772639691714837, + "grad_norm": 1.8171916110356148, + "learning_rate": 1.8207962964281628e-05, + "loss": 1.1131, + "step": 1582 + }, + { + "epoch": 0.21786402422240572, + "grad_norm": 1.8508707904053736, + "learning_rate": 1.8205415974554457e-05, + "loss": 1.2271, + "step": 1583 + }, + { + "epoch": 0.2180016515276631, + "grad_norm": 1.7052287655903968, + "learning_rate": 1.8202867354523456e-05, + "loss": 1.0835, + "step": 1584 + }, + { + "epoch": 0.21813927883292045, + "grad_norm": 1.7970039997551248, + "learning_rate": 1.8200317104694996e-05, + "loss": 1.1495, + "step": 1585 + }, + { + "epoch": 0.21827690613817782, + "grad_norm": 1.6290904310607954, + "learning_rate": 1.8197765225575775e-05, + "loss": 1.1988, + "step": 1586 + }, + { + "epoch": 0.21841453344343517, + "grad_norm": 1.7329535725632388, + "learning_rate": 1.8195211717672824e-05, + "loss": 1.084, + "step": 1587 + }, + { + "epoch": 0.21855216074869255, + "grad_norm": 1.7904691428629729, + "learning_rate": 1.8192656581493477e-05, + "loss": 1.1504, + "step": 1588 + }, + { + "epoch": 0.2186897880539499, + "grad_norm": 1.6602481514724015, + "learning_rate": 1.8190099817545417e-05, + "loss": 1.0842, + "step": 1589 + }, + { + "epoch": 0.21882741535920727, + "grad_norm": 1.812665397402033, + "learning_rate": 1.8187541426336633e-05, + "loss": 1.1046, + "step": 1590 + }, + { + "epoch": 0.21896504266446462, + "grad_norm": 1.6644078046092825, + "learning_rate": 1.8184981408375446e-05, + "loss": 1.1364, + "step": 1591 + }, + { + "epoch": 0.219102669969722, + "grad_norm": 1.5819879253636904, + "learning_rate": 1.8182419764170488e-05, + "loss": 1.101, + "step": 1592 + }, + { + "epoch": 0.21924029727497935, + "grad_norm": 1.7027993648543982, + "learning_rate": 1.8179856494230733e-05, + "loss": 1.1888, + "step": 1593 + }, + { + "epoch": 0.21937792458023672, + "grad_norm": 1.5536096620982303, + "learning_rate": 1.817729159906546e-05, + "loss": 1.0872, + "step": 1594 + }, + { + "epoch": 0.21951555188549407, + "grad_norm": 1.6273583506645348, + "learning_rate": 1.817472507918428e-05, + "loss": 1.1041, + "step": 1595 + }, + { + "epoch": 0.21965317919075145, + "grad_norm": 2.0190241769108725, + "learning_rate": 1.8172156935097128e-05, + "loss": 1.1486, + "step": 1596 + }, + { + "epoch": 0.2197908064960088, + "grad_norm": 1.8070844815637965, + "learning_rate": 1.8169587167314256e-05, + "loss": 1.1795, + "step": 1597 + }, + { + "epoch": 0.21992843380126617, + "grad_norm": 1.6757726525636913, + "learning_rate": 1.8167015776346247e-05, + "loss": 1.0699, + "step": 1598 + }, + { + "epoch": 0.22006606110652355, + "grad_norm": 1.7205978885405127, + "learning_rate": 1.8164442762703997e-05, + "loss": 1.0641, + "step": 1599 + }, + { + "epoch": 0.2202036884117809, + "grad_norm": 1.7197692892889012, + "learning_rate": 1.8161868126898724e-05, + "loss": 1.0994, + "step": 1600 + }, + { + "epoch": 0.22034131571703827, + "grad_norm": 1.7451825827952954, + "learning_rate": 1.8159291869441983e-05, + "loss": 1.1776, + "step": 1601 + }, + { + "epoch": 0.22047894302229562, + "grad_norm": 1.5855995861965546, + "learning_rate": 1.8156713990845634e-05, + "loss": 1.1708, + "step": 1602 + }, + { + "epoch": 0.220616570327553, + "grad_norm": 1.7286592742332065, + "learning_rate": 1.8154134491621872e-05, + "loss": 1.0867, + "step": 1603 + }, + { + "epoch": 0.22075419763281035, + "grad_norm": 1.5632218989109004, + "learning_rate": 1.81515533722832e-05, + "loss": 1.0661, + "step": 1604 + }, + { + "epoch": 0.22089182493806772, + "grad_norm": 1.7540732699619548, + "learning_rate": 1.8148970633342455e-05, + "loss": 1.2064, + "step": 1605 + }, + { + "epoch": 0.22102945224332507, + "grad_norm": 1.735277303926088, + "learning_rate": 1.8146386275312794e-05, + "loss": 1.239, + "step": 1606 + }, + { + "epoch": 0.22116707954858245, + "grad_norm": 1.8149977517130742, + "learning_rate": 1.814380029870769e-05, + "loss": 1.1415, + "step": 1607 + }, + { + "epoch": 0.2213047068538398, + "grad_norm": 1.7857315295419747, + "learning_rate": 1.814121270404095e-05, + "loss": 1.0724, + "step": 1608 + }, + { + "epoch": 0.22144233415909717, + "grad_norm": 1.8341942323440683, + "learning_rate": 1.813862349182668e-05, + "loss": 1.1581, + "step": 1609 + }, + { + "epoch": 0.22157996146435452, + "grad_norm": 1.6069606129956517, + "learning_rate": 1.8136032662579327e-05, + "loss": 1.0713, + "step": 1610 + }, + { + "epoch": 0.2217175887696119, + "grad_norm": 1.6935991289726544, + "learning_rate": 1.813344021681366e-05, + "loss": 1.1341, + "step": 1611 + }, + { + "epoch": 0.22185521607486924, + "grad_norm": 1.6958093513761305, + "learning_rate": 1.813084615504475e-05, + "loss": 1.1456, + "step": 1612 + }, + { + "epoch": 0.22199284338012662, + "grad_norm": 1.6761222278281145, + "learning_rate": 1.8128250477788013e-05, + "loss": 1.138, + "step": 1613 + }, + { + "epoch": 0.22213047068538397, + "grad_norm": 1.8352346147759513, + "learning_rate": 1.8125653185559172e-05, + "loss": 1.1889, + "step": 1614 + }, + { + "epoch": 0.22226809799064134, + "grad_norm": 1.7498444094730516, + "learning_rate": 1.8123054278874265e-05, + "loss": 1.1317, + "step": 1615 + }, + { + "epoch": 0.2224057252958987, + "grad_norm": 1.8331573379026291, + "learning_rate": 1.8120453758249666e-05, + "loss": 1.1407, + "step": 1616 + }, + { + "epoch": 0.22254335260115607, + "grad_norm": 1.5965125766434676, + "learning_rate": 1.8117851624202066e-05, + "loss": 1.1146, + "step": 1617 + }, + { + "epoch": 0.22268097990641345, + "grad_norm": 1.7178446365597235, + "learning_rate": 1.8115247877248467e-05, + "loss": 1.1153, + "step": 1618 + }, + { + "epoch": 0.2228186072116708, + "grad_norm": 1.6264172791093963, + "learning_rate": 1.81126425179062e-05, + "loss": 1.0713, + "step": 1619 + }, + { + "epoch": 0.22295623451692817, + "grad_norm": 1.7403875441102825, + "learning_rate": 1.8110035546692916e-05, + "loss": 1.1302, + "step": 1620 + }, + { + "epoch": 0.22309386182218552, + "grad_norm": 1.7085348143703882, + "learning_rate": 1.8107426964126582e-05, + "loss": 1.1751, + "step": 1621 + }, + { + "epoch": 0.2232314891274429, + "grad_norm": 1.7313479055296241, + "learning_rate": 1.810481677072549e-05, + "loss": 1.1809, + "step": 1622 + }, + { + "epoch": 0.22336911643270024, + "grad_norm": 1.6083332549467435, + "learning_rate": 1.8102204967008248e-05, + "loss": 1.1015, + "step": 1623 + }, + { + "epoch": 0.22350674373795762, + "grad_norm": 1.6946653305301262, + "learning_rate": 1.809959155349379e-05, + "loss": 1.174, + "step": 1624 + }, + { + "epoch": 0.22364437104321497, + "grad_norm": 1.7378314664176662, + "learning_rate": 1.8096976530701355e-05, + "loss": 1.0629, + "step": 1625 + }, + { + "epoch": 0.22378199834847234, + "grad_norm": 1.9925117096343832, + "learning_rate": 1.809435989915052e-05, + "loss": 1.1711, + "step": 1626 + }, + { + "epoch": 0.2239196256537297, + "grad_norm": 1.7181537764630712, + "learning_rate": 1.8091741659361174e-05, + "loss": 1.1892, + "step": 1627 + }, + { + "epoch": 0.22405725295898707, + "grad_norm": 1.8915709308265245, + "learning_rate": 1.8089121811853524e-05, + "loss": 1.1496, + "step": 1628 + }, + { + "epoch": 0.22419488026424442, + "grad_norm": 1.6467886002259564, + "learning_rate": 1.80865003571481e-05, + "loss": 1.1147, + "step": 1629 + }, + { + "epoch": 0.2243325075695018, + "grad_norm": 1.9053031378795866, + "learning_rate": 1.8083877295765747e-05, + "loss": 1.1964, + "step": 1630 + }, + { + "epoch": 0.22447013487475914, + "grad_norm": 1.6584585277036383, + "learning_rate": 1.808125262822763e-05, + "loss": 1.0619, + "step": 1631 + }, + { + "epoch": 0.22460776218001652, + "grad_norm": 1.6652663449547, + "learning_rate": 1.8078626355055237e-05, + "loss": 1.1513, + "step": 1632 + }, + { + "epoch": 0.22474538948527387, + "grad_norm": 1.6850856893787831, + "learning_rate": 1.807599847677037e-05, + "loss": 1.1037, + "step": 1633 + }, + { + "epoch": 0.22488301679053124, + "grad_norm": 1.553141328096441, + "learning_rate": 1.8073368993895156e-05, + "loss": 1.1213, + "step": 1634 + }, + { + "epoch": 0.2250206440957886, + "grad_norm": 1.5988938473188312, + "learning_rate": 1.8070737906952032e-05, + "loss": 1.1238, + "step": 1635 + }, + { + "epoch": 0.22515827140104597, + "grad_norm": 1.675872074662097, + "learning_rate": 1.8068105216463763e-05, + "loss": 1.1291, + "step": 1636 + }, + { + "epoch": 0.22529589870630334, + "grad_norm": 1.690113613026726, + "learning_rate": 1.806547092295343e-05, + "loss": 1.1218, + "step": 1637 + }, + { + "epoch": 0.2254335260115607, + "grad_norm": 1.6988441918926727, + "learning_rate": 1.806283502694443e-05, + "loss": 1.1164, + "step": 1638 + }, + { + "epoch": 0.22557115331681807, + "grad_norm": 1.6505951186490517, + "learning_rate": 1.8060197528960477e-05, + "loss": 1.1791, + "step": 1639 + }, + { + "epoch": 0.22570878062207542, + "grad_norm": 1.530847327358255, + "learning_rate": 1.8057558429525606e-05, + "loss": 1.083, + "step": 1640 + }, + { + "epoch": 0.2258464079273328, + "grad_norm": 1.6136463362922857, + "learning_rate": 1.805491772916417e-05, + "loss": 1.0571, + "step": 1641 + }, + { + "epoch": 0.22598403523259014, + "grad_norm": 1.7586532372893589, + "learning_rate": 1.8052275428400847e-05, + "loss": 1.1361, + "step": 1642 + }, + { + "epoch": 0.22612166253784752, + "grad_norm": 1.798567796317372, + "learning_rate": 1.804963152776061e-05, + "loss": 1.1165, + "step": 1643 + }, + { + "epoch": 0.22625928984310487, + "grad_norm": 1.6552751794382337, + "learning_rate": 1.8046986027768786e-05, + "loss": 1.2078, + "step": 1644 + }, + { + "epoch": 0.22639691714836224, + "grad_norm": 1.684940520040137, + "learning_rate": 1.8044338928950984e-05, + "loss": 1.1351, + "step": 1645 + }, + { + "epoch": 0.2265345444536196, + "grad_norm": 1.7231810168868344, + "learning_rate": 1.8041690231833155e-05, + "loss": 1.1588, + "step": 1646 + }, + { + "epoch": 0.22667217175887697, + "grad_norm": 1.7913198400351107, + "learning_rate": 1.8039039936941552e-05, + "loss": 1.1893, + "step": 1647 + }, + { + "epoch": 0.22680979906413432, + "grad_norm": 1.710869214797614, + "learning_rate": 1.8036388044802757e-05, + "loss": 1.0214, + "step": 1648 + }, + { + "epoch": 0.2269474263693917, + "grad_norm": 1.7303803714431538, + "learning_rate": 1.8033734555943664e-05, + "loss": 1.1621, + "step": 1649 + }, + { + "epoch": 0.22708505367464904, + "grad_norm": 1.576122750144607, + "learning_rate": 1.8031079470891484e-05, + "loss": 1.0444, + "step": 1650 + }, + { + "epoch": 0.22722268097990642, + "grad_norm": 1.6440388581442575, + "learning_rate": 1.802842279017375e-05, + "loss": 1.1104, + "step": 1651 + }, + { + "epoch": 0.22736030828516376, + "grad_norm": 1.9321884922241224, + "learning_rate": 1.80257645143183e-05, + "loss": 1.1893, + "step": 1652 + }, + { + "epoch": 0.22749793559042114, + "grad_norm": 1.9124544975213824, + "learning_rate": 1.8023104643853306e-05, + "loss": 1.21, + "step": 1653 + }, + { + "epoch": 0.2276355628956785, + "grad_norm": 1.7601561234349525, + "learning_rate": 1.802044317930724e-05, + "loss": 1.2091, + "step": 1654 + }, + { + "epoch": 0.22777319020093587, + "grad_norm": 1.8840219619968523, + "learning_rate": 1.8017780121208904e-05, + "loss": 1.1552, + "step": 1655 + }, + { + "epoch": 0.22791081750619324, + "grad_norm": 1.9125127967624584, + "learning_rate": 1.8015115470087406e-05, + "loss": 1.1246, + "step": 1656 + }, + { + "epoch": 0.2280484448114506, + "grad_norm": 1.744455341669251, + "learning_rate": 1.8012449226472183e-05, + "loss": 1.0935, + "step": 1657 + }, + { + "epoch": 0.22818607211670797, + "grad_norm": 1.8047687061914486, + "learning_rate": 1.800978139089298e-05, + "loss": 1.1582, + "step": 1658 + }, + { + "epoch": 0.22832369942196531, + "grad_norm": 1.5547045382261586, + "learning_rate": 1.800711196387985e-05, + "loss": 1.0452, + "step": 1659 + }, + { + "epoch": 0.2284613267272227, + "grad_norm": 1.7317420896338862, + "learning_rate": 1.8004440945963183e-05, + "loss": 1.1062, + "step": 1660 + }, + { + "epoch": 0.22859895403248004, + "grad_norm": 1.882230688934405, + "learning_rate": 1.8001768337673666e-05, + "loss": 1.1374, + "step": 1661 + }, + { + "epoch": 0.22873658133773742, + "grad_norm": 1.7430189425194837, + "learning_rate": 1.7999094139542314e-05, + "loss": 1.0869, + "step": 1662 + }, + { + "epoch": 0.22887420864299476, + "grad_norm": 1.9399043495698964, + "learning_rate": 1.799641835210045e-05, + "loss": 1.2778, + "step": 1663 + }, + { + "epoch": 0.22901183594825214, + "grad_norm": 1.739356226106059, + "learning_rate": 1.7993740975879723e-05, + "loss": 1.1216, + "step": 1664 + }, + { + "epoch": 0.2291494632535095, + "grad_norm": 1.7647917368559702, + "learning_rate": 1.7991062011412082e-05, + "loss": 1.1627, + "step": 1665 + }, + { + "epoch": 0.22928709055876687, + "grad_norm": 1.788992704743257, + "learning_rate": 1.7988381459229805e-05, + "loss": 1.1253, + "step": 1666 + }, + { + "epoch": 0.2294247178640242, + "grad_norm": 1.696551967846157, + "learning_rate": 1.7985699319865482e-05, + "loss": 1.204, + "step": 1667 + }, + { + "epoch": 0.2295623451692816, + "grad_norm": 1.675371556258455, + "learning_rate": 1.798301559385202e-05, + "loss": 1.1638, + "step": 1668 + }, + { + "epoch": 0.22969997247453894, + "grad_norm": 1.7986542760406026, + "learning_rate": 1.798033028172263e-05, + "loss": 1.2111, + "step": 1669 + }, + { + "epoch": 0.22983759977979631, + "grad_norm": 1.6549489513564422, + "learning_rate": 1.7977643384010857e-05, + "loss": 1.1311, + "step": 1670 + }, + { + "epoch": 0.22997522708505366, + "grad_norm": 1.7051886617491292, + "learning_rate": 1.797495490125054e-05, + "loss": 1.1077, + "step": 1671 + }, + { + "epoch": 0.23011285439031104, + "grad_norm": 1.775160498838214, + "learning_rate": 1.7972264833975855e-05, + "loss": 1.11, + "step": 1672 + }, + { + "epoch": 0.2302504816955684, + "grad_norm": 1.631387923703715, + "learning_rate": 1.7969573182721274e-05, + "loss": 1.0707, + "step": 1673 + }, + { + "epoch": 0.23038810900082576, + "grad_norm": 1.8948070801858465, + "learning_rate": 1.796687994802159e-05, + "loss": 1.0727, + "step": 1674 + }, + { + "epoch": 0.23052573630608314, + "grad_norm": 1.7578574325444796, + "learning_rate": 1.796418513041192e-05, + "loss": 1.127, + "step": 1675 + }, + { + "epoch": 0.2306633636113405, + "grad_norm": 1.8512279075885227, + "learning_rate": 1.7961488730427676e-05, + "loss": 1.1991, + "step": 1676 + }, + { + "epoch": 0.23080099091659786, + "grad_norm": 1.6378387664600542, + "learning_rate": 1.7958790748604606e-05, + "loss": 1.1975, + "step": 1677 + }, + { + "epoch": 0.2309386182218552, + "grad_norm": 1.9852581709771042, + "learning_rate": 1.795609118547876e-05, + "loss": 1.1893, + "step": 1678 + }, + { + "epoch": 0.2310762455271126, + "grad_norm": 1.9269688084361234, + "learning_rate": 1.7953390041586498e-05, + "loss": 1.1835, + "step": 1679 + }, + { + "epoch": 0.23121387283236994, + "grad_norm": 1.5918493841210304, + "learning_rate": 1.7950687317464508e-05, + "loss": 1.0727, + "step": 1680 + }, + { + "epoch": 0.23135150013762731, + "grad_norm": 1.5215432325396774, + "learning_rate": 1.7947983013649782e-05, + "loss": 1.0635, + "step": 1681 + }, + { + "epoch": 0.23148912744288466, + "grad_norm": 1.6579038189657276, + "learning_rate": 1.7945277130679626e-05, + "loss": 1.1081, + "step": 1682 + }, + { + "epoch": 0.23162675474814204, + "grad_norm": 1.718435770413427, + "learning_rate": 1.794256966909166e-05, + "loss": 1.1586, + "step": 1683 + }, + { + "epoch": 0.2317643820533994, + "grad_norm": 1.7016249037951952, + "learning_rate": 1.7939860629423825e-05, + "loss": 1.1402, + "step": 1684 + }, + { + "epoch": 0.23190200935865676, + "grad_norm": 1.8262633292243078, + "learning_rate": 1.793715001221437e-05, + "loss": 1.1017, + "step": 1685 + }, + { + "epoch": 0.2320396366639141, + "grad_norm": 1.618967024926263, + "learning_rate": 1.7934437818001852e-05, + "loss": 1.0977, + "step": 1686 + }, + { + "epoch": 0.2321772639691715, + "grad_norm": 1.7713821763891748, + "learning_rate": 1.7931724047325153e-05, + "loss": 1.1092, + "step": 1687 + }, + { + "epoch": 0.23231489127442884, + "grad_norm": 2.0180152414963892, + "learning_rate": 1.7929008700723458e-05, + "loss": 1.1163, + "step": 1688 + }, + { + "epoch": 0.2324525185796862, + "grad_norm": 1.6999766299919146, + "learning_rate": 1.7926291778736268e-05, + "loss": 1.1064, + "step": 1689 + }, + { + "epoch": 0.23259014588494356, + "grad_norm": 1.581692299559519, + "learning_rate": 1.7923573281903405e-05, + "loss": 1.0942, + "step": 1690 + }, + { + "epoch": 0.23272777319020094, + "grad_norm": 1.8377777004402367, + "learning_rate": 1.792085321076499e-05, + "loss": 1.1935, + "step": 1691 + }, + { + "epoch": 0.23286540049545829, + "grad_norm": 1.8731560145907848, + "learning_rate": 1.7918131565861467e-05, + "loss": 1.1627, + "step": 1692 + }, + { + "epoch": 0.23300302780071566, + "grad_norm": 2.1207514325989307, + "learning_rate": 1.791540834773359e-05, + "loss": 1.1886, + "step": 1693 + }, + { + "epoch": 0.23314065510597304, + "grad_norm": 1.793611054440342, + "learning_rate": 1.7912683556922427e-05, + "loss": 1.167, + "step": 1694 + }, + { + "epoch": 0.2332782824112304, + "grad_norm": 1.5967269888859532, + "learning_rate": 1.790995719396935e-05, + "loss": 1.0345, + "step": 1695 + }, + { + "epoch": 0.23341590971648776, + "grad_norm": 1.7055682388892068, + "learning_rate": 1.7907229259416054e-05, + "loss": 1.1263, + "step": 1696 + }, + { + "epoch": 0.2335535370217451, + "grad_norm": 1.7982275495397122, + "learning_rate": 1.7904499753804545e-05, + "loss": 1.1289, + "step": 1697 + }, + { + "epoch": 0.2336911643270025, + "grad_norm": 1.6748987152843873, + "learning_rate": 1.7901768677677133e-05, + "loss": 1.1822, + "step": 1698 + }, + { + "epoch": 0.23382879163225984, + "grad_norm": 1.9468172953078995, + "learning_rate": 1.789903603157645e-05, + "loss": 1.0993, + "step": 1699 + }, + { + "epoch": 0.2339664189375172, + "grad_norm": 1.5614646546908204, + "learning_rate": 1.7896301816045432e-05, + "loss": 1.1602, + "step": 1700 + }, + { + "epoch": 0.23410404624277456, + "grad_norm": 1.8238678230838912, + "learning_rate": 1.7893566031627334e-05, + "loss": 1.0884, + "step": 1701 + }, + { + "epoch": 0.23424167354803194, + "grad_norm": 1.6917074885540846, + "learning_rate": 1.7890828678865718e-05, + "loss": 1.1268, + "step": 1702 + }, + { + "epoch": 0.23437930085328929, + "grad_norm": 1.7522222890678387, + "learning_rate": 1.7888089758304456e-05, + "loss": 1.0702, + "step": 1703 + }, + { + "epoch": 0.23451692815854666, + "grad_norm": 1.730583005433053, + "learning_rate": 1.7885349270487734e-05, + "loss": 1.1509, + "step": 1704 + }, + { + "epoch": 0.234654555463804, + "grad_norm": 1.539723969935983, + "learning_rate": 1.7882607215960045e-05, + "loss": 1.05, + "step": 1705 + }, + { + "epoch": 0.23479218276906139, + "grad_norm": 1.9019974211243587, + "learning_rate": 1.787986359526621e-05, + "loss": 1.2123, + "step": 1706 + }, + { + "epoch": 0.23492981007431873, + "grad_norm": 1.8340668640848363, + "learning_rate": 1.7877118408951344e-05, + "loss": 1.1598, + "step": 1707 + }, + { + "epoch": 0.2350674373795761, + "grad_norm": 1.9155636592600638, + "learning_rate": 1.7874371657560873e-05, + "loss": 1.2227, + "step": 1708 + }, + { + "epoch": 0.23520506468483346, + "grad_norm": 2.1013823915162315, + "learning_rate": 1.7871623341640544e-05, + "loss": 1.0773, + "step": 1709 + }, + { + "epoch": 0.23534269199009084, + "grad_norm": 1.6051504099784308, + "learning_rate": 1.7868873461736403e-05, + "loss": 1.121, + "step": 1710 + }, + { + "epoch": 0.23548031929534818, + "grad_norm": 1.7025121600518087, + "learning_rate": 1.786612201839482e-05, + "loss": 1.0359, + "step": 1711 + }, + { + "epoch": 0.23561794660060556, + "grad_norm": 1.621082942340268, + "learning_rate": 1.786336901216247e-05, + "loss": 1.0986, + "step": 1712 + }, + { + "epoch": 0.23575557390586294, + "grad_norm": 1.6948684009112633, + "learning_rate": 1.7860614443586335e-05, + "loss": 1.1539, + "step": 1713 + }, + { + "epoch": 0.23589320121112028, + "grad_norm": 1.7480132482375463, + "learning_rate": 1.7857858313213708e-05, + "loss": 1.2192, + "step": 1714 + }, + { + "epoch": 0.23603082851637766, + "grad_norm": 1.7424339918228735, + "learning_rate": 1.78551006215922e-05, + "loss": 1.037, + "step": 1715 + }, + { + "epoch": 0.236168455821635, + "grad_norm": 1.8265710045217458, + "learning_rate": 1.785234136926972e-05, + "loss": 1.1737, + "step": 1716 + }, + { + "epoch": 0.23630608312689239, + "grad_norm": 2.047749495646647, + "learning_rate": 1.7849580556794497e-05, + "loss": 1.1365, + "step": 1717 + }, + { + "epoch": 0.23644371043214973, + "grad_norm": 1.9504541302155423, + "learning_rate": 1.7846818184715066e-05, + "loss": 1.2045, + "step": 1718 + }, + { + "epoch": 0.2365813377374071, + "grad_norm": 1.5822998818423637, + "learning_rate": 1.7844054253580273e-05, + "loss": 1.2185, + "step": 1719 + }, + { + "epoch": 0.23671896504266446, + "grad_norm": 1.6801087724145576, + "learning_rate": 1.7841288763939275e-05, + "loss": 1.0221, + "step": 1720 + }, + { + "epoch": 0.23685659234792183, + "grad_norm": 2.01985975743288, + "learning_rate": 1.7838521716341533e-05, + "loss": 1.1546, + "step": 1721 + }, + { + "epoch": 0.23699421965317918, + "grad_norm": 1.710092564866613, + "learning_rate": 1.7835753111336824e-05, + "loss": 1.0679, + "step": 1722 + }, + { + "epoch": 0.23713184695843656, + "grad_norm": 1.661332263650292, + "learning_rate": 1.7832982949475234e-05, + "loss": 1.156, + "step": 1723 + }, + { + "epoch": 0.2372694742636939, + "grad_norm": 1.6201194721499923, + "learning_rate": 1.783021123130715e-05, + "loss": 1.0921, + "step": 1724 + }, + { + "epoch": 0.23740710156895128, + "grad_norm": 1.7033396008297428, + "learning_rate": 1.782743795738328e-05, + "loss": 1.1713, + "step": 1725 + }, + { + "epoch": 0.23754472887420863, + "grad_norm": 1.8041058199837072, + "learning_rate": 1.7824663128254637e-05, + "loss": 1.108, + "step": 1726 + }, + { + "epoch": 0.237682356179466, + "grad_norm": 1.808408225356363, + "learning_rate": 1.7821886744472536e-05, + "loss": 1.2065, + "step": 1727 + }, + { + "epoch": 0.23781998348472336, + "grad_norm": 1.9414012651841512, + "learning_rate": 1.7819108806588607e-05, + "loss": 1.0435, + "step": 1728 + }, + { + "epoch": 0.23795761078998073, + "grad_norm": 1.751495399278274, + "learning_rate": 1.7816329315154793e-05, + "loss": 1.1618, + "step": 1729 + }, + { + "epoch": 0.23809523809523808, + "grad_norm": 1.617623808380777, + "learning_rate": 1.7813548270723334e-05, + "loss": 1.1935, + "step": 1730 + }, + { + "epoch": 0.23823286540049546, + "grad_norm": 1.8355865927617843, + "learning_rate": 1.7810765673846795e-05, + "loss": 1.145, + "step": 1731 + }, + { + "epoch": 0.23837049270575283, + "grad_norm": 1.6919422102902557, + "learning_rate": 1.780798152507803e-05, + "loss": 1.0804, + "step": 1732 + }, + { + "epoch": 0.23850812001101018, + "grad_norm": 1.812005149233321, + "learning_rate": 1.7805195824970216e-05, + "loss": 1.0597, + "step": 1733 + }, + { + "epoch": 0.23864574731626756, + "grad_norm": 1.6626148917342687, + "learning_rate": 1.7802408574076833e-05, + "loss": 0.9991, + "step": 1734 + }, + { + "epoch": 0.2387833746215249, + "grad_norm": 1.5978212011692006, + "learning_rate": 1.7799619772951673e-05, + "loss": 1.1143, + "step": 1735 + }, + { + "epoch": 0.23892100192678228, + "grad_norm": 2.077605111959503, + "learning_rate": 1.7796829422148825e-05, + "loss": 1.1434, + "step": 1736 + }, + { + "epoch": 0.23905862923203963, + "grad_norm": 1.7663264100475409, + "learning_rate": 1.77940375222227e-05, + "loss": 1.1655, + "step": 1737 + }, + { + "epoch": 0.239196256537297, + "grad_norm": 1.7357272956589496, + "learning_rate": 1.7791244073728008e-05, + "loss": 1.1665, + "step": 1738 + }, + { + "epoch": 0.23933388384255436, + "grad_norm": 1.5790612588969983, + "learning_rate": 1.7788449077219767e-05, + "loss": 1.088, + "step": 1739 + }, + { + "epoch": 0.23947151114781173, + "grad_norm": 1.7792981498748501, + "learning_rate": 1.778565253325331e-05, + "loss": 1.1267, + "step": 1740 + }, + { + "epoch": 0.23960913845306908, + "grad_norm": 1.579683568444218, + "learning_rate": 1.7782854442384264e-05, + "loss": 1.118, + "step": 1741 + }, + { + "epoch": 0.23974676575832646, + "grad_norm": 1.8751291707523239, + "learning_rate": 1.778005480516858e-05, + "loss": 1.1884, + "step": 1742 + }, + { + "epoch": 0.2398843930635838, + "grad_norm": 1.9604220287781875, + "learning_rate": 1.7777253622162503e-05, + "loss": 1.1912, + "step": 1743 + }, + { + "epoch": 0.24002202036884118, + "grad_norm": 1.6530576950514995, + "learning_rate": 1.777445089392259e-05, + "loss": 1.1238, + "step": 1744 + }, + { + "epoch": 0.24015964767409853, + "grad_norm": 1.567419009084569, + "learning_rate": 1.7771646621005705e-05, + "loss": 1.1204, + "step": 1745 + }, + { + "epoch": 0.2402972749793559, + "grad_norm": 1.6986231329416253, + "learning_rate": 1.7768840803969017e-05, + "loss": 1.056, + "step": 1746 + }, + { + "epoch": 0.24043490228461326, + "grad_norm": 1.786000280692303, + "learning_rate": 1.776603344337001e-05, + "loss": 1.1674, + "step": 1747 + }, + { + "epoch": 0.24057252958987063, + "grad_norm": 1.641755517898788, + "learning_rate": 1.776322453976646e-05, + "loss": 1.1018, + "step": 1748 + }, + { + "epoch": 0.24071015689512798, + "grad_norm": 1.6380778310943016, + "learning_rate": 1.776041409371646e-05, + "loss": 1.1236, + "step": 1749 + }, + { + "epoch": 0.24084778420038536, + "grad_norm": 1.739723979288783, + "learning_rate": 1.7757602105778406e-05, + "loss": 1.1955, + "step": 1750 + }, + { + "epoch": 0.24098541150564273, + "grad_norm": 1.76387877774985, + "learning_rate": 1.7754788576511006e-05, + "loss": 1.106, + "step": 1751 + }, + { + "epoch": 0.24112303881090008, + "grad_norm": 1.6283618773579944, + "learning_rate": 1.775197350647327e-05, + "loss": 1.1125, + "step": 1752 + }, + { + "epoch": 0.24126066611615746, + "grad_norm": 1.679213175860945, + "learning_rate": 1.774915689622451e-05, + "loss": 1.0208, + "step": 1753 + }, + { + "epoch": 0.2413982934214148, + "grad_norm": 1.947309066825703, + "learning_rate": 1.774633874632435e-05, + "loss": 1.1273, + "step": 1754 + }, + { + "epoch": 0.24153592072667218, + "grad_norm": 1.7200878643105664, + "learning_rate": 1.7743519057332708e-05, + "loss": 1.0674, + "step": 1755 + }, + { + "epoch": 0.24167354803192953, + "grad_norm": 1.8521793056897395, + "learning_rate": 1.7740697829809836e-05, + "loss": 1.0499, + "step": 1756 + }, + { + "epoch": 0.2418111753371869, + "grad_norm": 1.5229558547256976, + "learning_rate": 1.7737875064316262e-05, + "loss": 1.1348, + "step": 1757 + }, + { + "epoch": 0.24194880264244426, + "grad_norm": 1.7463338829142465, + "learning_rate": 1.773505076141283e-05, + "loss": 1.1131, + "step": 1758 + }, + { + "epoch": 0.24208642994770163, + "grad_norm": 1.7529322996695358, + "learning_rate": 1.7732224921660695e-05, + "loss": 1.1788, + "step": 1759 + }, + { + "epoch": 0.24222405725295898, + "grad_norm": 1.9078328549853052, + "learning_rate": 1.7729397545621313e-05, + "loss": 1.0732, + "step": 1760 + }, + { + "epoch": 0.24236168455821636, + "grad_norm": 1.583998590871363, + "learning_rate": 1.772656863385644e-05, + "loss": 1.0956, + "step": 1761 + }, + { + "epoch": 0.2424993118634737, + "grad_norm": 1.6085521340961173, + "learning_rate": 1.7723738186928145e-05, + "loss": 1.0702, + "step": 1762 + }, + { + "epoch": 0.24263693916873108, + "grad_norm": 1.660338393967526, + "learning_rate": 1.7720906205398796e-05, + "loss": 1.0874, + "step": 1763 + }, + { + "epoch": 0.24277456647398843, + "grad_norm": 1.7522154900552314, + "learning_rate": 1.7718072689831075e-05, + "loss": 1.1249, + "step": 1764 + }, + { + "epoch": 0.2429121937792458, + "grad_norm": 1.7146455173963568, + "learning_rate": 1.771523764078796e-05, + "loss": 1.2018, + "step": 1765 + }, + { + "epoch": 0.24304982108450315, + "grad_norm": 1.6725748171596602, + "learning_rate": 1.7712401058832733e-05, + "loss": 1.1392, + "step": 1766 + }, + { + "epoch": 0.24318744838976053, + "grad_norm": 2.016352780039768, + "learning_rate": 1.770956294452899e-05, + "loss": 1.1621, + "step": 1767 + }, + { + "epoch": 0.24332507569501788, + "grad_norm": 1.9127677565112473, + "learning_rate": 1.770672329844062e-05, + "loss": 1.1615, + "step": 1768 + }, + { + "epoch": 0.24346270300027525, + "grad_norm": 1.5673178992976797, + "learning_rate": 1.7703882121131823e-05, + "loss": 1.0958, + "step": 1769 + }, + { + "epoch": 0.24360033030553263, + "grad_norm": 1.7136909594958947, + "learning_rate": 1.770103941316711e-05, + "loss": 1.1469, + "step": 1770 + }, + { + "epoch": 0.24373795761078998, + "grad_norm": 1.8443646656748942, + "learning_rate": 1.7698195175111274e-05, + "loss": 1.1596, + "step": 1771 + }, + { + "epoch": 0.24387558491604736, + "grad_norm": 1.796621081323455, + "learning_rate": 1.7695349407529435e-05, + "loss": 1.0918, + "step": 1772 + }, + { + "epoch": 0.2440132122213047, + "grad_norm": 1.842177623984622, + "learning_rate": 1.7692502110987008e-05, + "loss": 1.1804, + "step": 1773 + }, + { + "epoch": 0.24415083952656208, + "grad_norm": 1.6055570479918668, + "learning_rate": 1.768965328604971e-05, + "loss": 1.0056, + "step": 1774 + }, + { + "epoch": 0.24428846683181943, + "grad_norm": 1.8405870371494335, + "learning_rate": 1.7686802933283565e-05, + "loss": 1.1986, + "step": 1775 + }, + { + "epoch": 0.2444260941370768, + "grad_norm": 1.8188710479664265, + "learning_rate": 1.7683951053254897e-05, + "loss": 1.1542, + "step": 1776 + }, + { + "epoch": 0.24456372144233415, + "grad_norm": 1.7447280017502376, + "learning_rate": 1.7681097646530333e-05, + "loss": 1.1149, + "step": 1777 + }, + { + "epoch": 0.24470134874759153, + "grad_norm": 1.7455038033445627, + "learning_rate": 1.7678242713676813e-05, + "loss": 1.099, + "step": 1778 + }, + { + "epoch": 0.24483897605284888, + "grad_norm": 1.8439559013551687, + "learning_rate": 1.7675386255261573e-05, + "loss": 1.1831, + "step": 1779 + }, + { + "epoch": 0.24497660335810625, + "grad_norm": 1.9287421775691638, + "learning_rate": 1.7672528271852142e-05, + "loss": 1.1994, + "step": 1780 + }, + { + "epoch": 0.2451142306633636, + "grad_norm": 1.8005419955025663, + "learning_rate": 1.7669668764016373e-05, + "loss": 1.1393, + "step": 1781 + }, + { + "epoch": 0.24525185796862098, + "grad_norm": 1.7344359222205779, + "learning_rate": 1.7666807732322407e-05, + "loss": 1.1027, + "step": 1782 + }, + { + "epoch": 0.24538948527387833, + "grad_norm": 1.7840168421975116, + "learning_rate": 1.766394517733869e-05, + "loss": 1.1139, + "step": 1783 + }, + { + "epoch": 0.2455271125791357, + "grad_norm": 1.6058689359369729, + "learning_rate": 1.7661081099633976e-05, + "loss": 1.0813, + "step": 1784 + }, + { + "epoch": 0.24566473988439305, + "grad_norm": 1.8362292539707685, + "learning_rate": 1.7658215499777314e-05, + "loss": 1.1848, + "step": 1785 + }, + { + "epoch": 0.24580236718965043, + "grad_norm": 1.7537342790161152, + "learning_rate": 1.7655348378338062e-05, + "loss": 1.136, + "step": 1786 + }, + { + "epoch": 0.24593999449490778, + "grad_norm": 2.1129762107038954, + "learning_rate": 1.765247973588588e-05, + "loss": 1.1678, + "step": 1787 + }, + { + "epoch": 0.24607762180016515, + "grad_norm": 1.5753264715939579, + "learning_rate": 1.7649609572990724e-05, + "loss": 1.0935, + "step": 1788 + }, + { + "epoch": 0.24621524910542253, + "grad_norm": 2.049864212593271, + "learning_rate": 1.7646737890222855e-05, + "loss": 1.174, + "step": 1789 + }, + { + "epoch": 0.24635287641067988, + "grad_norm": 1.788181790436539, + "learning_rate": 1.7643864688152843e-05, + "loss": 1.2012, + "step": 1790 + }, + { + "epoch": 0.24649050371593725, + "grad_norm": 1.77933594059868, + "learning_rate": 1.764098996735155e-05, + "loss": 1.191, + "step": 1791 + }, + { + "epoch": 0.2466281310211946, + "grad_norm": 1.7041495631736205, + "learning_rate": 1.7638113728390145e-05, + "loss": 1.169, + "step": 1792 + }, + { + "epoch": 0.24676575832645198, + "grad_norm": 1.816556475933002, + "learning_rate": 1.7635235971840094e-05, + "loss": 1.2465, + "step": 1793 + }, + { + "epoch": 0.24690338563170933, + "grad_norm": 1.7270719146788693, + "learning_rate": 1.7632356698273173e-05, + "loss": 1.0703, + "step": 1794 + }, + { + "epoch": 0.2470410129369667, + "grad_norm": 1.603438942487467, + "learning_rate": 1.7629475908261452e-05, + "loss": 1.137, + "step": 1795 + }, + { + "epoch": 0.24717864024222405, + "grad_norm": 1.8229029454550567, + "learning_rate": 1.76265936023773e-05, + "loss": 1.1492, + "step": 1796 + }, + { + "epoch": 0.24731626754748143, + "grad_norm": 1.6678589107159985, + "learning_rate": 1.7623709781193398e-05, + "loss": 1.0467, + "step": 1797 + }, + { + "epoch": 0.24745389485273878, + "grad_norm": 1.778150510217301, + "learning_rate": 1.762082444528272e-05, + "loss": 1.1571, + "step": 1798 + }, + { + "epoch": 0.24759152215799615, + "grad_norm": 1.7514895290860197, + "learning_rate": 1.7617937595218543e-05, + "loss": 1.1012, + "step": 1799 + }, + { + "epoch": 0.2477291494632535, + "grad_norm": 1.7639957804035473, + "learning_rate": 1.7615049231574444e-05, + "loss": 1.1498, + "step": 1800 + }, + { + "epoch": 0.24786677676851088, + "grad_norm": 1.7612904039185773, + "learning_rate": 1.7612159354924303e-05, + "loss": 1.1481, + "step": 1801 + }, + { + "epoch": 0.24800440407376823, + "grad_norm": 1.6596626519137907, + "learning_rate": 1.7609267965842295e-05, + "loss": 1.1745, + "step": 1802 + }, + { + "epoch": 0.2481420313790256, + "grad_norm": 1.7333992480054492, + "learning_rate": 1.76063750649029e-05, + "loss": 1.1346, + "step": 1803 + }, + { + "epoch": 0.24827965868428295, + "grad_norm": 1.6346919657368744, + "learning_rate": 1.7603480652680902e-05, + "loss": 1.0676, + "step": 1804 + }, + { + "epoch": 0.24841728598954033, + "grad_norm": 1.7488661076830252, + "learning_rate": 1.7600584729751378e-05, + "loss": 1.1304, + "step": 1805 + }, + { + "epoch": 0.24855491329479767, + "grad_norm": 1.786724520974546, + "learning_rate": 1.759768729668971e-05, + "loss": 1.1333, + "step": 1806 + }, + { + "epoch": 0.24869254060005505, + "grad_norm": 1.6234807324161031, + "learning_rate": 1.7594788354071576e-05, + "loss": 1.1911, + "step": 1807 + }, + { + "epoch": 0.24883016790531243, + "grad_norm": 1.6948443820262118, + "learning_rate": 1.7591887902472956e-05, + "loss": 1.0733, + "step": 1808 + }, + { + "epoch": 0.24896779521056978, + "grad_norm": 1.8254423990604678, + "learning_rate": 1.7588985942470134e-05, + "loss": 1.129, + "step": 1809 + }, + { + "epoch": 0.24910542251582715, + "grad_norm": 1.7044848393490044, + "learning_rate": 1.7586082474639688e-05, + "loss": 1.0491, + "step": 1810 + }, + { + "epoch": 0.2492430498210845, + "grad_norm": 1.759765225806777, + "learning_rate": 1.7583177499558492e-05, + "loss": 1.0945, + "step": 1811 + }, + { + "epoch": 0.24938067712634188, + "grad_norm": 1.795122659422111, + "learning_rate": 1.7580271017803734e-05, + "loss": 1.0906, + "step": 1812 + }, + { + "epoch": 0.24951830443159922, + "grad_norm": 1.6385124363587005, + "learning_rate": 1.7577363029952885e-05, + "loss": 1.1158, + "step": 1813 + }, + { + "epoch": 0.2496559317368566, + "grad_norm": 1.7672365190278323, + "learning_rate": 1.7574453536583724e-05, + "loss": 1.137, + "step": 1814 + }, + { + "epoch": 0.24979355904211395, + "grad_norm": 1.7832313084753144, + "learning_rate": 1.7571542538274328e-05, + "loss": 1.0508, + "step": 1815 + }, + { + "epoch": 0.24993118634737133, + "grad_norm": 1.8922905398070986, + "learning_rate": 1.7568630035603077e-05, + "loss": 1.1407, + "step": 1816 + }, + { + "epoch": 0.2500688136526287, + "grad_norm": 1.632291884046593, + "learning_rate": 1.7565716029148637e-05, + "loss": 1.0914, + "step": 1817 + }, + { + "epoch": 0.250206440957886, + "grad_norm": 1.6580055410971883, + "learning_rate": 1.7562800519489986e-05, + "loss": 1.0831, + "step": 1818 + }, + { + "epoch": 0.2503440682631434, + "grad_norm": 1.6553374689797562, + "learning_rate": 1.7559883507206396e-05, + "loss": 1.0976, + "step": 1819 + }, + { + "epoch": 0.2504816955684008, + "grad_norm": 1.558621702138325, + "learning_rate": 1.7556964992877438e-05, + "loss": 1.0899, + "step": 1820 + }, + { + "epoch": 0.2506193228736581, + "grad_norm": 1.6722964692257043, + "learning_rate": 1.7554044977082976e-05, + "loss": 1.0875, + "step": 1821 + }, + { + "epoch": 0.25075695017891547, + "grad_norm": 1.9679945908210024, + "learning_rate": 1.7551123460403185e-05, + "loss": 1.189, + "step": 1822 + }, + { + "epoch": 0.2508945774841729, + "grad_norm": 1.6997082578064715, + "learning_rate": 1.754820044341852e-05, + "loss": 1.0609, + "step": 1823 + }, + { + "epoch": 0.2510322047894302, + "grad_norm": 1.6420027629874525, + "learning_rate": 1.7545275926709757e-05, + "loss": 1.0684, + "step": 1824 + }, + { + "epoch": 0.2511698320946876, + "grad_norm": 1.8609431318755765, + "learning_rate": 1.7542349910857948e-05, + "loss": 1.1587, + "step": 1825 + }, + { + "epoch": 0.2513074593999449, + "grad_norm": 1.8096304252143798, + "learning_rate": 1.753942239644445e-05, + "loss": 1.1493, + "step": 1826 + }, + { + "epoch": 0.2514450867052023, + "grad_norm": 2.0046784926813617, + "learning_rate": 1.7536493384050933e-05, + "loss": 1.102, + "step": 1827 + }, + { + "epoch": 0.2515827140104597, + "grad_norm": 1.851203881678385, + "learning_rate": 1.753356287425934e-05, + "loss": 1.1114, + "step": 1828 + }, + { + "epoch": 0.251720341315717, + "grad_norm": 1.7255736883436192, + "learning_rate": 1.753063086765193e-05, + "loss": 1.0925, + "step": 1829 + }, + { + "epoch": 0.2518579686209744, + "grad_norm": 1.656822468561523, + "learning_rate": 1.752769736481125e-05, + "loss": 1.1612, + "step": 1830 + }, + { + "epoch": 0.2519955959262318, + "grad_norm": 1.8980298591118834, + "learning_rate": 1.752476236632014e-05, + "loss": 1.0598, + "step": 1831 + }, + { + "epoch": 0.2521332232314891, + "grad_norm": 1.5760309613380692, + "learning_rate": 1.752182587276176e-05, + "loss": 1.1381, + "step": 1832 + }, + { + "epoch": 0.25227085053674647, + "grad_norm": 1.9498514646245573, + "learning_rate": 1.7518887884719537e-05, + "loss": 1.233, + "step": 1833 + }, + { + "epoch": 0.2524084778420039, + "grad_norm": 1.8737963372956097, + "learning_rate": 1.751594840277721e-05, + "loss": 1.1083, + "step": 1834 + }, + { + "epoch": 0.2525461051472612, + "grad_norm": 1.7944138909615754, + "learning_rate": 1.7513007427518825e-05, + "loss": 1.0518, + "step": 1835 + }, + { + "epoch": 0.2526837324525186, + "grad_norm": 1.6536725094690443, + "learning_rate": 1.75100649595287e-05, + "loss": 1.0873, + "step": 1836 + }, + { + "epoch": 0.2528213597577759, + "grad_norm": 1.5604150150403506, + "learning_rate": 1.7507120999391472e-05, + "loss": 1.0738, + "step": 1837 + }, + { + "epoch": 0.2529589870630333, + "grad_norm": 1.8311572579801987, + "learning_rate": 1.7504175547692062e-05, + "loss": 1.0598, + "step": 1838 + }, + { + "epoch": 0.2530966143682907, + "grad_norm": 1.766369818127065, + "learning_rate": 1.7501228605015693e-05, + "loss": 1.1233, + "step": 1839 + }, + { + "epoch": 0.253234241673548, + "grad_norm": 1.9157399350091002, + "learning_rate": 1.7498280171947878e-05, + "loss": 1.0431, + "step": 1840 + }, + { + "epoch": 0.25337186897880537, + "grad_norm": 1.7488639694119437, + "learning_rate": 1.7495330249074436e-05, + "loss": 1.154, + "step": 1841 + }, + { + "epoch": 0.2535094962840628, + "grad_norm": 1.6645540395612066, + "learning_rate": 1.749237883698147e-05, + "loss": 1.079, + "step": 1842 + }, + { + "epoch": 0.2536471235893201, + "grad_norm": 1.9519412373818978, + "learning_rate": 1.748942593625539e-05, + "loss": 1.0737, + "step": 1843 + }, + { + "epoch": 0.25378475089457747, + "grad_norm": 1.5931762747784026, + "learning_rate": 1.74864715474829e-05, + "loss": 1.0757, + "step": 1844 + }, + { + "epoch": 0.2539223781998348, + "grad_norm": 1.7338443337525924, + "learning_rate": 1.7483515671250985e-05, + "loss": 1.0541, + "step": 1845 + }, + { + "epoch": 0.2540600055050922, + "grad_norm": 1.7749954920533604, + "learning_rate": 1.748055830814695e-05, + "loss": 1.0932, + "step": 1846 + }, + { + "epoch": 0.25419763281034957, + "grad_norm": 1.8638148363545926, + "learning_rate": 1.747759945875837e-05, + "loss": 1.1064, + "step": 1847 + }, + { + "epoch": 0.2543352601156069, + "grad_norm": 1.701750955316755, + "learning_rate": 1.7474639123673143e-05, + "loss": 1.0945, + "step": 1848 + }, + { + "epoch": 0.2544728874208643, + "grad_norm": 1.668473108038409, + "learning_rate": 1.7471677303479434e-05, + "loss": 1.1402, + "step": 1849 + }, + { + "epoch": 0.2546105147261217, + "grad_norm": 2.0111304303497874, + "learning_rate": 1.746871399876572e-05, + "loss": 1.1714, + "step": 1850 + }, + { + "epoch": 0.254748142031379, + "grad_norm": 1.7446170195849178, + "learning_rate": 1.746574921012077e-05, + "loss": 1.1849, + "step": 1851 + }, + { + "epoch": 0.25488576933663637, + "grad_norm": 1.8354686515332905, + "learning_rate": 1.7462782938133654e-05, + "loss": 1.1637, + "step": 1852 + }, + { + "epoch": 0.2550233966418938, + "grad_norm": 1.7283065251896477, + "learning_rate": 1.7459815183393715e-05, + "loss": 1.0342, + "step": 1853 + }, + { + "epoch": 0.2551610239471511, + "grad_norm": 1.7929327603234388, + "learning_rate": 1.7456845946490616e-05, + "loss": 1.1544, + "step": 1854 + }, + { + "epoch": 0.25529865125240847, + "grad_norm": 1.6563598725343802, + "learning_rate": 1.7453875228014297e-05, + "loss": 1.0887, + "step": 1855 + }, + { + "epoch": 0.2554362785576658, + "grad_norm": 1.5431662853314518, + "learning_rate": 1.745090302855501e-05, + "loss": 1.0594, + "step": 1856 + }, + { + "epoch": 0.2555739058629232, + "grad_norm": 1.7604415629468957, + "learning_rate": 1.7447929348703277e-05, + "loss": 1.1918, + "step": 1857 + }, + { + "epoch": 0.25571153316818057, + "grad_norm": 1.8504348177930372, + "learning_rate": 1.7444954189049935e-05, + "loss": 1.2017, + "step": 1858 + }, + { + "epoch": 0.2558491604734379, + "grad_norm": 1.7495303682377772, + "learning_rate": 1.7441977550186108e-05, + "loss": 1.0816, + "step": 1859 + }, + { + "epoch": 0.25598678777869527, + "grad_norm": 1.7496712194558393, + "learning_rate": 1.7438999432703204e-05, + "loss": 1.1264, + "step": 1860 + }, + { + "epoch": 0.2561244150839527, + "grad_norm": 1.9967389602641927, + "learning_rate": 1.743601983719295e-05, + "loss": 1.1408, + "step": 1861 + }, + { + "epoch": 0.25626204238921, + "grad_norm": 1.946589071615279, + "learning_rate": 1.743303876424734e-05, + "loss": 1.0132, + "step": 1862 + }, + { + "epoch": 0.25639966969446737, + "grad_norm": 1.7169386728851386, + "learning_rate": 1.743005621445867e-05, + "loss": 1.0923, + "step": 1863 + }, + { + "epoch": 0.2565372969997247, + "grad_norm": 1.9418484609179523, + "learning_rate": 1.7427072188419543e-05, + "loss": 1.2262, + "step": 1864 + }, + { + "epoch": 0.2566749243049821, + "grad_norm": 1.8391477174173823, + "learning_rate": 1.742408668672283e-05, + "loss": 1.148, + "step": 1865 + }, + { + "epoch": 0.25681255161023947, + "grad_norm": 1.6740232730302724, + "learning_rate": 1.7421099709961728e-05, + "loss": 1.1178, + "step": 1866 + }, + { + "epoch": 0.2569501789154968, + "grad_norm": 1.7754516552959174, + "learning_rate": 1.741811125872969e-05, + "loss": 1.1375, + "step": 1867 + }, + { + "epoch": 0.2570878062207542, + "grad_norm": 1.689119959599771, + "learning_rate": 1.7415121333620487e-05, + "loss": 1.1193, + "step": 1868 + }, + { + "epoch": 0.25722543352601157, + "grad_norm": 1.7456331122525384, + "learning_rate": 1.7412129935228178e-05, + "loss": 1.1404, + "step": 1869 + }, + { + "epoch": 0.2573630608312689, + "grad_norm": 1.650225735021971, + "learning_rate": 1.7409137064147113e-05, + "loss": 1.0337, + "step": 1870 + }, + { + "epoch": 0.25750068813652627, + "grad_norm": 1.7479327820885169, + "learning_rate": 1.7406142720971935e-05, + "loss": 1.0464, + "step": 1871 + }, + { + "epoch": 0.25763831544178367, + "grad_norm": 1.6226918681074909, + "learning_rate": 1.7403146906297578e-05, + "loss": 1.084, + "step": 1872 + }, + { + "epoch": 0.257775942747041, + "grad_norm": 1.6634160068776511, + "learning_rate": 1.7400149620719263e-05, + "loss": 1.1075, + "step": 1873 + }, + { + "epoch": 0.25791357005229837, + "grad_norm": 1.8103226752623403, + "learning_rate": 1.739715086483252e-05, + "loss": 1.1166, + "step": 1874 + }, + { + "epoch": 0.2580511973575557, + "grad_norm": 1.5218059986267214, + "learning_rate": 1.7394150639233156e-05, + "loss": 1.0973, + "step": 1875 + }, + { + "epoch": 0.2581888246628131, + "grad_norm": 1.8027747164134855, + "learning_rate": 1.7391148944517273e-05, + "loss": 1.0843, + "step": 1876 + }, + { + "epoch": 0.25832645196807047, + "grad_norm": 1.7543031268527447, + "learning_rate": 1.738814578128127e-05, + "loss": 1.1548, + "step": 1877 + }, + { + "epoch": 0.2584640792733278, + "grad_norm": 1.853976847466797, + "learning_rate": 1.738514115012183e-05, + "loss": 1.0953, + "step": 1878 + }, + { + "epoch": 0.25860170657858517, + "grad_norm": 1.8301304204783642, + "learning_rate": 1.7382135051635938e-05, + "loss": 1.1785, + "step": 1879 + }, + { + "epoch": 0.25873933388384257, + "grad_norm": 1.6382201352979313, + "learning_rate": 1.7379127486420862e-05, + "loss": 1.1409, + "step": 1880 + }, + { + "epoch": 0.2588769611890999, + "grad_norm": 1.6000603028954439, + "learning_rate": 1.7376118455074164e-05, + "loss": 1.02, + "step": 1881 + }, + { + "epoch": 0.25901458849435727, + "grad_norm": 1.6173426039588876, + "learning_rate": 1.7373107958193696e-05, + "loss": 1.072, + "step": 1882 + }, + { + "epoch": 0.2591522157996146, + "grad_norm": 1.6268575255357909, + "learning_rate": 1.7370095996377603e-05, + "loss": 1.1221, + "step": 1883 + }, + { + "epoch": 0.259289843104872, + "grad_norm": 1.9562584502291203, + "learning_rate": 1.736708257022432e-05, + "loss": 1.2108, + "step": 1884 + }, + { + "epoch": 0.25942747041012937, + "grad_norm": 1.7357960430112962, + "learning_rate": 1.7364067680332576e-05, + "loss": 1.1334, + "step": 1885 + }, + { + "epoch": 0.2595650977153867, + "grad_norm": 1.5534080212017654, + "learning_rate": 1.736105132730139e-05, + "loss": 1.0894, + "step": 1886 + }, + { + "epoch": 0.2597027250206441, + "grad_norm": 1.7266812984526405, + "learning_rate": 1.7358033511730062e-05, + "loss": 1.1247, + "step": 1887 + }, + { + "epoch": 0.25984035232590147, + "grad_norm": 2.0424559838437606, + "learning_rate": 1.7355014234218197e-05, + "loss": 1.0143, + "step": 1888 + }, + { + "epoch": 0.2599779796311588, + "grad_norm": 1.6648603786118623, + "learning_rate": 1.7351993495365684e-05, + "loss": 1.153, + "step": 1889 + }, + { + "epoch": 0.26011560693641617, + "grad_norm": 1.7203423342490476, + "learning_rate": 1.7348971295772704e-05, + "loss": 1.0809, + "step": 1890 + }, + { + "epoch": 0.26025323424167357, + "grad_norm": 1.7348369944966082, + "learning_rate": 1.7345947636039718e-05, + "loss": 1.0997, + "step": 1891 + }, + { + "epoch": 0.2603908615469309, + "grad_norm": 1.6912364737994603, + "learning_rate": 1.7342922516767496e-05, + "loss": 1.0849, + "step": 1892 + }, + { + "epoch": 0.26052848885218827, + "grad_norm": 1.5411227531889038, + "learning_rate": 1.7339895938557083e-05, + "loss": 0.9714, + "step": 1893 + }, + { + "epoch": 0.2606661161574456, + "grad_norm": 1.6607115962083139, + "learning_rate": 1.7336867902009817e-05, + "loss": 1.0174, + "step": 1894 + }, + { + "epoch": 0.260803743462703, + "grad_norm": 1.7198247937211633, + "learning_rate": 1.7333838407727333e-05, + "loss": 1.063, + "step": 1895 + }, + { + "epoch": 0.26094137076796037, + "grad_norm": 1.7832959376742208, + "learning_rate": 1.7330807456311546e-05, + "loss": 1.0814, + "step": 1896 + }, + { + "epoch": 0.2610789980732177, + "grad_norm": 1.7676483045730174, + "learning_rate": 1.7327775048364662e-05, + "loss": 1.102, + "step": 1897 + }, + { + "epoch": 0.26121662537847506, + "grad_norm": 1.892594996999069, + "learning_rate": 1.7324741184489186e-05, + "loss": 1.068, + "step": 1898 + }, + { + "epoch": 0.26135425268373247, + "grad_norm": 1.80627369462942, + "learning_rate": 1.73217058652879e-05, + "loss": 1.0426, + "step": 1899 + }, + { + "epoch": 0.2614918799889898, + "grad_norm": 1.7115252664649632, + "learning_rate": 1.7318669091363883e-05, + "loss": 1.0984, + "step": 1900 + }, + { + "epoch": 0.26162950729424717, + "grad_norm": 2.0929938760296367, + "learning_rate": 1.7315630863320496e-05, + "loss": 1.1519, + "step": 1901 + }, + { + "epoch": 0.2617671345995045, + "grad_norm": 1.819354116769841, + "learning_rate": 1.7312591181761398e-05, + "loss": 1.1023, + "step": 1902 + }, + { + "epoch": 0.2619047619047619, + "grad_norm": 1.7070935400563674, + "learning_rate": 1.730955004729053e-05, + "loss": 1.0986, + "step": 1903 + }, + { + "epoch": 0.26204238921001927, + "grad_norm": 1.741643697964033, + "learning_rate": 1.730650746051213e-05, + "loss": 1.0495, + "step": 1904 + }, + { + "epoch": 0.2621800165152766, + "grad_norm": 1.6526578815370194, + "learning_rate": 1.7303463422030707e-05, + "loss": 1.0742, + "step": 1905 + }, + { + "epoch": 0.262317643820534, + "grad_norm": 1.7750252735521697, + "learning_rate": 1.7300417932451075e-05, + "loss": 1.1221, + "step": 1906 + }, + { + "epoch": 0.26245527112579137, + "grad_norm": 1.8543814840421027, + "learning_rate": 1.7297370992378332e-05, + "loss": 1.0815, + "step": 1907 + }, + { + "epoch": 0.2625928984310487, + "grad_norm": 1.6647377985052554, + "learning_rate": 1.7294322602417867e-05, + "loss": 1.1205, + "step": 1908 + }, + { + "epoch": 0.26273052573630606, + "grad_norm": 1.6049803651717396, + "learning_rate": 1.7291272763175345e-05, + "loss": 1.0985, + "step": 1909 + }, + { + "epoch": 0.26286815304156347, + "grad_norm": 1.5902819523848442, + "learning_rate": 1.7288221475256735e-05, + "loss": 1.0641, + "step": 1910 + }, + { + "epoch": 0.2630057803468208, + "grad_norm": 1.6597202670430593, + "learning_rate": 1.728516873926828e-05, + "loss": 1.0808, + "step": 1911 + }, + { + "epoch": 0.26314340765207817, + "grad_norm": 1.8568308356965246, + "learning_rate": 1.728211455581652e-05, + "loss": 1.0345, + "step": 1912 + }, + { + "epoch": 0.2632810349573355, + "grad_norm": 1.707350611412565, + "learning_rate": 1.7279058925508284e-05, + "loss": 1.0661, + "step": 1913 + }, + { + "epoch": 0.2634186622625929, + "grad_norm": 1.8123911454567971, + "learning_rate": 1.7276001848950672e-05, + "loss": 1.0625, + "step": 1914 + }, + { + "epoch": 0.26355628956785027, + "grad_norm": 1.642921826885694, + "learning_rate": 1.7272943326751094e-05, + "loss": 1.1234, + "step": 1915 + }, + { + "epoch": 0.2636939168731076, + "grad_norm": 1.7727727077282347, + "learning_rate": 1.726988335951723e-05, + "loss": 1.1071, + "step": 1916 + }, + { + "epoch": 0.26383154417836496, + "grad_norm": 1.5965378555642655, + "learning_rate": 1.7266821947857057e-05, + "loss": 1.0341, + "step": 1917 + }, + { + "epoch": 0.26396917148362237, + "grad_norm": 1.600991731819956, + "learning_rate": 1.7263759092378836e-05, + "loss": 1.1008, + "step": 1918 + }, + { + "epoch": 0.2641067987888797, + "grad_norm": 1.7579453216645406, + "learning_rate": 1.7260694793691114e-05, + "loss": 1.1069, + "step": 1919 + }, + { + "epoch": 0.26424442609413706, + "grad_norm": 1.8507168254143103, + "learning_rate": 1.7257629052402725e-05, + "loss": 1.1686, + "step": 1920 + }, + { + "epoch": 0.2643820533993944, + "grad_norm": 1.7954328413029912, + "learning_rate": 1.7254561869122786e-05, + "loss": 1.1135, + "step": 1921 + }, + { + "epoch": 0.2645196807046518, + "grad_norm": 1.7276186632125112, + "learning_rate": 1.7251493244460712e-05, + "loss": 1.099, + "step": 1922 + }, + { + "epoch": 0.26465730800990916, + "grad_norm": 1.8639922658622365, + "learning_rate": 1.724842317902619e-05, + "loss": 1.0965, + "step": 1923 + }, + { + "epoch": 0.2647949353151665, + "grad_norm": 1.7063193990359053, + "learning_rate": 1.7245351673429203e-05, + "loss": 1.1179, + "step": 1924 + }, + { + "epoch": 0.2649325626204239, + "grad_norm": 1.7454398811103677, + "learning_rate": 1.7242278728280015e-05, + "loss": 1.13, + "step": 1925 + }, + { + "epoch": 0.26507018992568127, + "grad_norm": 1.7712126868575098, + "learning_rate": 1.7239204344189182e-05, + "loss": 1.1748, + "step": 1926 + }, + { + "epoch": 0.2652078172309386, + "grad_norm": 1.6891472900224795, + "learning_rate": 1.7236128521767538e-05, + "loss": 1.0694, + "step": 1927 + }, + { + "epoch": 0.26534544453619596, + "grad_norm": 1.8551599125563996, + "learning_rate": 1.7233051261626213e-05, + "loss": 1.1639, + "step": 1928 + }, + { + "epoch": 0.26548307184145337, + "grad_norm": 1.8096725132910423, + "learning_rate": 1.7229972564376605e-05, + "loss": 1.2175, + "step": 1929 + }, + { + "epoch": 0.2656206991467107, + "grad_norm": 1.7783909314566977, + "learning_rate": 1.7226892430630423e-05, + "loss": 1.103, + "step": 1930 + }, + { + "epoch": 0.26575832645196806, + "grad_norm": 1.6251083925783627, + "learning_rate": 1.7223810860999636e-05, + "loss": 1.0898, + "step": 1931 + }, + { + "epoch": 0.2658959537572254, + "grad_norm": 1.6207558005310294, + "learning_rate": 1.722072785609651e-05, + "loss": 1.13, + "step": 1932 + }, + { + "epoch": 0.2660335810624828, + "grad_norm": 1.7116284866065754, + "learning_rate": 1.7217643416533605e-05, + "loss": 1.0881, + "step": 1933 + }, + { + "epoch": 0.26617120836774016, + "grad_norm": 1.9827725626372417, + "learning_rate": 1.7214557542923757e-05, + "loss": 1.1696, + "step": 1934 + }, + { + "epoch": 0.2663088356729975, + "grad_norm": 2.108617487605124, + "learning_rate": 1.7211470235880076e-05, + "loss": 1.2439, + "step": 1935 + }, + { + "epoch": 0.26644646297825486, + "grad_norm": 1.5124398602229299, + "learning_rate": 1.7208381496015973e-05, + "loss": 1.0986, + "step": 1936 + }, + { + "epoch": 0.26658409028351226, + "grad_norm": 1.795482766766421, + "learning_rate": 1.720529132394514e-05, + "loss": 1.1961, + "step": 1937 + }, + { + "epoch": 0.2667217175887696, + "grad_norm": 1.970789241045633, + "learning_rate": 1.7202199720281548e-05, + "loss": 1.2327, + "step": 1938 + }, + { + "epoch": 0.26685934489402696, + "grad_norm": 1.659935049798508, + "learning_rate": 1.719910668563946e-05, + "loss": 1.0677, + "step": 1939 + }, + { + "epoch": 0.2669969721992843, + "grad_norm": 1.7713271888489008, + "learning_rate": 1.7196012220633423e-05, + "loss": 1.157, + "step": 1940 + }, + { + "epoch": 0.2671345995045417, + "grad_norm": 1.5892225390764534, + "learning_rate": 1.7192916325878254e-05, + "loss": 1.0779, + "step": 1941 + }, + { + "epoch": 0.26727222680979906, + "grad_norm": 1.7702760603479895, + "learning_rate": 1.7189819001989075e-05, + "loss": 1.1171, + "step": 1942 + }, + { + "epoch": 0.2674098541150564, + "grad_norm": 1.6059021618970897, + "learning_rate": 1.7186720249581277e-05, + "loss": 1.113, + "step": 1943 + }, + { + "epoch": 0.2675474814203138, + "grad_norm": 2.005383153966915, + "learning_rate": 1.7183620069270545e-05, + "loss": 1.059, + "step": 1944 + }, + { + "epoch": 0.26768510872557116, + "grad_norm": 1.746512417994671, + "learning_rate": 1.7180518461672836e-05, + "loss": 1.0577, + "step": 1945 + }, + { + "epoch": 0.2678227360308285, + "grad_norm": 1.6044823191779194, + "learning_rate": 1.7177415427404396e-05, + "loss": 1.1623, + "step": 1946 + }, + { + "epoch": 0.26796036333608586, + "grad_norm": 1.7603424617598378, + "learning_rate": 1.717431096708176e-05, + "loss": 0.9971, + "step": 1947 + }, + { + "epoch": 0.26809799064134326, + "grad_norm": 1.5225608378102733, + "learning_rate": 1.7171205081321742e-05, + "loss": 1.039, + "step": 1948 + }, + { + "epoch": 0.2682356179466006, + "grad_norm": 1.8551322689402063, + "learning_rate": 1.716809777074144e-05, + "loss": 1.1384, + "step": 1949 + }, + { + "epoch": 0.26837324525185796, + "grad_norm": 1.7717513227515933, + "learning_rate": 1.7164989035958225e-05, + "loss": 1.0865, + "step": 1950 + }, + { + "epoch": 0.2685108725571153, + "grad_norm": 1.6133262163783593, + "learning_rate": 1.716187887758977e-05, + "loss": 1.1506, + "step": 1951 + }, + { + "epoch": 0.2686484998623727, + "grad_norm": 1.7626524384731777, + "learning_rate": 1.7158767296254017e-05, + "loss": 1.0407, + "step": 1952 + }, + { + "epoch": 0.26878612716763006, + "grad_norm": 1.8101465847649512, + "learning_rate": 1.7155654292569198e-05, + "loss": 1.0953, + "step": 1953 + }, + { + "epoch": 0.2689237544728874, + "grad_norm": 1.7122761437446572, + "learning_rate": 1.7152539867153817e-05, + "loss": 0.9581, + "step": 1954 + }, + { + "epoch": 0.26906138177814476, + "grad_norm": 1.7473059609186494, + "learning_rate": 1.7149424020626676e-05, + "loss": 1.1008, + "step": 1955 + }, + { + "epoch": 0.26919900908340216, + "grad_norm": 2.047662105510371, + "learning_rate": 1.714630675360684e-05, + "loss": 1.139, + "step": 1956 + }, + { + "epoch": 0.2693366363886595, + "grad_norm": 1.8116354563293793, + "learning_rate": 1.714318806671368e-05, + "loss": 1.1498, + "step": 1957 + }, + { + "epoch": 0.26947426369391686, + "grad_norm": 1.8657353341936453, + "learning_rate": 1.7140067960566833e-05, + "loss": 1.2056, + "step": 1958 + }, + { + "epoch": 0.2696118909991742, + "grad_norm": 1.993858766280044, + "learning_rate": 1.7136946435786214e-05, + "loss": 1.1834, + "step": 1959 + }, + { + "epoch": 0.2697495183044316, + "grad_norm": 1.6516881142660074, + "learning_rate": 1.7133823492992035e-05, + "loss": 0.9887, + "step": 1960 + }, + { + "epoch": 0.26988714560968896, + "grad_norm": 1.6051890108291405, + "learning_rate": 1.713069913280478e-05, + "loss": 1.1352, + "step": 1961 + }, + { + "epoch": 0.2700247729149463, + "grad_norm": 1.811981189197741, + "learning_rate": 1.7127573355845214e-05, + "loss": 1.0849, + "step": 1962 + }, + { + "epoch": 0.2701624002202037, + "grad_norm": 1.9537132824913355, + "learning_rate": 1.7124446162734386e-05, + "loss": 1.1399, + "step": 1963 + }, + { + "epoch": 0.27030002752546106, + "grad_norm": 1.5747066062988588, + "learning_rate": 1.7121317554093634e-05, + "loss": 1.0836, + "step": 1964 + }, + { + "epoch": 0.2704376548307184, + "grad_norm": 1.7193288564484561, + "learning_rate": 1.711818753054456e-05, + "loss": 1.1238, + "step": 1965 + }, + { + "epoch": 0.27057528213597576, + "grad_norm": 1.681442305379545, + "learning_rate": 1.711505609270906e-05, + "loss": 1.0868, + "step": 1966 + }, + { + "epoch": 0.27071290944123316, + "grad_norm": 1.71362229032511, + "learning_rate": 1.7111923241209312e-05, + "loss": 1.1301, + "step": 1967 + }, + { + "epoch": 0.2708505367464905, + "grad_norm": 1.772805656584986, + "learning_rate": 1.710878897666777e-05, + "loss": 1.1952, + "step": 1968 + }, + { + "epoch": 0.27098816405174786, + "grad_norm": 1.780145633181192, + "learning_rate": 1.7105653299707165e-05, + "loss": 1.0662, + "step": 1969 + }, + { + "epoch": 0.2711257913570052, + "grad_norm": 1.8325127966552426, + "learning_rate": 1.7102516210950514e-05, + "loss": 1.1697, + "step": 1970 + }, + { + "epoch": 0.2712634186622626, + "grad_norm": 1.9325591083614, + "learning_rate": 1.709937771102111e-05, + "loss": 1.1291, + "step": 1971 + }, + { + "epoch": 0.27140104596751996, + "grad_norm": 1.798915549228028, + "learning_rate": 1.7096237800542543e-05, + "loss": 1.1337, + "step": 1972 + }, + { + "epoch": 0.2715386732727773, + "grad_norm": 1.7728295432391228, + "learning_rate": 1.7093096480138657e-05, + "loss": 1.1486, + "step": 1973 + }, + { + "epoch": 0.27167630057803466, + "grad_norm": 1.5303237245410317, + "learning_rate": 1.7089953750433595e-05, + "loss": 1.0136, + "step": 1974 + }, + { + "epoch": 0.27181392788329206, + "grad_norm": 2.044857472263054, + "learning_rate": 1.7086809612051776e-05, + "loss": 1.2353, + "step": 1975 + }, + { + "epoch": 0.2719515551885494, + "grad_norm": 1.8576738785882372, + "learning_rate": 1.7083664065617896e-05, + "loss": 1.1416, + "step": 1976 + }, + { + "epoch": 0.27208918249380676, + "grad_norm": 1.5763762699423334, + "learning_rate": 1.7080517111756927e-05, + "loss": 1.0329, + "step": 1977 + }, + { + "epoch": 0.2722268097990641, + "grad_norm": 1.8272214769110426, + "learning_rate": 1.707736875109413e-05, + "loss": 1.1151, + "step": 1978 + }, + { + "epoch": 0.2723644371043215, + "grad_norm": 1.6683754218281484, + "learning_rate": 1.7074218984255042e-05, + "loss": 1.1596, + "step": 1979 + }, + { + "epoch": 0.27250206440957886, + "grad_norm": 1.8800368708411355, + "learning_rate": 1.7071067811865477e-05, + "loss": 1.1762, + "step": 1980 + }, + { + "epoch": 0.2726396917148362, + "grad_norm": 1.6353189669205435, + "learning_rate": 1.706791523455153e-05, + "loss": 1.0275, + "step": 1981 + }, + { + "epoch": 0.2727773190200936, + "grad_norm": 1.5882897033146293, + "learning_rate": 1.7064761252939576e-05, + "loss": 1.029, + "step": 1982 + }, + { + "epoch": 0.27291494632535096, + "grad_norm": 1.6746140654096426, + "learning_rate": 1.7061605867656265e-05, + "loss": 1.0675, + "step": 1983 + }, + { + "epoch": 0.2730525736306083, + "grad_norm": 2.770223903014143, + "learning_rate": 1.7058449079328535e-05, + "loss": 1.108, + "step": 1984 + }, + { + "epoch": 0.27319020093586566, + "grad_norm": 1.6837115242135507, + "learning_rate": 1.7055290888583588e-05, + "loss": 1.0289, + "step": 1985 + }, + { + "epoch": 0.27332782824112306, + "grad_norm": 1.8272372111576105, + "learning_rate": 1.705213129604892e-05, + "loss": 1.1496, + "step": 1986 + }, + { + "epoch": 0.2734654555463804, + "grad_norm": 1.990414918090637, + "learning_rate": 1.7048970302352293e-05, + "loss": 1.1556, + "step": 1987 + }, + { + "epoch": 0.27360308285163776, + "grad_norm": 1.9616818140560504, + "learning_rate": 1.7045807908121764e-05, + "loss": 1.1601, + "step": 1988 + }, + { + "epoch": 0.2737407101568951, + "grad_norm": 1.8061629320836823, + "learning_rate": 1.7042644113985646e-05, + "loss": 1.0673, + "step": 1989 + }, + { + "epoch": 0.2738783374621525, + "grad_norm": 1.9462847703720496, + "learning_rate": 1.7039478920572546e-05, + "loss": 1.1882, + "step": 1990 + }, + { + "epoch": 0.27401596476740986, + "grad_norm": 1.561489046841805, + "learning_rate": 1.7036312328511345e-05, + "loss": 1.088, + "step": 1991 + }, + { + "epoch": 0.2741535920726672, + "grad_norm": 1.7037076428665592, + "learning_rate": 1.70331443384312e-05, + "loss": 1.099, + "step": 1992 + }, + { + "epoch": 0.27429121937792456, + "grad_norm": 1.700474901653103, + "learning_rate": 1.7029974950961552e-05, + "loss": 1.0643, + "step": 1993 + }, + { + "epoch": 0.27442884668318196, + "grad_norm": 1.7313578169278945, + "learning_rate": 1.702680416673211e-05, + "loss": 1.0204, + "step": 1994 + }, + { + "epoch": 0.2745664739884393, + "grad_norm": 1.8549789928794156, + "learning_rate": 1.702363198637286e-05, + "loss": 1.1249, + "step": 1995 + }, + { + "epoch": 0.27470410129369666, + "grad_norm": 1.7640766559780332, + "learning_rate": 1.7020458410514083e-05, + "loss": 1.0, + "step": 1996 + }, + { + "epoch": 0.274841728598954, + "grad_norm": 1.603608528593956, + "learning_rate": 1.7017283439786317e-05, + "loss": 1.0562, + "step": 1997 + }, + { + "epoch": 0.2749793559042114, + "grad_norm": 2.0089059619933503, + "learning_rate": 1.7014107074820388e-05, + "loss": 1.0738, + "step": 1998 + }, + { + "epoch": 0.27511698320946876, + "grad_norm": 1.7113491987640808, + "learning_rate": 1.7010929316247397e-05, + "loss": 1.1221, + "step": 1999 + }, + { + "epoch": 0.2752546105147261, + "grad_norm": 1.6520336935761395, + "learning_rate": 1.7007750164698723e-05, + "loss": 1.0767, + "step": 2000 + }, + { + "epoch": 0.2753922378199835, + "grad_norm": 1.7867215322466121, + "learning_rate": 1.7004569620806014e-05, + "loss": 1.1555, + "step": 2001 + }, + { + "epoch": 0.27552986512524086, + "grad_norm": 1.7537277899181478, + "learning_rate": 1.70013876852012e-05, + "loss": 1.1466, + "step": 2002 + }, + { + "epoch": 0.2756674924304982, + "grad_norm": 1.5679141104315972, + "learning_rate": 1.6998204358516497e-05, + "loss": 1.0114, + "step": 2003 + }, + { + "epoch": 0.27580511973575556, + "grad_norm": 1.9277787439763607, + "learning_rate": 1.699501964138438e-05, + "loss": 1.2311, + "step": 2004 + }, + { + "epoch": 0.27594274704101296, + "grad_norm": 1.6790631794101432, + "learning_rate": 1.6991833534437612e-05, + "loss": 1.1673, + "step": 2005 + }, + { + "epoch": 0.2760803743462703, + "grad_norm": 1.76143559420636, + "learning_rate": 1.698864603830923e-05, + "loss": 1.0883, + "step": 2006 + }, + { + "epoch": 0.27621800165152766, + "grad_norm": 2.0215568945373263, + "learning_rate": 1.6985457153632543e-05, + "loss": 1.0502, + "step": 2007 + }, + { + "epoch": 0.276355628956785, + "grad_norm": 1.6028884654841207, + "learning_rate": 1.6982266881041143e-05, + "loss": 1.1212, + "step": 2008 + }, + { + "epoch": 0.2764932562620424, + "grad_norm": 1.7434382995298872, + "learning_rate": 1.6979075221168884e-05, + "loss": 1.1368, + "step": 2009 + }, + { + "epoch": 0.27663088356729976, + "grad_norm": 1.5856556318634336, + "learning_rate": 1.6975882174649915e-05, + "loss": 0.9787, + "step": 2010 + }, + { + "epoch": 0.2767685108725571, + "grad_norm": 1.6314355809519026, + "learning_rate": 1.697268774211865e-05, + "loss": 1.0885, + "step": 2011 + }, + { + "epoch": 0.27690613817781445, + "grad_norm": 1.8896032766926456, + "learning_rate": 1.6969491924209774e-05, + "loss": 1.1325, + "step": 2012 + }, + { + "epoch": 0.27704376548307186, + "grad_norm": 1.7404251647478077, + "learning_rate": 1.6966294721558254e-05, + "loss": 1.1548, + "step": 2013 + }, + { + "epoch": 0.2771813927883292, + "grad_norm": 2.398255128669592, + "learning_rate": 1.6963096134799333e-05, + "loss": 1.2102, + "step": 2014 + }, + { + "epoch": 0.27731902009358655, + "grad_norm": 1.6973110640252802, + "learning_rate": 1.6959896164568523e-05, + "loss": 1.1966, + "step": 2015 + }, + { + "epoch": 0.2774566473988439, + "grad_norm": 1.9945316440579002, + "learning_rate": 1.6956694811501616e-05, + "loss": 1.2151, + "step": 2016 + }, + { + "epoch": 0.2775942747041013, + "grad_norm": 1.523829225233868, + "learning_rate": 1.6953492076234677e-05, + "loss": 0.9693, + "step": 2017 + }, + { + "epoch": 0.27773190200935866, + "grad_norm": 1.6497026518211393, + "learning_rate": 1.6950287959404045e-05, + "loss": 1.1018, + "step": 2018 + }, + { + "epoch": 0.277869529314616, + "grad_norm": 2.1331492739121183, + "learning_rate": 1.6947082461646332e-05, + "loss": 1.1367, + "step": 2019 + }, + { + "epoch": 0.2780071566198734, + "grad_norm": 1.7313574663210336, + "learning_rate": 1.6943875583598434e-05, + "loss": 1.0869, + "step": 2020 + }, + { + "epoch": 0.27814478392513076, + "grad_norm": 1.6446969823582942, + "learning_rate": 1.6940667325897507e-05, + "loss": 1.0628, + "step": 2021 + }, + { + "epoch": 0.2782824112303881, + "grad_norm": 1.771172781755217, + "learning_rate": 1.693745768918099e-05, + "loss": 1.1357, + "step": 2022 + }, + { + "epoch": 0.27842003853564545, + "grad_norm": 1.717084998984212, + "learning_rate": 1.693424667408659e-05, + "loss": 1.1028, + "step": 2023 + }, + { + "epoch": 0.27855766584090286, + "grad_norm": 1.5905010474897845, + "learning_rate": 1.69310342812523e-05, + "loss": 1.0663, + "step": 2024 + }, + { + "epoch": 0.2786952931461602, + "grad_norm": 1.6039250181365219, + "learning_rate": 1.692782051131637e-05, + "loss": 1.0653, + "step": 2025 + }, + { + "epoch": 0.27883292045141755, + "grad_norm": 1.7738622249834513, + "learning_rate": 1.692460536491734e-05, + "loss": 1.1121, + "step": 2026 + }, + { + "epoch": 0.2789705477566749, + "grad_norm": 1.667844574797634, + "learning_rate": 1.6921388842694008e-05, + "loss": 1.0626, + "step": 2027 + }, + { + "epoch": 0.2791081750619323, + "grad_norm": 1.565639110720571, + "learning_rate": 1.691817094528546e-05, + "loss": 1.0454, + "step": 2028 + }, + { + "epoch": 0.27924580236718965, + "grad_norm": 1.6397196513696386, + "learning_rate": 1.6914951673331043e-05, + "loss": 1.0442, + "step": 2029 + }, + { + "epoch": 0.279383429672447, + "grad_norm": 1.8331260842538097, + "learning_rate": 1.691173102747038e-05, + "loss": 1.0762, + "step": 2030 + }, + { + "epoch": 0.27952105697770435, + "grad_norm": 1.6156043947666379, + "learning_rate": 1.6908509008343377e-05, + "loss": 1.0451, + "step": 2031 + }, + { + "epoch": 0.27965868428296176, + "grad_norm": 1.8113405389011512, + "learning_rate": 1.69052856165902e-05, + "loss": 1.1375, + "step": 2032 + }, + { + "epoch": 0.2797963115882191, + "grad_norm": 1.7015895075657994, + "learning_rate": 1.6902060852851297e-05, + "loss": 1.121, + "step": 2033 + }, + { + "epoch": 0.27993393889347645, + "grad_norm": 1.6314739761852879, + "learning_rate": 1.689883471776738e-05, + "loss": 1.0972, + "step": 2034 + }, + { + "epoch": 0.2800715661987338, + "grad_norm": 1.6141357725194707, + "learning_rate": 1.6895607211979438e-05, + "loss": 1.1153, + "step": 2035 + }, + { + "epoch": 0.2802091935039912, + "grad_norm": 1.8784245596494695, + "learning_rate": 1.6892378336128733e-05, + "loss": 1.0922, + "step": 2036 + }, + { + "epoch": 0.28034682080924855, + "grad_norm": 1.8360065183053507, + "learning_rate": 1.6889148090856798e-05, + "loss": 1.022, + "step": 2037 + }, + { + "epoch": 0.2804844481145059, + "grad_norm": 1.8498739749930087, + "learning_rate": 1.6885916476805445e-05, + "loss": 1.1159, + "step": 2038 + }, + { + "epoch": 0.2806220754197633, + "grad_norm": 1.7397370249812885, + "learning_rate": 1.688268349461674e-05, + "loss": 1.1116, + "step": 2039 + }, + { + "epoch": 0.28075970272502065, + "grad_norm": 1.7478691208907988, + "learning_rate": 1.6879449144933043e-05, + "loss": 0.9518, + "step": 2040 + }, + { + "epoch": 0.280897330030278, + "grad_norm": 2.026744780936143, + "learning_rate": 1.6876213428396967e-05, + "loss": 1.1071, + "step": 2041 + }, + { + "epoch": 0.28103495733553535, + "grad_norm": 1.8082190117072885, + "learning_rate": 1.687297634565141e-05, + "loss": 1.0825, + "step": 2042 + }, + { + "epoch": 0.28117258464079276, + "grad_norm": 1.8343920831483869, + "learning_rate": 1.686973789733953e-05, + "loss": 1.1894, + "step": 2043 + }, + { + "epoch": 0.2813102119460501, + "grad_norm": 1.6593857308532538, + "learning_rate": 1.6866498084104774e-05, + "loss": 1.0536, + "step": 2044 + }, + { + "epoch": 0.28144783925130745, + "grad_norm": 2.3510775768472927, + "learning_rate": 1.6863256906590834e-05, + "loss": 1.13, + "step": 2045 + }, + { + "epoch": 0.2815854665565648, + "grad_norm": 1.908625469192015, + "learning_rate": 1.6860014365441695e-05, + "loss": 1.1122, + "step": 2046 + }, + { + "epoch": 0.2817230938618222, + "grad_norm": 1.7133134367241183, + "learning_rate": 1.685677046130161e-05, + "loss": 1.0172, + "step": 2047 + }, + { + "epoch": 0.28186072116707955, + "grad_norm": 1.768728430140071, + "learning_rate": 1.6853525194815086e-05, + "loss": 1.1265, + "step": 2048 + }, + { + "epoch": 0.2819983484723369, + "grad_norm": 1.6859237156226738, + "learning_rate": 1.6850278566626925e-05, + "loss": 1.1225, + "step": 2049 + }, + { + "epoch": 0.28213597577759425, + "grad_norm": 1.889295770954119, + "learning_rate": 1.6847030577382182e-05, + "loss": 1.2004, + "step": 2050 + }, + { + "epoch": 0.28227360308285165, + "grad_norm": 1.8893801408118909, + "learning_rate": 1.684378122772619e-05, + "loss": 1.0503, + "step": 2051 + }, + { + "epoch": 0.282411230388109, + "grad_norm": 1.660420819266096, + "learning_rate": 1.6840530518304547e-05, + "loss": 0.9928, + "step": 2052 + }, + { + "epoch": 0.28254885769336635, + "grad_norm": 1.927375167710676, + "learning_rate": 1.6837278449763126e-05, + "loss": 1.0799, + "step": 2053 + }, + { + "epoch": 0.2826864849986237, + "grad_norm": 1.6052413256281568, + "learning_rate": 1.6834025022748073e-05, + "loss": 1.1049, + "step": 2054 + }, + { + "epoch": 0.2828241123038811, + "grad_norm": 1.7558864199973336, + "learning_rate": 1.6830770237905794e-05, + "loss": 1.0905, + "step": 2055 + }, + { + "epoch": 0.28296173960913845, + "grad_norm": 1.9935096667523264, + "learning_rate": 1.6827514095882974e-05, + "loss": 1.1119, + "step": 2056 + }, + { + "epoch": 0.2830993669143958, + "grad_norm": 1.6813325183346206, + "learning_rate": 1.6824256597326557e-05, + "loss": 1.0358, + "step": 2057 + }, + { + "epoch": 0.2832369942196532, + "grad_norm": 1.7103888177110769, + "learning_rate": 1.682099774288377e-05, + "loss": 1.0622, + "step": 2058 + }, + { + "epoch": 0.28337462152491055, + "grad_norm": 1.676036343947673, + "learning_rate": 1.68177375332021e-05, + "loss": 1.0495, + "step": 2059 + }, + { + "epoch": 0.2835122488301679, + "grad_norm": 1.9400509676925761, + "learning_rate": 1.6814475968929308e-05, + "loss": 1.1378, + "step": 2060 + }, + { + "epoch": 0.28364987613542525, + "grad_norm": 1.9817733584747985, + "learning_rate": 1.681121305071342e-05, + "loss": 1.1447, + "step": 2061 + }, + { + "epoch": 0.28378750344068265, + "grad_norm": 1.5481459745731871, + "learning_rate": 1.6807948779202735e-05, + "loss": 1.0325, + "step": 2062 + }, + { + "epoch": 0.28392513074594, + "grad_norm": 1.7678235241275253, + "learning_rate": 1.680468315504582e-05, + "loss": 1.067, + "step": 2063 + }, + { + "epoch": 0.28406275805119735, + "grad_norm": 1.486088699376935, + "learning_rate": 1.6801416178891504e-05, + "loss": 1.165, + "step": 2064 + }, + { + "epoch": 0.2842003853564547, + "grad_norm": 1.9417658293501943, + "learning_rate": 1.6798147851388895e-05, + "loss": 1.1373, + "step": 2065 + }, + { + "epoch": 0.2843380126617121, + "grad_norm": 2.018353271229725, + "learning_rate": 1.679487817318737e-05, + "loss": 1.1376, + "step": 2066 + }, + { + "epoch": 0.28447563996696945, + "grad_norm": 1.5940569546743422, + "learning_rate": 1.6791607144936556e-05, + "loss": 1.1078, + "step": 2067 + }, + { + "epoch": 0.2846132672722268, + "grad_norm": 1.726469643801644, + "learning_rate": 1.678833476728637e-05, + "loss": 1.1575, + "step": 2068 + }, + { + "epoch": 0.28475089457748415, + "grad_norm": 1.7467337120359443, + "learning_rate": 1.6785061040886995e-05, + "loss": 1.0492, + "step": 2069 + }, + { + "epoch": 0.28488852188274155, + "grad_norm": 1.6427540995852143, + "learning_rate": 1.6781785966388863e-05, + "loss": 1.1052, + "step": 2070 + }, + { + "epoch": 0.2850261491879989, + "grad_norm": 1.712182790593229, + "learning_rate": 1.6778509544442695e-05, + "loss": 1.0611, + "step": 2071 + }, + { + "epoch": 0.28516377649325625, + "grad_norm": 1.7508265125928628, + "learning_rate": 1.6775231775699464e-05, + "loss": 1.1772, + "step": 2072 + }, + { + "epoch": 0.2853014037985136, + "grad_norm": 1.5271174131666998, + "learning_rate": 1.6771952660810426e-05, + "loss": 1.0645, + "step": 2073 + }, + { + "epoch": 0.285439031103771, + "grad_norm": 1.6501540475049745, + "learning_rate": 1.6768672200427092e-05, + "loss": 1.0302, + "step": 2074 + }, + { + "epoch": 0.28557665840902835, + "grad_norm": 1.7511606011008105, + "learning_rate": 1.6765390395201243e-05, + "loss": 1.1335, + "step": 2075 + }, + { + "epoch": 0.2857142857142857, + "grad_norm": 1.76017050570742, + "learning_rate": 1.6762107245784932e-05, + "loss": 1.148, + "step": 2076 + }, + { + "epoch": 0.2858519130195431, + "grad_norm": 1.7510642322553143, + "learning_rate": 1.6758822752830473e-05, + "loss": 1.1483, + "step": 2077 + }, + { + "epoch": 0.28598954032480045, + "grad_norm": 1.783043410788411, + "learning_rate": 1.6755536916990453e-05, + "loss": 1.0983, + "step": 2078 + }, + { + "epoch": 0.2861271676300578, + "grad_norm": 1.7960061030180332, + "learning_rate": 1.675224973891772e-05, + "loss": 1.1555, + "step": 2079 + }, + { + "epoch": 0.28626479493531515, + "grad_norm": 1.6003936852187608, + "learning_rate": 1.6748961219265387e-05, + "loss": 1.1036, + "step": 2080 + }, + { + "epoch": 0.28640242224057255, + "grad_norm": 1.605752343915571, + "learning_rate": 1.674567135868685e-05, + "loss": 1.1056, + "step": 2081 + }, + { + "epoch": 0.2865400495458299, + "grad_norm": 1.7049018525831086, + "learning_rate": 1.674238015783575e-05, + "loss": 1.0842, + "step": 2082 + }, + { + "epoch": 0.28667767685108725, + "grad_norm": 1.7292928903693676, + "learning_rate": 1.6739087617366006e-05, + "loss": 1.0618, + "step": 2083 + }, + { + "epoch": 0.2868153041563446, + "grad_norm": 1.9123822792078355, + "learning_rate": 1.6735793737931798e-05, + "loss": 1.1823, + "step": 2084 + }, + { + "epoch": 0.286952931461602, + "grad_norm": 1.8897127886437164, + "learning_rate": 1.6732498520187582e-05, + "loss": 1.0423, + "step": 2085 + }, + { + "epoch": 0.28709055876685935, + "grad_norm": 1.6758081769982054, + "learning_rate": 1.6729201964788066e-05, + "loss": 1.0977, + "step": 2086 + }, + { + "epoch": 0.2872281860721167, + "grad_norm": 1.6206252119432412, + "learning_rate": 1.6725904072388228e-05, + "loss": 1.0741, + "step": 2087 + }, + { + "epoch": 0.28736581337737405, + "grad_norm": 1.8853614573090982, + "learning_rate": 1.6722604843643324e-05, + "loss": 1.1017, + "step": 2088 + }, + { + "epoch": 0.28750344068263145, + "grad_norm": 1.5972405785196395, + "learning_rate": 1.6719304279208858e-05, + "loss": 1.0596, + "step": 2089 + }, + { + "epoch": 0.2876410679878888, + "grad_norm": 1.657682105825771, + "learning_rate": 1.6716002379740607e-05, + "loss": 1.1142, + "step": 2090 + }, + { + "epoch": 0.28777869529314615, + "grad_norm": 1.7818383910131304, + "learning_rate": 1.671269914589462e-05, + "loss": 1.1027, + "step": 2091 + }, + { + "epoch": 0.2879163225984035, + "grad_norm": 1.7278744896149576, + "learning_rate": 1.6709394578327195e-05, + "loss": 1.0935, + "step": 2092 + }, + { + "epoch": 0.2880539499036609, + "grad_norm": 2.1172185681020412, + "learning_rate": 1.6706088677694913e-05, + "loss": 1.2641, + "step": 2093 + }, + { + "epoch": 0.28819157720891825, + "grad_norm": 1.6578770955831497, + "learning_rate": 1.6702781444654604e-05, + "loss": 1.1019, + "step": 2094 + }, + { + "epoch": 0.2883292045141756, + "grad_norm": 1.678905003613266, + "learning_rate": 1.6699472879863373e-05, + "loss": 1.1776, + "step": 2095 + }, + { + "epoch": 0.288466831819433, + "grad_norm": 1.7249513838734574, + "learning_rate": 1.6696162983978585e-05, + "loss": 1.099, + "step": 2096 + }, + { + "epoch": 0.28860445912469035, + "grad_norm": 1.7157457790469366, + "learning_rate": 1.6692851757657876e-05, + "loss": 1.0304, + "step": 2097 + }, + { + "epoch": 0.2887420864299477, + "grad_norm": 1.751356486322032, + "learning_rate": 1.6689539201559137e-05, + "loss": 1.0388, + "step": 2098 + }, + { + "epoch": 0.28887971373520505, + "grad_norm": 1.6372315101728687, + "learning_rate": 1.668622531634053e-05, + "loss": 1.1452, + "step": 2099 + }, + { + "epoch": 0.28901734104046245, + "grad_norm": 2.1969869335082857, + "learning_rate": 1.668291010266047e-05, + "loss": 1.0927, + "step": 2100 + }, + { + "epoch": 0.2891549683457198, + "grad_norm": 1.5655375643090481, + "learning_rate": 1.6679593561177664e-05, + "loss": 1.0737, + "step": 2101 + }, + { + "epoch": 0.28929259565097715, + "grad_norm": 1.6806373416439504, + "learning_rate": 1.667627569255104e-05, + "loss": 1.131, + "step": 2102 + }, + { + "epoch": 0.2894302229562345, + "grad_norm": 1.693805495353178, + "learning_rate": 1.6672956497439828e-05, + "loss": 1.1175, + "step": 2103 + }, + { + "epoch": 0.2895678502614919, + "grad_norm": 1.7734786165575591, + "learning_rate": 1.6669635976503505e-05, + "loss": 1.0966, + "step": 2104 + }, + { + "epoch": 0.28970547756674925, + "grad_norm": 1.7306698069346045, + "learning_rate": 1.6666314130401808e-05, + "loss": 1.0929, + "step": 2105 + }, + { + "epoch": 0.2898431048720066, + "grad_norm": 1.7871381038841916, + "learning_rate": 1.6662990959794746e-05, + "loss": 1.0415, + "step": 2106 + }, + { + "epoch": 0.28998073217726394, + "grad_norm": 1.6974647137783825, + "learning_rate": 1.6659666465342587e-05, + "loss": 1.0801, + "step": 2107 + }, + { + "epoch": 0.29011835948252135, + "grad_norm": 1.6806131179176573, + "learning_rate": 1.6656340647705862e-05, + "loss": 1.0692, + "step": 2108 + }, + { + "epoch": 0.2902559867877787, + "grad_norm": 1.7812812653559127, + "learning_rate": 1.6653013507545364e-05, + "loss": 1.1454, + "step": 2109 + }, + { + "epoch": 0.29039361409303605, + "grad_norm": 1.7511479413227669, + "learning_rate": 1.6649685045522156e-05, + "loss": 1.04, + "step": 2110 + }, + { + "epoch": 0.2905312413982934, + "grad_norm": 1.7277497700745206, + "learning_rate": 1.664635526229755e-05, + "loss": 1.1445, + "step": 2111 + }, + { + "epoch": 0.2906688687035508, + "grad_norm": 1.7180912328927473, + "learning_rate": 1.6643024158533135e-05, + "loss": 1.0931, + "step": 2112 + }, + { + "epoch": 0.29080649600880815, + "grad_norm": 1.5963932616255387, + "learning_rate": 1.663969173489075e-05, + "loss": 1.0567, + "step": 2113 + }, + { + "epoch": 0.2909441233140655, + "grad_norm": 1.56385242089709, + "learning_rate": 1.6636357992032507e-05, + "loss": 1.1286, + "step": 2114 + }, + { + "epoch": 0.2910817506193229, + "grad_norm": 1.793848555515424, + "learning_rate": 1.663302293062077e-05, + "loss": 1.0721, + "step": 2115 + }, + { + "epoch": 0.29121937792458025, + "grad_norm": 1.79159613010735, + "learning_rate": 1.6629686551318173e-05, + "loss": 1.0812, + "step": 2116 + }, + { + "epoch": 0.2913570052298376, + "grad_norm": 1.8977632754286275, + "learning_rate": 1.6626348854787614e-05, + "loss": 1.1531, + "step": 2117 + }, + { + "epoch": 0.29149463253509494, + "grad_norm": 1.6853100707599935, + "learning_rate": 1.6623009841692236e-05, + "loss": 1.1202, + "step": 2118 + }, + { + "epoch": 0.29163225984035235, + "grad_norm": 1.8166463334233767, + "learning_rate": 1.6619669512695465e-05, + "loss": 1.1091, + "step": 2119 + }, + { + "epoch": 0.2917698871456097, + "grad_norm": 1.7284986176296433, + "learning_rate": 1.6616327868460973e-05, + "loss": 1.1024, + "step": 2120 + }, + { + "epoch": 0.29190751445086704, + "grad_norm": 1.9886611448999614, + "learning_rate": 1.6612984909652702e-05, + "loss": 1.1393, + "step": 2121 + }, + { + "epoch": 0.2920451417561244, + "grad_norm": 1.6460826258669519, + "learning_rate": 1.6609640636934855e-05, + "loss": 1.0939, + "step": 2122 + }, + { + "epoch": 0.2921827690613818, + "grad_norm": 1.8468487323550347, + "learning_rate": 1.6606295050971885e-05, + "loss": 1.1459, + "step": 2123 + }, + { + "epoch": 0.29232039636663915, + "grad_norm": 1.8380348752082594, + "learning_rate": 1.660294815242852e-05, + "loss": 1.175, + "step": 2124 + }, + { + "epoch": 0.2924580236718965, + "grad_norm": 1.733619427261147, + "learning_rate": 1.6599599941969745e-05, + "loss": 1.1573, + "step": 2125 + }, + { + "epoch": 0.29259565097715384, + "grad_norm": 1.7694586840593325, + "learning_rate": 1.6596250420260796e-05, + "loss": 1.0829, + "step": 2126 + }, + { + "epoch": 0.29273327828241125, + "grad_norm": 1.6709911061553078, + "learning_rate": 1.659289958796718e-05, + "loss": 1.0377, + "step": 2127 + }, + { + "epoch": 0.2928709055876686, + "grad_norm": 1.6445240434433102, + "learning_rate": 1.6589547445754666e-05, + "loss": 1.0932, + "step": 2128 + }, + { + "epoch": 0.29300853289292594, + "grad_norm": 1.531996212617955, + "learning_rate": 1.6586193994289278e-05, + "loss": 1.0377, + "step": 2129 + }, + { + "epoch": 0.2931461601981833, + "grad_norm": 1.6920612129847745, + "learning_rate": 1.6582839234237292e-05, + "loss": 1.1508, + "step": 2130 + }, + { + "epoch": 0.2932837875034407, + "grad_norm": 1.7545497398416776, + "learning_rate": 1.6579483166265262e-05, + "loss": 1.1011, + "step": 2131 + }, + { + "epoch": 0.29342141480869804, + "grad_norm": 1.6543622278050902, + "learning_rate": 1.657612579103999e-05, + "loss": 1.0769, + "step": 2132 + }, + { + "epoch": 0.2935590421139554, + "grad_norm": 1.8200202780595547, + "learning_rate": 1.6572767109228546e-05, + "loss": 1.1694, + "step": 2133 + }, + { + "epoch": 0.2936966694192128, + "grad_norm": 2.006285325817001, + "learning_rate": 1.6569407121498243e-05, + "loss": 1.2456, + "step": 2134 + }, + { + "epoch": 0.29383429672447015, + "grad_norm": 1.7388708192443536, + "learning_rate": 1.656604582851667e-05, + "loss": 1.1126, + "step": 2135 + }, + { + "epoch": 0.2939719240297275, + "grad_norm": 1.9920297585358244, + "learning_rate": 1.6562683230951675e-05, + "loss": 1.1244, + "step": 2136 + }, + { + "epoch": 0.29410955133498484, + "grad_norm": 1.6311193082702358, + "learning_rate": 1.6559319329471352e-05, + "loss": 1.1316, + "step": 2137 + }, + { + "epoch": 0.29424717864024225, + "grad_norm": 1.8577968575926889, + "learning_rate": 1.6555954124744066e-05, + "loss": 1.1373, + "step": 2138 + }, + { + "epoch": 0.2943848059454996, + "grad_norm": 1.6870503392496519, + "learning_rate": 1.6552587617438435e-05, + "loss": 1.0251, + "step": 2139 + }, + { + "epoch": 0.29452243325075694, + "grad_norm": 1.715884648282807, + "learning_rate": 1.654921980822334e-05, + "loss": 1.0618, + "step": 2140 + }, + { + "epoch": 0.2946600605560143, + "grad_norm": 1.8122712789499047, + "learning_rate": 1.654585069776792e-05, + "loss": 1.1109, + "step": 2141 + }, + { + "epoch": 0.2947976878612717, + "grad_norm": 1.7953016507353854, + "learning_rate": 1.654248028674157e-05, + "loss": 1.1484, + "step": 2142 + }, + { + "epoch": 0.29493531516652904, + "grad_norm": 1.5006542931792108, + "learning_rate": 1.6539108575813944e-05, + "loss": 1.0793, + "step": 2143 + }, + { + "epoch": 0.2950729424717864, + "grad_norm": 1.7215379876342598, + "learning_rate": 1.653573556565495e-05, + "loss": 1.049, + "step": 2144 + }, + { + "epoch": 0.29521056977704374, + "grad_norm": 1.7149317964778832, + "learning_rate": 1.6532361256934767e-05, + "loss": 1.0026, + "step": 2145 + }, + { + "epoch": 0.29534819708230114, + "grad_norm": 1.9238708956149693, + "learning_rate": 1.652898565032382e-05, + "loss": 1.1993, + "step": 2146 + }, + { + "epoch": 0.2954858243875585, + "grad_norm": 1.7780401240763801, + "learning_rate": 1.6525608746492794e-05, + "loss": 1.1176, + "step": 2147 + }, + { + "epoch": 0.29562345169281584, + "grad_norm": 1.8108252207537217, + "learning_rate": 1.6522230546112642e-05, + "loss": 1.0875, + "step": 2148 + }, + { + "epoch": 0.2957610789980732, + "grad_norm": 1.6437813213185566, + "learning_rate": 1.6518851049854555e-05, + "loss": 1.1163, + "step": 2149 + }, + { + "epoch": 0.2958987063033306, + "grad_norm": 1.8870554238769743, + "learning_rate": 1.6515470258390002e-05, + "loss": 1.0098, + "step": 2150 + }, + { + "epoch": 0.29603633360858794, + "grad_norm": 1.6117151758740642, + "learning_rate": 1.651208817239069e-05, + "loss": 1.1571, + "step": 2151 + }, + { + "epoch": 0.2961739609138453, + "grad_norm": 2.095468072153884, + "learning_rate": 1.6508704792528604e-05, + "loss": 1.0987, + "step": 2152 + }, + { + "epoch": 0.2963115882191027, + "grad_norm": 1.9865657748429184, + "learning_rate": 1.6505320119475966e-05, + "loss": 1.172, + "step": 2153 + }, + { + "epoch": 0.29644921552436004, + "grad_norm": 1.764098261595099, + "learning_rate": 1.650193415390527e-05, + "loss": 1.0615, + "step": 2154 + }, + { + "epoch": 0.2965868428296174, + "grad_norm": 1.6973956180479857, + "learning_rate": 1.649854689648926e-05, + "loss": 1.2213, + "step": 2155 + }, + { + "epoch": 0.29672447013487474, + "grad_norm": 1.8127625711383928, + "learning_rate": 1.6495158347900937e-05, + "loss": 1.1535, + "step": 2156 + }, + { + "epoch": 0.29686209744013214, + "grad_norm": 1.7039888646601242, + "learning_rate": 1.6491768508813558e-05, + "loss": 1.0719, + "step": 2157 + }, + { + "epoch": 0.2969997247453895, + "grad_norm": 1.6593005465021127, + "learning_rate": 1.6488377379900643e-05, + "loss": 1.1093, + "step": 2158 + }, + { + "epoch": 0.29713735205064684, + "grad_norm": 1.7216106082146327, + "learning_rate": 1.6484984961835956e-05, + "loss": 1.0818, + "step": 2159 + }, + { + "epoch": 0.2972749793559042, + "grad_norm": 1.632503223181387, + "learning_rate": 1.6481591255293525e-05, + "loss": 1.12, + "step": 2160 + }, + { + "epoch": 0.2974126066611616, + "grad_norm": 1.884893814508176, + "learning_rate": 1.647819626094764e-05, + "loss": 1.1713, + "step": 2161 + }, + { + "epoch": 0.29755023396641894, + "grad_norm": 1.8644278591125731, + "learning_rate": 1.6474799979472828e-05, + "loss": 1.1693, + "step": 2162 + }, + { + "epoch": 0.2976878612716763, + "grad_norm": 1.6593990825486917, + "learning_rate": 1.647140241154389e-05, + "loss": 1.0791, + "step": 2163 + }, + { + "epoch": 0.29782548857693364, + "grad_norm": 1.8568199320582006, + "learning_rate": 1.6468003557835886e-05, + "loss": 1.114, + "step": 2164 + }, + { + "epoch": 0.29796311588219104, + "grad_norm": 1.6454675420236906, + "learning_rate": 1.646460341902411e-05, + "loss": 1.1515, + "step": 2165 + }, + { + "epoch": 0.2981007431874484, + "grad_norm": 1.5830823243476428, + "learning_rate": 1.646120199578412e-05, + "loss": 1.1014, + "step": 2166 + }, + { + "epoch": 0.29823837049270574, + "grad_norm": 1.9572069521366735, + "learning_rate": 1.645779928879174e-05, + "loss": 1.1042, + "step": 2167 + }, + { + "epoch": 0.2983759977979631, + "grad_norm": 1.7596426280324715, + "learning_rate": 1.645439529872304e-05, + "loss": 1.1576, + "step": 2168 + }, + { + "epoch": 0.2985136251032205, + "grad_norm": 1.7289258948218793, + "learning_rate": 1.6450990026254343e-05, + "loss": 1.0765, + "step": 2169 + }, + { + "epoch": 0.29865125240847784, + "grad_norm": 1.7355700279786292, + "learning_rate": 1.6447583472062234e-05, + "loss": 1.1058, + "step": 2170 + }, + { + "epoch": 0.2987888797137352, + "grad_norm": 1.716961753595389, + "learning_rate": 1.6444175636823547e-05, + "loss": 1.0671, + "step": 2171 + }, + { + "epoch": 0.2989265070189926, + "grad_norm": 1.7436806458474103, + "learning_rate": 1.644076652121537e-05, + "loss": 1.0839, + "step": 2172 + }, + { + "epoch": 0.29906413432424994, + "grad_norm": 1.5676958407958548, + "learning_rate": 1.6437356125915052e-05, + "loss": 1.0675, + "step": 2173 + }, + { + "epoch": 0.2992017616295073, + "grad_norm": 1.6711782723401658, + "learning_rate": 1.643394445160019e-05, + "loss": 1.1298, + "step": 2174 + }, + { + "epoch": 0.29933938893476464, + "grad_norm": 1.6439680415014082, + "learning_rate": 1.6430531498948633e-05, + "loss": 1.0751, + "step": 2175 + }, + { + "epoch": 0.29947701624002204, + "grad_norm": 1.6635855031244835, + "learning_rate": 1.6427117268638495e-05, + "loss": 1.095, + "step": 2176 + }, + { + "epoch": 0.2996146435452794, + "grad_norm": 2.042829635238149, + "learning_rate": 1.642370176134813e-05, + "loss": 1.081, + "step": 2177 + }, + { + "epoch": 0.29975227085053674, + "grad_norm": 1.745096513996818, + "learning_rate": 1.6420284977756158e-05, + "loss": 0.9773, + "step": 2178 + }, + { + "epoch": 0.2998898981557941, + "grad_norm": 1.8572861864472607, + "learning_rate": 1.6416866918541446e-05, + "loss": 1.0224, + "step": 2179 + }, + { + "epoch": 0.3000275254610515, + "grad_norm": 1.711667156535208, + "learning_rate": 1.6413447584383114e-05, + "loss": 1.0908, + "step": 2180 + }, + { + "epoch": 0.30016515276630884, + "grad_norm": 1.6025759139010358, + "learning_rate": 1.641002697596054e-05, + "loss": 1.0985, + "step": 2181 + }, + { + "epoch": 0.3003027800715662, + "grad_norm": 1.7769781874005735, + "learning_rate": 1.6406605093953345e-05, + "loss": 1.1335, + "step": 2182 + }, + { + "epoch": 0.30044040737682354, + "grad_norm": 1.6739004727427294, + "learning_rate": 1.6403181939041417e-05, + "loss": 1.0949, + "step": 2183 + }, + { + "epoch": 0.30057803468208094, + "grad_norm": 1.8531095080188655, + "learning_rate": 1.6399757511904888e-05, + "loss": 1.1202, + "step": 2184 + }, + { + "epoch": 0.3007156619873383, + "grad_norm": 1.746308885201986, + "learning_rate": 1.639633181322414e-05, + "loss": 1.0776, + "step": 2185 + }, + { + "epoch": 0.30085328929259564, + "grad_norm": 1.698615720350092, + "learning_rate": 1.6392904843679823e-05, + "loss": 1.1175, + "step": 2186 + }, + { + "epoch": 0.300990916597853, + "grad_norm": 1.8932228350098081, + "learning_rate": 1.6389476603952822e-05, + "loss": 1.0837, + "step": 2187 + }, + { + "epoch": 0.3011285439031104, + "grad_norm": 1.8700853274993954, + "learning_rate": 1.638604709472428e-05, + "loss": 1.141, + "step": 2188 + }, + { + "epoch": 0.30126617120836774, + "grad_norm": 1.6316886887307018, + "learning_rate": 1.63826163166756e-05, + "loss": 1.0856, + "step": 2189 + }, + { + "epoch": 0.3014037985136251, + "grad_norm": 1.7691010349200074, + "learning_rate": 1.637918427048842e-05, + "loss": 1.1018, + "step": 2190 + }, + { + "epoch": 0.3015414258188825, + "grad_norm": 1.5749834042975361, + "learning_rate": 1.637575095684465e-05, + "loss": 1.165, + "step": 2191 + }, + { + "epoch": 0.30167905312413984, + "grad_norm": 1.7723958149247103, + "learning_rate": 1.637231637642644e-05, + "loss": 1.0807, + "step": 2192 + }, + { + "epoch": 0.3018166804293972, + "grad_norm": 1.670026288605514, + "learning_rate": 1.6368880529916192e-05, + "loss": 1.1037, + "step": 2193 + }, + { + "epoch": 0.30195430773465454, + "grad_norm": 1.6994838348732557, + "learning_rate": 1.636544341799656e-05, + "loss": 1.0256, + "step": 2194 + }, + { + "epoch": 0.30209193503991194, + "grad_norm": 1.9471310342303108, + "learning_rate": 1.6362005041350462e-05, + "loss": 1.1392, + "step": 2195 + }, + { + "epoch": 0.3022295623451693, + "grad_norm": 1.7249503502425678, + "learning_rate": 1.6358565400661045e-05, + "loss": 1.0925, + "step": 2196 + }, + { + "epoch": 0.30236718965042664, + "grad_norm": 1.6631911375870054, + "learning_rate": 1.635512449661172e-05, + "loss": 1.0531, + "step": 2197 + }, + { + "epoch": 0.302504816955684, + "grad_norm": 1.74940569141512, + "learning_rate": 1.6351682329886155e-05, + "loss": 1.0969, + "step": 2198 + }, + { + "epoch": 0.3026424442609414, + "grad_norm": 1.61721086385767, + "learning_rate": 1.634823890116825e-05, + "loss": 1.07, + "step": 2199 + }, + { + "epoch": 0.30278007156619874, + "grad_norm": 1.694969940640828, + "learning_rate": 1.6344794211142176e-05, + "loss": 1.0358, + "step": 2200 + }, + { + "epoch": 0.3029176988714561, + "grad_norm": 1.800868565113983, + "learning_rate": 1.6341348260492345e-05, + "loss": 1.1247, + "step": 2201 + }, + { + "epoch": 0.30305532617671344, + "grad_norm": 1.6831847817808754, + "learning_rate": 1.6337901049903418e-05, + "loss": 1.0509, + "step": 2202 + }, + { + "epoch": 0.30319295348197084, + "grad_norm": 1.7824390073896086, + "learning_rate": 1.6334452580060305e-05, + "loss": 1.1158, + "step": 2203 + }, + { + "epoch": 0.3033305807872282, + "grad_norm": 1.7308634254944903, + "learning_rate": 1.6331002851648178e-05, + "loss": 1.1346, + "step": 2204 + }, + { + "epoch": 0.30346820809248554, + "grad_norm": 1.6582720993973892, + "learning_rate": 1.6327551865352442e-05, + "loss": 1.0143, + "step": 2205 + }, + { + "epoch": 0.3036058353977429, + "grad_norm": 1.8849399992819307, + "learning_rate": 1.6324099621858768e-05, + "loss": 1.1521, + "step": 2206 + }, + { + "epoch": 0.3037434627030003, + "grad_norm": 1.6056172104066964, + "learning_rate": 1.6320646121853065e-05, + "loss": 1.1414, + "step": 2207 + }, + { + "epoch": 0.30388109000825764, + "grad_norm": 1.7702731689466813, + "learning_rate": 1.6317191366021502e-05, + "loss": 1.1071, + "step": 2208 + }, + { + "epoch": 0.304018717313515, + "grad_norm": 1.9420385981894535, + "learning_rate": 1.631373535505048e-05, + "loss": 1.1815, + "step": 2209 + }, + { + "epoch": 0.3041563446187724, + "grad_norm": 1.785972796997425, + "learning_rate": 1.6310278089626672e-05, + "loss": 1.0727, + "step": 2210 + }, + { + "epoch": 0.30429397192402974, + "grad_norm": 1.8586894968951926, + "learning_rate": 1.6306819570436988e-05, + "loss": 1.1304, + "step": 2211 + }, + { + "epoch": 0.3044315992292871, + "grad_norm": 1.661374049824449, + "learning_rate": 1.6303359798168584e-05, + "loss": 1.1286, + "step": 2212 + }, + { + "epoch": 0.30456922653454443, + "grad_norm": 1.5390374764803356, + "learning_rate": 1.6299898773508875e-05, + "loss": 1.0984, + "step": 2213 + }, + { + "epoch": 0.30470685383980184, + "grad_norm": 1.8998446491509091, + "learning_rate": 1.6296436497145514e-05, + "loss": 1.1048, + "step": 2214 + }, + { + "epoch": 0.3048444811450592, + "grad_norm": 1.717422029097654, + "learning_rate": 1.629297296976641e-05, + "loss": 1.0921, + "step": 2215 + }, + { + "epoch": 0.30498210845031654, + "grad_norm": 1.9147297403392152, + "learning_rate": 1.628950819205972e-05, + "loss": 1.0979, + "step": 2216 + }, + { + "epoch": 0.3051197357555739, + "grad_norm": 2.1365262779059946, + "learning_rate": 1.628604216471384e-05, + "loss": 1.0939, + "step": 2217 + }, + { + "epoch": 0.3052573630608313, + "grad_norm": 1.6266334310651613, + "learning_rate": 1.6282574888417433e-05, + "loss": 1.0722, + "step": 2218 + }, + { + "epoch": 0.30539499036608864, + "grad_norm": 1.8071929402245814, + "learning_rate": 1.6279106363859395e-05, + "loss": 1.1031, + "step": 2219 + }, + { + "epoch": 0.305532617671346, + "grad_norm": 1.6130987316196415, + "learning_rate": 1.627563659172887e-05, + "loss": 1.0878, + "step": 2220 + }, + { + "epoch": 0.30567024497660333, + "grad_norm": 1.84737530188381, + "learning_rate": 1.6272165572715263e-05, + "loss": 1.0674, + "step": 2221 + }, + { + "epoch": 0.30580787228186074, + "grad_norm": 1.7736845027614323, + "learning_rate": 1.6268693307508213e-05, + "loss": 1.1688, + "step": 2222 + }, + { + "epoch": 0.3059454995871181, + "grad_norm": 1.803235048461757, + "learning_rate": 1.626521979679761e-05, + "loss": 1.1824, + "step": 2223 + }, + { + "epoch": 0.30608312689237543, + "grad_norm": 1.5839709871366263, + "learning_rate": 1.6261745041273595e-05, + "loss": 1.1278, + "step": 2224 + }, + { + "epoch": 0.3062207541976328, + "grad_norm": 1.7732457074217556, + "learning_rate": 1.6258269041626555e-05, + "loss": 1.1044, + "step": 2225 + }, + { + "epoch": 0.3063583815028902, + "grad_norm": 1.7835034933126916, + "learning_rate": 1.6254791798547122e-05, + "loss": 1.0824, + "step": 2226 + }, + { + "epoch": 0.30649600880814754, + "grad_norm": 1.5554097238362798, + "learning_rate": 1.6251313312726177e-05, + "loss": 1.1209, + "step": 2227 + }, + { + "epoch": 0.3066336361134049, + "grad_norm": 1.6990032978058978, + "learning_rate": 1.624783358485485e-05, + "loss": 1.1452, + "step": 2228 + }, + { + "epoch": 0.3067712634186623, + "grad_norm": 1.738735025735243, + "learning_rate": 1.6244352615624514e-05, + "loss": 1.1642, + "step": 2229 + }, + { + "epoch": 0.30690889072391964, + "grad_norm": 1.7024162924238175, + "learning_rate": 1.6240870405726786e-05, + "loss": 1.059, + "step": 2230 + }, + { + "epoch": 0.307046518029177, + "grad_norm": 1.8289147813991757, + "learning_rate": 1.6237386955853536e-05, + "loss": 1.0577, + "step": 2231 + }, + { + "epoch": 0.30718414533443433, + "grad_norm": 1.9145491609028986, + "learning_rate": 1.623390226669688e-05, + "loss": 1.0868, + "step": 2232 + }, + { + "epoch": 0.30732177263969174, + "grad_norm": 1.6728680762237125, + "learning_rate": 1.623041633894918e-05, + "loss": 1.0151, + "step": 2233 + }, + { + "epoch": 0.3074593999449491, + "grad_norm": 1.8793449209981985, + "learning_rate": 1.6226929173303034e-05, + "loss": 1.064, + "step": 2234 + }, + { + "epoch": 0.30759702725020643, + "grad_norm": 1.735271770532169, + "learning_rate": 1.62234407704513e-05, + "loss": 1.0983, + "step": 2235 + }, + { + "epoch": 0.3077346545554638, + "grad_norm": 1.8815878041058784, + "learning_rate": 1.6219951131087076e-05, + "loss": 1.0307, + "step": 2236 + }, + { + "epoch": 0.3078722818607212, + "grad_norm": 1.8211030615560844, + "learning_rate": 1.62164602559037e-05, + "loss": 1.1275, + "step": 2237 + }, + { + "epoch": 0.30800990916597853, + "grad_norm": 1.5460532431063154, + "learning_rate": 1.621296814559477e-05, + "loss": 1.0331, + "step": 2238 + }, + { + "epoch": 0.3081475364712359, + "grad_norm": 1.5546189096400589, + "learning_rate": 1.6209474800854114e-05, + "loss": 1.0785, + "step": 2239 + }, + { + "epoch": 0.30828516377649323, + "grad_norm": 1.7309889418156763, + "learning_rate": 1.620598022237581e-05, + "loss": 1.1228, + "step": 2240 + }, + { + "epoch": 0.30842279108175064, + "grad_norm": 1.8284616743870008, + "learning_rate": 1.620248441085419e-05, + "loss": 1.2583, + "step": 2241 + }, + { + "epoch": 0.308560418387008, + "grad_norm": 1.8788026617379256, + "learning_rate": 1.619898736698382e-05, + "loss": 1.1232, + "step": 2242 + }, + { + "epoch": 0.30869804569226533, + "grad_norm": 1.9290747067652312, + "learning_rate": 1.6195489091459515e-05, + "loss": 1.2494, + "step": 2243 + }, + { + "epoch": 0.3088356729975227, + "grad_norm": 1.6694916878253072, + "learning_rate": 1.6191989584976332e-05, + "loss": 1.0606, + "step": 2244 + }, + { + "epoch": 0.3089733003027801, + "grad_norm": 1.915135087540746, + "learning_rate": 1.6188488848229584e-05, + "loss": 1.0291, + "step": 2245 + }, + { + "epoch": 0.30911092760803743, + "grad_norm": 1.7434048860870541, + "learning_rate": 1.6184986881914808e-05, + "loss": 1.0229, + "step": 2246 + }, + { + "epoch": 0.3092485549132948, + "grad_norm": 2.0144486003700126, + "learning_rate": 1.6181483686727802e-05, + "loss": 1.1263, + "step": 2247 + }, + { + "epoch": 0.3093861822185522, + "grad_norm": 1.9089431424538068, + "learning_rate": 1.6177979263364606e-05, + "loss": 1.0712, + "step": 2248 + }, + { + "epoch": 0.30952380952380953, + "grad_norm": 2.0708949144574684, + "learning_rate": 1.6174473612521496e-05, + "loss": 1.1152, + "step": 2249 + }, + { + "epoch": 0.3096614368290669, + "grad_norm": 1.68696118189307, + "learning_rate": 1.6170966734894996e-05, + "loss": 1.1885, + "step": 2250 + }, + { + "epoch": 0.30979906413432423, + "grad_norm": 1.863931555526344, + "learning_rate": 1.6167458631181877e-05, + "loss": 1.1366, + "step": 2251 + }, + { + "epoch": 0.30993669143958164, + "grad_norm": 1.8456116696089355, + "learning_rate": 1.6163949302079153e-05, + "loss": 1.1017, + "step": 2252 + }, + { + "epoch": 0.310074318744839, + "grad_norm": 1.5578508265517148, + "learning_rate": 1.616043874828408e-05, + "loss": 1.0415, + "step": 2253 + }, + { + "epoch": 0.31021194605009633, + "grad_norm": 1.882952307713398, + "learning_rate": 1.6156926970494154e-05, + "loss": 1.1674, + "step": 2254 + }, + { + "epoch": 0.3103495733553537, + "grad_norm": 1.9021672831497831, + "learning_rate": 1.6153413969407113e-05, + "loss": 1.0952, + "step": 2255 + }, + { + "epoch": 0.3104872006606111, + "grad_norm": 1.8568328774281189, + "learning_rate": 1.6149899745720952e-05, + "loss": 1.1495, + "step": 2256 + }, + { + "epoch": 0.31062482796586843, + "grad_norm": 1.7894773883954638, + "learning_rate": 1.6146384300133895e-05, + "loss": 1.0494, + "step": 2257 + }, + { + "epoch": 0.3107624552711258, + "grad_norm": 1.690909862036309, + "learning_rate": 1.614286763334441e-05, + "loss": 1.1476, + "step": 2258 + }, + { + "epoch": 0.31090008257638313, + "grad_norm": 1.6403130211689063, + "learning_rate": 1.6139349746051215e-05, + "loss": 1.1188, + "step": 2259 + }, + { + "epoch": 0.31103770988164053, + "grad_norm": 1.6755032293719765, + "learning_rate": 1.6135830638953264e-05, + "loss": 1.0986, + "step": 2260 + }, + { + "epoch": 0.3111753371868979, + "grad_norm": 1.7511703818391924, + "learning_rate": 1.6132310312749754e-05, + "loss": 1.0851, + "step": 2261 + }, + { + "epoch": 0.31131296449215523, + "grad_norm": 1.6767572025242836, + "learning_rate": 1.612878876814013e-05, + "loss": 1.0885, + "step": 2262 + }, + { + "epoch": 0.3114505917974126, + "grad_norm": 1.6584814649289747, + "learning_rate": 1.612526600582407e-05, + "loss": 1.1916, + "step": 2263 + }, + { + "epoch": 0.31158821910267, + "grad_norm": 1.8397010163800933, + "learning_rate": 1.6121742026501496e-05, + "loss": 1.0342, + "step": 2264 + }, + { + "epoch": 0.31172584640792733, + "grad_norm": 1.8445151982548667, + "learning_rate": 1.6118216830872583e-05, + "loss": 1.0583, + "step": 2265 + }, + { + "epoch": 0.3118634737131847, + "grad_norm": 1.7132185187576148, + "learning_rate": 1.6114690419637736e-05, + "loss": 1.0047, + "step": 2266 + }, + { + "epoch": 0.3120011010184421, + "grad_norm": 1.6089838698094958, + "learning_rate": 1.6111162793497604e-05, + "loss": 1.0095, + "step": 2267 + }, + { + "epoch": 0.31213872832369943, + "grad_norm": 1.5652485444464184, + "learning_rate": 1.6107633953153075e-05, + "loss": 1.0992, + "step": 2268 + }, + { + "epoch": 0.3122763556289568, + "grad_norm": 1.7125184318109963, + "learning_rate": 1.6104103899305284e-05, + "loss": 1.1326, + "step": 2269 + }, + { + "epoch": 0.31241398293421413, + "grad_norm": 1.8968762591900499, + "learning_rate": 1.6100572632655603e-05, + "loss": 1.0976, + "step": 2270 + }, + { + "epoch": 0.31255161023947153, + "grad_norm": 1.8648520467517655, + "learning_rate": 1.609704015390565e-05, + "loss": 1.0975, + "step": 2271 + }, + { + "epoch": 0.3126892375447289, + "grad_norm": 1.6195633596302104, + "learning_rate": 1.609350646375728e-05, + "loss": 1.0805, + "step": 2272 + }, + { + "epoch": 0.31282686484998623, + "grad_norm": 1.8936009498165187, + "learning_rate": 1.608997156291258e-05, + "loss": 1.1756, + "step": 2273 + }, + { + "epoch": 0.3129644921552436, + "grad_norm": 1.9339836397657018, + "learning_rate": 1.60864354520739e-05, + "loss": 1.0214, + "step": 2274 + }, + { + "epoch": 0.313102119460501, + "grad_norm": 1.764479598542925, + "learning_rate": 1.608289813194381e-05, + "loss": 1.0112, + "step": 2275 + }, + { + "epoch": 0.31323974676575833, + "grad_norm": 1.6743033308276218, + "learning_rate": 1.6079359603225123e-05, + "loss": 1.1987, + "step": 2276 + }, + { + "epoch": 0.3133773740710157, + "grad_norm": 1.7292733751922802, + "learning_rate": 1.60758198666209e-05, + "loss": 1.0511, + "step": 2277 + }, + { + "epoch": 0.31351500137627303, + "grad_norm": 1.763973526400724, + "learning_rate": 1.6072278922834442e-05, + "loss": 1.0588, + "step": 2278 + }, + { + "epoch": 0.31365262868153043, + "grad_norm": 1.7162165761275257, + "learning_rate": 1.606873677256928e-05, + "loss": 1.0871, + "step": 2279 + }, + { + "epoch": 0.3137902559867878, + "grad_norm": 1.6769339809803583, + "learning_rate": 1.6065193416529195e-05, + "loss": 0.9978, + "step": 2280 + }, + { + "epoch": 0.31392788329204513, + "grad_norm": 1.9054773302687735, + "learning_rate": 1.6061648855418197e-05, + "loss": 1.1643, + "step": 2281 + }, + { + "epoch": 0.3140655105973025, + "grad_norm": 1.6416762172112254, + "learning_rate": 1.605810308994055e-05, + "loss": 1.0335, + "step": 2282 + }, + { + "epoch": 0.3142031379025599, + "grad_norm": 1.803399031523385, + "learning_rate": 1.6054556120800742e-05, + "loss": 1.148, + "step": 2283 + }, + { + "epoch": 0.31434076520781723, + "grad_norm": 1.6220559659085125, + "learning_rate": 1.6051007948703517e-05, + "loss": 1.0901, + "step": 2284 + }, + { + "epoch": 0.3144783925130746, + "grad_norm": 2.0672646693819425, + "learning_rate": 1.6047458574353838e-05, + "loss": 1.0439, + "step": 2285 + }, + { + "epoch": 0.314616019818332, + "grad_norm": 1.6291604438891474, + "learning_rate": 1.604390799845692e-05, + "loss": 0.9976, + "step": 2286 + }, + { + "epoch": 0.31475364712358933, + "grad_norm": 1.8960126114828377, + "learning_rate": 1.604035622171822e-05, + "loss": 1.1045, + "step": 2287 + }, + { + "epoch": 0.3148912744288467, + "grad_norm": 1.731972933116561, + "learning_rate": 1.6036803244843415e-05, + "loss": 1.0936, + "step": 2288 + }, + { + "epoch": 0.315028901734104, + "grad_norm": 1.6255213666812534, + "learning_rate": 1.6033249068538445e-05, + "loss": 1.0364, + "step": 2289 + }, + { + "epoch": 0.31516652903936143, + "grad_norm": 1.6970566255347188, + "learning_rate": 1.602969369350947e-05, + "loss": 1.0918, + "step": 2290 + }, + { + "epoch": 0.3153041563446188, + "grad_norm": 1.8604537566046033, + "learning_rate": 1.6026137120462895e-05, + "loss": 1.1146, + "step": 2291 + }, + { + "epoch": 0.31544178364987613, + "grad_norm": 1.6135813274240594, + "learning_rate": 1.6022579350105362e-05, + "loss": 1.038, + "step": 2292 + }, + { + "epoch": 0.3155794109551335, + "grad_norm": 1.6749503165053843, + "learning_rate": 1.6019020383143754e-05, + "loss": 1.1293, + "step": 2293 + }, + { + "epoch": 0.3157170382603909, + "grad_norm": 1.6840539808506523, + "learning_rate": 1.6015460220285185e-05, + "loss": 1.1406, + "step": 2294 + }, + { + "epoch": 0.31585466556564823, + "grad_norm": 1.7136770477155474, + "learning_rate": 1.601189886223702e-05, + "loss": 1.1485, + "step": 2295 + }, + { + "epoch": 0.3159922928709056, + "grad_norm": 1.5846586128283668, + "learning_rate": 1.6008336309706835e-05, + "loss": 1.0802, + "step": 2296 + }, + { + "epoch": 0.3161299201761629, + "grad_norm": 1.7663962216175761, + "learning_rate": 1.6004772563402477e-05, + "loss": 1.0915, + "step": 2297 + }, + { + "epoch": 0.31626754748142033, + "grad_norm": 1.6904754200515562, + "learning_rate": 1.6001207624032006e-05, + "loss": 1.0453, + "step": 2298 + }, + { + "epoch": 0.3164051747866777, + "grad_norm": 1.9891265184616433, + "learning_rate": 1.5997641492303727e-05, + "loss": 1.1462, + "step": 2299 + }, + { + "epoch": 0.316542802091935, + "grad_norm": 1.772704527602838, + "learning_rate": 1.599407416892618e-05, + "loss": 1.1113, + "step": 2300 + }, + { + "epoch": 0.3166804293971924, + "grad_norm": 1.7766902012776271, + "learning_rate": 1.599050565460815e-05, + "loss": 1.0587, + "step": 2301 + }, + { + "epoch": 0.3168180567024498, + "grad_norm": 1.933262242545844, + "learning_rate": 1.5986935950058642e-05, + "loss": 1.1241, + "step": 2302 + }, + { + "epoch": 0.31695568400770713, + "grad_norm": 1.800019927433424, + "learning_rate": 1.5983365055986917e-05, + "loss": 1.0814, + "step": 2303 + }, + { + "epoch": 0.3170933113129645, + "grad_norm": 1.5983183734035724, + "learning_rate": 1.597979297310246e-05, + "loss": 1.0935, + "step": 2304 + }, + { + "epoch": 0.3172309386182219, + "grad_norm": 1.9125475956060354, + "learning_rate": 1.597621970211499e-05, + "loss": 1.0988, + "step": 2305 + }, + { + "epoch": 0.31736856592347923, + "grad_norm": 1.9020757124520562, + "learning_rate": 1.5972645243734473e-05, + "loss": 1.1055, + "step": 2306 + }, + { + "epoch": 0.3175061932287366, + "grad_norm": 1.7643985733187741, + "learning_rate": 1.59690695986711e-05, + "loss": 1.1126, + "step": 2307 + }, + { + "epoch": 0.3176438205339939, + "grad_norm": 1.5091678716845236, + "learning_rate": 1.5965492767635307e-05, + "loss": 1.0118, + "step": 2308 + }, + { + "epoch": 0.31778144783925133, + "grad_norm": 1.860448777853755, + "learning_rate": 1.5961914751337762e-05, + "loss": 0.9792, + "step": 2309 + }, + { + "epoch": 0.3179190751445087, + "grad_norm": 1.7426759550871298, + "learning_rate": 1.5958335550489367e-05, + "loss": 1.0956, + "step": 2310 + }, + { + "epoch": 0.318056702449766, + "grad_norm": 1.6914039313364324, + "learning_rate": 1.5954755165801253e-05, + "loss": 1.0355, + "step": 2311 + }, + { + "epoch": 0.3181943297550234, + "grad_norm": 1.7801941735940796, + "learning_rate": 1.59511735979848e-05, + "loss": 1.1175, + "step": 2312 + }, + { + "epoch": 0.3183319570602808, + "grad_norm": 1.6082071287109962, + "learning_rate": 1.594759084775162e-05, + "loss": 1.1161, + "step": 2313 + }, + { + "epoch": 0.3184695843655381, + "grad_norm": 1.9456554729076283, + "learning_rate": 1.594400691581355e-05, + "loss": 1.1416, + "step": 2314 + }, + { + "epoch": 0.3186072116707955, + "grad_norm": 1.7272363550957333, + "learning_rate": 1.594042180288267e-05, + "loss": 1.1477, + "step": 2315 + }, + { + "epoch": 0.3187448389760528, + "grad_norm": 3.5664810739271253, + "learning_rate": 1.5936835509671294e-05, + "loss": 1.0083, + "step": 2316 + }, + { + "epoch": 0.31888246628131023, + "grad_norm": 1.8179687995631697, + "learning_rate": 1.5933248036891968e-05, + "loss": 1.1404, + "step": 2317 + }, + { + "epoch": 0.3190200935865676, + "grad_norm": 1.7027646526491853, + "learning_rate": 1.5929659385257475e-05, + "loss": 1.08, + "step": 2318 + }, + { + "epoch": 0.3191577208918249, + "grad_norm": 1.6086468336865665, + "learning_rate": 1.5926069555480827e-05, + "loss": 1.1195, + "step": 2319 + }, + { + "epoch": 0.3192953481970823, + "grad_norm": 1.5863347472287999, + "learning_rate": 1.592247854827528e-05, + "loss": 1.0912, + "step": 2320 + }, + { + "epoch": 0.3194329755023397, + "grad_norm": 1.7912733824826224, + "learning_rate": 1.5918886364354315e-05, + "loss": 1.1714, + "step": 2321 + }, + { + "epoch": 0.319570602807597, + "grad_norm": 1.8109376049888988, + "learning_rate": 1.591529300443165e-05, + "loss": 1.1253, + "step": 2322 + }, + { + "epoch": 0.3197082301128544, + "grad_norm": 2.10375077606398, + "learning_rate": 1.591169846922124e-05, + "loss": 1.1378, + "step": 2323 + }, + { + "epoch": 0.3198458574181118, + "grad_norm": 1.6289270735227497, + "learning_rate": 1.5908102759437265e-05, + "loss": 1.0542, + "step": 2324 + }, + { + "epoch": 0.3199834847233691, + "grad_norm": 2.13222515842592, + "learning_rate": 1.5904505875794144e-05, + "loss": 1.1582, + "step": 2325 + }, + { + "epoch": 0.3201211120286265, + "grad_norm": 1.6649241454636934, + "learning_rate": 1.590090781900653e-05, + "loss": 0.9699, + "step": 2326 + }, + { + "epoch": 0.3202587393338838, + "grad_norm": 1.9519304229349628, + "learning_rate": 1.5897308589789305e-05, + "loss": 1.1089, + "step": 2327 + }, + { + "epoch": 0.32039636663914123, + "grad_norm": 1.6650331588854406, + "learning_rate": 1.589370818885759e-05, + "loss": 1.0378, + "step": 2328 + }, + { + "epoch": 0.3205339939443986, + "grad_norm": 1.9348901998083594, + "learning_rate": 1.5890106616926732e-05, + "loss": 1.0754, + "step": 2329 + }, + { + "epoch": 0.3206716212496559, + "grad_norm": 1.5012382844947767, + "learning_rate": 1.5886503874712316e-05, + "loss": 0.995, + "step": 2330 + }, + { + "epoch": 0.3208092485549133, + "grad_norm": 1.661694356030018, + "learning_rate": 1.5882899962930162e-05, + "loss": 0.9706, + "step": 2331 + }, + { + "epoch": 0.3209468758601707, + "grad_norm": 1.6725339026758324, + "learning_rate": 1.587929488229631e-05, + "loss": 1.0437, + "step": 2332 + }, + { + "epoch": 0.321084503165428, + "grad_norm": 1.754075685346653, + "learning_rate": 1.5875688633527043e-05, + "loss": 1.143, + "step": 2333 + }, + { + "epoch": 0.3212221304706854, + "grad_norm": 1.9898848614835465, + "learning_rate": 1.5872081217338874e-05, + "loss": 1.1093, + "step": 2334 + }, + { + "epoch": 0.3213597577759427, + "grad_norm": 1.5104178094667415, + "learning_rate": 1.586847263444855e-05, + "loss": 1.0481, + "step": 2335 + }, + { + "epoch": 0.3214973850812001, + "grad_norm": 2.018293111903957, + "learning_rate": 1.5864862885573042e-05, + "loss": 1.1189, + "step": 2336 + }, + { + "epoch": 0.3216350123864575, + "grad_norm": 1.7661490130223527, + "learning_rate": 1.5861251971429557e-05, + "loss": 1.1409, + "step": 2337 + }, + { + "epoch": 0.3217726396917148, + "grad_norm": 1.7062105848401936, + "learning_rate": 1.5857639892735544e-05, + "loss": 1.0966, + "step": 2338 + }, + { + "epoch": 0.32191026699697217, + "grad_norm": 1.76628155603058, + "learning_rate": 1.5854026650208662e-05, + "loss": 1.0969, + "step": 2339 + }, + { + "epoch": 0.3220478943022296, + "grad_norm": 1.6356783012090528, + "learning_rate": 1.5850412244566817e-05, + "loss": 1.077, + "step": 2340 + }, + { + "epoch": 0.3221855216074869, + "grad_norm": 1.6955593658789085, + "learning_rate": 1.5846796676528147e-05, + "loss": 1.0417, + "step": 2341 + }, + { + "epoch": 0.3223231489127443, + "grad_norm": 1.8493747638151574, + "learning_rate": 1.5843179946811006e-05, + "loss": 1.1204, + "step": 2342 + }, + { + "epoch": 0.3224607762180017, + "grad_norm": 1.8759195177691446, + "learning_rate": 1.5839562056133998e-05, + "loss": 1.0453, + "step": 2343 + }, + { + "epoch": 0.322598403523259, + "grad_norm": 1.7085931154027267, + "learning_rate": 1.5835943005215944e-05, + "loss": 0.993, + "step": 2344 + }, + { + "epoch": 0.3227360308285164, + "grad_norm": 1.6818874493546516, + "learning_rate": 1.5832322794775898e-05, + "loss": 1.0608, + "step": 2345 + }, + { + "epoch": 0.3228736581337737, + "grad_norm": 1.9166687559531714, + "learning_rate": 1.582870142553315e-05, + "loss": 1.1488, + "step": 2346 + }, + { + "epoch": 0.3230112854390311, + "grad_norm": 1.566279373070242, + "learning_rate": 1.5825078898207214e-05, + "loss": 1.0584, + "step": 2347 + }, + { + "epoch": 0.3231489127442885, + "grad_norm": 1.7198993460098657, + "learning_rate": 1.5821455213517837e-05, + "loss": 1.0155, + "step": 2348 + }, + { + "epoch": 0.3232865400495458, + "grad_norm": 1.8606493326206026, + "learning_rate": 1.5817830372184998e-05, + "loss": 1.1371, + "step": 2349 + }, + { + "epoch": 0.32342416735480317, + "grad_norm": 1.7388856212878667, + "learning_rate": 1.5814204374928898e-05, + "loss": 1.0515, + "step": 2350 + }, + { + "epoch": 0.3235617946600606, + "grad_norm": 1.6649017289299055, + "learning_rate": 1.581057722246998e-05, + "loss": 1.0911, + "step": 2351 + }, + { + "epoch": 0.3236994219653179, + "grad_norm": 1.7682101828216725, + "learning_rate": 1.5806948915528904e-05, + "loss": 1.1168, + "step": 2352 + }, + { + "epoch": 0.3238370492705753, + "grad_norm": 1.7690607414438981, + "learning_rate": 1.580331945482657e-05, + "loss": 1.0604, + "step": 2353 + }, + { + "epoch": 0.3239746765758326, + "grad_norm": 1.7270337931770436, + "learning_rate": 1.5799688841084097e-05, + "loss": 1.096, + "step": 2354 + }, + { + "epoch": 0.32411230388109, + "grad_norm": 1.7735729383154624, + "learning_rate": 1.5796057075022838e-05, + "loss": 1.1673, + "step": 2355 + }, + { + "epoch": 0.3242499311863474, + "grad_norm": 2.04313966196195, + "learning_rate": 1.579242415736438e-05, + "loss": 1.0997, + "step": 2356 + }, + { + "epoch": 0.3243875584916047, + "grad_norm": 1.7400971187375815, + "learning_rate": 1.5788790088830534e-05, + "loss": 1.0251, + "step": 2357 + }, + { + "epoch": 0.32452518579686207, + "grad_norm": 1.6832618591094404, + "learning_rate": 1.5785154870143333e-05, + "loss": 1.0825, + "step": 2358 + }, + { + "epoch": 0.3246628131021195, + "grad_norm": 1.8769334318375996, + "learning_rate": 1.5781518502025054e-05, + "loss": 1.1911, + "step": 2359 + }, + { + "epoch": 0.3248004404073768, + "grad_norm": 1.7201128259880378, + "learning_rate": 1.5777880985198185e-05, + "loss": 1.0818, + "step": 2360 + }, + { + "epoch": 0.32493806771263417, + "grad_norm": 1.927904506595857, + "learning_rate": 1.5774242320385456e-05, + "loss": 1.0753, + "step": 2361 + }, + { + "epoch": 0.3250756950178916, + "grad_norm": 1.6518787098038445, + "learning_rate": 1.5770602508309818e-05, + "loss": 1.0347, + "step": 2362 + }, + { + "epoch": 0.3252133223231489, + "grad_norm": 1.7254303827610937, + "learning_rate": 1.5766961549694452e-05, + "loss": 1.09, + "step": 2363 + }, + { + "epoch": 0.32535094962840627, + "grad_norm": 1.8468023746410842, + "learning_rate": 1.576331944526277e-05, + "loss": 1.1002, + "step": 2364 + }, + { + "epoch": 0.3254885769336636, + "grad_norm": 1.7760414595909948, + "learning_rate": 1.5759676195738403e-05, + "loss": 1.1114, + "step": 2365 + }, + { + "epoch": 0.325626204238921, + "grad_norm": 1.70808927899885, + "learning_rate": 1.5756031801845218e-05, + "loss": 1.1066, + "step": 2366 + }, + { + "epoch": 0.3257638315441784, + "grad_norm": 1.7937495633770215, + "learning_rate": 1.5752386264307307e-05, + "loss": 1.05, + "step": 2367 + }, + { + "epoch": 0.3259014588494357, + "grad_norm": 1.7795041548322201, + "learning_rate": 1.5748739583848983e-05, + "loss": 1.0677, + "step": 2368 + }, + { + "epoch": 0.32603908615469307, + "grad_norm": 1.7302835811021178, + "learning_rate": 1.57450917611948e-05, + "loss": 1.0605, + "step": 2369 + }, + { + "epoch": 0.3261767134599505, + "grad_norm": 1.6602865270467608, + "learning_rate": 1.5741442797069523e-05, + "loss": 1.0686, + "step": 2370 + }, + { + "epoch": 0.3263143407652078, + "grad_norm": 1.5257812599981984, + "learning_rate": 1.5737792692198155e-05, + "loss": 1.0961, + "step": 2371 + }, + { + "epoch": 0.32645196807046517, + "grad_norm": 1.6016563903527217, + "learning_rate": 1.573414144730592e-05, + "loss": 1.1306, + "step": 2372 + }, + { + "epoch": 0.3265895953757225, + "grad_norm": 1.7141437081719049, + "learning_rate": 1.5730489063118274e-05, + "loss": 1.0809, + "step": 2373 + }, + { + "epoch": 0.3267272226809799, + "grad_norm": 1.7166592463678925, + "learning_rate": 1.572683554036089e-05, + "loss": 1.0676, + "step": 2374 + }, + { + "epoch": 0.32686484998623727, + "grad_norm": 2.004562180474064, + "learning_rate": 1.5723180879759675e-05, + "loss": 1.0014, + "step": 2375 + }, + { + "epoch": 0.3270024772914946, + "grad_norm": 1.6586025150990276, + "learning_rate": 1.5719525082040764e-05, + "loss": 1.1033, + "step": 2376 + }, + { + "epoch": 0.32714010459675197, + "grad_norm": 1.7811987714300934, + "learning_rate": 1.571586814793051e-05, + "loss": 1.1586, + "step": 2377 + }, + { + "epoch": 0.3272777319020094, + "grad_norm": 1.7047001699287403, + "learning_rate": 1.5712210078155495e-05, + "loss": 1.0294, + "step": 2378 + }, + { + "epoch": 0.3274153592072667, + "grad_norm": 1.8001029425852324, + "learning_rate": 1.570855087344253e-05, + "loss": 1.0833, + "step": 2379 + }, + { + "epoch": 0.32755298651252407, + "grad_norm": 1.6810697964791235, + "learning_rate": 1.570489053451865e-05, + "loss": 1.025, + "step": 2380 + }, + { + "epoch": 0.3276906138177815, + "grad_norm": 1.541819086969105, + "learning_rate": 1.570122906211111e-05, + "loss": 1.0122, + "step": 2381 + }, + { + "epoch": 0.3278282411230388, + "grad_norm": 1.7618509939448488, + "learning_rate": 1.56975664569474e-05, + "loss": 1.0294, + "step": 2382 + }, + { + "epoch": 0.32796586842829617, + "grad_norm": 1.8598685173753144, + "learning_rate": 1.569390271975522e-05, + "loss": 1.0559, + "step": 2383 + }, + { + "epoch": 0.3281034957335535, + "grad_norm": 1.7747954467334333, + "learning_rate": 1.5690237851262514e-05, + "loss": 1.011, + "step": 2384 + }, + { + "epoch": 0.3282411230388109, + "grad_norm": 1.6487990070930296, + "learning_rate": 1.568657185219744e-05, + "loss": 1.1034, + "step": 2385 + }, + { + "epoch": 0.32837875034406827, + "grad_norm": 2.0826714417480723, + "learning_rate": 1.5682904723288375e-05, + "loss": 1.1685, + "step": 2386 + }, + { + "epoch": 0.3285163776493256, + "grad_norm": 1.468675794508899, + "learning_rate": 1.5679236465263938e-05, + "loss": 1.0781, + "step": 2387 + }, + { + "epoch": 0.32865400495458297, + "grad_norm": 1.832051478677324, + "learning_rate": 1.5675567078852953e-05, + "loss": 1.0866, + "step": 2388 + }, + { + "epoch": 0.32879163225984037, + "grad_norm": 1.5904825662952686, + "learning_rate": 1.567189656478448e-05, + "loss": 1.0368, + "step": 2389 + }, + { + "epoch": 0.3289292595650977, + "grad_norm": 1.788046188361262, + "learning_rate": 1.56682249237878e-05, + "loss": 1.1599, + "step": 2390 + }, + { + "epoch": 0.32906688687035507, + "grad_norm": 1.752651508661983, + "learning_rate": 1.5664552156592412e-05, + "loss": 1.1091, + "step": 2391 + }, + { + "epoch": 0.3292045141756124, + "grad_norm": 2.050158250314157, + "learning_rate": 1.5660878263928054e-05, + "loss": 1.2802, + "step": 2392 + }, + { + "epoch": 0.3293421414808698, + "grad_norm": 1.866939139595996, + "learning_rate": 1.565720324652467e-05, + "loss": 1.1539, + "step": 2393 + }, + { + "epoch": 0.32947976878612717, + "grad_norm": 2.016290378236832, + "learning_rate": 1.5653527105112444e-05, + "loss": 1.0084, + "step": 2394 + }, + { + "epoch": 0.3296173960913845, + "grad_norm": 1.6525591375001598, + "learning_rate": 1.564984984042177e-05, + "loss": 1.095, + "step": 2395 + }, + { + "epoch": 0.32975502339664187, + "grad_norm": 1.7085894056865552, + "learning_rate": 1.564617145318327e-05, + "loss": 1.0396, + "step": 2396 + }, + { + "epoch": 0.32989265070189927, + "grad_norm": 1.8732548474485025, + "learning_rate": 1.5642491944127792e-05, + "loss": 1.1343, + "step": 2397 + }, + { + "epoch": 0.3300302780071566, + "grad_norm": 1.6349048433226607, + "learning_rate": 1.56388113139864e-05, + "loss": 1.0825, + "step": 2398 + }, + { + "epoch": 0.33016790531241397, + "grad_norm": 1.6646857406750188, + "learning_rate": 1.5635129563490384e-05, + "loss": 0.9959, + "step": 2399 + }, + { + "epoch": 0.33030553261767137, + "grad_norm": 1.6914573105875277, + "learning_rate": 1.563144669337126e-05, + "loss": 1.0849, + "step": 2400 + }, + { + "epoch": 0.3304431599229287, + "grad_norm": 1.629293871225552, + "learning_rate": 1.5627762704360773e-05, + "loss": 1.0536, + "step": 2401 + }, + { + "epoch": 0.33058078722818607, + "grad_norm": 1.5937219087124495, + "learning_rate": 1.5624077597190864e-05, + "loss": 0.9655, + "step": 2402 + }, + { + "epoch": 0.3307184145334434, + "grad_norm": 1.7139382459330028, + "learning_rate": 1.5620391372593723e-05, + "loss": 1.0231, + "step": 2403 + }, + { + "epoch": 0.3308560418387008, + "grad_norm": 1.5747178348505184, + "learning_rate": 1.5616704031301754e-05, + "loss": 1.1209, + "step": 2404 + }, + { + "epoch": 0.33099366914395817, + "grad_norm": 1.7413968191712683, + "learning_rate": 1.561301557404758e-05, + "loss": 1.0927, + "step": 2405 + }, + { + "epoch": 0.3311312964492155, + "grad_norm": 1.5321825619302887, + "learning_rate": 1.560932600156404e-05, + "loss": 0.9832, + "step": 2406 + }, + { + "epoch": 0.33126892375447287, + "grad_norm": 1.5011250820933424, + "learning_rate": 1.5605635314584206e-05, + "loss": 0.9461, + "step": 2407 + }, + { + "epoch": 0.33140655105973027, + "grad_norm": 1.6193644567964949, + "learning_rate": 1.560194351384137e-05, + "loss": 1.0257, + "step": 2408 + }, + { + "epoch": 0.3315441783649876, + "grad_norm": 1.9510663808578694, + "learning_rate": 1.559825060006904e-05, + "loss": 1.1339, + "step": 2409 + }, + { + "epoch": 0.33168180567024497, + "grad_norm": 1.6187957467910705, + "learning_rate": 1.559455657400095e-05, + "loss": 1.0121, + "step": 2410 + }, + { + "epoch": 0.3318194329755023, + "grad_norm": 1.7384094839684594, + "learning_rate": 1.559086143637105e-05, + "loss": 1.0791, + "step": 2411 + }, + { + "epoch": 0.3319570602807597, + "grad_norm": 1.6977530496280118, + "learning_rate": 1.5587165187913507e-05, + "loss": 1.0451, + "step": 2412 + }, + { + "epoch": 0.33209468758601707, + "grad_norm": 1.6574988208816277, + "learning_rate": 1.5583467829362726e-05, + "loss": 1.0969, + "step": 2413 + }, + { + "epoch": 0.3322323148912744, + "grad_norm": 1.729339781897529, + "learning_rate": 1.5579769361453317e-05, + "loss": 1.0843, + "step": 2414 + }, + { + "epoch": 0.33236994219653176, + "grad_norm": 1.6367312443181354, + "learning_rate": 1.557606978492011e-05, + "loss": 1.0804, + "step": 2415 + }, + { + "epoch": 0.33250756950178917, + "grad_norm": 1.9953687940267166, + "learning_rate": 1.5572369100498168e-05, + "loss": 1.1108, + "step": 2416 + }, + { + "epoch": 0.3326451968070465, + "grad_norm": 1.6533863863126634, + "learning_rate": 1.5568667308922764e-05, + "loss": 0.9502, + "step": 2417 + }, + { + "epoch": 0.33278282411230387, + "grad_norm": 1.862532215211359, + "learning_rate": 1.5564964410929394e-05, + "loss": 0.9961, + "step": 2418 + }, + { + "epoch": 0.33292045141756127, + "grad_norm": 1.6158055581339776, + "learning_rate": 1.5561260407253766e-05, + "loss": 1.1259, + "step": 2419 + }, + { + "epoch": 0.3330580787228186, + "grad_norm": 1.7673305943936002, + "learning_rate": 1.5557555298631825e-05, + "loss": 1.0545, + "step": 2420 + }, + { + "epoch": 0.33319570602807597, + "grad_norm": 1.7774490993114553, + "learning_rate": 1.555384908579972e-05, + "loss": 1.0466, + "step": 2421 + }, + { + "epoch": 0.3333333333333333, + "grad_norm": 1.7167457695563906, + "learning_rate": 1.5550141769493824e-05, + "loss": 1.0655, + "step": 2422 + }, + { + "epoch": 0.3334709606385907, + "grad_norm": 1.713492288604055, + "learning_rate": 1.554643335045073e-05, + "loss": 1.1201, + "step": 2423 + }, + { + "epoch": 0.33360858794384807, + "grad_norm": 1.687585260046263, + "learning_rate": 1.5542723829407255e-05, + "loss": 1.0734, + "step": 2424 + }, + { + "epoch": 0.3337462152491054, + "grad_norm": 1.5930877211628152, + "learning_rate": 1.553901320710043e-05, + "loss": 1.0656, + "step": 2425 + }, + { + "epoch": 0.33388384255436276, + "grad_norm": 1.5543665765904255, + "learning_rate": 1.5535301484267496e-05, + "loss": 1.0608, + "step": 2426 + }, + { + "epoch": 0.33402146985962017, + "grad_norm": 1.7395507619351236, + "learning_rate": 1.553158866164593e-05, + "loss": 1.0794, + "step": 2427 + }, + { + "epoch": 0.3341590971648775, + "grad_norm": 1.9131707660834933, + "learning_rate": 1.5527874739973414e-05, + "loss": 1.1832, + "step": 2428 + }, + { + "epoch": 0.33429672447013487, + "grad_norm": 1.8638755076397202, + "learning_rate": 1.5524159719987855e-05, + "loss": 1.1638, + "step": 2429 + }, + { + "epoch": 0.3344343517753922, + "grad_norm": 1.8195774500045223, + "learning_rate": 1.5520443602427378e-05, + "loss": 1.0615, + "step": 2430 + }, + { + "epoch": 0.3345719790806496, + "grad_norm": 1.6895453351369925, + "learning_rate": 1.5516726388030324e-05, + "loss": 1.2294, + "step": 2431 + }, + { + "epoch": 0.33470960638590697, + "grad_norm": 1.6691357329445753, + "learning_rate": 1.5513008077535255e-05, + "loss": 1.0025, + "step": 2432 + }, + { + "epoch": 0.3348472336911643, + "grad_norm": 1.6786652353004003, + "learning_rate": 1.5509288671680943e-05, + "loss": 1.1524, + "step": 2433 + }, + { + "epoch": 0.33498486099642166, + "grad_norm": 1.8928764926416464, + "learning_rate": 1.5505568171206387e-05, + "loss": 1.1015, + "step": 2434 + }, + { + "epoch": 0.33512248830167907, + "grad_norm": 1.645657502854033, + "learning_rate": 1.5501846576850796e-05, + "loss": 0.9984, + "step": 2435 + }, + { + "epoch": 0.3352601156069364, + "grad_norm": 1.6090894767446973, + "learning_rate": 1.5498123889353603e-05, + "loss": 1.0492, + "step": 2436 + }, + { + "epoch": 0.33539774291219376, + "grad_norm": 1.7309350338962743, + "learning_rate": 1.5494400109454457e-05, + "loss": 1.0897, + "step": 2437 + }, + { + "epoch": 0.33553537021745117, + "grad_norm": 1.6156997037707437, + "learning_rate": 1.5490675237893217e-05, + "loss": 1.0903, + "step": 2438 + }, + { + "epoch": 0.3356729975227085, + "grad_norm": 1.577663058698449, + "learning_rate": 1.5486949275409967e-05, + "loss": 1.094, + "step": 2439 + }, + { + "epoch": 0.33581062482796586, + "grad_norm": 1.6178247978478184, + "learning_rate": 1.5483222222745004e-05, + "loss": 1.1846, + "step": 2440 + }, + { + "epoch": 0.3359482521332232, + "grad_norm": 1.734992083328551, + "learning_rate": 1.5479494080638844e-05, + "loss": 1.0756, + "step": 2441 + }, + { + "epoch": 0.3360858794384806, + "grad_norm": 1.6004037925793275, + "learning_rate": 1.5475764849832215e-05, + "loss": 1.1094, + "step": 2442 + }, + { + "epoch": 0.33622350674373797, + "grad_norm": 1.5854947704566433, + "learning_rate": 1.5472034531066066e-05, + "loss": 1.084, + "step": 2443 + }, + { + "epoch": 0.3363611340489953, + "grad_norm": 1.6229880296735704, + "learning_rate": 1.546830312508156e-05, + "loss": 1.0798, + "step": 2444 + }, + { + "epoch": 0.33649876135425266, + "grad_norm": 1.7011715757128885, + "learning_rate": 1.5464570632620074e-05, + "loss": 1.1194, + "step": 2445 + }, + { + "epoch": 0.33663638865951007, + "grad_norm": 1.7828829837357527, + "learning_rate": 1.5460837054423206e-05, + "loss": 1.1322, + "step": 2446 + }, + { + "epoch": 0.3367740159647674, + "grad_norm": 1.7688893173722586, + "learning_rate": 1.5457102391232765e-05, + "loss": 1.0994, + "step": 2447 + }, + { + "epoch": 0.33691164327002476, + "grad_norm": 1.921710652768241, + "learning_rate": 1.545336664379078e-05, + "loss": 1.0077, + "step": 2448 + }, + { + "epoch": 0.3370492705752821, + "grad_norm": 1.6195241372579625, + "learning_rate": 1.5449629812839492e-05, + "loss": 1.0665, + "step": 2449 + }, + { + "epoch": 0.3371868978805395, + "grad_norm": 1.8714779759415912, + "learning_rate": 1.5445891899121356e-05, + "loss": 1.116, + "step": 2450 + }, + { + "epoch": 0.33732452518579686, + "grad_norm": 2.114463897136919, + "learning_rate": 1.5442152903379044e-05, + "loss": 1.1124, + "step": 2451 + }, + { + "epoch": 0.3374621524910542, + "grad_norm": 1.8057340321699364, + "learning_rate": 1.5438412826355447e-05, + "loss": 0.9909, + "step": 2452 + }, + { + "epoch": 0.33759977979631156, + "grad_norm": 1.630156294633219, + "learning_rate": 1.5434671668793668e-05, + "loss": 1.1287, + "step": 2453 + }, + { + "epoch": 0.33773740710156896, + "grad_norm": 1.8606501298007794, + "learning_rate": 1.5430929431437022e-05, + "loss": 1.0822, + "step": 2454 + }, + { + "epoch": 0.3378750344068263, + "grad_norm": 1.9247552770118723, + "learning_rate": 1.5427186115029035e-05, + "loss": 1.0672, + "step": 2455 + }, + { + "epoch": 0.33801266171208366, + "grad_norm": 1.619581788791508, + "learning_rate": 1.5423441720313458e-05, + "loss": 1.1313, + "step": 2456 + }, + { + "epoch": 0.33815028901734107, + "grad_norm": 1.8311744317021126, + "learning_rate": 1.5419696248034245e-05, + "loss": 1.147, + "step": 2457 + }, + { + "epoch": 0.3382879163225984, + "grad_norm": 1.7716751128694765, + "learning_rate": 1.5415949698935582e-05, + "loss": 1.1304, + "step": 2458 + }, + { + "epoch": 0.33842554362785576, + "grad_norm": 1.7367918135044305, + "learning_rate": 1.541220207376185e-05, + "loss": 1.1842, + "step": 2459 + }, + { + "epoch": 0.3385631709331131, + "grad_norm": 1.552135337234452, + "learning_rate": 1.5408453373257646e-05, + "loss": 1.1422, + "step": 2460 + }, + { + "epoch": 0.3387007982383705, + "grad_norm": 1.695730937226073, + "learning_rate": 1.5404703598167793e-05, + "loss": 1.1555, + "step": 2461 + }, + { + "epoch": 0.33883842554362786, + "grad_norm": 1.7549605747132613, + "learning_rate": 1.5400952749237317e-05, + "loss": 1.056, + "step": 2462 + }, + { + "epoch": 0.3389760528488852, + "grad_norm": 1.6848924726584689, + "learning_rate": 1.5397200827211464e-05, + "loss": 1.0401, + "step": 2463 + }, + { + "epoch": 0.33911368015414256, + "grad_norm": 1.5820613804703938, + "learning_rate": 1.539344783283568e-05, + "loss": 1.0543, + "step": 2464 + }, + { + "epoch": 0.33925130745939996, + "grad_norm": 1.657923352467557, + "learning_rate": 1.5389693766855645e-05, + "loss": 1.1277, + "step": 2465 + }, + { + "epoch": 0.3393889347646573, + "grad_norm": 1.8526755296739283, + "learning_rate": 1.5385938630017233e-05, + "loss": 1.1207, + "step": 2466 + }, + { + "epoch": 0.33952656206991466, + "grad_norm": 1.9587946569834347, + "learning_rate": 1.5382182423066544e-05, + "loss": 1.0795, + "step": 2467 + }, + { + "epoch": 0.339664189375172, + "grad_norm": 1.7563768495694851, + "learning_rate": 1.5378425146749878e-05, + "loss": 1.1257, + "step": 2468 + }, + { + "epoch": 0.3398018166804294, + "grad_norm": 1.5979272248706566, + "learning_rate": 1.537466680181376e-05, + "loss": 1.0399, + "step": 2469 + }, + { + "epoch": 0.33993944398568676, + "grad_norm": 1.725949860239849, + "learning_rate": 1.5370907389004917e-05, + "loss": 1.066, + "step": 2470 + }, + { + "epoch": 0.3400770712909441, + "grad_norm": 1.7333051148939074, + "learning_rate": 1.5367146909070297e-05, + "loss": 1.0831, + "step": 2471 + }, + { + "epoch": 0.34021469859620146, + "grad_norm": 1.650125366692987, + "learning_rate": 1.5363385362757054e-05, + "loss": 1.0597, + "step": 2472 + }, + { + "epoch": 0.34035232590145886, + "grad_norm": 1.8601542136550744, + "learning_rate": 1.535962275081256e-05, + "loss": 1.1184, + "step": 2473 + }, + { + "epoch": 0.3404899532067162, + "grad_norm": 1.513563528141664, + "learning_rate": 1.535585907398439e-05, + "loss": 1.0694, + "step": 2474 + }, + { + "epoch": 0.34062758051197356, + "grad_norm": 1.935330741873839, + "learning_rate": 1.535209433302033e-05, + "loss": 1.1241, + "step": 2475 + }, + { + "epoch": 0.34076520781723096, + "grad_norm": 1.5305038879136965, + "learning_rate": 1.534832852866839e-05, + "loss": 1.0233, + "step": 2476 + }, + { + "epoch": 0.3409028351224883, + "grad_norm": 1.8473654832600042, + "learning_rate": 1.5344561661676784e-05, + "loss": 1.1906, + "step": 2477 + }, + { + "epoch": 0.34104046242774566, + "grad_norm": 1.7355399287021502, + "learning_rate": 1.5340793732793934e-05, + "loss": 1.0456, + "step": 2478 + }, + { + "epoch": 0.341178089733003, + "grad_norm": 1.6626922000839517, + "learning_rate": 1.5337024742768475e-05, + "loss": 1.091, + "step": 2479 + }, + { + "epoch": 0.3413157170382604, + "grad_norm": 2.249891605964178, + "learning_rate": 1.533325469234926e-05, + "loss": 1.1074, + "step": 2480 + }, + { + "epoch": 0.34145334434351776, + "grad_norm": 1.7521846163621029, + "learning_rate": 1.532948358228534e-05, + "loss": 1.0633, + "step": 2481 + }, + { + "epoch": 0.3415909716487751, + "grad_norm": 1.7730734920016111, + "learning_rate": 1.5325711413325984e-05, + "loss": 1.1702, + "step": 2482 + }, + { + "epoch": 0.34172859895403246, + "grad_norm": 1.6453841226552313, + "learning_rate": 1.532193818622067e-05, + "loss": 1.1156, + "step": 2483 + }, + { + "epoch": 0.34186622625928986, + "grad_norm": 2.106371047174693, + "learning_rate": 1.531816390171909e-05, + "loss": 1.1078, + "step": 2484 + }, + { + "epoch": 0.3420038535645472, + "grad_norm": 1.827561678162473, + "learning_rate": 1.5314388560571142e-05, + "loss": 1.1435, + "step": 2485 + }, + { + "epoch": 0.34214148086980456, + "grad_norm": 1.5199705189299015, + "learning_rate": 1.531061216352693e-05, + "loss": 1.0091, + "step": 2486 + }, + { + "epoch": 0.3422791081750619, + "grad_norm": 1.6368250744534398, + "learning_rate": 1.530683471133678e-05, + "loss": 1.1342, + "step": 2487 + }, + { + "epoch": 0.3424167354803193, + "grad_norm": 1.7421683346333066, + "learning_rate": 1.530305620475121e-05, + "loss": 1.0608, + "step": 2488 + }, + { + "epoch": 0.34255436278557666, + "grad_norm": 1.6906875509047727, + "learning_rate": 1.5299276644520965e-05, + "loss": 1.1095, + "step": 2489 + }, + { + "epoch": 0.342691990090834, + "grad_norm": 1.7278192034774242, + "learning_rate": 1.529549603139699e-05, + "loss": 1.0095, + "step": 2490 + }, + { + "epoch": 0.34282961739609136, + "grad_norm": 1.704972681801542, + "learning_rate": 1.5291714366130438e-05, + "loss": 1.0859, + "step": 2491 + }, + { + "epoch": 0.34296724470134876, + "grad_norm": 1.626092616987487, + "learning_rate": 1.528793164947268e-05, + "loss": 1.1043, + "step": 2492 + }, + { + "epoch": 0.3431048720066061, + "grad_norm": 1.6347340492101472, + "learning_rate": 1.5284147882175287e-05, + "loss": 1.0104, + "step": 2493 + }, + { + "epoch": 0.34324249931186346, + "grad_norm": 1.672062990253176, + "learning_rate": 1.5280363064990042e-05, + "loss": 1.0345, + "step": 2494 + }, + { + "epoch": 0.34338012661712086, + "grad_norm": 1.5211368415279691, + "learning_rate": 1.5276577198668938e-05, + "loss": 1.0606, + "step": 2495 + }, + { + "epoch": 0.3435177539223782, + "grad_norm": 1.8170614012947848, + "learning_rate": 1.527279028396417e-05, + "loss": 1.1359, + "step": 2496 + }, + { + "epoch": 0.34365538122763556, + "grad_norm": 1.6213105199832738, + "learning_rate": 1.5269002321628152e-05, + "loss": 1.0982, + "step": 2497 + }, + { + "epoch": 0.3437930085328929, + "grad_norm": 1.8097405865032545, + "learning_rate": 1.5265213312413496e-05, + "loss": 1.1621, + "step": 2498 + }, + { + "epoch": 0.3439306358381503, + "grad_norm": 1.6753605631925006, + "learning_rate": 1.5261423257073028e-05, + "loss": 0.9842, + "step": 2499 + }, + { + "epoch": 0.34406826314340766, + "grad_norm": 1.5707995998681727, + "learning_rate": 1.5257632156359779e-05, + "loss": 1.0309, + "step": 2500 + }, + { + "epoch": 0.344205890448665, + "grad_norm": 1.8243804521449938, + "learning_rate": 1.5253840011026992e-05, + "loss": 1.0871, + "step": 2501 + }, + { + "epoch": 0.34434351775392236, + "grad_norm": 1.6824051943343137, + "learning_rate": 1.525004682182811e-05, + "loss": 1.0783, + "step": 2502 + }, + { + "epoch": 0.34448114505917976, + "grad_norm": 1.4785025945144012, + "learning_rate": 1.524625258951679e-05, + "loss": 0.9843, + "step": 2503 + }, + { + "epoch": 0.3446187723644371, + "grad_norm": 1.6224852362356657, + "learning_rate": 1.5242457314846894e-05, + "loss": 1.1099, + "step": 2504 + }, + { + "epoch": 0.34475639966969446, + "grad_norm": 1.777819533118021, + "learning_rate": 1.5238660998572492e-05, + "loss": 1.1213, + "step": 2505 + }, + { + "epoch": 0.3448940269749518, + "grad_norm": 1.75688063359809, + "learning_rate": 1.5234863641447857e-05, + "loss": 1.0783, + "step": 2506 + }, + { + "epoch": 0.3450316542802092, + "grad_norm": 1.7134935829124425, + "learning_rate": 1.5231065244227474e-05, + "loss": 1.0425, + "step": 2507 + }, + { + "epoch": 0.34516928158546656, + "grad_norm": 1.5224715381314857, + "learning_rate": 1.5227265807666034e-05, + "loss": 1.0335, + "step": 2508 + }, + { + "epoch": 0.3453069088907239, + "grad_norm": 1.7547787564037145, + "learning_rate": 1.522346533251843e-05, + "loss": 1.0733, + "step": 2509 + }, + { + "epoch": 0.34544453619598126, + "grad_norm": 1.549281608191395, + "learning_rate": 1.5219663819539769e-05, + "loss": 1.0559, + "step": 2510 + }, + { + "epoch": 0.34558216350123866, + "grad_norm": 1.7693444597990737, + "learning_rate": 1.5215861269485354e-05, + "loss": 1.1178, + "step": 2511 + }, + { + "epoch": 0.345719790806496, + "grad_norm": 1.755433986221727, + "learning_rate": 1.5212057683110702e-05, + "loss": 1.1059, + "step": 2512 + }, + { + "epoch": 0.34585741811175336, + "grad_norm": 1.901930132102428, + "learning_rate": 1.5208253061171535e-05, + "loss": 1.0424, + "step": 2513 + }, + { + "epoch": 0.34599504541701076, + "grad_norm": 1.8840027268699295, + "learning_rate": 1.5204447404423777e-05, + "loss": 1.0293, + "step": 2514 + }, + { + "epoch": 0.3461326727222681, + "grad_norm": 1.8006990095952689, + "learning_rate": 1.5200640713623558e-05, + "loss": 1.1076, + "step": 2515 + }, + { + "epoch": 0.34627030002752546, + "grad_norm": 1.9684085740961244, + "learning_rate": 1.5196832989527222e-05, + "loss": 1.0896, + "step": 2516 + }, + { + "epoch": 0.3464079273327828, + "grad_norm": 1.7150343354270194, + "learning_rate": 1.519302423289131e-05, + "loss": 1.0977, + "step": 2517 + }, + { + "epoch": 0.3465455546380402, + "grad_norm": 1.7833580700983938, + "learning_rate": 1.5189214444472562e-05, + "loss": 1.0532, + "step": 2518 + }, + { + "epoch": 0.34668318194329756, + "grad_norm": 1.7419282758217778, + "learning_rate": 1.5185403625027941e-05, + "loss": 1.0419, + "step": 2519 + }, + { + "epoch": 0.3468208092485549, + "grad_norm": 1.6509194077351477, + "learning_rate": 1.5181591775314597e-05, + "loss": 1.156, + "step": 2520 + }, + { + "epoch": 0.34695843655381226, + "grad_norm": 1.820930368117441, + "learning_rate": 1.51777788960899e-05, + "loss": 1.0791, + "step": 2521 + }, + { + "epoch": 0.34709606385906966, + "grad_norm": 1.6329745895645094, + "learning_rate": 1.5173964988111406e-05, + "loss": 1.0651, + "step": 2522 + }, + { + "epoch": 0.347233691164327, + "grad_norm": 1.730348201865413, + "learning_rate": 1.5170150052136896e-05, + "loss": 1.0522, + "step": 2523 + }, + { + "epoch": 0.34737131846958436, + "grad_norm": 1.9121073863256095, + "learning_rate": 1.5166334088924343e-05, + "loss": 1.058, + "step": 2524 + }, + { + "epoch": 0.3475089457748417, + "grad_norm": 1.8026279514826422, + "learning_rate": 1.5162517099231923e-05, + "loss": 1.0271, + "step": 2525 + }, + { + "epoch": 0.3476465730800991, + "grad_norm": 1.6796648858758643, + "learning_rate": 1.5158699083818027e-05, + "loss": 1.1328, + "step": 2526 + }, + { + "epoch": 0.34778420038535646, + "grad_norm": 2.0075598928410034, + "learning_rate": 1.5154880043441233e-05, + "loss": 1.1005, + "step": 2527 + }, + { + "epoch": 0.3479218276906138, + "grad_norm": 1.7869783621036, + "learning_rate": 1.5151059978860341e-05, + "loss": 1.0783, + "step": 2528 + }, + { + "epoch": 0.34805945499587115, + "grad_norm": 2.0584947623837295, + "learning_rate": 1.514723889083434e-05, + "loss": 1.1623, + "step": 2529 + }, + { + "epoch": 0.34819708230112856, + "grad_norm": 1.670771794433263, + "learning_rate": 1.514341678012243e-05, + "loss": 1.1976, + "step": 2530 + }, + { + "epoch": 0.3483347096063859, + "grad_norm": 1.7201049901615104, + "learning_rate": 1.513959364748401e-05, + "loss": 1.088, + "step": 2531 + }, + { + "epoch": 0.34847233691164325, + "grad_norm": 2.0129916412525124, + "learning_rate": 1.5135769493678686e-05, + "loss": 1.047, + "step": 2532 + }, + { + "epoch": 0.34860996421690066, + "grad_norm": 1.7914746768355863, + "learning_rate": 1.5131944319466269e-05, + "loss": 1.1291, + "step": 2533 + }, + { + "epoch": 0.348747591522158, + "grad_norm": 1.807065567567135, + "learning_rate": 1.5128118125606759e-05, + "loss": 1.1191, + "step": 2534 + }, + { + "epoch": 0.34888521882741536, + "grad_norm": 1.5033738128006784, + "learning_rate": 1.5124290912860375e-05, + "loss": 1.14, + "step": 2535 + }, + { + "epoch": 0.3490228461326727, + "grad_norm": 1.804583645489471, + "learning_rate": 1.5120462681987534e-05, + "loss": 1.1359, + "step": 2536 + }, + { + "epoch": 0.3491604734379301, + "grad_norm": 1.7350917328773792, + "learning_rate": 1.5116633433748846e-05, + "loss": 1.0613, + "step": 2537 + }, + { + "epoch": 0.34929810074318746, + "grad_norm": 1.6981971491006336, + "learning_rate": 1.5112803168905133e-05, + "loss": 1.2286, + "step": 2538 + }, + { + "epoch": 0.3494357280484448, + "grad_norm": 1.9266151869593289, + "learning_rate": 1.5108971888217418e-05, + "loss": 1.1776, + "step": 2539 + }, + { + "epoch": 0.34957335535370215, + "grad_norm": 1.6882229497071304, + "learning_rate": 1.5105139592446923e-05, + "loss": 1.1066, + "step": 2540 + }, + { + "epoch": 0.34971098265895956, + "grad_norm": 2.073497695663884, + "learning_rate": 1.5101306282355076e-05, + "loss": 1.1714, + "step": 2541 + }, + { + "epoch": 0.3498486099642169, + "grad_norm": 1.7834935414058426, + "learning_rate": 1.5097471958703494e-05, + "loss": 1.1002, + "step": 2542 + }, + { + "epoch": 0.34998623726947425, + "grad_norm": 1.696892440493395, + "learning_rate": 1.5093636622254012e-05, + "loss": 1.0941, + "step": 2543 + }, + { + "epoch": 0.3501238645747316, + "grad_norm": 2.011586114652374, + "learning_rate": 1.508980027376866e-05, + "loss": 1.0039, + "step": 2544 + }, + { + "epoch": 0.350261491879989, + "grad_norm": 1.7505746457800913, + "learning_rate": 1.5085962914009667e-05, + "loss": 1.1157, + "step": 2545 + }, + { + "epoch": 0.35039911918524635, + "grad_norm": 1.7380368411938478, + "learning_rate": 1.5082124543739456e-05, + "loss": 1.1612, + "step": 2546 + }, + { + "epoch": 0.3505367464905037, + "grad_norm": 1.886370876527671, + "learning_rate": 1.5078285163720669e-05, + "loss": 1.0449, + "step": 2547 + }, + { + "epoch": 0.35067437379576105, + "grad_norm": 1.6572261346667083, + "learning_rate": 1.5074444774716135e-05, + "loss": 1.0851, + "step": 2548 + }, + { + "epoch": 0.35081200110101846, + "grad_norm": 1.7824365555614288, + "learning_rate": 1.5070603377488885e-05, + "loss": 1.0881, + "step": 2549 + }, + { + "epoch": 0.3509496284062758, + "grad_norm": 1.5846972852212469, + "learning_rate": 1.5066760972802153e-05, + "loss": 1.0211, + "step": 2550 + }, + { + "epoch": 0.35108725571153315, + "grad_norm": 1.72820750993155, + "learning_rate": 1.5062917561419373e-05, + "loss": 1.0832, + "step": 2551 + }, + { + "epoch": 0.35122488301679056, + "grad_norm": 1.6371857352389667, + "learning_rate": 1.5059073144104174e-05, + "loss": 1.1442, + "step": 2552 + }, + { + "epoch": 0.3513625103220479, + "grad_norm": 1.6580463820512277, + "learning_rate": 1.5055227721620394e-05, + "loss": 1.0606, + "step": 2553 + }, + { + "epoch": 0.35150013762730525, + "grad_norm": 1.8394583108849039, + "learning_rate": 1.5051381294732064e-05, + "loss": 1.0887, + "step": 2554 + }, + { + "epoch": 0.3516377649325626, + "grad_norm": 1.8443317433163753, + "learning_rate": 1.5047533864203416e-05, + "loss": 1.093, + "step": 2555 + }, + { + "epoch": 0.35177539223782, + "grad_norm": 2.1114191504614577, + "learning_rate": 1.5043685430798884e-05, + "loss": 1.0976, + "step": 2556 + }, + { + "epoch": 0.35191301954307735, + "grad_norm": 1.6514430053152194, + "learning_rate": 1.5039835995283097e-05, + "loss": 1.0536, + "step": 2557 + }, + { + "epoch": 0.3520506468483347, + "grad_norm": 1.70573544046439, + "learning_rate": 1.5035985558420887e-05, + "loss": 0.9519, + "step": 2558 + }, + { + "epoch": 0.35218827415359205, + "grad_norm": 1.7038618020890826, + "learning_rate": 1.503213412097728e-05, + "loss": 1.0284, + "step": 2559 + }, + { + "epoch": 0.35232590145884946, + "grad_norm": 2.165752814952019, + "learning_rate": 1.5028281683717505e-05, + "loss": 1.0882, + "step": 2560 + }, + { + "epoch": 0.3524635287641068, + "grad_norm": 1.7832828119324977, + "learning_rate": 1.5024428247406992e-05, + "loss": 1.1454, + "step": 2561 + }, + { + "epoch": 0.35260115606936415, + "grad_norm": 1.6488418047142295, + "learning_rate": 1.5020573812811359e-05, + "loss": 0.9611, + "step": 2562 + }, + { + "epoch": 0.3527387833746215, + "grad_norm": 1.6937210807718899, + "learning_rate": 1.5016718380696436e-05, + "loss": 1.076, + "step": 2563 + }, + { + "epoch": 0.3528764106798789, + "grad_norm": 1.9066844014119584, + "learning_rate": 1.5012861951828241e-05, + "loss": 1.1385, + "step": 2564 + }, + { + "epoch": 0.35301403798513625, + "grad_norm": 1.9330330387088697, + "learning_rate": 1.5009004526972999e-05, + "loss": 1.1068, + "step": 2565 + }, + { + "epoch": 0.3531516652903936, + "grad_norm": 1.7702317599563946, + "learning_rate": 1.5005146106897117e-05, + "loss": 1.1374, + "step": 2566 + }, + { + "epoch": 0.35328929259565095, + "grad_norm": 1.8792050362701709, + "learning_rate": 1.5001286692367221e-05, + "loss": 1.0887, + "step": 2567 + }, + { + "epoch": 0.35342691990090835, + "grad_norm": 1.5790249603244053, + "learning_rate": 1.4997426284150119e-05, + "loss": 1.1345, + "step": 2568 + }, + { + "epoch": 0.3535645472061657, + "grad_norm": 1.7901694071333325, + "learning_rate": 1.4993564883012816e-05, + "loss": 1.1591, + "step": 2569 + }, + { + "epoch": 0.35370217451142305, + "grad_norm": 1.6620783060249198, + "learning_rate": 1.498970248972253e-05, + "loss": 1.1259, + "step": 2570 + }, + { + "epoch": 0.35383980181668045, + "grad_norm": 1.8469942506964478, + "learning_rate": 1.4985839105046663e-05, + "loss": 1.1577, + "step": 2571 + }, + { + "epoch": 0.3539774291219378, + "grad_norm": 1.7822492089898467, + "learning_rate": 1.498197472975281e-05, + "loss": 1.0662, + "step": 2572 + }, + { + "epoch": 0.35411505642719515, + "grad_norm": 1.8669572595714818, + "learning_rate": 1.4978109364608777e-05, + "loss": 0.9972, + "step": 2573 + }, + { + "epoch": 0.3542526837324525, + "grad_norm": 2.0359710317843147, + "learning_rate": 1.4974243010382554e-05, + "loss": 1.0561, + "step": 2574 + }, + { + "epoch": 0.3543903110377099, + "grad_norm": 1.5176717488190814, + "learning_rate": 1.4970375667842336e-05, + "loss": 1.0406, + "step": 2575 + }, + { + "epoch": 0.35452793834296725, + "grad_norm": 1.6240185999513308, + "learning_rate": 1.4966507337756511e-05, + "loss": 1.0974, + "step": 2576 + }, + { + "epoch": 0.3546655656482246, + "grad_norm": 1.7107033626694317, + "learning_rate": 1.4962638020893659e-05, + "loss": 1.105, + "step": 2577 + }, + { + "epoch": 0.35480319295348195, + "grad_norm": 1.6202668149389496, + "learning_rate": 1.4958767718022565e-05, + "loss": 1.0086, + "step": 2578 + }, + { + "epoch": 0.35494082025873935, + "grad_norm": 1.8435129321440153, + "learning_rate": 1.4954896429912205e-05, + "loss": 1.1273, + "step": 2579 + }, + { + "epoch": 0.3550784475639967, + "grad_norm": 1.943671577885704, + "learning_rate": 1.4951024157331749e-05, + "loss": 1.1715, + "step": 2580 + }, + { + "epoch": 0.35521607486925405, + "grad_norm": 1.634725265708131, + "learning_rate": 1.4947150901050564e-05, + "loss": 1.0844, + "step": 2581 + }, + { + "epoch": 0.3553537021745114, + "grad_norm": 1.4985648750613396, + "learning_rate": 1.4943276661838215e-05, + "loss": 1.0894, + "step": 2582 + }, + { + "epoch": 0.3554913294797688, + "grad_norm": 1.710513708096674, + "learning_rate": 1.4939401440464458e-05, + "loss": 1.0518, + "step": 2583 + }, + { + "epoch": 0.35562895678502615, + "grad_norm": 1.5630120020357812, + "learning_rate": 1.4935525237699249e-05, + "loss": 1.0395, + "step": 2584 + }, + { + "epoch": 0.3557665840902835, + "grad_norm": 1.6414020471874435, + "learning_rate": 1.4931648054312736e-05, + "loss": 1.0952, + "step": 2585 + }, + { + "epoch": 0.35590421139554085, + "grad_norm": 1.673189722639924, + "learning_rate": 1.4927769891075259e-05, + "loss": 1.113, + "step": 2586 + }, + { + "epoch": 0.35604183870079825, + "grad_norm": 1.5703468713576496, + "learning_rate": 1.492389074875736e-05, + "loss": 1.1234, + "step": 2587 + }, + { + "epoch": 0.3561794660060556, + "grad_norm": 1.7437648144420366, + "learning_rate": 1.4920010628129769e-05, + "loss": 1.0545, + "step": 2588 + }, + { + "epoch": 0.35631709331131295, + "grad_norm": 1.6271766876597584, + "learning_rate": 1.4916129529963415e-05, + "loss": 1.1056, + "step": 2589 + }, + { + "epoch": 0.35645472061657035, + "grad_norm": 1.4760560745621767, + "learning_rate": 1.4912247455029417e-05, + "loss": 1.0833, + "step": 2590 + }, + { + "epoch": 0.3565923479218277, + "grad_norm": 1.5875325526078738, + "learning_rate": 1.4908364404099093e-05, + "loss": 1.0419, + "step": 2591 + }, + { + "epoch": 0.35672997522708505, + "grad_norm": 1.6065080917246648, + "learning_rate": 1.490448037794395e-05, + "loss": 1.1092, + "step": 2592 + }, + { + "epoch": 0.3568676025323424, + "grad_norm": 1.5974413367525926, + "learning_rate": 1.4900595377335686e-05, + "loss": 1.0983, + "step": 2593 + }, + { + "epoch": 0.3570052298375998, + "grad_norm": 1.795231911589888, + "learning_rate": 1.4896709403046205e-05, + "loss": 1.1178, + "step": 2594 + }, + { + "epoch": 0.35714285714285715, + "grad_norm": 1.7840239803686166, + "learning_rate": 1.4892822455847595e-05, + "loss": 1.0901, + "step": 2595 + }, + { + "epoch": 0.3572804844481145, + "grad_norm": 1.6946752760523136, + "learning_rate": 1.4888934536512141e-05, + "loss": 1.0563, + "step": 2596 + }, + { + "epoch": 0.35741811175337185, + "grad_norm": 1.5866023653977352, + "learning_rate": 1.4885045645812316e-05, + "loss": 1.0079, + "step": 2597 + }, + { + "epoch": 0.35755573905862925, + "grad_norm": 1.6744673647122899, + "learning_rate": 1.4881155784520789e-05, + "loss": 1.0824, + "step": 2598 + }, + { + "epoch": 0.3576933663638866, + "grad_norm": 1.665295169616684, + "learning_rate": 1.4877264953410421e-05, + "loss": 1.0389, + "step": 2599 + }, + { + "epoch": 0.35783099366914395, + "grad_norm": 1.684963794945681, + "learning_rate": 1.4873373153254271e-05, + "loss": 1.0438, + "step": 2600 + }, + { + "epoch": 0.3579686209744013, + "grad_norm": 1.7841742658221174, + "learning_rate": 1.4869480384825585e-05, + "loss": 1.1275, + "step": 2601 + }, + { + "epoch": 0.3581062482796587, + "grad_norm": 1.9192726378346312, + "learning_rate": 1.4865586648897803e-05, + "loss": 1.2261, + "step": 2602 + }, + { + "epoch": 0.35824387558491605, + "grad_norm": 1.7642299636910759, + "learning_rate": 1.4861691946244553e-05, + "loss": 1.0499, + "step": 2603 + }, + { + "epoch": 0.3583815028901734, + "grad_norm": 1.8940935537328831, + "learning_rate": 1.4857796277639666e-05, + "loss": 1.1499, + "step": 2604 + }, + { + "epoch": 0.35851913019543075, + "grad_norm": 1.871691132191385, + "learning_rate": 1.4853899643857154e-05, + "loss": 1.039, + "step": 2605 + }, + { + "epoch": 0.35865675750068815, + "grad_norm": 1.878289580016883, + "learning_rate": 1.4850002045671228e-05, + "loss": 1.1044, + "step": 2606 + }, + { + "epoch": 0.3587943848059455, + "grad_norm": 1.724030358282651, + "learning_rate": 1.484610348385628e-05, + "loss": 1.0647, + "step": 2607 + }, + { + "epoch": 0.35893201211120285, + "grad_norm": 1.635979180951212, + "learning_rate": 1.4842203959186908e-05, + "loss": 1.0735, + "step": 2608 + }, + { + "epoch": 0.35906963941646025, + "grad_norm": 1.5928935356850182, + "learning_rate": 1.4838303472437891e-05, + "loss": 1.0673, + "step": 2609 + }, + { + "epoch": 0.3592072667217176, + "grad_norm": 1.5206395878957337, + "learning_rate": 1.4834402024384205e-05, + "loss": 0.9957, + "step": 2610 + }, + { + "epoch": 0.35934489402697495, + "grad_norm": 1.4998054938655745, + "learning_rate": 1.483049961580101e-05, + "loss": 0.9433, + "step": 2611 + }, + { + "epoch": 0.3594825213322323, + "grad_norm": 1.531581674235957, + "learning_rate": 1.4826596247463668e-05, + "loss": 1.0538, + "step": 2612 + }, + { + "epoch": 0.3596201486374897, + "grad_norm": 1.7723770086920334, + "learning_rate": 1.4822691920147718e-05, + "loss": 1.0298, + "step": 2613 + }, + { + "epoch": 0.35975777594274705, + "grad_norm": 1.5916166160419218, + "learning_rate": 1.4818786634628898e-05, + "loss": 1.1158, + "step": 2614 + }, + { + "epoch": 0.3598954032480044, + "grad_norm": 1.887287108507014, + "learning_rate": 1.4814880391683135e-05, + "loss": 1.1768, + "step": 2615 + }, + { + "epoch": 0.36003303055326175, + "grad_norm": 1.7066316915388042, + "learning_rate": 1.481097319208655e-05, + "loss": 1.0815, + "step": 2616 + }, + { + "epoch": 0.36017065785851915, + "grad_norm": 1.957956831013734, + "learning_rate": 1.4807065036615442e-05, + "loss": 1.1028, + "step": 2617 + }, + { + "epoch": 0.3603082851637765, + "grad_norm": 1.7748079524560387, + "learning_rate": 1.4803155926046318e-05, + "loss": 1.1503, + "step": 2618 + }, + { + "epoch": 0.36044591246903385, + "grad_norm": 1.5239821242872063, + "learning_rate": 1.4799245861155855e-05, + "loss": 1.1124, + "step": 2619 + }, + { + "epoch": 0.3605835397742912, + "grad_norm": 1.7541778112931568, + "learning_rate": 1.4795334842720936e-05, + "loss": 0.9751, + "step": 2620 + }, + { + "epoch": 0.3607211670795486, + "grad_norm": 1.6525513042127815, + "learning_rate": 1.4791422871518626e-05, + "loss": 1.059, + "step": 2621 + }, + { + "epoch": 0.36085879438480595, + "grad_norm": 1.830757201850349, + "learning_rate": 1.4787509948326177e-05, + "loss": 1.1646, + "step": 2622 + }, + { + "epoch": 0.3609964216900633, + "grad_norm": 1.6778658624253295, + "learning_rate": 1.4783596073921036e-05, + "loss": 1.0811, + "step": 2623 + }, + { + "epoch": 0.36113404899532064, + "grad_norm": 1.7810316392226975, + "learning_rate": 1.4779681249080833e-05, + "loss": 1.0626, + "step": 2624 + }, + { + "epoch": 0.36127167630057805, + "grad_norm": 1.9080938471413562, + "learning_rate": 1.4775765474583396e-05, + "loss": 1.0933, + "step": 2625 + }, + { + "epoch": 0.3614093036058354, + "grad_norm": 1.6805930538822762, + "learning_rate": 1.477184875120673e-05, + "loss": 1.008, + "step": 2626 + }, + { + "epoch": 0.36154693091109275, + "grad_norm": 1.6703255710796603, + "learning_rate": 1.4767931079729039e-05, + "loss": 1.0727, + "step": 2627 + }, + { + "epoch": 0.36168455821635015, + "grad_norm": 1.6431299437256073, + "learning_rate": 1.4764012460928707e-05, + "loss": 1.0769, + "step": 2628 + }, + { + "epoch": 0.3618221855216075, + "grad_norm": 1.6090280267592296, + "learning_rate": 1.4760092895584313e-05, + "loss": 1.0503, + "step": 2629 + }, + { + "epoch": 0.36195981282686485, + "grad_norm": 1.7262636610324742, + "learning_rate": 1.4756172384474615e-05, + "loss": 1.1083, + "step": 2630 + }, + { + "epoch": 0.3620974401321222, + "grad_norm": 1.573042589490139, + "learning_rate": 1.475225092837857e-05, + "loss": 1.0369, + "step": 2631 + }, + { + "epoch": 0.3622350674373796, + "grad_norm": 2.0414580062299796, + "learning_rate": 1.4748328528075319e-05, + "loss": 1.0732, + "step": 2632 + }, + { + "epoch": 0.36237269474263695, + "grad_norm": 1.6210084710729677, + "learning_rate": 1.4744405184344186e-05, + "loss": 1.0047, + "step": 2633 + }, + { + "epoch": 0.3625103220478943, + "grad_norm": 1.5762925021621306, + "learning_rate": 1.4740480897964689e-05, + "loss": 1.0223, + "step": 2634 + }, + { + "epoch": 0.36264794935315164, + "grad_norm": 1.7067236136661312, + "learning_rate": 1.4736555669716526e-05, + "loss": 1.1422, + "step": 2635 + }, + { + "epoch": 0.36278557665840905, + "grad_norm": 1.7583200549687568, + "learning_rate": 1.4732629500379587e-05, + "loss": 1.0644, + "step": 2636 + }, + { + "epoch": 0.3629232039636664, + "grad_norm": 1.644533056988231, + "learning_rate": 1.472870239073395e-05, + "loss": 0.9271, + "step": 2637 + }, + { + "epoch": 0.36306083126892374, + "grad_norm": 1.8224029414932235, + "learning_rate": 1.4724774341559875e-05, + "loss": 1.0446, + "step": 2638 + }, + { + "epoch": 0.3631984585741811, + "grad_norm": 1.5900859912806318, + "learning_rate": 1.4720845353637815e-05, + "loss": 1.0504, + "step": 2639 + }, + { + "epoch": 0.3633360858794385, + "grad_norm": 1.7662328517331134, + "learning_rate": 1.4716915427748402e-05, + "loss": 1.151, + "step": 2640 + }, + { + "epoch": 0.36347371318469585, + "grad_norm": 1.4890588213570892, + "learning_rate": 1.4712984564672464e-05, + "loss": 1.0398, + "step": 2641 + }, + { + "epoch": 0.3636113404899532, + "grad_norm": 1.7067636092422127, + "learning_rate": 1.4709052765191006e-05, + "loss": 1.0662, + "step": 2642 + }, + { + "epoch": 0.36374896779521054, + "grad_norm": 1.8491287827961467, + "learning_rate": 1.4705120030085223e-05, + "loss": 1.0845, + "step": 2643 + }, + { + "epoch": 0.36388659510046795, + "grad_norm": 1.5339910308990317, + "learning_rate": 1.4701186360136497e-05, + "loss": 0.9987, + "step": 2644 + }, + { + "epoch": 0.3640242224057253, + "grad_norm": 2.1504107599847564, + "learning_rate": 1.4697251756126397e-05, + "loss": 1.2341, + "step": 2645 + }, + { + "epoch": 0.36416184971098264, + "grad_norm": 1.717175606320573, + "learning_rate": 1.4693316218836663e-05, + "loss": 1.0928, + "step": 2646 + }, + { + "epoch": 0.36429947701624005, + "grad_norm": 1.8049648056289809, + "learning_rate": 1.4689379749049246e-05, + "loss": 1.0742, + "step": 2647 + }, + { + "epoch": 0.3644371043214974, + "grad_norm": 1.589659055025965, + "learning_rate": 1.4685442347546261e-05, + "loss": 1.0916, + "step": 2648 + }, + { + "epoch": 0.36457473162675474, + "grad_norm": 1.6608028096883132, + "learning_rate": 1.4681504015110022e-05, + "loss": 1.0492, + "step": 2649 + }, + { + "epoch": 0.3647123589320121, + "grad_norm": 1.7923432516085307, + "learning_rate": 1.4677564752523017e-05, + "loss": 1.009, + "step": 2650 + }, + { + "epoch": 0.3648499862372695, + "grad_norm": 1.6802524359420248, + "learning_rate": 1.4673624560567923e-05, + "loss": 1.0115, + "step": 2651 + }, + { + "epoch": 0.36498761354252685, + "grad_norm": 1.9459191122265007, + "learning_rate": 1.4669683440027605e-05, + "loss": 1.155, + "step": 2652 + }, + { + "epoch": 0.3651252408477842, + "grad_norm": 1.655905159050617, + "learning_rate": 1.4665741391685107e-05, + "loss": 1.0135, + "step": 2653 + }, + { + "epoch": 0.36526286815304154, + "grad_norm": 1.7859311097230404, + "learning_rate": 1.4661798416323663e-05, + "loss": 1.0839, + "step": 2654 + }, + { + "epoch": 0.36540049545829895, + "grad_norm": 1.8265440261483294, + "learning_rate": 1.4657854514726684e-05, + "loss": 1.0556, + "step": 2655 + }, + { + "epoch": 0.3655381227635563, + "grad_norm": 1.7259038858999736, + "learning_rate": 1.4653909687677774e-05, + "loss": 1.047, + "step": 2656 + }, + { + "epoch": 0.36567575006881364, + "grad_norm": 1.9938766398304408, + "learning_rate": 1.4649963935960715e-05, + "loss": 1.2317, + "step": 2657 + }, + { + "epoch": 0.365813377374071, + "grad_norm": 1.6548834275409612, + "learning_rate": 1.4646017260359471e-05, + "loss": 1.0858, + "step": 2658 + }, + { + "epoch": 0.3659510046793284, + "grad_norm": 2.0066448541230515, + "learning_rate": 1.4642069661658196e-05, + "loss": 1.0508, + "step": 2659 + }, + { + "epoch": 0.36608863198458574, + "grad_norm": 1.8659325501224993, + "learning_rate": 1.4638121140641222e-05, + "loss": 1.1169, + "step": 2660 + }, + { + "epoch": 0.3662262592898431, + "grad_norm": 1.631262011542562, + "learning_rate": 1.4634171698093068e-05, + "loss": 1.1514, + "step": 2661 + }, + { + "epoch": 0.36636388659510044, + "grad_norm": 1.4092484058028352, + "learning_rate": 1.4630221334798429e-05, + "loss": 1.1173, + "step": 2662 + }, + { + "epoch": 0.36650151390035784, + "grad_norm": 1.6424629596241145, + "learning_rate": 1.4626270051542192e-05, + "loss": 1.0597, + "step": 2663 + }, + { + "epoch": 0.3666391412056152, + "grad_norm": 1.8546990843944755, + "learning_rate": 1.4622317849109424e-05, + "loss": 1.0112, + "step": 2664 + }, + { + "epoch": 0.36677676851087254, + "grad_norm": 1.6589268606594818, + "learning_rate": 1.4618364728285373e-05, + "loss": 1.0415, + "step": 2665 + }, + { + "epoch": 0.36691439581612995, + "grad_norm": 1.5620239924072246, + "learning_rate": 1.4614410689855466e-05, + "loss": 1.0426, + "step": 2666 + }, + { + "epoch": 0.3670520231213873, + "grad_norm": 1.7015526917494908, + "learning_rate": 1.4610455734605323e-05, + "loss": 1.025, + "step": 2667 + }, + { + "epoch": 0.36718965042664464, + "grad_norm": 1.7539882110488587, + "learning_rate": 1.4606499863320734e-05, + "loss": 1.0856, + "step": 2668 + }, + { + "epoch": 0.367327277731902, + "grad_norm": 1.581280048335631, + "learning_rate": 1.460254307678768e-05, + "loss": 1.0486, + "step": 2669 + }, + { + "epoch": 0.3674649050371594, + "grad_norm": 1.667483074016372, + "learning_rate": 1.4598585375792315e-05, + "loss": 1.0789, + "step": 2670 + }, + { + "epoch": 0.36760253234241674, + "grad_norm": 1.755703790455354, + "learning_rate": 1.4594626761120983e-05, + "loss": 1.1694, + "step": 2671 + }, + { + "epoch": 0.3677401596476741, + "grad_norm": 1.8226280823619685, + "learning_rate": 1.459066723356021e-05, + "loss": 1.1164, + "step": 2672 + }, + { + "epoch": 0.36787778695293144, + "grad_norm": 1.5699803951107298, + "learning_rate": 1.4586706793896697e-05, + "loss": 1.0819, + "step": 2673 + }, + { + "epoch": 0.36801541425818884, + "grad_norm": 1.736392590840252, + "learning_rate": 1.4582745442917328e-05, + "loss": 1.0896, + "step": 2674 + }, + { + "epoch": 0.3681530415634462, + "grad_norm": 1.5171324061137426, + "learning_rate": 1.457878318140917e-05, + "loss": 1.0916, + "step": 2675 + }, + { + "epoch": 0.36829066886870354, + "grad_norm": 1.536076214682531, + "learning_rate": 1.4574820010159474e-05, + "loss": 1.0559, + "step": 2676 + }, + { + "epoch": 0.3684282961739609, + "grad_norm": 1.778050361389024, + "learning_rate": 1.4570855929955663e-05, + "loss": 1.0534, + "step": 2677 + }, + { + "epoch": 0.3685659234792183, + "grad_norm": 1.6377152929715204, + "learning_rate": 1.4566890941585345e-05, + "loss": 1.0626, + "step": 2678 + }, + { + "epoch": 0.36870355078447564, + "grad_norm": 1.5399938482312343, + "learning_rate": 1.4562925045836311e-05, + "loss": 1.0261, + "step": 2679 + }, + { + "epoch": 0.368841178089733, + "grad_norm": 1.6147069572120216, + "learning_rate": 1.4558958243496533e-05, + "loss": 0.9829, + "step": 2680 + }, + { + "epoch": 0.36897880539499034, + "grad_norm": 1.7144424287514364, + "learning_rate": 1.4554990535354155e-05, + "loss": 1.0636, + "step": 2681 + }, + { + "epoch": 0.36911643270024774, + "grad_norm": 1.5566125722011692, + "learning_rate": 1.4551021922197508e-05, + "loss": 0.9682, + "step": 2682 + }, + { + "epoch": 0.3692540600055051, + "grad_norm": 1.6788545482416568, + "learning_rate": 1.4547052404815103e-05, + "loss": 1.0782, + "step": 2683 + }, + { + "epoch": 0.36939168731076244, + "grad_norm": 1.6675383647995132, + "learning_rate": 1.4543081983995628e-05, + "loss": 0.9954, + "step": 2684 + }, + { + "epoch": 0.36952931461601984, + "grad_norm": 1.9066348937060653, + "learning_rate": 1.4539110660527951e-05, + "loss": 1.0843, + "step": 2685 + }, + { + "epoch": 0.3696669419212772, + "grad_norm": 1.6736938174409826, + "learning_rate": 1.4535138435201117e-05, + "loss": 0.9869, + "step": 2686 + }, + { + "epoch": 0.36980456922653454, + "grad_norm": 1.8308988350622064, + "learning_rate": 1.4531165308804355e-05, + "loss": 1.05, + "step": 2687 + }, + { + "epoch": 0.3699421965317919, + "grad_norm": 1.6582123061784981, + "learning_rate": 1.4527191282127074e-05, + "loss": 1.0798, + "step": 2688 + }, + { + "epoch": 0.3700798238370493, + "grad_norm": 1.7301101651851614, + "learning_rate": 1.4523216355958854e-05, + "loss": 1.0827, + "step": 2689 + }, + { + "epoch": 0.37021745114230664, + "grad_norm": 1.760665834637587, + "learning_rate": 1.451924053108946e-05, + "loss": 1.1413, + "step": 2690 + }, + { + "epoch": 0.370355078447564, + "grad_norm": 1.7619600069021069, + "learning_rate": 1.4515263808308837e-05, + "loss": 1.004, + "step": 2691 + }, + { + "epoch": 0.37049270575282134, + "grad_norm": 1.614914668217245, + "learning_rate": 1.4511286188407098e-05, + "loss": 1.114, + "step": 2692 + }, + { + "epoch": 0.37063033305807874, + "grad_norm": 1.7318183652847423, + "learning_rate": 1.450730767217455e-05, + "loss": 1.0979, + "step": 2693 + }, + { + "epoch": 0.3707679603633361, + "grad_norm": 1.7269469890493767, + "learning_rate": 1.4503328260401664e-05, + "loss": 1.1091, + "step": 2694 + }, + { + "epoch": 0.37090558766859344, + "grad_norm": 1.7036114883754234, + "learning_rate": 1.4499347953879097e-05, + "loss": 1.0276, + "step": 2695 + }, + { + "epoch": 0.3710432149738508, + "grad_norm": 1.5626689541007812, + "learning_rate": 1.4495366753397682e-05, + "loss": 1.0595, + "step": 2696 + }, + { + "epoch": 0.3711808422791082, + "grad_norm": 1.699743699958119, + "learning_rate": 1.4491384659748426e-05, + "loss": 1.075, + "step": 2697 + }, + { + "epoch": 0.37131846958436554, + "grad_norm": 1.8542358243470682, + "learning_rate": 1.4487401673722519e-05, + "loss": 1.0732, + "step": 2698 + }, + { + "epoch": 0.3714560968896229, + "grad_norm": 2.117585076097301, + "learning_rate": 1.4483417796111325e-05, + "loss": 1.0227, + "step": 2699 + }, + { + "epoch": 0.37159372419488024, + "grad_norm": 1.5997842502538744, + "learning_rate": 1.4479433027706386e-05, + "loss": 1.0447, + "step": 2700 + }, + { + "epoch": 0.37173135150013764, + "grad_norm": 1.679174527896832, + "learning_rate": 1.4475447369299422e-05, + "loss": 1.0579, + "step": 2701 + }, + { + "epoch": 0.371868978805395, + "grad_norm": 1.7639032047294902, + "learning_rate": 1.4471460821682328e-05, + "loss": 1.0975, + "step": 2702 + }, + { + "epoch": 0.37200660611065234, + "grad_norm": 1.683605772726475, + "learning_rate": 1.4467473385647175e-05, + "loss": 1.0886, + "step": 2703 + }, + { + "epoch": 0.37214423341590974, + "grad_norm": 1.6505623083120022, + "learning_rate": 1.4463485061986214e-05, + "loss": 1.001, + "step": 2704 + }, + { + "epoch": 0.3722818607211671, + "grad_norm": 1.9515320732039045, + "learning_rate": 1.4459495851491872e-05, + "loss": 1.0655, + "step": 2705 + }, + { + "epoch": 0.37241948802642444, + "grad_norm": 1.943275562019624, + "learning_rate": 1.4455505754956745e-05, + "loss": 1.1811, + "step": 2706 + }, + { + "epoch": 0.3725571153316818, + "grad_norm": 1.7815349334314485, + "learning_rate": 1.4451514773173614e-05, + "loss": 1.0753, + "step": 2707 + }, + { + "epoch": 0.3726947426369392, + "grad_norm": 1.5888572858352863, + "learning_rate": 1.4447522906935434e-05, + "loss": 1.0838, + "step": 2708 + }, + { + "epoch": 0.37283236994219654, + "grad_norm": 1.6564166331856693, + "learning_rate": 1.4443530157035333e-05, + "loss": 1.0038, + "step": 2709 + }, + { + "epoch": 0.3729699972474539, + "grad_norm": 1.5347079640620014, + "learning_rate": 1.4439536524266612e-05, + "loss": 1.0009, + "step": 2710 + }, + { + "epoch": 0.37310762455271124, + "grad_norm": 1.7732853316036703, + "learning_rate": 1.4435542009422756e-05, + "loss": 1.1285, + "step": 2711 + }, + { + "epoch": 0.37324525185796864, + "grad_norm": 1.64511553735762, + "learning_rate": 1.4431546613297417e-05, + "loss": 1.1137, + "step": 2712 + }, + { + "epoch": 0.373382879163226, + "grad_norm": 1.6552405033050148, + "learning_rate": 1.442755033668443e-05, + "loss": 1.053, + "step": 2713 + }, + { + "epoch": 0.37352050646848334, + "grad_norm": 1.6197243745606467, + "learning_rate": 1.4423553180377796e-05, + "loss": 1.1038, + "step": 2714 + }, + { + "epoch": 0.3736581337737407, + "grad_norm": 1.7433856046695835, + "learning_rate": 1.4419555145171698e-05, + "loss": 1.1624, + "step": 2715 + }, + { + "epoch": 0.3737957610789981, + "grad_norm": 1.578169137719525, + "learning_rate": 1.4415556231860488e-05, + "loss": 1.0756, + "step": 2716 + }, + { + "epoch": 0.37393338838425544, + "grad_norm": 1.801972526690994, + "learning_rate": 1.4411556441238697e-05, + "loss": 1.106, + "step": 2717 + }, + { + "epoch": 0.3740710156895128, + "grad_norm": 1.6397145112477818, + "learning_rate": 1.440755577410103e-05, + "loss": 1.0424, + "step": 2718 + }, + { + "epoch": 0.37420864299477014, + "grad_norm": 1.815560730118826, + "learning_rate": 1.4403554231242364e-05, + "loss": 1.0831, + "step": 2719 + }, + { + "epoch": 0.37434627030002754, + "grad_norm": 1.6838961062581774, + "learning_rate": 1.4399551813457748e-05, + "loss": 1.0109, + "step": 2720 + }, + { + "epoch": 0.3744838976052849, + "grad_norm": 1.8267033249057691, + "learning_rate": 1.4395548521542413e-05, + "loss": 1.0455, + "step": 2721 + }, + { + "epoch": 0.37462152491054224, + "grad_norm": 1.6141987353417335, + "learning_rate": 1.4391544356291753e-05, + "loss": 1.1115, + "step": 2722 + }, + { + "epoch": 0.37475915221579964, + "grad_norm": 1.6274481251911723, + "learning_rate": 1.4387539318501347e-05, + "loss": 1.0239, + "step": 2723 + }, + { + "epoch": 0.374896779521057, + "grad_norm": 1.831046792432533, + "learning_rate": 1.4383533408966934e-05, + "loss": 1.1183, + "step": 2724 + }, + { + "epoch": 0.37503440682631434, + "grad_norm": 1.5506975667502614, + "learning_rate": 1.4379526628484437e-05, + "loss": 1.0708, + "step": 2725 + }, + { + "epoch": 0.3751720341315717, + "grad_norm": 1.746287815930841, + "learning_rate": 1.4375518977849946e-05, + "loss": 0.9987, + "step": 2726 + }, + { + "epoch": 0.3753096614368291, + "grad_norm": 1.829548509442392, + "learning_rate": 1.4371510457859731e-05, + "loss": 1.1004, + "step": 2727 + }, + { + "epoch": 0.37544728874208644, + "grad_norm": 1.6795355007672779, + "learning_rate": 1.4367501069310228e-05, + "loss": 1.1069, + "step": 2728 + }, + { + "epoch": 0.3755849160473438, + "grad_norm": 1.7684451442028153, + "learning_rate": 1.4363490812998047e-05, + "loss": 1.0346, + "step": 2729 + }, + { + "epoch": 0.37572254335260113, + "grad_norm": 2.003673527078075, + "learning_rate": 1.435947968971997e-05, + "loss": 1.0333, + "step": 2730 + }, + { + "epoch": 0.37586017065785854, + "grad_norm": 1.7532826668826607, + "learning_rate": 1.4355467700272954e-05, + "loss": 1.0591, + "step": 2731 + }, + { + "epoch": 0.3759977979631159, + "grad_norm": 1.898964115619472, + "learning_rate": 1.4351454845454124e-05, + "loss": 1.1188, + "step": 2732 + }, + { + "epoch": 0.37613542526837324, + "grad_norm": 1.6873958809582708, + "learning_rate": 1.4347441126060781e-05, + "loss": 1.1205, + "step": 2733 + }, + { + "epoch": 0.3762730525736306, + "grad_norm": 1.8417897535980432, + "learning_rate": 1.4343426542890399e-05, + "loss": 1.043, + "step": 2734 + }, + { + "epoch": 0.376410679878888, + "grad_norm": 1.737020769548892, + "learning_rate": 1.4339411096740616e-05, + "loss": 1.0801, + "step": 2735 + }, + { + "epoch": 0.37654830718414534, + "grad_norm": 1.8300389670846584, + "learning_rate": 1.433539478840925e-05, + "loss": 1.1405, + "step": 2736 + }, + { + "epoch": 0.3766859344894027, + "grad_norm": 1.7352009229303242, + "learning_rate": 1.4331377618694285e-05, + "loss": 1.0375, + "step": 2737 + }, + { + "epoch": 0.37682356179466003, + "grad_norm": 1.6667345961905546, + "learning_rate": 1.4327359588393873e-05, + "loss": 1.0458, + "step": 2738 + }, + { + "epoch": 0.37696118909991744, + "grad_norm": 1.7307121135951675, + "learning_rate": 1.4323340698306352e-05, + "loss": 1.0267, + "step": 2739 + }, + { + "epoch": 0.3770988164051748, + "grad_norm": 1.7501165795674134, + "learning_rate": 1.4319320949230208e-05, + "loss": 1.0677, + "step": 2740 + }, + { + "epoch": 0.37723644371043213, + "grad_norm": 1.690952823609567, + "learning_rate": 1.4315300341964123e-05, + "loss": 1.0815, + "step": 2741 + }, + { + "epoch": 0.37737407101568954, + "grad_norm": 1.6648265684785413, + "learning_rate": 1.4311278877306924e-05, + "loss": 1.0714, + "step": 2742 + }, + { + "epoch": 0.3775116983209469, + "grad_norm": 1.6880798781078672, + "learning_rate": 1.4307256556057632e-05, + "loss": 1.0125, + "step": 2743 + }, + { + "epoch": 0.37764932562620424, + "grad_norm": 1.7954920754417787, + "learning_rate": 1.430323337901542e-05, + "loss": 0.9859, + "step": 2744 + }, + { + "epoch": 0.3777869529314616, + "grad_norm": 1.480765059646714, + "learning_rate": 1.4299209346979639e-05, + "loss": 1.0889, + "step": 2745 + }, + { + "epoch": 0.377924580236719, + "grad_norm": 1.7492060848309057, + "learning_rate": 1.4295184460749814e-05, + "loss": 1.0927, + "step": 2746 + }, + { + "epoch": 0.37806220754197634, + "grad_norm": 1.5518978766837375, + "learning_rate": 1.4291158721125626e-05, + "loss": 1.1561, + "step": 2747 + }, + { + "epoch": 0.3781998348472337, + "grad_norm": 1.7417085673571024, + "learning_rate": 1.4287132128906942e-05, + "loss": 1.0612, + "step": 2748 + }, + { + "epoch": 0.37833746215249103, + "grad_norm": 1.7318995094554988, + "learning_rate": 1.428310468489379e-05, + "loss": 1.0131, + "step": 2749 + }, + { + "epoch": 0.37847508945774844, + "grad_norm": 1.6985233594886642, + "learning_rate": 1.4279076389886367e-05, + "loss": 1.0654, + "step": 2750 + }, + { + "epoch": 0.3786127167630058, + "grad_norm": 1.8428072274348712, + "learning_rate": 1.4275047244685038e-05, + "loss": 1.0293, + "step": 2751 + }, + { + "epoch": 0.37875034406826313, + "grad_norm": 1.7001588773882064, + "learning_rate": 1.427101725009034e-05, + "loss": 1.0942, + "step": 2752 + }, + { + "epoch": 0.3788879713735205, + "grad_norm": 1.608190275162232, + "learning_rate": 1.4266986406902979e-05, + "loss": 1.0662, + "step": 2753 + }, + { + "epoch": 0.3790255986787779, + "grad_norm": 1.8930520339351489, + "learning_rate": 1.4262954715923827e-05, + "loss": 1.1336, + "step": 2754 + }, + { + "epoch": 0.37916322598403523, + "grad_norm": 1.7215958220729648, + "learning_rate": 1.4258922177953926e-05, + "loss": 1.0686, + "step": 2755 + }, + { + "epoch": 0.3793008532892926, + "grad_norm": 1.5098061738235085, + "learning_rate": 1.425488879379449e-05, + "loss": 1.023, + "step": 2756 + }, + { + "epoch": 0.37943848059454993, + "grad_norm": 1.8727323405998264, + "learning_rate": 1.425085456424689e-05, + "loss": 1.1194, + "step": 2757 + }, + { + "epoch": 0.37957610789980734, + "grad_norm": 1.4334432737626632, + "learning_rate": 1.4246819490112678e-05, + "loss": 1.0426, + "step": 2758 + }, + { + "epoch": 0.3797137352050647, + "grad_norm": 1.8418156007421456, + "learning_rate": 1.4242783572193565e-05, + "loss": 1.083, + "step": 2759 + }, + { + "epoch": 0.37985136251032203, + "grad_norm": 1.7126767918685382, + "learning_rate": 1.4238746811291435e-05, + "loss": 1.1568, + "step": 2760 + }, + { + "epoch": 0.37998898981557944, + "grad_norm": 1.630191889150529, + "learning_rate": 1.4234709208208334e-05, + "loss": 1.058, + "step": 2761 + }, + { + "epoch": 0.3801266171208368, + "grad_norm": 1.8541846666799495, + "learning_rate": 1.4230670763746482e-05, + "loss": 1.1455, + "step": 2762 + }, + { + "epoch": 0.38026424442609413, + "grad_norm": 1.4366581174352853, + "learning_rate": 1.422663147870826e-05, + "loss": 1.1075, + "step": 2763 + }, + { + "epoch": 0.3804018717313515, + "grad_norm": 1.6530930004666824, + "learning_rate": 1.4222591353896215e-05, + "loss": 1.0199, + "step": 2764 + }, + { + "epoch": 0.3805394990366089, + "grad_norm": 1.825800254316283, + "learning_rate": 1.4218550390113075e-05, + "loss": 1.0724, + "step": 2765 + }, + { + "epoch": 0.38067712634186623, + "grad_norm": 1.5661996117201764, + "learning_rate": 1.4214508588161716e-05, + "loss": 1.0288, + "step": 2766 + }, + { + "epoch": 0.3808147536471236, + "grad_norm": 1.7386178695867043, + "learning_rate": 1.4210465948845191e-05, + "loss": 1.1111, + "step": 2767 + }, + { + "epoch": 0.38095238095238093, + "grad_norm": 1.787246169988825, + "learning_rate": 1.4206422472966716e-05, + "loss": 1.0421, + "step": 2768 + }, + { + "epoch": 0.38109000825763834, + "grad_norm": 1.8136275569864353, + "learning_rate": 1.4202378161329678e-05, + "loss": 1.0939, + "step": 2769 + }, + { + "epoch": 0.3812276355628957, + "grad_norm": 1.6482131482543099, + "learning_rate": 1.4198333014737619e-05, + "loss": 1.1085, + "step": 2770 + }, + { + "epoch": 0.38136526286815303, + "grad_norm": 1.9265813662398645, + "learning_rate": 1.4194287033994263e-05, + "loss": 1.0522, + "step": 2771 + }, + { + "epoch": 0.3815028901734104, + "grad_norm": 1.5519420199826888, + "learning_rate": 1.4190240219903486e-05, + "loss": 1.0724, + "step": 2772 + }, + { + "epoch": 0.3816405174786678, + "grad_norm": 1.5263635566223055, + "learning_rate": 1.4186192573269335e-05, + "loss": 1.0685, + "step": 2773 + }, + { + "epoch": 0.38177814478392513, + "grad_norm": 1.4515778113407662, + "learning_rate": 1.4182144094896021e-05, + "loss": 1.0317, + "step": 2774 + }, + { + "epoch": 0.3819157720891825, + "grad_norm": 1.7729726555936651, + "learning_rate": 1.4178094785587926e-05, + "loss": 1.0744, + "step": 2775 + }, + { + "epoch": 0.38205339939443983, + "grad_norm": 1.6459140430801447, + "learning_rate": 1.4174044646149587e-05, + "loss": 1.0432, + "step": 2776 + }, + { + "epoch": 0.38219102669969723, + "grad_norm": 1.6442565067801325, + "learning_rate": 1.4169993677385713e-05, + "loss": 1.0059, + "step": 2777 + }, + { + "epoch": 0.3823286540049546, + "grad_norm": 1.5137037647737381, + "learning_rate": 1.4165941880101178e-05, + "loss": 1.0244, + "step": 2778 + }, + { + "epoch": 0.38246628131021193, + "grad_norm": 1.63695155077094, + "learning_rate": 1.4161889255101012e-05, + "loss": 1.0324, + "step": 2779 + }, + { + "epoch": 0.38260390861546933, + "grad_norm": 1.6789451655603265, + "learning_rate": 1.4157835803190425e-05, + "loss": 1.0423, + "step": 2780 + }, + { + "epoch": 0.3827415359207267, + "grad_norm": 1.858996737574047, + "learning_rate": 1.4153781525174779e-05, + "loss": 1.1771, + "step": 2781 + }, + { + "epoch": 0.38287916322598403, + "grad_norm": 1.677502836194795, + "learning_rate": 1.4149726421859601e-05, + "loss": 1.0935, + "step": 2782 + }, + { + "epoch": 0.3830167905312414, + "grad_norm": 1.663905831881494, + "learning_rate": 1.4145670494050588e-05, + "loss": 1.1535, + "step": 2783 + }, + { + "epoch": 0.3831544178364988, + "grad_norm": 1.6475914981956006, + "learning_rate": 1.4141613742553597e-05, + "loss": 1.1272, + "step": 2784 + }, + { + "epoch": 0.38329204514175613, + "grad_norm": 1.5985873426894874, + "learning_rate": 1.4137556168174646e-05, + "loss": 1.0963, + "step": 2785 + }, + { + "epoch": 0.3834296724470135, + "grad_norm": 1.5856430519814781, + "learning_rate": 1.4133497771719922e-05, + "loss": 1.02, + "step": 2786 + }, + { + "epoch": 0.38356729975227083, + "grad_norm": 1.6160498941334656, + "learning_rate": 1.4129438553995771e-05, + "loss": 1.06, + "step": 2787 + }, + { + "epoch": 0.38370492705752823, + "grad_norm": 1.659003100708784, + "learning_rate": 1.4125378515808706e-05, + "loss": 1.025, + "step": 2788 + }, + { + "epoch": 0.3838425543627856, + "grad_norm": 1.5017961570536498, + "learning_rate": 1.4121317657965401e-05, + "loss": 1.0426, + "step": 2789 + }, + { + "epoch": 0.38398018166804293, + "grad_norm": 1.684299096133013, + "learning_rate": 1.4117255981272694e-05, + "loss": 1.0491, + "step": 2790 + }, + { + "epoch": 0.3841178089733003, + "grad_norm": 1.708681892320337, + "learning_rate": 1.4113193486537578e-05, + "loss": 0.993, + "step": 2791 + }, + { + "epoch": 0.3842554362785577, + "grad_norm": 1.748382927508248, + "learning_rate": 1.4109130174567223e-05, + "loss": 1.0647, + "step": 2792 + }, + { + "epoch": 0.38439306358381503, + "grad_norm": 1.7476688161576805, + "learning_rate": 1.4105066046168947e-05, + "loss": 1.1161, + "step": 2793 + }, + { + "epoch": 0.3845306908890724, + "grad_norm": 1.6827361516617716, + "learning_rate": 1.4101001102150243e-05, + "loss": 1.091, + "step": 2794 + }, + { + "epoch": 0.38466831819432973, + "grad_norm": 1.9505138657357013, + "learning_rate": 1.4096935343318752e-05, + "loss": 0.9877, + "step": 2795 + }, + { + "epoch": 0.38480594549958713, + "grad_norm": 1.702925962477622, + "learning_rate": 1.4092868770482293e-05, + "loss": 1.0363, + "step": 2796 + }, + { + "epoch": 0.3849435728048445, + "grad_norm": 1.8165739328261106, + "learning_rate": 1.4088801384448836e-05, + "loss": 0.9556, + "step": 2797 + }, + { + "epoch": 0.38508120011010183, + "grad_norm": 1.5071955945288082, + "learning_rate": 1.4084733186026512e-05, + "loss": 1.0316, + "step": 2798 + }, + { + "epoch": 0.38521882741535923, + "grad_norm": 1.798564467955146, + "learning_rate": 1.4080664176023615e-05, + "loss": 1.0828, + "step": 2799 + }, + { + "epoch": 0.3853564547206166, + "grad_norm": 1.552309023558474, + "learning_rate": 1.407659435524861e-05, + "loss": 1.0185, + "step": 2800 + }, + { + "epoch": 0.38549408202587393, + "grad_norm": 1.6442194645085841, + "learning_rate": 1.4072523724510103e-05, + "loss": 1.0636, + "step": 2801 + }, + { + "epoch": 0.3856317093311313, + "grad_norm": 1.7542165789738038, + "learning_rate": 1.4068452284616883e-05, + "loss": 1.0829, + "step": 2802 + }, + { + "epoch": 0.3857693366363887, + "grad_norm": 1.4896378073129424, + "learning_rate": 1.4064380036377886e-05, + "loss": 1.0564, + "step": 2803 + }, + { + "epoch": 0.38590696394164603, + "grad_norm": 2.158109244246876, + "learning_rate": 1.4060306980602206e-05, + "loss": 1.0862, + "step": 2804 + }, + { + "epoch": 0.3860445912469034, + "grad_norm": 1.673310575139339, + "learning_rate": 1.4056233118099112e-05, + "loss": 1.0747, + "step": 2805 + }, + { + "epoch": 0.3861822185521607, + "grad_norm": 1.798520536664166, + "learning_rate": 1.4052158449678022e-05, + "loss": 1.0931, + "step": 2806 + }, + { + "epoch": 0.38631984585741813, + "grad_norm": 1.6241185375328566, + "learning_rate": 1.4048082976148513e-05, + "loss": 1.066, + "step": 2807 + }, + { + "epoch": 0.3864574731626755, + "grad_norm": 1.8280721655476513, + "learning_rate": 1.4044006698320331e-05, + "loss": 1.0475, + "step": 2808 + }, + { + "epoch": 0.38659510046793283, + "grad_norm": 1.9089347048420024, + "learning_rate": 1.4039929617003373e-05, + "loss": 1.1459, + "step": 2809 + }, + { + "epoch": 0.3867327277731902, + "grad_norm": 1.7673652037219538, + "learning_rate": 1.4035851733007704e-05, + "loss": 1.0582, + "step": 2810 + }, + { + "epoch": 0.3868703550784476, + "grad_norm": 1.8197453011808504, + "learning_rate": 1.4031773047143536e-05, + "loss": 1.038, + "step": 2811 + }, + { + "epoch": 0.38700798238370493, + "grad_norm": 1.6569567074615181, + "learning_rate": 1.4027693560221254e-05, + "loss": 1.0937, + "step": 2812 + }, + { + "epoch": 0.3871456096889623, + "grad_norm": 1.5368502764739944, + "learning_rate": 1.4023613273051395e-05, + "loss": 1.0425, + "step": 2813 + }, + { + "epoch": 0.3872832369942196, + "grad_norm": 1.8066807303576407, + "learning_rate": 1.4019532186444658e-05, + "loss": 1.1182, + "step": 2814 + }, + { + "epoch": 0.38742086429947703, + "grad_norm": 1.6474487668411018, + "learning_rate": 1.4015450301211895e-05, + "loss": 0.9853, + "step": 2815 + }, + { + "epoch": 0.3875584916047344, + "grad_norm": 1.777816044740927, + "learning_rate": 1.4011367618164122e-05, + "loss": 1.0731, + "step": 2816 + }, + { + "epoch": 0.3876961189099917, + "grad_norm": 1.62206762910681, + "learning_rate": 1.4007284138112513e-05, + "loss": 1.1097, + "step": 2817 + }, + { + "epoch": 0.38783374621524913, + "grad_norm": 1.7892548416883558, + "learning_rate": 1.4003199861868401e-05, + "loss": 1.1401, + "step": 2818 + }, + { + "epoch": 0.3879713735205065, + "grad_norm": 1.7139921662755788, + "learning_rate": 1.3999114790243274e-05, + "loss": 0.9782, + "step": 2819 + }, + { + "epoch": 0.38810900082576383, + "grad_norm": 1.718055842959684, + "learning_rate": 1.3995028924048777e-05, + "loss": 1.0256, + "step": 2820 + }, + { + "epoch": 0.3882466281310212, + "grad_norm": 1.720717608258826, + "learning_rate": 1.3990942264096725e-05, + "loss": 1.0292, + "step": 2821 + }, + { + "epoch": 0.3883842554362786, + "grad_norm": 1.7284268173774238, + "learning_rate": 1.3986854811199072e-05, + "loss": 1.0631, + "step": 2822 + }, + { + "epoch": 0.38852188274153593, + "grad_norm": 1.95007539523667, + "learning_rate": 1.398276656616794e-05, + "loss": 1.0337, + "step": 2823 + }, + { + "epoch": 0.3886595100467933, + "grad_norm": 1.9907854529267541, + "learning_rate": 1.3978677529815611e-05, + "loss": 1.1622, + "step": 2824 + }, + { + "epoch": 0.3887971373520506, + "grad_norm": 1.5345290874743542, + "learning_rate": 1.3974587702954521e-05, + "loss": 1.0629, + "step": 2825 + }, + { + "epoch": 0.38893476465730803, + "grad_norm": 1.7447338064578646, + "learning_rate": 1.3970497086397257e-05, + "loss": 1.1048, + "step": 2826 + }, + { + "epoch": 0.3890723919625654, + "grad_norm": 1.7753211043368142, + "learning_rate": 1.3966405680956574e-05, + "loss": 1.1087, + "step": 2827 + }, + { + "epoch": 0.3892100192678227, + "grad_norm": 1.9171438986714817, + "learning_rate": 1.3962313487445376e-05, + "loss": 1.0227, + "step": 2828 + }, + { + "epoch": 0.3893476465730801, + "grad_norm": 1.8582329931354122, + "learning_rate": 1.3958220506676724e-05, + "loss": 1.0738, + "step": 2829 + }, + { + "epoch": 0.3894852738783375, + "grad_norm": 1.8762924712824558, + "learning_rate": 1.3954126739463844e-05, + "loss": 1.0044, + "step": 2830 + }, + { + "epoch": 0.3896229011835948, + "grad_norm": 1.6099223896567645, + "learning_rate": 1.3950032186620104e-05, + "loss": 1.0781, + "step": 2831 + }, + { + "epoch": 0.3897605284888522, + "grad_norm": 1.7431850336065398, + "learning_rate": 1.394593684895904e-05, + "loss": 1.0416, + "step": 2832 + }, + { + "epoch": 0.3898981557941095, + "grad_norm": 1.906355352269881, + "learning_rate": 1.3941840727294337e-05, + "loss": 1.0314, + "step": 2833 + }, + { + "epoch": 0.39003578309936693, + "grad_norm": 1.8631715242906592, + "learning_rate": 1.3937743822439842e-05, + "loss": 1.0864, + "step": 2834 + }, + { + "epoch": 0.3901734104046243, + "grad_norm": 1.533966672051595, + "learning_rate": 1.3933646135209546e-05, + "loss": 1.0437, + "step": 2835 + }, + { + "epoch": 0.3903110377098816, + "grad_norm": 1.68284061308331, + "learning_rate": 1.3929547666417613e-05, + "loss": 0.9868, + "step": 2836 + }, + { + "epoch": 0.39044866501513903, + "grad_norm": 1.8823601913690837, + "learning_rate": 1.392544841687835e-05, + "loss": 1.1677, + "step": 2837 + }, + { + "epoch": 0.3905862923203964, + "grad_norm": 1.8206803882110318, + "learning_rate": 1.3921348387406215e-05, + "loss": 1.1914, + "step": 2838 + }, + { + "epoch": 0.3907239196256537, + "grad_norm": 1.7498191749786491, + "learning_rate": 1.3917247578815836e-05, + "loss": 0.9928, + "step": 2839 + }, + { + "epoch": 0.3908615469309111, + "grad_norm": 1.6427953986338186, + "learning_rate": 1.3913145991921984e-05, + "loss": 1.0799, + "step": 2840 + }, + { + "epoch": 0.3909991742361685, + "grad_norm": 1.798814535947167, + "learning_rate": 1.390904362753959e-05, + "loss": 1.0663, + "step": 2841 + }, + { + "epoch": 0.3911368015414258, + "grad_norm": 1.7844607398746606, + "learning_rate": 1.3904940486483731e-05, + "loss": 1.0661, + "step": 2842 + }, + { + "epoch": 0.3912744288466832, + "grad_norm": 1.5503262467525056, + "learning_rate": 1.3900836569569654e-05, + "loss": 1.0856, + "step": 2843 + }, + { + "epoch": 0.3914120561519405, + "grad_norm": 1.7220886086413048, + "learning_rate": 1.3896731877612746e-05, + "loss": 1.1131, + "step": 2844 + }, + { + "epoch": 0.39154968345719793, + "grad_norm": 1.7428470809137904, + "learning_rate": 1.3892626411428554e-05, + "loss": 1.0326, + "step": 2845 + }, + { + "epoch": 0.3916873107624553, + "grad_norm": 1.699844517017266, + "learning_rate": 1.388852017183278e-05, + "loss": 1.1668, + "step": 2846 + }, + { + "epoch": 0.3918249380677126, + "grad_norm": 1.7018204503426606, + "learning_rate": 1.3884413159641276e-05, + "loss": 0.9984, + "step": 2847 + }, + { + "epoch": 0.39196256537297, + "grad_norm": 1.5879125555569387, + "learning_rate": 1.388030537567005e-05, + "loss": 1.0915, + "step": 2848 + }, + { + "epoch": 0.3921001926782274, + "grad_norm": 1.7598326042216341, + "learning_rate": 1.3876196820735258e-05, + "loss": 1.1225, + "step": 2849 + }, + { + "epoch": 0.3922378199834847, + "grad_norm": 1.8430338253447007, + "learning_rate": 1.3872087495653216e-05, + "loss": 0.9922, + "step": 2850 + }, + { + "epoch": 0.3923754472887421, + "grad_norm": 1.6795102267220028, + "learning_rate": 1.3867977401240395e-05, + "loss": 0.9937, + "step": 2851 + }, + { + "epoch": 0.3925130745939994, + "grad_norm": 1.6419010654053106, + "learning_rate": 1.386386653831341e-05, + "loss": 1.0787, + "step": 2852 + }, + { + "epoch": 0.3926507018992568, + "grad_norm": 1.8449651725340714, + "learning_rate": 1.3859754907689037e-05, + "loss": 1.1243, + "step": 2853 + }, + { + "epoch": 0.3927883292045142, + "grad_norm": 1.8227072709007053, + "learning_rate": 1.3855642510184197e-05, + "loss": 1.1647, + "step": 2854 + }, + { + "epoch": 0.3929259565097715, + "grad_norm": 1.6951511136911261, + "learning_rate": 1.3851529346615969e-05, + "loss": 1.1153, + "step": 2855 + }, + { + "epoch": 0.3930635838150289, + "grad_norm": 1.6486258556385747, + "learning_rate": 1.384741541780158e-05, + "loss": 1.0801, + "step": 2856 + }, + { + "epoch": 0.3932012111202863, + "grad_norm": 1.5608581347387593, + "learning_rate": 1.384330072455841e-05, + "loss": 1.0506, + "step": 2857 + }, + { + "epoch": 0.3933388384255436, + "grad_norm": 1.5342299328141695, + "learning_rate": 1.3839185267704e-05, + "loss": 1.186, + "step": 2858 + }, + { + "epoch": 0.393476465730801, + "grad_norm": 1.883069411284481, + "learning_rate": 1.3835069048056027e-05, + "loss": 1.092, + "step": 2859 + }, + { + "epoch": 0.3936140930360584, + "grad_norm": 1.6602338518165858, + "learning_rate": 1.383095206643233e-05, + "loss": 1.0748, + "step": 2860 + }, + { + "epoch": 0.3937517203413157, + "grad_norm": 1.6679655508122826, + "learning_rate": 1.3826834323650899e-05, + "loss": 1.0826, + "step": 2861 + }, + { + "epoch": 0.3938893476465731, + "grad_norm": 1.5221761686812818, + "learning_rate": 1.382271582052987e-05, + "loss": 1.0586, + "step": 2862 + }, + { + "epoch": 0.3940269749518304, + "grad_norm": 1.6676628833365728, + "learning_rate": 1.3818596557887538e-05, + "loss": 1.0066, + "step": 2863 + }, + { + "epoch": 0.3941646022570878, + "grad_norm": 1.7534234475718389, + "learning_rate": 1.3814476536542338e-05, + "loss": 1.0889, + "step": 2864 + }, + { + "epoch": 0.3943022295623452, + "grad_norm": 1.7149844273103154, + "learning_rate": 1.3810355757312867e-05, + "loss": 1.1446, + "step": 2865 + }, + { + "epoch": 0.3944398568676025, + "grad_norm": 1.7858387283062693, + "learning_rate": 1.3806234221017862e-05, + "loss": 1.1562, + "step": 2866 + }, + { + "epoch": 0.39457748417285987, + "grad_norm": 1.5342536894772782, + "learning_rate": 1.380211192847622e-05, + "loss": 0.9946, + "step": 2867 + }, + { + "epoch": 0.3947151114781173, + "grad_norm": 1.7411735606762555, + "learning_rate": 1.3797988880506986e-05, + "loss": 1.1148, + "step": 2868 + }, + { + "epoch": 0.3948527387833746, + "grad_norm": 1.6039530433622324, + "learning_rate": 1.3793865077929346e-05, + "loss": 1.0024, + "step": 2869 + }, + { + "epoch": 0.394990366088632, + "grad_norm": 1.6544798462390056, + "learning_rate": 1.3789740521562652e-05, + "loss": 1.0579, + "step": 2870 + }, + { + "epoch": 0.3951279933938893, + "grad_norm": 1.7570593401247045, + "learning_rate": 1.3785615212226389e-05, + "loss": 1.0591, + "step": 2871 + }, + { + "epoch": 0.3952656206991467, + "grad_norm": 1.6666572847355376, + "learning_rate": 1.3781489150740204e-05, + "loss": 1.1048, + "step": 2872 + }, + { + "epoch": 0.3954032480044041, + "grad_norm": 1.7827409850274374, + "learning_rate": 1.3777362337923885e-05, + "loss": 1.165, + "step": 2873 + }, + { + "epoch": 0.3955408753096614, + "grad_norm": 1.545152690941674, + "learning_rate": 1.377323477459738e-05, + "loss": 0.9919, + "step": 2874 + }, + { + "epoch": 0.3956785026149188, + "grad_norm": 1.7048045676093664, + "learning_rate": 1.3769106461580772e-05, + "loss": 1.0002, + "step": 2875 + }, + { + "epoch": 0.3958161299201762, + "grad_norm": 1.814804636308928, + "learning_rate": 1.3764977399694304e-05, + "loss": 1.094, + "step": 2876 + }, + { + "epoch": 0.3959537572254335, + "grad_norm": 1.6752898008018287, + "learning_rate": 1.3760847589758366e-05, + "loss": 1.0587, + "step": 2877 + }, + { + "epoch": 0.39609138453069087, + "grad_norm": 1.5721318119200358, + "learning_rate": 1.3756717032593489e-05, + "loss": 1.0333, + "step": 2878 + }, + { + "epoch": 0.3962290118359483, + "grad_norm": 1.6842544581612495, + "learning_rate": 1.3752585729020365e-05, + "loss": 1.0405, + "step": 2879 + }, + { + "epoch": 0.3963666391412056, + "grad_norm": 1.5532006936194194, + "learning_rate": 1.3748453679859821e-05, + "loss": 1.0246, + "step": 2880 + }, + { + "epoch": 0.39650426644646297, + "grad_norm": 1.7696234744606163, + "learning_rate": 1.374432088593284e-05, + "loss": 0.9818, + "step": 2881 + }, + { + "epoch": 0.3966418937517203, + "grad_norm": 1.7039161233958249, + "learning_rate": 1.3740187348060557e-05, + "loss": 1.0232, + "step": 2882 + }, + { + "epoch": 0.3967795210569777, + "grad_norm": 1.9230784395840668, + "learning_rate": 1.3736053067064246e-05, + "loss": 1.0808, + "step": 2883 + }, + { + "epoch": 0.3969171483622351, + "grad_norm": 1.651788627701754, + "learning_rate": 1.3731918043765329e-05, + "loss": 1.1031, + "step": 2884 + }, + { + "epoch": 0.3970547756674924, + "grad_norm": 1.8443374497330645, + "learning_rate": 1.3727782278985382e-05, + "loss": 1.0575, + "step": 2885 + }, + { + "epoch": 0.39719240297274977, + "grad_norm": 1.9267220972916554, + "learning_rate": 1.3723645773546125e-05, + "loss": 1.0319, + "step": 2886 + }, + { + "epoch": 0.3973300302780072, + "grad_norm": 1.575605507767993, + "learning_rate": 1.3719508528269424e-05, + "loss": 1.1246, + "step": 2887 + }, + { + "epoch": 0.3974676575832645, + "grad_norm": 1.7378645749534758, + "learning_rate": 1.3715370543977292e-05, + "loss": 1.1407, + "step": 2888 + }, + { + "epoch": 0.39760528488852187, + "grad_norm": 1.4076332525841564, + "learning_rate": 1.3711231821491888e-05, + "loss": 1.0175, + "step": 2889 + }, + { + "epoch": 0.3977429121937792, + "grad_norm": 1.7092797267752737, + "learning_rate": 1.3707092361635526e-05, + "loss": 1.0677, + "step": 2890 + }, + { + "epoch": 0.3978805394990366, + "grad_norm": 1.794996705940687, + "learning_rate": 1.3702952165230654e-05, + "loss": 1.0169, + "step": 2891 + }, + { + "epoch": 0.39801816680429397, + "grad_norm": 2.1478551322446564, + "learning_rate": 1.3698811233099876e-05, + "loss": 1.0925, + "step": 2892 + }, + { + "epoch": 0.3981557941095513, + "grad_norm": 1.7449298274190403, + "learning_rate": 1.3694669566065938e-05, + "loss": 1.1363, + "step": 2893 + }, + { + "epoch": 0.3982934214148087, + "grad_norm": 1.5593449199054368, + "learning_rate": 1.369052716495173e-05, + "loss": 1.1202, + "step": 2894 + }, + { + "epoch": 0.39843104872006607, + "grad_norm": 1.7208057786252413, + "learning_rate": 1.3686384030580294e-05, + "loss": 1.0608, + "step": 2895 + }, + { + "epoch": 0.3985686760253234, + "grad_norm": 1.6013761813703682, + "learning_rate": 1.3682240163774812e-05, + "loss": 1.0052, + "step": 2896 + }, + { + "epoch": 0.39870630333058077, + "grad_norm": 1.7241850077159837, + "learning_rate": 1.367809556535861e-05, + "loss": 1.1633, + "step": 2897 + }, + { + "epoch": 0.3988439306358382, + "grad_norm": 1.610377276405802, + "learning_rate": 1.367395023615517e-05, + "loss": 1.0717, + "step": 2898 + }, + { + "epoch": 0.3989815579410955, + "grad_norm": 1.7386583866651428, + "learning_rate": 1.366980417698811e-05, + "loss": 1.0244, + "step": 2899 + }, + { + "epoch": 0.39911918524635287, + "grad_norm": 1.6193890920420864, + "learning_rate": 1.3665657388681193e-05, + "loss": 1.0505, + "step": 2900 + }, + { + "epoch": 0.3992568125516102, + "grad_norm": 1.7538389364710647, + "learning_rate": 1.366150987205833e-05, + "loss": 1.0679, + "step": 2901 + }, + { + "epoch": 0.3993944398568676, + "grad_norm": 1.8223811632920925, + "learning_rate": 1.3657361627943577e-05, + "loss": 1.0505, + "step": 2902 + }, + { + "epoch": 0.39953206716212497, + "grad_norm": 1.8029120655484407, + "learning_rate": 1.3653212657161132e-05, + "loss": 1.0221, + "step": 2903 + }, + { + "epoch": 0.3996696944673823, + "grad_norm": 1.6313830854432692, + "learning_rate": 1.3649062960535337e-05, + "loss": 1.0719, + "step": 2904 + }, + { + "epoch": 0.39980732177263967, + "grad_norm": 1.7618613597229262, + "learning_rate": 1.3644912538890684e-05, + "loss": 1.0888, + "step": 2905 + }, + { + "epoch": 0.39994494907789707, + "grad_norm": 1.9273894032861831, + "learning_rate": 1.3640761393051802e-05, + "loss": 1.091, + "step": 2906 + }, + { + "epoch": 0.4000825763831544, + "grad_norm": 1.7302772703895435, + "learning_rate": 1.363660952384347e-05, + "loss": 1.0516, + "step": 2907 + }, + { + "epoch": 0.40022020368841177, + "grad_norm": 1.8374161141176648, + "learning_rate": 1.3632456932090603e-05, + "loss": 1.126, + "step": 2908 + }, + { + "epoch": 0.4003578309936691, + "grad_norm": 1.7679970121187711, + "learning_rate": 1.3628303618618268e-05, + "loss": 1.0424, + "step": 2909 + }, + { + "epoch": 0.4004954582989265, + "grad_norm": 1.6725689106453228, + "learning_rate": 1.362414958425167e-05, + "loss": 1.0469, + "step": 2910 + }, + { + "epoch": 0.40063308560418387, + "grad_norm": 1.4677857318855299, + "learning_rate": 1.3619994829816156e-05, + "loss": 1.0221, + "step": 2911 + }, + { + "epoch": 0.4007707129094412, + "grad_norm": 1.74044141967409, + "learning_rate": 1.3615839356137223e-05, + "loss": 1.0353, + "step": 2912 + }, + { + "epoch": 0.4009083402146986, + "grad_norm": 1.616495983270934, + "learning_rate": 1.3611683164040508e-05, + "loss": 1.0609, + "step": 2913 + }, + { + "epoch": 0.40104596751995597, + "grad_norm": 1.849905448626749, + "learning_rate": 1.3607526254351784e-05, + "loss": 1.0425, + "step": 2914 + }, + { + "epoch": 0.4011835948252133, + "grad_norm": 1.883323270016199, + "learning_rate": 1.3603368627896976e-05, + "loss": 1.076, + "step": 2915 + }, + { + "epoch": 0.40132122213047067, + "grad_norm": 1.7738991997171931, + "learning_rate": 1.3599210285502147e-05, + "loss": 1.1664, + "step": 2916 + }, + { + "epoch": 0.40145884943572807, + "grad_norm": 1.6770990968514194, + "learning_rate": 1.3595051227993501e-05, + "loss": 0.9878, + "step": 2917 + }, + { + "epoch": 0.4015964767409854, + "grad_norm": 1.663971931688104, + "learning_rate": 1.3590891456197388e-05, + "loss": 1.0427, + "step": 2918 + }, + { + "epoch": 0.40173410404624277, + "grad_norm": 1.6391955229400554, + "learning_rate": 1.3586730970940293e-05, + "loss": 1.0024, + "step": 2919 + }, + { + "epoch": 0.4018717313515001, + "grad_norm": 1.8829275289563066, + "learning_rate": 1.3582569773048854e-05, + "loss": 1.2202, + "step": 2920 + }, + { + "epoch": 0.4020093586567575, + "grad_norm": 1.6346589300519356, + "learning_rate": 1.3578407863349842e-05, + "loss": 1.0416, + "step": 2921 + }, + { + "epoch": 0.40214698596201487, + "grad_norm": 1.708553678187109, + "learning_rate": 1.357424524267017e-05, + "loss": 1.0308, + "step": 2922 + }, + { + "epoch": 0.4022846132672722, + "grad_norm": 1.9109937814325682, + "learning_rate": 1.3570081911836894e-05, + "loss": 1.025, + "step": 2923 + }, + { + "epoch": 0.40242224057252957, + "grad_norm": 1.5382565204248728, + "learning_rate": 1.3565917871677211e-05, + "loss": 1.0553, + "step": 2924 + }, + { + "epoch": 0.40255986787778697, + "grad_norm": 1.8484275796550542, + "learning_rate": 1.3561753123018462e-05, + "loss": 1.0685, + "step": 2925 + }, + { + "epoch": 0.4026974951830443, + "grad_norm": 1.649688172587731, + "learning_rate": 1.355758766668812e-05, + "loss": 1.0792, + "step": 2926 + }, + { + "epoch": 0.40283512248830167, + "grad_norm": 1.6912593499396404, + "learning_rate": 1.3553421503513806e-05, + "loss": 1.0631, + "step": 2927 + }, + { + "epoch": 0.402972749793559, + "grad_norm": 1.67003605456862, + "learning_rate": 1.3549254634323279e-05, + "loss": 1.0499, + "step": 2928 + }, + { + "epoch": 0.4031103770988164, + "grad_norm": 1.6654101034054862, + "learning_rate": 1.3545087059944442e-05, + "loss": 1.016, + "step": 2929 + }, + { + "epoch": 0.40324800440407377, + "grad_norm": 1.592044707580495, + "learning_rate": 1.3540918781205331e-05, + "loss": 1.0278, + "step": 2930 + }, + { + "epoch": 0.4033856317093311, + "grad_norm": 1.7772042715619683, + "learning_rate": 1.3536749798934132e-05, + "loss": 1.0066, + "step": 2931 + }, + { + "epoch": 0.4035232590145885, + "grad_norm": 1.717851486816849, + "learning_rate": 1.3532580113959159e-05, + "loss": 1.1452, + "step": 2932 + }, + { + "epoch": 0.40366088631984587, + "grad_norm": 1.5819134390681329, + "learning_rate": 1.3528409727108873e-05, + "loss": 1.0167, + "step": 2933 + }, + { + "epoch": 0.4037985136251032, + "grad_norm": 1.7277949395740602, + "learning_rate": 1.3524238639211874e-05, + "loss": 1.0547, + "step": 2934 + }, + { + "epoch": 0.40393614093036057, + "grad_norm": 1.6402884657368522, + "learning_rate": 1.3520066851096898e-05, + "loss": 1.0417, + "step": 2935 + }, + { + "epoch": 0.40407376823561797, + "grad_norm": 1.8412595152047828, + "learning_rate": 1.3515894363592824e-05, + "loss": 1.155, + "step": 2936 + }, + { + "epoch": 0.4042113955408753, + "grad_norm": 2.0261770614225227, + "learning_rate": 1.3511721177528665e-05, + "loss": 1.0873, + "step": 2937 + }, + { + "epoch": 0.40434902284613267, + "grad_norm": 1.6612690271088073, + "learning_rate": 1.350754729373358e-05, + "loss": 1.1014, + "step": 2938 + }, + { + "epoch": 0.40448665015139, + "grad_norm": 2.1248230024037045, + "learning_rate": 1.350337271303686e-05, + "loss": 1.1304, + "step": 2939 + }, + { + "epoch": 0.4046242774566474, + "grad_norm": 1.6384645908469302, + "learning_rate": 1.349919743626794e-05, + "loss": 0.9833, + "step": 2940 + }, + { + "epoch": 0.40476190476190477, + "grad_norm": 1.6490128907932469, + "learning_rate": 1.3495021464256386e-05, + "loss": 1.14, + "step": 2941 + }, + { + "epoch": 0.4048995320671621, + "grad_norm": 1.6955289787977417, + "learning_rate": 1.3490844797831909e-05, + "loss": 1.0314, + "step": 2942 + }, + { + "epoch": 0.40503715937241946, + "grad_norm": 1.6726967460242488, + "learning_rate": 1.3486667437824355e-05, + "loss": 1.1179, + "step": 2943 + }, + { + "epoch": 0.40517478667767687, + "grad_norm": 1.61266506903315, + "learning_rate": 1.3482489385063705e-05, + "loss": 1.0058, + "step": 2944 + }, + { + "epoch": 0.4053124139829342, + "grad_norm": 1.8697777580991963, + "learning_rate": 1.347831064038009e-05, + "loss": 1.048, + "step": 2945 + }, + { + "epoch": 0.40545004128819156, + "grad_norm": 1.7893113857754643, + "learning_rate": 1.347413120460376e-05, + "loss": 1.0407, + "step": 2946 + }, + { + "epoch": 0.4055876685934489, + "grad_norm": 1.8562431868668634, + "learning_rate": 1.3469951078565117e-05, + "loss": 1.1318, + "step": 2947 + }, + { + "epoch": 0.4057252958987063, + "grad_norm": 1.660447148156247, + "learning_rate": 1.3465770263094693e-05, + "loss": 1.093, + "step": 2948 + }, + { + "epoch": 0.40586292320396367, + "grad_norm": 1.4902625436286638, + "learning_rate": 1.3461588759023162e-05, + "loss": 0.9832, + "step": 2949 + }, + { + "epoch": 0.406000550509221, + "grad_norm": 1.7912142935157125, + "learning_rate": 1.3457406567181324e-05, + "loss": 1.0651, + "step": 2950 + }, + { + "epoch": 0.4061381778144784, + "grad_norm": 1.8015793746845836, + "learning_rate": 1.345322368840013e-05, + "loss": 0.9973, + "step": 2951 + }, + { + "epoch": 0.40627580511973577, + "grad_norm": 1.5879596196966237, + "learning_rate": 1.3449040123510657e-05, + "loss": 0.9815, + "step": 2952 + }, + { + "epoch": 0.4064134324249931, + "grad_norm": 1.928342325399757, + "learning_rate": 1.3444855873344124e-05, + "loss": 1.0279, + "step": 2953 + }, + { + "epoch": 0.40655105973025046, + "grad_norm": 1.8576430868480196, + "learning_rate": 1.3440670938731885e-05, + "loss": 1.0443, + "step": 2954 + }, + { + "epoch": 0.40668868703550787, + "grad_norm": 1.7652417908105098, + "learning_rate": 1.3436485320505427e-05, + "loss": 1.0239, + "step": 2955 + }, + { + "epoch": 0.4068263143407652, + "grad_norm": 1.8326917126914215, + "learning_rate": 1.343229901949638e-05, + "loss": 0.999, + "step": 2956 + }, + { + "epoch": 0.40696394164602256, + "grad_norm": 1.8697678271772764, + "learning_rate": 1.3428112036536497e-05, + "loss": 1.0753, + "step": 2957 + }, + { + "epoch": 0.4071015689512799, + "grad_norm": 1.759726808773196, + "learning_rate": 1.342392437245768e-05, + "loss": 1.0102, + "step": 2958 + }, + { + "epoch": 0.4072391962565373, + "grad_norm": 1.821670304566112, + "learning_rate": 1.3419736028091954e-05, + "loss": 0.9508, + "step": 2959 + }, + { + "epoch": 0.40737682356179467, + "grad_norm": 1.548223703164834, + "learning_rate": 1.3415547004271495e-05, + "loss": 1.0168, + "step": 2960 + }, + { + "epoch": 0.407514450867052, + "grad_norm": 1.842970019560257, + "learning_rate": 1.3411357301828597e-05, + "loss": 1.1346, + "step": 2961 + }, + { + "epoch": 0.40765207817230936, + "grad_norm": 1.7018190367740607, + "learning_rate": 1.3407166921595702e-05, + "loss": 1.0758, + "step": 2962 + }, + { + "epoch": 0.40778970547756677, + "grad_norm": 1.8811613874178925, + "learning_rate": 1.3402975864405378e-05, + "loss": 1.0393, + "step": 2963 + }, + { + "epoch": 0.4079273327828241, + "grad_norm": 1.709992365270689, + "learning_rate": 1.3398784131090331e-05, + "loss": 1.0105, + "step": 2964 + }, + { + "epoch": 0.40806496008808146, + "grad_norm": 1.6666909188666281, + "learning_rate": 1.3394591722483402e-05, + "loss": 1.068, + "step": 2965 + }, + { + "epoch": 0.4082025873933388, + "grad_norm": 1.7007821737303124, + "learning_rate": 1.3390398639417565e-05, + "loss": 1.0446, + "step": 2966 + }, + { + "epoch": 0.4083402146985962, + "grad_norm": 1.6047127039838578, + "learning_rate": 1.3386204882725928e-05, + "loss": 1.1704, + "step": 2967 + }, + { + "epoch": 0.40847784200385356, + "grad_norm": 1.7739982716949938, + "learning_rate": 1.3382010453241732e-05, + "loss": 1.1318, + "step": 2968 + }, + { + "epoch": 0.4086154693091109, + "grad_norm": 1.765811288206812, + "learning_rate": 1.3377815351798353e-05, + "loss": 1.0588, + "step": 2969 + }, + { + "epoch": 0.4087530966143683, + "grad_norm": 1.9224393278730878, + "learning_rate": 1.3373619579229304e-05, + "loss": 1.0796, + "step": 2970 + }, + { + "epoch": 0.40889072391962566, + "grad_norm": 1.8729112994103922, + "learning_rate": 1.3369423136368223e-05, + "loss": 1.1182, + "step": 2971 + }, + { + "epoch": 0.409028351224883, + "grad_norm": 1.8584839263124386, + "learning_rate": 1.3365226024048885e-05, + "loss": 1.0673, + "step": 2972 + }, + { + "epoch": 0.40916597853014036, + "grad_norm": 1.6380778844088122, + "learning_rate": 1.3361028243105203e-05, + "loss": 1.0226, + "step": 2973 + }, + { + "epoch": 0.40930360583539777, + "grad_norm": 1.6704536266598462, + "learning_rate": 1.3356829794371215e-05, + "loss": 1.141, + "step": 2974 + }, + { + "epoch": 0.4094412331406551, + "grad_norm": 2.109994885415102, + "learning_rate": 1.3352630678681095e-05, + "loss": 0.9887, + "step": 2975 + }, + { + "epoch": 0.40957886044591246, + "grad_norm": 1.6412817649522309, + "learning_rate": 1.3348430896869155e-05, + "loss": 1.0638, + "step": 2976 + }, + { + "epoch": 0.4097164877511698, + "grad_norm": 1.6881577342044665, + "learning_rate": 1.334423044976983e-05, + "loss": 1.1079, + "step": 2977 + }, + { + "epoch": 0.4098541150564272, + "grad_norm": 1.8595392528409433, + "learning_rate": 1.3340029338217693e-05, + "loss": 1.138, + "step": 2978 + }, + { + "epoch": 0.40999174236168456, + "grad_norm": 1.7842628613493872, + "learning_rate": 1.3335827563047446e-05, + "loss": 1.1872, + "step": 2979 + }, + { + "epoch": 0.4101293696669419, + "grad_norm": 1.6875242484948774, + "learning_rate": 1.3331625125093925e-05, + "loss": 0.9986, + "step": 2980 + }, + { + "epoch": 0.41026699697219926, + "grad_norm": 1.6459299499613893, + "learning_rate": 1.3327422025192095e-05, + "loss": 1.0929, + "step": 2981 + }, + { + "epoch": 0.41040462427745666, + "grad_norm": 1.8001413612098554, + "learning_rate": 1.3323218264177058e-05, + "loss": 1.1045, + "step": 2982 + }, + { + "epoch": 0.410542251582714, + "grad_norm": 1.6681634669086285, + "learning_rate": 1.331901384288404e-05, + "loss": 1.0364, + "step": 2983 + }, + { + "epoch": 0.41067987888797136, + "grad_norm": 1.8334633338239053, + "learning_rate": 1.3314808762148406e-05, + "loss": 1.0853, + "step": 2984 + }, + { + "epoch": 0.4108175061932287, + "grad_norm": 1.8698260083817937, + "learning_rate": 1.3310603022805646e-05, + "loss": 1.1129, + "step": 2985 + }, + { + "epoch": 0.4109551334984861, + "grad_norm": 1.7163843005138193, + "learning_rate": 1.330639662569138e-05, + "loss": 1.0077, + "step": 2986 + }, + { + "epoch": 0.41109276080374346, + "grad_norm": 1.6802320794184566, + "learning_rate": 1.3302189571641366e-05, + "loss": 1.0256, + "step": 2987 + }, + { + "epoch": 0.4112303881090008, + "grad_norm": 1.7918702046657091, + "learning_rate": 1.3297981861491485e-05, + "loss": 1.0599, + "step": 2988 + }, + { + "epoch": 0.4113680154142582, + "grad_norm": 1.602094742710218, + "learning_rate": 1.3293773496077753e-05, + "loss": 1.0624, + "step": 2989 + }, + { + "epoch": 0.41150564271951556, + "grad_norm": 1.7846450937860054, + "learning_rate": 1.3289564476236311e-05, + "loss": 1.0882, + "step": 2990 + }, + { + "epoch": 0.4116432700247729, + "grad_norm": 1.689622120044213, + "learning_rate": 1.328535480280344e-05, + "loss": 1.1801, + "step": 2991 + }, + { + "epoch": 0.41178089733003026, + "grad_norm": 1.7913570278271993, + "learning_rate": 1.3281144476615544e-05, + "loss": 1.0672, + "step": 2992 + }, + { + "epoch": 0.41191852463528766, + "grad_norm": 1.804477441428126, + "learning_rate": 1.327693349850915e-05, + "loss": 1.0626, + "step": 2993 + }, + { + "epoch": 0.412056151940545, + "grad_norm": 1.7246027880223527, + "learning_rate": 1.3272721869320929e-05, + "loss": 1.06, + "step": 2994 + }, + { + "epoch": 0.41219377924580236, + "grad_norm": 1.6350279679985293, + "learning_rate": 1.3268509589887671e-05, + "loss": 1.0573, + "step": 2995 + }, + { + "epoch": 0.4123314065510597, + "grad_norm": 1.6410190286345985, + "learning_rate": 1.3264296661046296e-05, + "loss": 1.0321, + "step": 2996 + }, + { + "epoch": 0.4124690338563171, + "grad_norm": 1.630683855486899, + "learning_rate": 1.3260083083633859e-05, + "loss": 1.078, + "step": 2997 + }, + { + "epoch": 0.41260666116157446, + "grad_norm": 1.6009085224037036, + "learning_rate": 1.325586885848754e-05, + "loss": 1.0214, + "step": 2998 + }, + { + "epoch": 0.4127442884668318, + "grad_norm": 1.793651585201714, + "learning_rate": 1.3251653986444649e-05, + "loss": 1.0719, + "step": 2999 + }, + { + "epoch": 0.41288191577208916, + "grad_norm": 1.7008889364397932, + "learning_rate": 1.3247438468342618e-05, + "loss": 1.0707, + "step": 3000 + }, + { + "epoch": 0.41301954307734656, + "grad_norm": 1.7144088971110383, + "learning_rate": 1.3243222305019015e-05, + "loss": 1.0366, + "step": 3001 + }, + { + "epoch": 0.4131571703826039, + "grad_norm": 1.6882872602981118, + "learning_rate": 1.3239005497311536e-05, + "loss": 0.967, + "step": 3002 + }, + { + "epoch": 0.41329479768786126, + "grad_norm": 1.6745126043975482, + "learning_rate": 1.3234788046058002e-05, + "loss": 1.1176, + "step": 3003 + }, + { + "epoch": 0.4134324249931186, + "grad_norm": 1.7006847342767089, + "learning_rate": 1.3230569952096363e-05, + "loss": 1.1309, + "step": 3004 + }, + { + "epoch": 0.413570052298376, + "grad_norm": 1.6259542321981322, + "learning_rate": 1.3226351216264695e-05, + "loss": 1.0389, + "step": 3005 + }, + { + "epoch": 0.41370767960363336, + "grad_norm": 1.686149145164333, + "learning_rate": 1.3222131839401204e-05, + "loss": 1.0479, + "step": 3006 + }, + { + "epoch": 0.4138453069088907, + "grad_norm": 1.7500062757682815, + "learning_rate": 1.3217911822344225e-05, + "loss": 1.0127, + "step": 3007 + }, + { + "epoch": 0.4139829342141481, + "grad_norm": 1.644974802227951, + "learning_rate": 1.3213691165932212e-05, + "loss": 1.0784, + "step": 3008 + }, + { + "epoch": 0.41412056151940546, + "grad_norm": 1.9036043564665068, + "learning_rate": 1.3209469871003759e-05, + "loss": 1.0577, + "step": 3009 + }, + { + "epoch": 0.4142581888246628, + "grad_norm": 1.9253326810196307, + "learning_rate": 1.3205247938397574e-05, + "loss": 1.1272, + "step": 3010 + }, + { + "epoch": 0.41439581612992016, + "grad_norm": 1.725321886211019, + "learning_rate": 1.3201025368952498e-05, + "loss": 1.0535, + "step": 3011 + }, + { + "epoch": 0.41453344343517756, + "grad_norm": 1.7146791929559169, + "learning_rate": 1.3196802163507499e-05, + "loss": 1.0353, + "step": 3012 + }, + { + "epoch": 0.4146710707404349, + "grad_norm": 1.5358563686175672, + "learning_rate": 1.319257832290167e-05, + "loss": 1.0667, + "step": 3013 + }, + { + "epoch": 0.41480869804569226, + "grad_norm": 1.6672023937682923, + "learning_rate": 1.3188353847974228e-05, + "loss": 1.0331, + "step": 3014 + }, + { + "epoch": 0.4149463253509496, + "grad_norm": 2.399074475840285, + "learning_rate": 1.3184128739564522e-05, + "loss": 1.11, + "step": 3015 + }, + { + "epoch": 0.415083952656207, + "grad_norm": 1.6848279638269332, + "learning_rate": 1.3179902998512021e-05, + "loss": 1.0201, + "step": 3016 + }, + { + "epoch": 0.41522157996146436, + "grad_norm": 1.8561403327176902, + "learning_rate": 1.3175676625656322e-05, + "loss": 1.0567, + "step": 3017 + }, + { + "epoch": 0.4153592072667217, + "grad_norm": 1.6315369802411375, + "learning_rate": 1.3171449621837149e-05, + "loss": 1.105, + "step": 3018 + }, + { + "epoch": 0.41549683457197906, + "grad_norm": 1.7978487799811917, + "learning_rate": 1.3167221987894348e-05, + "loss": 1.0138, + "step": 3019 + }, + { + "epoch": 0.41563446187723646, + "grad_norm": 1.64728262221785, + "learning_rate": 1.316299372466789e-05, + "loss": 1.0908, + "step": 3020 + }, + { + "epoch": 0.4157720891824938, + "grad_norm": 1.7760277538958098, + "learning_rate": 1.3158764832997877e-05, + "loss": 1.1453, + "step": 3021 + }, + { + "epoch": 0.41590971648775116, + "grad_norm": 1.7750347628021885, + "learning_rate": 1.315453531372453e-05, + "loss": 1.0228, + "step": 3022 + }, + { + "epoch": 0.4160473437930085, + "grad_norm": 1.9618108569242174, + "learning_rate": 1.31503051676882e-05, + "loss": 1.1407, + "step": 3023 + }, + { + "epoch": 0.4161849710982659, + "grad_norm": 1.640611334139154, + "learning_rate": 1.3146074395729353e-05, + "loss": 1.075, + "step": 3024 + }, + { + "epoch": 0.41632259840352326, + "grad_norm": 1.7739567699435685, + "learning_rate": 1.3141842998688589e-05, + "loss": 1.1552, + "step": 3025 + }, + { + "epoch": 0.4164602257087806, + "grad_norm": 1.8208954570265325, + "learning_rate": 1.3137610977406632e-05, + "loss": 1.0867, + "step": 3026 + }, + { + "epoch": 0.416597853014038, + "grad_norm": 1.5918058194722966, + "learning_rate": 1.3133378332724318e-05, + "loss": 1.0667, + "step": 3027 + }, + { + "epoch": 0.41673548031929536, + "grad_norm": 1.5935519973891266, + "learning_rate": 1.3129145065482622e-05, + "loss": 0.9561, + "step": 3028 + }, + { + "epoch": 0.4168731076245527, + "grad_norm": 1.7914409754412541, + "learning_rate": 1.3124911176522637e-05, + "loss": 1.0361, + "step": 3029 + }, + { + "epoch": 0.41701073492981006, + "grad_norm": 1.6585946385018528, + "learning_rate": 1.3120676666685576e-05, + "loss": 1.1673, + "step": 3030 + }, + { + "epoch": 0.41714836223506746, + "grad_norm": 1.4574205058884744, + "learning_rate": 1.311644153681278e-05, + "loss": 1.0251, + "step": 3031 + }, + { + "epoch": 0.4172859895403248, + "grad_norm": 1.6170242599783526, + "learning_rate": 1.3112205787745706e-05, + "loss": 0.9872, + "step": 3032 + }, + { + "epoch": 0.41742361684558216, + "grad_norm": 1.571178139899867, + "learning_rate": 1.3107969420325945e-05, + "loss": 1.0358, + "step": 3033 + }, + { + "epoch": 0.4175612441508395, + "grad_norm": 1.6305116645574207, + "learning_rate": 1.3103732435395205e-05, + "loss": 0.9876, + "step": 3034 + }, + { + "epoch": 0.4176988714560969, + "grad_norm": 1.7678608616559375, + "learning_rate": 1.3099494833795314e-05, + "loss": 1.0423, + "step": 3035 + }, + { + "epoch": 0.41783649876135426, + "grad_norm": 1.714414158370946, + "learning_rate": 1.3095256616368228e-05, + "loss": 1.0253, + "step": 3036 + }, + { + "epoch": 0.4179741260666116, + "grad_norm": 1.972336932522941, + "learning_rate": 1.3091017783956018e-05, + "loss": 1.0415, + "step": 3037 + }, + { + "epoch": 0.41811175337186895, + "grad_norm": 1.562178277884062, + "learning_rate": 1.308677833740089e-05, + "loss": 1.0806, + "step": 3038 + }, + { + "epoch": 0.41824938067712636, + "grad_norm": 1.8926683508220141, + "learning_rate": 1.3082538277545161e-05, + "loss": 1.0586, + "step": 3039 + }, + { + "epoch": 0.4183870079823837, + "grad_norm": 1.8309892195362432, + "learning_rate": 1.3078297605231267e-05, + "loss": 1.0923, + "step": 3040 + }, + { + "epoch": 0.41852463528764106, + "grad_norm": 1.6887500776382363, + "learning_rate": 1.307405632130178e-05, + "loss": 1.05, + "step": 3041 + }, + { + "epoch": 0.4186622625928984, + "grad_norm": 1.7806125117237206, + "learning_rate": 1.306981442659938e-05, + "loss": 1.1288, + "step": 3042 + }, + { + "epoch": 0.4187998898981558, + "grad_norm": 1.7910770335784474, + "learning_rate": 1.3065571921966875e-05, + "loss": 1.0756, + "step": 3043 + }, + { + "epoch": 0.41893751720341316, + "grad_norm": 1.838712411557385, + "learning_rate": 1.3061328808247191e-05, + "loss": 1.1604, + "step": 3044 + }, + { + "epoch": 0.4190751445086705, + "grad_norm": 1.7197796200414144, + "learning_rate": 1.3057085086283378e-05, + "loss": 1.0795, + "step": 3045 + }, + { + "epoch": 0.4192127718139279, + "grad_norm": 1.7018948218861754, + "learning_rate": 1.3052840756918607e-05, + "loss": 1.2068, + "step": 3046 + }, + { + "epoch": 0.41935039911918526, + "grad_norm": 1.6568255903403415, + "learning_rate": 1.3048595820996166e-05, + "loss": 1.04, + "step": 3047 + }, + { + "epoch": 0.4194880264244426, + "grad_norm": 1.6799032575971349, + "learning_rate": 1.3044350279359464e-05, + "loss": 1.0302, + "step": 3048 + }, + { + "epoch": 0.41962565372969995, + "grad_norm": 1.6821346953822536, + "learning_rate": 1.3040104132852036e-05, + "loss": 1.0525, + "step": 3049 + }, + { + "epoch": 0.41976328103495736, + "grad_norm": 1.7865195732332562, + "learning_rate": 1.3035857382317532e-05, + "loss": 1.0928, + "step": 3050 + }, + { + "epoch": 0.4199009083402147, + "grad_norm": 1.4638278026668325, + "learning_rate": 1.303161002859972e-05, + "loss": 1.0447, + "step": 3051 + }, + { + "epoch": 0.42003853564547206, + "grad_norm": 1.7002449982730747, + "learning_rate": 1.3027362072542493e-05, + "loss": 1.0182, + "step": 3052 + }, + { + "epoch": 0.4201761629507294, + "grad_norm": 1.6080529102479715, + "learning_rate": 1.3023113514989865e-05, + "loss": 1.0538, + "step": 3053 + }, + { + "epoch": 0.4203137902559868, + "grad_norm": 1.7625827635816944, + "learning_rate": 1.3018864356785961e-05, + "loss": 1.0532, + "step": 3054 + }, + { + "epoch": 0.42045141756124416, + "grad_norm": 1.7576122640538192, + "learning_rate": 1.3014614598775035e-05, + "loss": 1.0429, + "step": 3055 + }, + { + "epoch": 0.4205890448665015, + "grad_norm": 1.6339978276592457, + "learning_rate": 1.3010364241801453e-05, + "loss": 1.0963, + "step": 3056 + }, + { + "epoch": 0.42072667217175885, + "grad_norm": 1.6950136685311903, + "learning_rate": 1.3006113286709705e-05, + "loss": 1.0627, + "step": 3057 + }, + { + "epoch": 0.42086429947701626, + "grad_norm": 1.5560320034254649, + "learning_rate": 1.3001861734344394e-05, + "loss": 1.058, + "step": 3058 + }, + { + "epoch": 0.4210019267822736, + "grad_norm": 1.7062727742830026, + "learning_rate": 1.2997609585550248e-05, + "loss": 1.0159, + "step": 3059 + }, + { + "epoch": 0.42113955408753095, + "grad_norm": 1.745887472527452, + "learning_rate": 1.299335684117211e-05, + "loss": 1.0592, + "step": 3060 + }, + { + "epoch": 0.4212771813927883, + "grad_norm": 1.6818398345153933, + "learning_rate": 1.2989103502054943e-05, + "loss": 1.118, + "step": 3061 + }, + { + "epoch": 0.4214148086980457, + "grad_norm": 1.827710372264519, + "learning_rate": 1.298484956904383e-05, + "loss": 1.0067, + "step": 3062 + }, + { + "epoch": 0.42155243600330305, + "grad_norm": 1.8441029517937946, + "learning_rate": 1.2980595042983961e-05, + "loss": 1.1122, + "step": 3063 + }, + { + "epoch": 0.4216900633085604, + "grad_norm": 1.6289015601743708, + "learning_rate": 1.2976339924720657e-05, + "loss": 1.0885, + "step": 3064 + }, + { + "epoch": 0.4218276906138178, + "grad_norm": 1.5713109877966056, + "learning_rate": 1.2972084215099353e-05, + "loss": 1.038, + "step": 3065 + }, + { + "epoch": 0.42196531791907516, + "grad_norm": 1.525857752531764, + "learning_rate": 1.2967827914965598e-05, + "loss": 1.0143, + "step": 3066 + }, + { + "epoch": 0.4221029452243325, + "grad_norm": 1.6058407106345536, + "learning_rate": 1.2963571025165062e-05, + "loss": 1.1011, + "step": 3067 + }, + { + "epoch": 0.42224057252958985, + "grad_norm": 1.8189877180181475, + "learning_rate": 1.2959313546543529e-05, + "loss": 1.0948, + "step": 3068 + }, + { + "epoch": 0.42237819983484726, + "grad_norm": 1.579561229311487, + "learning_rate": 1.2955055479946906e-05, + "loss": 1.0679, + "step": 3069 + }, + { + "epoch": 0.4225158271401046, + "grad_norm": 2.0246197722684447, + "learning_rate": 1.2950796826221206e-05, + "loss": 1.2288, + "step": 3070 + }, + { + "epoch": 0.42265345444536195, + "grad_norm": 1.8903250652978796, + "learning_rate": 1.2946537586212568e-05, + "loss": 1.0872, + "step": 3071 + }, + { + "epoch": 0.4227910817506193, + "grad_norm": 1.720880088606622, + "learning_rate": 1.294227776076725e-05, + "loss": 0.9987, + "step": 3072 + }, + { + "epoch": 0.4229287090558767, + "grad_norm": 1.9539893777382384, + "learning_rate": 1.2938017350731614e-05, + "loss": 1.0599, + "step": 3073 + }, + { + "epoch": 0.42306633636113405, + "grad_norm": 1.7244222840933896, + "learning_rate": 1.2933756356952145e-05, + "loss": 1.0882, + "step": 3074 + }, + { + "epoch": 0.4232039636663914, + "grad_norm": 1.7286828272122046, + "learning_rate": 1.2929494780275449e-05, + "loss": 1.1122, + "step": 3075 + }, + { + "epoch": 0.42334159097164875, + "grad_norm": 1.6237419450437434, + "learning_rate": 1.2925232621548242e-05, + "loss": 1.0258, + "step": 3076 + }, + { + "epoch": 0.42347921827690616, + "grad_norm": 1.5627382435020283, + "learning_rate": 1.292096988161735e-05, + "loss": 1.0188, + "step": 3077 + }, + { + "epoch": 0.4236168455821635, + "grad_norm": 1.6123659145228613, + "learning_rate": 1.2916706561329733e-05, + "loss": 1.0319, + "step": 3078 + }, + { + "epoch": 0.42375447288742085, + "grad_norm": 1.7700479727198817, + "learning_rate": 1.2912442661532446e-05, + "loss": 1.0838, + "step": 3079 + }, + { + "epoch": 0.4238921001926782, + "grad_norm": 1.752295399227553, + "learning_rate": 1.2908178183072666e-05, + "loss": 1.0925, + "step": 3080 + }, + { + "epoch": 0.4240297274979356, + "grad_norm": 1.7457267540198458, + "learning_rate": 1.2903913126797692e-05, + "loss": 1.0508, + "step": 3081 + }, + { + "epoch": 0.42416735480319295, + "grad_norm": 1.5320268243290713, + "learning_rate": 1.2899647493554927e-05, + "loss": 0.9961, + "step": 3082 + }, + { + "epoch": 0.4243049821084503, + "grad_norm": 1.5532457792908723, + "learning_rate": 1.2895381284191898e-05, + "loss": 1.0869, + "step": 3083 + }, + { + "epoch": 0.4244426094137077, + "grad_norm": 1.7727562876627874, + "learning_rate": 1.2891114499556243e-05, + "loss": 0.9572, + "step": 3084 + }, + { + "epoch": 0.42458023671896505, + "grad_norm": 1.5036474824967767, + "learning_rate": 1.288684714049571e-05, + "loss": 1.0621, + "step": 3085 + }, + { + "epoch": 0.4247178640242224, + "grad_norm": 1.6808772078439156, + "learning_rate": 1.288257920785817e-05, + "loss": 0.9915, + "step": 3086 + }, + { + "epoch": 0.42485549132947975, + "grad_norm": 1.5788369949490637, + "learning_rate": 1.2878310702491597e-05, + "loss": 1.1019, + "step": 3087 + }, + { + "epoch": 0.42499311863473715, + "grad_norm": 1.7304591414068993, + "learning_rate": 1.2874041625244087e-05, + "loss": 1.1207, + "step": 3088 + }, + { + "epoch": 0.4251307459399945, + "grad_norm": 1.788340679239401, + "learning_rate": 1.286977197696385e-05, + "loss": 1.2451, + "step": 3089 + }, + { + "epoch": 0.42526837324525185, + "grad_norm": 1.8970870132144813, + "learning_rate": 1.2865501758499202e-05, + "loss": 1.0715, + "step": 3090 + }, + { + "epoch": 0.4254060005505092, + "grad_norm": 1.597682991506191, + "learning_rate": 1.2861230970698579e-05, + "loss": 1.0074, + "step": 3091 + }, + { + "epoch": 0.4255436278557666, + "grad_norm": 1.7772810202750515, + "learning_rate": 1.2856959614410527e-05, + "loss": 1.007, + "step": 3092 + }, + { + "epoch": 0.42568125516102395, + "grad_norm": 1.820221520661318, + "learning_rate": 1.2852687690483709e-05, + "loss": 1.0239, + "step": 3093 + }, + { + "epoch": 0.4258188824662813, + "grad_norm": 1.8300867184214384, + "learning_rate": 1.2848415199766896e-05, + "loss": 1.1073, + "step": 3094 + }, + { + "epoch": 0.42595650977153865, + "grad_norm": 1.9000215652436345, + "learning_rate": 1.2844142143108976e-05, + "loss": 1.1761, + "step": 3095 + }, + { + "epoch": 0.42609413707679605, + "grad_norm": 1.7602665475561385, + "learning_rate": 1.283986852135894e-05, + "loss": 1.085, + "step": 3096 + }, + { + "epoch": 0.4262317643820534, + "grad_norm": 1.4883104177985613, + "learning_rate": 1.2835594335365904e-05, + "loss": 1.0512, + "step": 3097 + }, + { + "epoch": 0.42636939168731075, + "grad_norm": 1.7071133665255267, + "learning_rate": 1.2831319585979088e-05, + "loss": 0.9966, + "step": 3098 + }, + { + "epoch": 0.4265070189925681, + "grad_norm": 1.776204696723145, + "learning_rate": 1.2827044274047825e-05, + "loss": 1.0764, + "step": 3099 + }, + { + "epoch": 0.4266446462978255, + "grad_norm": 1.725909259019703, + "learning_rate": 1.2822768400421564e-05, + "loss": 1.1123, + "step": 3100 + }, + { + "epoch": 0.42678227360308285, + "grad_norm": 1.5073815134995774, + "learning_rate": 1.2818491965949862e-05, + "loss": 1.0791, + "step": 3101 + }, + { + "epoch": 0.4269199009083402, + "grad_norm": 1.5743084088660422, + "learning_rate": 1.2814214971482389e-05, + "loss": 1.0221, + "step": 3102 + }, + { + "epoch": 0.4270575282135976, + "grad_norm": 1.5566053543231022, + "learning_rate": 1.280993741786892e-05, + "loss": 1.0166, + "step": 3103 + }, + { + "epoch": 0.42719515551885495, + "grad_norm": 1.804668527474538, + "learning_rate": 1.2805659305959352e-05, + "loss": 1.0647, + "step": 3104 + }, + { + "epoch": 0.4273327828241123, + "grad_norm": 1.6486344133142343, + "learning_rate": 1.2801380636603686e-05, + "loss": 1.0884, + "step": 3105 + }, + { + "epoch": 0.42747041012936965, + "grad_norm": 1.531564978705585, + "learning_rate": 1.2797101410652033e-05, + "loss": 1.0703, + "step": 3106 + }, + { + "epoch": 0.42760803743462705, + "grad_norm": 1.7568537503522137, + "learning_rate": 1.279282162895462e-05, + "loss": 1.0662, + "step": 3107 + }, + { + "epoch": 0.4277456647398844, + "grad_norm": 1.6489692997287986, + "learning_rate": 1.2788541292361774e-05, + "loss": 1.0562, + "step": 3108 + }, + { + "epoch": 0.42788329204514175, + "grad_norm": 1.6027806389338546, + "learning_rate": 1.2784260401723947e-05, + "loss": 1.037, + "step": 3109 + }, + { + "epoch": 0.4280209193503991, + "grad_norm": 1.5865446336100815, + "learning_rate": 1.2779978957891692e-05, + "loss": 1.0713, + "step": 3110 + }, + { + "epoch": 0.4281585466556565, + "grad_norm": 1.6586520929151236, + "learning_rate": 1.2775696961715671e-05, + "loss": 0.9449, + "step": 3111 + }, + { + "epoch": 0.42829617396091385, + "grad_norm": 1.7463607641763577, + "learning_rate": 1.2771414414046658e-05, + "loss": 1.0658, + "step": 3112 + }, + { + "epoch": 0.4284338012661712, + "grad_norm": 1.9651552457368968, + "learning_rate": 1.276713131573554e-05, + "loss": 1.0476, + "step": 3113 + }, + { + "epoch": 0.42857142857142855, + "grad_norm": 1.6249884113305426, + "learning_rate": 1.2762847667633305e-05, + "loss": 1.0193, + "step": 3114 + }, + { + "epoch": 0.42870905587668595, + "grad_norm": 1.6616066471384072, + "learning_rate": 1.2758563470591057e-05, + "loss": 1.0358, + "step": 3115 + }, + { + "epoch": 0.4288466831819433, + "grad_norm": 1.7948450094151074, + "learning_rate": 1.2754278725460008e-05, + "loss": 1.1198, + "step": 3116 + }, + { + "epoch": 0.42898431048720065, + "grad_norm": 1.6728406241485596, + "learning_rate": 1.2749993433091479e-05, + "loss": 1.0511, + "step": 3117 + }, + { + "epoch": 0.429121937792458, + "grad_norm": 1.702515640176959, + "learning_rate": 1.27457075943369e-05, + "loss": 1.1458, + "step": 3118 + }, + { + "epoch": 0.4292595650977154, + "grad_norm": 1.8574280686784768, + "learning_rate": 1.2741421210047803e-05, + "loss": 0.969, + "step": 3119 + }, + { + "epoch": 0.42939719240297275, + "grad_norm": 1.7999194187457404, + "learning_rate": 1.273713428107584e-05, + "loss": 1.0494, + "step": 3120 + }, + { + "epoch": 0.4295348197082301, + "grad_norm": 1.7416193703403509, + "learning_rate": 1.2732846808272762e-05, + "loss": 1.1068, + "step": 3121 + }, + { + "epoch": 0.4296724470134875, + "grad_norm": 1.643127031413654, + "learning_rate": 1.2728558792490433e-05, + "loss": 1.0124, + "step": 3122 + }, + { + "epoch": 0.42981007431874485, + "grad_norm": 1.8549211720104217, + "learning_rate": 1.272427023458082e-05, + "loss": 1.0371, + "step": 3123 + }, + { + "epoch": 0.4299477016240022, + "grad_norm": 1.85952636340522, + "learning_rate": 1.2719981135396002e-05, + "loss": 1.0825, + "step": 3124 + }, + { + "epoch": 0.43008532892925955, + "grad_norm": 1.7360753216908393, + "learning_rate": 1.2715691495788167e-05, + "loss": 1.0565, + "step": 3125 + }, + { + "epoch": 0.43022295623451695, + "grad_norm": 1.6938326389465288, + "learning_rate": 1.2711401316609609e-05, + "loss": 1.0943, + "step": 3126 + }, + { + "epoch": 0.4303605835397743, + "grad_norm": 1.8172848863364526, + "learning_rate": 1.270711059871272e-05, + "loss": 1.0686, + "step": 3127 + }, + { + "epoch": 0.43049821084503165, + "grad_norm": 1.6048678965865795, + "learning_rate": 1.270281934295001e-05, + "loss": 0.9601, + "step": 3128 + }, + { + "epoch": 0.430635838150289, + "grad_norm": 1.6098443949117176, + "learning_rate": 1.2698527550174096e-05, + "loss": 1.0907, + "step": 3129 + }, + { + "epoch": 0.4307734654555464, + "grad_norm": 1.9938263775233502, + "learning_rate": 1.2694235221237695e-05, + "loss": 1.0383, + "step": 3130 + }, + { + "epoch": 0.43091109276080375, + "grad_norm": 1.646435798285442, + "learning_rate": 1.2689942356993637e-05, + "loss": 0.9769, + "step": 3131 + }, + { + "epoch": 0.4310487200660611, + "grad_norm": 1.8229832346215644, + "learning_rate": 1.2685648958294854e-05, + "loss": 1.155, + "step": 3132 + }, + { + "epoch": 0.43118634737131845, + "grad_norm": 1.687962810813789, + "learning_rate": 1.2681355025994386e-05, + "loss": 1.0828, + "step": 3133 + }, + { + "epoch": 0.43132397467657585, + "grad_norm": 1.8961903710574464, + "learning_rate": 1.2677060560945377e-05, + "loss": 1.074, + "step": 3134 + }, + { + "epoch": 0.4314616019818332, + "grad_norm": 1.543217766170686, + "learning_rate": 1.2672765564001082e-05, + "loss": 0.9539, + "step": 3135 + }, + { + "epoch": 0.43159922928709055, + "grad_norm": 1.7379587821564906, + "learning_rate": 1.2668470036014852e-05, + "loss": 1.0339, + "step": 3136 + }, + { + "epoch": 0.4317368565923479, + "grad_norm": 1.7511599167144067, + "learning_rate": 1.2664173977840155e-05, + "loss": 1.1296, + "step": 3137 + }, + { + "epoch": 0.4318744838976053, + "grad_norm": 1.6327434234842504, + "learning_rate": 1.2659877390330557e-05, + "loss": 1.0541, + "step": 3138 + }, + { + "epoch": 0.43201211120286265, + "grad_norm": 1.7944906594830923, + "learning_rate": 1.265558027433973e-05, + "loss": 1.141, + "step": 3139 + }, + { + "epoch": 0.43214973850812, + "grad_norm": 1.7984560448078661, + "learning_rate": 1.2651282630721454e-05, + "loss": 1.1396, + "step": 3140 + }, + { + "epoch": 0.4322873658133774, + "grad_norm": 1.6224796215661335, + "learning_rate": 1.2646984460329612e-05, + "loss": 1.0961, + "step": 3141 + }, + { + "epoch": 0.43242499311863475, + "grad_norm": 1.542697735749195, + "learning_rate": 1.2642685764018195e-05, + "loss": 1.0846, + "step": 3142 + }, + { + "epoch": 0.4325626204238921, + "grad_norm": 1.7516892322251574, + "learning_rate": 1.2638386542641286e-05, + "loss": 0.9619, + "step": 3143 + }, + { + "epoch": 0.43270024772914945, + "grad_norm": 1.6781228288564856, + "learning_rate": 1.2634086797053089e-05, + "loss": 1.0081, + "step": 3144 + }, + { + "epoch": 0.43283787503440685, + "grad_norm": 1.8321574667939375, + "learning_rate": 1.2629786528107901e-05, + "loss": 1.0235, + "step": 3145 + }, + { + "epoch": 0.4329755023396642, + "grad_norm": 1.6262013509718087, + "learning_rate": 1.2625485736660126e-05, + "loss": 0.9767, + "step": 3146 + }, + { + "epoch": 0.43311312964492155, + "grad_norm": 1.6357209504785235, + "learning_rate": 1.2621184423564275e-05, + "loss": 0.9934, + "step": 3147 + }, + { + "epoch": 0.4332507569501789, + "grad_norm": 1.5489993817368388, + "learning_rate": 1.261688258967496e-05, + "loss": 1.0205, + "step": 3148 + }, + { + "epoch": 0.4333883842554363, + "grad_norm": 1.7006853093592256, + "learning_rate": 1.2612580235846895e-05, + "loss": 0.997, + "step": 3149 + }, + { + "epoch": 0.43352601156069365, + "grad_norm": 1.5811003569160225, + "learning_rate": 1.2608277362934901e-05, + "loss": 1.027, + "step": 3150 + }, + { + "epoch": 0.433663638865951, + "grad_norm": 1.7177391956181791, + "learning_rate": 1.2603973971793897e-05, + "loss": 1.0833, + "step": 3151 + }, + { + "epoch": 0.43380126617120834, + "grad_norm": 1.8362146115801377, + "learning_rate": 1.2599670063278911e-05, + "loss": 1.038, + "step": 3152 + }, + { + "epoch": 0.43393889347646575, + "grad_norm": 1.6568752419241706, + "learning_rate": 1.2595365638245068e-05, + "loss": 1.0115, + "step": 3153 + }, + { + "epoch": 0.4340765207817231, + "grad_norm": 1.7787602054076295, + "learning_rate": 1.2591060697547596e-05, + "loss": 1.0963, + "step": 3154 + }, + { + "epoch": 0.43421414808698044, + "grad_norm": 1.71318078175494, + "learning_rate": 1.2586755242041835e-05, + "loss": 1.1773, + "step": 3155 + }, + { + "epoch": 0.4343517753922378, + "grad_norm": 1.6298724444752155, + "learning_rate": 1.2582449272583218e-05, + "loss": 1.0541, + "step": 3156 + }, + { + "epoch": 0.4344894026974952, + "grad_norm": 1.5946830347426724, + "learning_rate": 1.2578142790027275e-05, + "loss": 1.0508, + "step": 3157 + }, + { + "epoch": 0.43462703000275255, + "grad_norm": 1.8644424747640742, + "learning_rate": 1.2573835795229656e-05, + "loss": 1.0091, + "step": 3158 + }, + { + "epoch": 0.4347646573080099, + "grad_norm": 1.7629854196859822, + "learning_rate": 1.2569528289046096e-05, + "loss": 1.0989, + "step": 3159 + }, + { + "epoch": 0.4349022846132673, + "grad_norm": 1.7849150933978042, + "learning_rate": 1.2565220272332435e-05, + "loss": 1.0707, + "step": 3160 + }, + { + "epoch": 0.43503991191852465, + "grad_norm": 1.6640070034228307, + "learning_rate": 1.2560911745944617e-05, + "loss": 1.1032, + "step": 3161 + }, + { + "epoch": 0.435177539223782, + "grad_norm": 1.886310816136597, + "learning_rate": 1.2556602710738695e-05, + "loss": 1.0434, + "step": 3162 + }, + { + "epoch": 0.43531516652903934, + "grad_norm": 1.792270173244599, + "learning_rate": 1.255229316757081e-05, + "loss": 1.0184, + "step": 3163 + }, + { + "epoch": 0.43545279383429675, + "grad_norm": 1.5772509109304522, + "learning_rate": 1.2547983117297208e-05, + "loss": 1.0575, + "step": 3164 + }, + { + "epoch": 0.4355904211395541, + "grad_norm": 1.7610745759451705, + "learning_rate": 1.2543672560774241e-05, + "loss": 0.9908, + "step": 3165 + }, + { + "epoch": 0.43572804844481144, + "grad_norm": 1.6190827877993157, + "learning_rate": 1.2539361498858354e-05, + "loss": 1.071, + "step": 3166 + }, + { + "epoch": 0.4358656757500688, + "grad_norm": 1.557941579429338, + "learning_rate": 1.2535049932406098e-05, + "loss": 0.9981, + "step": 3167 + }, + { + "epoch": 0.4360033030553262, + "grad_norm": 1.6404936552889935, + "learning_rate": 1.253073786227412e-05, + "loss": 1.0329, + "step": 3168 + }, + { + "epoch": 0.43614093036058355, + "grad_norm": 1.5588288919919873, + "learning_rate": 1.2526425289319173e-05, + "loss": 1.0018, + "step": 3169 + }, + { + "epoch": 0.4362785576658409, + "grad_norm": 1.6500157198685181, + "learning_rate": 1.25221122143981e-05, + "loss": 0.9798, + "step": 3170 + }, + { + "epoch": 0.43641618497109824, + "grad_norm": 1.6880322636698197, + "learning_rate": 1.2517798638367858e-05, + "loss": 1.0596, + "step": 3171 + }, + { + "epoch": 0.43655381227635565, + "grad_norm": 1.6648805509143199, + "learning_rate": 1.251348456208549e-05, + "loss": 1.0727, + "step": 3172 + }, + { + "epoch": 0.436691439581613, + "grad_norm": 1.7659517570629897, + "learning_rate": 1.2509169986408145e-05, + "loss": 1.0299, + "step": 3173 + }, + { + "epoch": 0.43682906688687034, + "grad_norm": 1.7395465184471834, + "learning_rate": 1.2504854912193075e-05, + "loss": 1.0318, + "step": 3174 + }, + { + "epoch": 0.4369666941921277, + "grad_norm": 1.6029235128883876, + "learning_rate": 1.2500539340297614e-05, + "loss": 0.9996, + "step": 3175 + }, + { + "epoch": 0.4371043214973851, + "grad_norm": 1.6627560283267624, + "learning_rate": 1.2496223271579219e-05, + "loss": 1.0411, + "step": 3176 + }, + { + "epoch": 0.43724194880264244, + "grad_norm": 1.6977250298361346, + "learning_rate": 1.2491906706895427e-05, + "loss": 0.9759, + "step": 3177 + }, + { + "epoch": 0.4373795761078998, + "grad_norm": 1.7833305712647323, + "learning_rate": 1.2487589647103887e-05, + "loss": 1.0375, + "step": 3178 + }, + { + "epoch": 0.4375172034131572, + "grad_norm": 1.849835517983004, + "learning_rate": 1.2483272093062332e-05, + "loss": 1.0026, + "step": 3179 + }, + { + "epoch": 0.43765483071841454, + "grad_norm": 1.7608875779329944, + "learning_rate": 1.2478954045628606e-05, + "loss": 1.0985, + "step": 3180 + }, + { + "epoch": 0.4377924580236719, + "grad_norm": 1.7544916347044048, + "learning_rate": 1.2474635505660645e-05, + "loss": 1.0367, + "step": 3181 + }, + { + "epoch": 0.43793008532892924, + "grad_norm": 1.55109606450465, + "learning_rate": 1.2470316474016482e-05, + "loss": 0.9968, + "step": 3182 + }, + { + "epoch": 0.43806771263418665, + "grad_norm": 1.9658896178337446, + "learning_rate": 1.246599695155425e-05, + "loss": 1.0278, + "step": 3183 + }, + { + "epoch": 0.438205339939444, + "grad_norm": 1.7328909496489404, + "learning_rate": 1.2461676939132179e-05, + "loss": 1.0244, + "step": 3184 + }, + { + "epoch": 0.43834296724470134, + "grad_norm": 1.6622757547894675, + "learning_rate": 1.2457356437608596e-05, + "loss": 0.9542, + "step": 3185 + }, + { + "epoch": 0.4384805945499587, + "grad_norm": 1.5325297556261395, + "learning_rate": 1.245303544784193e-05, + "loss": 1.0552, + "step": 3186 + }, + { + "epoch": 0.4386182218552161, + "grad_norm": 1.6754719636753852, + "learning_rate": 1.2448713970690696e-05, + "loss": 1.0384, + "step": 3187 + }, + { + "epoch": 0.43875584916047344, + "grad_norm": 2.086655518413788, + "learning_rate": 1.2444392007013518e-05, + "loss": 1.0947, + "step": 3188 + }, + { + "epoch": 0.4388934764657308, + "grad_norm": 1.700161558159017, + "learning_rate": 1.2440069557669106e-05, + "loss": 1.0756, + "step": 3189 + }, + { + "epoch": 0.43903110377098814, + "grad_norm": 1.9117027265303874, + "learning_rate": 1.2435746623516276e-05, + "loss": 1.0411, + "step": 3190 + }, + { + "epoch": 0.43916873107624554, + "grad_norm": 1.6254447132741074, + "learning_rate": 1.2431423205413933e-05, + "loss": 0.9813, + "step": 3191 + }, + { + "epoch": 0.4393063583815029, + "grad_norm": 1.6843316275313973, + "learning_rate": 1.2427099304221079e-05, + "loss": 1.0611, + "step": 3192 + }, + { + "epoch": 0.43944398568676024, + "grad_norm": 1.524002664379864, + "learning_rate": 1.2422774920796818e-05, + "loss": 1.0109, + "step": 3193 + }, + { + "epoch": 0.4395816129920176, + "grad_norm": 1.7493421633312474, + "learning_rate": 1.2418450056000346e-05, + "loss": 1.1647, + "step": 3194 + }, + { + "epoch": 0.439719240297275, + "grad_norm": 1.6230740957301961, + "learning_rate": 1.241412471069095e-05, + "loss": 1.0323, + "step": 3195 + }, + { + "epoch": 0.43985686760253234, + "grad_norm": 1.5636832552465756, + "learning_rate": 1.2409798885728023e-05, + "loss": 1.0169, + "step": 3196 + }, + { + "epoch": 0.4399944949077897, + "grad_norm": 1.636831351027792, + "learning_rate": 1.2405472581971042e-05, + "loss": 1.1052, + "step": 3197 + }, + { + "epoch": 0.4401321222130471, + "grad_norm": 1.9941306692535827, + "learning_rate": 1.2401145800279588e-05, + "loss": 1.0797, + "step": 3198 + }, + { + "epoch": 0.44026974951830444, + "grad_norm": 1.6933492278073254, + "learning_rate": 1.239681854151333e-05, + "loss": 1.0639, + "step": 3199 + }, + { + "epoch": 0.4404073768235618, + "grad_norm": 1.761482654922105, + "learning_rate": 1.2392490806532038e-05, + "loss": 1.0718, + "step": 3200 + }, + { + "epoch": 0.44054500412881914, + "grad_norm": 1.5820362891530688, + "learning_rate": 1.238816259619557e-05, + "loss": 1.0551, + "step": 3201 + }, + { + "epoch": 0.44068263143407654, + "grad_norm": 1.674842342949645, + "learning_rate": 1.2383833911363885e-05, + "loss": 0.9517, + "step": 3202 + }, + { + "epoch": 0.4408202587393339, + "grad_norm": 1.737347397302642, + "learning_rate": 1.2379504752897034e-05, + "loss": 1.0854, + "step": 3203 + }, + { + "epoch": 0.44095788604459124, + "grad_norm": 1.7246422831833141, + "learning_rate": 1.2375175121655161e-05, + "loss": 0.9239, + "step": 3204 + }, + { + "epoch": 0.4410955133498486, + "grad_norm": 1.733967508578194, + "learning_rate": 1.2370845018498503e-05, + "loss": 1.0965, + "step": 3205 + }, + { + "epoch": 0.441233140655106, + "grad_norm": 1.6078103889529902, + "learning_rate": 1.2366514444287396e-05, + "loss": 1.0201, + "step": 3206 + }, + { + "epoch": 0.44137076796036334, + "grad_norm": 1.7381363979598115, + "learning_rate": 1.2362183399882262e-05, + "loss": 1.1206, + "step": 3207 + }, + { + "epoch": 0.4415083952656207, + "grad_norm": 1.7496823738127036, + "learning_rate": 1.2357851886143616e-05, + "loss": 1.0512, + "step": 3208 + }, + { + "epoch": 0.44164602257087804, + "grad_norm": 1.7271127489763485, + "learning_rate": 1.235351990393208e-05, + "loss": 1.0508, + "step": 3209 + }, + { + "epoch": 0.44178364987613544, + "grad_norm": 1.611808272457047, + "learning_rate": 1.2349187454108354e-05, + "loss": 1.0606, + "step": 3210 + }, + { + "epoch": 0.4419212771813928, + "grad_norm": 1.776021783150459, + "learning_rate": 1.234485453753324e-05, + "loss": 1.0235, + "step": 3211 + }, + { + "epoch": 0.44205890448665014, + "grad_norm": 1.76542122779535, + "learning_rate": 1.2340521155067623e-05, + "loss": 0.9886, + "step": 3212 + }, + { + "epoch": 0.4421965317919075, + "grad_norm": 1.7284881609504283, + "learning_rate": 1.2336187307572493e-05, + "loss": 1.0095, + "step": 3213 + }, + { + "epoch": 0.4423341590971649, + "grad_norm": 1.8033416942477851, + "learning_rate": 1.2331852995908927e-05, + "loss": 1.0488, + "step": 3214 + }, + { + "epoch": 0.44247178640242224, + "grad_norm": 1.7772731380019744, + "learning_rate": 1.2327518220938086e-05, + "loss": 1.0057, + "step": 3215 + }, + { + "epoch": 0.4426094137076796, + "grad_norm": 1.648924059394994, + "learning_rate": 1.2323182983521236e-05, + "loss": 0.9851, + "step": 3216 + }, + { + "epoch": 0.442747041012937, + "grad_norm": 1.837133707876487, + "learning_rate": 1.2318847284519726e-05, + "loss": 1.127, + "step": 3217 + }, + { + "epoch": 0.44288466831819434, + "grad_norm": 1.6841317459593383, + "learning_rate": 1.2314511124795003e-05, + "loss": 1.0502, + "step": 3218 + }, + { + "epoch": 0.4430222956234517, + "grad_norm": 1.657717780390998, + "learning_rate": 1.2310174505208604e-05, + "loss": 1.0316, + "step": 3219 + }, + { + "epoch": 0.44315992292870904, + "grad_norm": 1.8515675456237604, + "learning_rate": 1.2305837426622153e-05, + "loss": 1.0282, + "step": 3220 + }, + { + "epoch": 0.44329755023396644, + "grad_norm": 1.6111843970120465, + "learning_rate": 1.2301499889897368e-05, + "loss": 1.0707, + "step": 3221 + }, + { + "epoch": 0.4434351775392238, + "grad_norm": 1.650082239657082, + "learning_rate": 1.2297161895896062e-05, + "loss": 1.1191, + "step": 3222 + }, + { + "epoch": 0.44357280484448114, + "grad_norm": 1.8678479527451124, + "learning_rate": 1.2292823445480126e-05, + "loss": 1.0852, + "step": 3223 + }, + { + "epoch": 0.4437104321497385, + "grad_norm": 1.6235834596643877, + "learning_rate": 1.228848453951156e-05, + "loss": 0.9877, + "step": 3224 + }, + { + "epoch": 0.4438480594549959, + "grad_norm": 2.1832705376431036, + "learning_rate": 1.2284145178852445e-05, + "loss": 1.1397, + "step": 3225 + }, + { + "epoch": 0.44398568676025324, + "grad_norm": 1.7026121411926158, + "learning_rate": 1.2279805364364945e-05, + "loss": 1.043, + "step": 3226 + }, + { + "epoch": 0.4441233140655106, + "grad_norm": 1.6998623698786843, + "learning_rate": 1.2275465096911327e-05, + "loss": 1.038, + "step": 3227 + }, + { + "epoch": 0.44426094137076794, + "grad_norm": 1.8479290861957356, + "learning_rate": 1.2271124377353945e-05, + "loss": 1.1217, + "step": 3228 + }, + { + "epoch": 0.44439856867602534, + "grad_norm": 1.8607164462218913, + "learning_rate": 1.2266783206555235e-05, + "loss": 1.2002, + "step": 3229 + }, + { + "epoch": 0.4445361959812827, + "grad_norm": 1.650611949226281, + "learning_rate": 1.2262441585377731e-05, + "loss": 1.0317, + "step": 3230 + }, + { + "epoch": 0.44467382328654004, + "grad_norm": 1.5279001064836084, + "learning_rate": 1.2258099514684052e-05, + "loss": 0.9919, + "step": 3231 + }, + { + "epoch": 0.4448114505917974, + "grad_norm": 1.6281976127874973, + "learning_rate": 1.225375699533691e-05, + "loss": 0.95, + "step": 3232 + }, + { + "epoch": 0.4449490778970548, + "grad_norm": 1.6345456989676252, + "learning_rate": 1.2249414028199104e-05, + "loss": 0.9755, + "step": 3233 + }, + { + "epoch": 0.44508670520231214, + "grad_norm": 1.6395157354413419, + "learning_rate": 1.2245070614133522e-05, + "loss": 1.0486, + "step": 3234 + }, + { + "epoch": 0.4452243325075695, + "grad_norm": 1.7404472833699915, + "learning_rate": 1.224072675400314e-05, + "loss": 1.0204, + "step": 3235 + }, + { + "epoch": 0.4453619598128269, + "grad_norm": 1.701781681798363, + "learning_rate": 1.2236382448671027e-05, + "loss": 1.0122, + "step": 3236 + }, + { + "epoch": 0.44549958711808424, + "grad_norm": 1.560882896897504, + "learning_rate": 1.2232037699000332e-05, + "loss": 1.0201, + "step": 3237 + }, + { + "epoch": 0.4456372144233416, + "grad_norm": 1.5766243079034652, + "learning_rate": 1.2227692505854303e-05, + "loss": 1.0768, + "step": 3238 + }, + { + "epoch": 0.44577484172859894, + "grad_norm": 1.7290501288363898, + "learning_rate": 1.222334687009626e-05, + "loss": 1.0398, + "step": 3239 + }, + { + "epoch": 0.44591246903385634, + "grad_norm": 1.6275044121561326, + "learning_rate": 1.2219000792589633e-05, + "loss": 0.9727, + "step": 3240 + }, + { + "epoch": 0.4460500963391137, + "grad_norm": 1.7118732517388435, + "learning_rate": 1.2214654274197922e-05, + "loss": 0.9847, + "step": 3241 + }, + { + "epoch": 0.44618772364437104, + "grad_norm": 1.6170963787510475, + "learning_rate": 1.2210307315784721e-05, + "loss": 1.0, + "step": 3242 + }, + { + "epoch": 0.4463253509496284, + "grad_norm": 1.620718718673401, + "learning_rate": 1.2205959918213714e-05, + "loss": 1.0055, + "step": 3243 + }, + { + "epoch": 0.4464629782548858, + "grad_norm": 1.589082499762054, + "learning_rate": 1.2201612082348666e-05, + "loss": 1.0485, + "step": 3244 + }, + { + "epoch": 0.44660060556014314, + "grad_norm": 1.8054830779124131, + "learning_rate": 1.2197263809053435e-05, + "loss": 1.0317, + "step": 3245 + }, + { + "epoch": 0.4467382328654005, + "grad_norm": 1.8013118709417002, + "learning_rate": 1.2192915099191964e-05, + "loss": 1.1345, + "step": 3246 + }, + { + "epoch": 0.44687586017065783, + "grad_norm": 1.8993159126135937, + "learning_rate": 1.2188565953628278e-05, + "loss": 1.1772, + "step": 3247 + }, + { + "epoch": 0.44701348747591524, + "grad_norm": 1.6425834295678363, + "learning_rate": 1.2184216373226494e-05, + "loss": 1.0359, + "step": 3248 + }, + { + "epoch": 0.4471511147811726, + "grad_norm": 1.749221294967735, + "learning_rate": 1.2179866358850818e-05, + "loss": 1.0139, + "step": 3249 + }, + { + "epoch": 0.44728874208642994, + "grad_norm": 1.8998095837523783, + "learning_rate": 1.2175515911365535e-05, + "loss": 1.097, + "step": 3250 + }, + { + "epoch": 0.4474263693916873, + "grad_norm": 1.5855957774891942, + "learning_rate": 1.217116503163502e-05, + "loss": 0.9543, + "step": 3251 + }, + { + "epoch": 0.4475639966969447, + "grad_norm": 1.6167812546582958, + "learning_rate": 1.2166813720523734e-05, + "loss": 0.99, + "step": 3252 + }, + { + "epoch": 0.44770162400220204, + "grad_norm": 1.6844539503387257, + "learning_rate": 1.216246197889622e-05, + "loss": 1.0674, + "step": 3253 + }, + { + "epoch": 0.4478392513074594, + "grad_norm": 1.6531401848885043, + "learning_rate": 1.2158109807617114e-05, + "loss": 0.9557, + "step": 3254 + }, + { + "epoch": 0.4479768786127168, + "grad_norm": 1.6572737161849602, + "learning_rate": 1.2153757207551129e-05, + "loss": 1.0477, + "step": 3255 + }, + { + "epoch": 0.44811450591797414, + "grad_norm": 1.9124510820055534, + "learning_rate": 1.2149404179563069e-05, + "loss": 1.1294, + "step": 3256 + }, + { + "epoch": 0.4482521332232315, + "grad_norm": 1.6534364697395338, + "learning_rate": 1.2145050724517819e-05, + "loss": 1.0068, + "step": 3257 + }, + { + "epoch": 0.44838976052848883, + "grad_norm": 1.962284770379394, + "learning_rate": 1.2140696843280355e-05, + "loss": 1.1436, + "step": 3258 + }, + { + "epoch": 0.44852738783374624, + "grad_norm": 1.683058512893367, + "learning_rate": 1.2136342536715731e-05, + "loss": 1.0346, + "step": 3259 + }, + { + "epoch": 0.4486650151390036, + "grad_norm": 1.6929402925395962, + "learning_rate": 1.2131987805689089e-05, + "loss": 0.9749, + "step": 3260 + }, + { + "epoch": 0.44880264244426094, + "grad_norm": 1.7051742034293074, + "learning_rate": 1.2127632651065652e-05, + "loss": 1.0327, + "step": 3261 + }, + { + "epoch": 0.4489402697495183, + "grad_norm": 1.613591598627337, + "learning_rate": 1.2123277073710733e-05, + "loss": 1.0038, + "step": 3262 + }, + { + "epoch": 0.4490778970547757, + "grad_norm": 1.7745919001342014, + "learning_rate": 1.211892107448972e-05, + "loss": 1.1127, + "step": 3263 + }, + { + "epoch": 0.44921552436003304, + "grad_norm": 1.8376395575154916, + "learning_rate": 1.2114564654268097e-05, + "loss": 1.0368, + "step": 3264 + }, + { + "epoch": 0.4493531516652904, + "grad_norm": 1.825012022181292, + "learning_rate": 1.2110207813911424e-05, + "loss": 1.0819, + "step": 3265 + }, + { + "epoch": 0.44949077897054773, + "grad_norm": 1.5116503982193217, + "learning_rate": 1.210585055428534e-05, + "loss": 1.0371, + "step": 3266 + }, + { + "epoch": 0.44962840627580514, + "grad_norm": 1.643664894329445, + "learning_rate": 1.2101492876255581e-05, + "loss": 1.0227, + "step": 3267 + }, + { + "epoch": 0.4497660335810625, + "grad_norm": 1.6713361797160597, + "learning_rate": 1.2097134780687954e-05, + "loss": 1.0699, + "step": 3268 + }, + { + "epoch": 0.44990366088631983, + "grad_norm": 1.8169204619927837, + "learning_rate": 1.209277626844835e-05, + "loss": 0.9811, + "step": 3269 + }, + { + "epoch": 0.4500412881915772, + "grad_norm": 1.6525341918012835, + "learning_rate": 1.2088417340402749e-05, + "loss": 1.0237, + "step": 3270 + }, + { + "epoch": 0.4501789154968346, + "grad_norm": 1.757007263730867, + "learning_rate": 1.208405799741721e-05, + "loss": 1.0882, + "step": 3271 + }, + { + "epoch": 0.45031654280209193, + "grad_norm": 1.8662296996129175, + "learning_rate": 1.2079698240357876e-05, + "loss": 1.0269, + "step": 3272 + }, + { + "epoch": 0.4504541701073493, + "grad_norm": 1.7294007337929087, + "learning_rate": 1.2075338070090967e-05, + "loss": 1.0011, + "step": 3273 + }, + { + "epoch": 0.4505917974126067, + "grad_norm": 1.7574734200970403, + "learning_rate": 1.2070977487482793e-05, + "loss": 1.1127, + "step": 3274 + }, + { + "epoch": 0.45072942471786404, + "grad_norm": 1.8981318739917472, + "learning_rate": 1.2066616493399744e-05, + "loss": 1.0285, + "step": 3275 + }, + { + "epoch": 0.4508670520231214, + "grad_norm": 1.523934879663697, + "learning_rate": 1.2062255088708282e-05, + "loss": 0.986, + "step": 3276 + }, + { + "epoch": 0.45100467932837873, + "grad_norm": 1.6938745946870397, + "learning_rate": 1.2057893274274965e-05, + "loss": 1.1151, + "step": 3277 + }, + { + "epoch": 0.45114230663363614, + "grad_norm": 1.6222238094214725, + "learning_rate": 1.2053531050966422e-05, + "loss": 1.0518, + "step": 3278 + }, + { + "epoch": 0.4512799339388935, + "grad_norm": 1.769985464183407, + "learning_rate": 1.204916841964937e-05, + "loss": 1.0843, + "step": 3279 + }, + { + "epoch": 0.45141756124415083, + "grad_norm": 1.6932381614669898, + "learning_rate": 1.2044805381190602e-05, + "loss": 0.9843, + "step": 3280 + }, + { + "epoch": 0.4515551885494082, + "grad_norm": 1.646395215440809, + "learning_rate": 1.2040441936456998e-05, + "loss": 1.0624, + "step": 3281 + }, + { + "epoch": 0.4516928158546656, + "grad_norm": 1.9077963678666485, + "learning_rate": 1.203607808631551e-05, + "loss": 1.0897, + "step": 3282 + }, + { + "epoch": 0.45183044315992293, + "grad_norm": 1.4679761450586144, + "learning_rate": 1.2031713831633178e-05, + "loss": 1.0368, + "step": 3283 + }, + { + "epoch": 0.4519680704651803, + "grad_norm": 1.5926135535533286, + "learning_rate": 1.2027349173277118e-05, + "loss": 1.0364, + "step": 3284 + }, + { + "epoch": 0.45210569777043763, + "grad_norm": 1.494292963672221, + "learning_rate": 1.202298411211453e-05, + "loss": 1.0076, + "step": 3285 + }, + { + "epoch": 0.45224332507569504, + "grad_norm": 1.6864107443958332, + "learning_rate": 1.201861864901269e-05, + "loss": 1.0986, + "step": 3286 + }, + { + "epoch": 0.4523809523809524, + "grad_norm": 1.8805721551700996, + "learning_rate": 1.2014252784838955e-05, + "loss": 1.1704, + "step": 3287 + }, + { + "epoch": 0.45251857968620973, + "grad_norm": 1.853903433378315, + "learning_rate": 1.2009886520460765e-05, + "loss": 1.1493, + "step": 3288 + }, + { + "epoch": 0.4526562069914671, + "grad_norm": 1.586150426027311, + "learning_rate": 1.2005519856745638e-05, + "loss": 1.1387, + "step": 3289 + }, + { + "epoch": 0.4527938342967245, + "grad_norm": 1.9058522499999693, + "learning_rate": 1.2001152794561164e-05, + "loss": 1.157, + "step": 3290 + }, + { + "epoch": 0.45293146160198183, + "grad_norm": 1.739575629903443, + "learning_rate": 1.1996785334775023e-05, + "loss": 1.1242, + "step": 3291 + }, + { + "epoch": 0.4530690889072392, + "grad_norm": 1.6240616286441973, + "learning_rate": 1.1992417478254972e-05, + "loss": 1.0479, + "step": 3292 + }, + { + "epoch": 0.4532067162124966, + "grad_norm": 1.6226496643111894, + "learning_rate": 1.1988049225868839e-05, + "loss": 1.0278, + "step": 3293 + }, + { + "epoch": 0.45334434351775393, + "grad_norm": 1.7352197338209585, + "learning_rate": 1.1983680578484535e-05, + "loss": 1.0205, + "step": 3294 + }, + { + "epoch": 0.4534819708230113, + "grad_norm": 1.459082502483015, + "learning_rate": 1.197931153697006e-05, + "loss": 1.0409, + "step": 3295 + }, + { + "epoch": 0.45361959812826863, + "grad_norm": 1.683616513197909, + "learning_rate": 1.1974942102193472e-05, + "loss": 1.0829, + "step": 3296 + }, + { + "epoch": 0.45375722543352603, + "grad_norm": 1.5487414148744094, + "learning_rate": 1.1970572275022925e-05, + "loss": 1.0594, + "step": 3297 + }, + { + "epoch": 0.4538948527387834, + "grad_norm": 1.7601979137626567, + "learning_rate": 1.1966202056326638e-05, + "loss": 1.0983, + "step": 3298 + }, + { + "epoch": 0.45403248004404073, + "grad_norm": 1.697816890378664, + "learning_rate": 1.1961831446972921e-05, + "loss": 1.0527, + "step": 3299 + }, + { + "epoch": 0.4541701073492981, + "grad_norm": 1.8005918305171795, + "learning_rate": 1.1957460447830147e-05, + "loss": 1.0426, + "step": 3300 + }, + { + "epoch": 0.4543077346545555, + "grad_norm": 1.6480841493777627, + "learning_rate": 1.1953089059766778e-05, + "loss": 1.0703, + "step": 3301 + }, + { + "epoch": 0.45444536195981283, + "grad_norm": 1.7281580576427387, + "learning_rate": 1.1948717283651347e-05, + "loss": 1.0769, + "step": 3302 + }, + { + "epoch": 0.4545829892650702, + "grad_norm": 1.7986794938202983, + "learning_rate": 1.1944345120352467e-05, + "loss": 1.1647, + "step": 3303 + }, + { + "epoch": 0.45472061657032753, + "grad_norm": 1.919631116432969, + "learning_rate": 1.1939972570738827e-05, + "loss": 1.1783, + "step": 3304 + }, + { + "epoch": 0.45485824387558493, + "grad_norm": 1.6448367853200376, + "learning_rate": 1.1935599635679194e-05, + "loss": 1.0931, + "step": 3305 + }, + { + "epoch": 0.4549958711808423, + "grad_norm": 1.702070872290541, + "learning_rate": 1.1931226316042409e-05, + "loss": 1.0966, + "step": 3306 + }, + { + "epoch": 0.45513349848609963, + "grad_norm": 1.790366601900612, + "learning_rate": 1.1926852612697386e-05, + "loss": 1.0871, + "step": 3307 + }, + { + "epoch": 0.455271125791357, + "grad_norm": 1.7029154995010782, + "learning_rate": 1.1922478526513128e-05, + "loss": 1.1683, + "step": 3308 + }, + { + "epoch": 0.4554087530966144, + "grad_norm": 1.7941516183797472, + "learning_rate": 1.1918104058358701e-05, + "loss": 1.0944, + "step": 3309 + }, + { + "epoch": 0.45554638040187173, + "grad_norm": 1.6624531880032065, + "learning_rate": 1.1913729209103253e-05, + "loss": 1.0, + "step": 3310 + }, + { + "epoch": 0.4556840077071291, + "grad_norm": 1.4266514133697288, + "learning_rate": 1.1909353979616009e-05, + "loss": 1.0173, + "step": 3311 + }, + { + "epoch": 0.4558216350123865, + "grad_norm": 1.785067928838574, + "learning_rate": 1.1904978370766267e-05, + "loss": 1.1155, + "step": 3312 + }, + { + "epoch": 0.45595926231764383, + "grad_norm": 1.7162788001945006, + "learning_rate": 1.1900602383423397e-05, + "loss": 1.1032, + "step": 3313 + }, + { + "epoch": 0.4560968896229012, + "grad_norm": 1.8060119790215499, + "learning_rate": 1.189622601845685e-05, + "loss": 1.0749, + "step": 3314 + }, + { + "epoch": 0.45623451692815853, + "grad_norm": 1.6414380294504327, + "learning_rate": 1.1891849276736151e-05, + "loss": 1.0991, + "step": 3315 + }, + { + "epoch": 0.45637214423341593, + "grad_norm": 1.690254162608865, + "learning_rate": 1.1887472159130895e-05, + "loss": 1.1257, + "step": 3316 + }, + { + "epoch": 0.4565097715386733, + "grad_norm": 1.8865705721325123, + "learning_rate": 1.188309466651076e-05, + "loss": 1.1003, + "step": 3317 + }, + { + "epoch": 0.45664739884393063, + "grad_norm": 1.895780507482217, + "learning_rate": 1.187871679974549e-05, + "loss": 1.1299, + "step": 3318 + }, + { + "epoch": 0.456785026149188, + "grad_norm": 1.998893476419672, + "learning_rate": 1.1874338559704909e-05, + "loss": 1.0949, + "step": 3319 + }, + { + "epoch": 0.4569226534544454, + "grad_norm": 1.5010609951701976, + "learning_rate": 1.1869959947258913e-05, + "loss": 1.1002, + "step": 3320 + }, + { + "epoch": 0.45706028075970273, + "grad_norm": 1.925106912353012, + "learning_rate": 1.1865580963277474e-05, + "loss": 1.2157, + "step": 3321 + }, + { + "epoch": 0.4571979080649601, + "grad_norm": 1.7281530797737379, + "learning_rate": 1.1861201608630632e-05, + "loss": 1.1124, + "step": 3322 + }, + { + "epoch": 0.4573355353702174, + "grad_norm": 1.640029272841053, + "learning_rate": 1.185682188418851e-05, + "loss": 1.0297, + "step": 3323 + }, + { + "epoch": 0.45747316267547483, + "grad_norm": 1.6208714907666426, + "learning_rate": 1.1852441790821297e-05, + "loss": 1.0323, + "step": 3324 + }, + { + "epoch": 0.4576107899807322, + "grad_norm": 1.6224707938246572, + "learning_rate": 1.1848061329399252e-05, + "loss": 1.0842, + "step": 3325 + }, + { + "epoch": 0.45774841728598953, + "grad_norm": 1.7832473801855395, + "learning_rate": 1.1843680500792723e-05, + "loss": 0.9929, + "step": 3326 + }, + { + "epoch": 0.4578860445912469, + "grad_norm": 1.602852670317834, + "learning_rate": 1.1839299305872114e-05, + "loss": 0.9024, + "step": 3327 + }, + { + "epoch": 0.4580236718965043, + "grad_norm": 1.7475116456703832, + "learning_rate": 1.1834917745507911e-05, + "loss": 1.107, + "step": 3328 + }, + { + "epoch": 0.45816129920176163, + "grad_norm": 1.6957599958994916, + "learning_rate": 1.183053582057067e-05, + "loss": 1.0747, + "step": 3329 + }, + { + "epoch": 0.458298926507019, + "grad_norm": 1.8365986028534491, + "learning_rate": 1.1826153531931018e-05, + "loss": 1.0982, + "step": 3330 + }, + { + "epoch": 0.4584365538122764, + "grad_norm": 1.7364651026027218, + "learning_rate": 1.1821770880459655e-05, + "loss": 1.0513, + "step": 3331 + }, + { + "epoch": 0.45857418111753373, + "grad_norm": 1.834959931027001, + "learning_rate": 1.1817387867027357e-05, + "loss": 1.0597, + "step": 3332 + }, + { + "epoch": 0.4587118084227911, + "grad_norm": 1.9666407652609499, + "learning_rate": 1.1813004492504967e-05, + "loss": 1.0657, + "step": 3333 + }, + { + "epoch": 0.4588494357280484, + "grad_norm": 1.6711532701038987, + "learning_rate": 1.1808620757763403e-05, + "loss": 1.0397, + "step": 3334 + }, + { + "epoch": 0.45898706303330583, + "grad_norm": 1.681638195922923, + "learning_rate": 1.180423666367365e-05, + "loss": 0.9255, + "step": 3335 + }, + { + "epoch": 0.4591246903385632, + "grad_norm": 1.6853010186423814, + "learning_rate": 1.1799852211106768e-05, + "loss": 1.0736, + "step": 3336 + }, + { + "epoch": 0.45926231764382053, + "grad_norm": 1.7557304107832628, + "learning_rate": 1.1795467400933891e-05, + "loss": 1.001, + "step": 3337 + }, + { + "epoch": 0.4593999449490779, + "grad_norm": 1.713567640099152, + "learning_rate": 1.1791082234026218e-05, + "loss": 1.02, + "step": 3338 + }, + { + "epoch": 0.4595375722543353, + "grad_norm": 1.8168813446596255, + "learning_rate": 1.1786696711255024e-05, + "loss": 1.1123, + "step": 3339 + }, + { + "epoch": 0.45967519955959263, + "grad_norm": 1.702815453978539, + "learning_rate": 1.1782310833491648e-05, + "loss": 0.9685, + "step": 3340 + }, + { + "epoch": 0.45981282686485, + "grad_norm": 1.6684501250686197, + "learning_rate": 1.1777924601607507e-05, + "loss": 0.9983, + "step": 3341 + }, + { + "epoch": 0.4599504541701073, + "grad_norm": 1.8863530593523867, + "learning_rate": 1.1773538016474085e-05, + "loss": 1.1947, + "step": 3342 + }, + { + "epoch": 0.46008808147536473, + "grad_norm": 1.7037891330902808, + "learning_rate": 1.1769151078962936e-05, + "loss": 1.0483, + "step": 3343 + }, + { + "epoch": 0.4602257087806221, + "grad_norm": 1.8045849099551563, + "learning_rate": 1.1764763789945686e-05, + "loss": 1.0213, + "step": 3344 + }, + { + "epoch": 0.4603633360858794, + "grad_norm": 1.7849923401604424, + "learning_rate": 1.1760376150294026e-05, + "loss": 0.9848, + "step": 3345 + }, + { + "epoch": 0.4605009633911368, + "grad_norm": 1.8904809312620632, + "learning_rate": 1.1755988160879723e-05, + "loss": 1.0098, + "step": 3346 + }, + { + "epoch": 0.4606385906963942, + "grad_norm": 1.790989122054825, + "learning_rate": 1.175159982257461e-05, + "loss": 1.0134, + "step": 3347 + }, + { + "epoch": 0.4607762180016515, + "grad_norm": 1.9736653760979912, + "learning_rate": 1.174721113625059e-05, + "loss": 1.1192, + "step": 3348 + }, + { + "epoch": 0.4609138453069089, + "grad_norm": 1.7632300782153767, + "learning_rate": 1.1742822102779632e-05, + "loss": 1.0644, + "step": 3349 + }, + { + "epoch": 0.4610514726121663, + "grad_norm": 1.8224722920191472, + "learning_rate": 1.1738432723033778e-05, + "loss": 1.0307, + "step": 3350 + }, + { + "epoch": 0.46118909991742363, + "grad_norm": 1.7387714339318252, + "learning_rate": 1.173404299788514e-05, + "loss": 1.055, + "step": 3351 + }, + { + "epoch": 0.461326727222681, + "grad_norm": 1.625768817429624, + "learning_rate": 1.1729652928205894e-05, + "loss": 1.0091, + "step": 3352 + }, + { + "epoch": 0.4614643545279383, + "grad_norm": 1.5866465824220013, + "learning_rate": 1.172526251486829e-05, + "loss": 1.106, + "step": 3353 + }, + { + "epoch": 0.46160198183319573, + "grad_norm": 1.772910385935587, + "learning_rate": 1.1720871758744639e-05, + "loss": 1.0737, + "step": 3354 + }, + { + "epoch": 0.4617396091384531, + "grad_norm": 1.7727922271345744, + "learning_rate": 1.1716480660707326e-05, + "loss": 0.982, + "step": 3355 + }, + { + "epoch": 0.4618772364437104, + "grad_norm": 1.7444197284708047, + "learning_rate": 1.1712089221628797e-05, + "loss": 1.0881, + "step": 3356 + }, + { + "epoch": 0.4620148637489678, + "grad_norm": 1.7351443967909226, + "learning_rate": 1.1707697442381581e-05, + "loss": 1.1493, + "step": 3357 + }, + { + "epoch": 0.4621524910542252, + "grad_norm": 1.695813585179952, + "learning_rate": 1.1703305323838257e-05, + "loss": 1.0356, + "step": 3358 + }, + { + "epoch": 0.4622901183594825, + "grad_norm": 1.9009294636616443, + "learning_rate": 1.169891286687148e-05, + "loss": 1.0604, + "step": 3359 + }, + { + "epoch": 0.4624277456647399, + "grad_norm": 1.5970016688460025, + "learning_rate": 1.1694520072353977e-05, + "loss": 1.1069, + "step": 3360 + }, + { + "epoch": 0.4625653729699972, + "grad_norm": 1.6844374968728684, + "learning_rate": 1.1690126941158526e-05, + "loss": 1.042, + "step": 3361 + }, + { + "epoch": 0.46270300027525463, + "grad_norm": 1.6726448337287079, + "learning_rate": 1.168573347415799e-05, + "loss": 1.0164, + "step": 3362 + }, + { + "epoch": 0.462840627580512, + "grad_norm": 1.6783630302453567, + "learning_rate": 1.1681339672225287e-05, + "loss": 1.0263, + "step": 3363 + }, + { + "epoch": 0.4629782548857693, + "grad_norm": 1.6010225956441717, + "learning_rate": 1.1676945536233404e-05, + "loss": 1.1162, + "step": 3364 + }, + { + "epoch": 0.4631158821910267, + "grad_norm": 1.9479987567053825, + "learning_rate": 1.1672551067055397e-05, + "loss": 1.0369, + "step": 3365 + }, + { + "epoch": 0.4632535094962841, + "grad_norm": 1.9946391623919404, + "learning_rate": 1.1668156265564395e-05, + "loss": 1.0509, + "step": 3366 + }, + { + "epoch": 0.4633911368015414, + "grad_norm": 1.7654512266198432, + "learning_rate": 1.166376113263357e-05, + "loss": 1.0193, + "step": 3367 + }, + { + "epoch": 0.4635287641067988, + "grad_norm": 1.671398476523646, + "learning_rate": 1.1659365669136187e-05, + "loss": 1.0309, + "step": 3368 + }, + { + "epoch": 0.4636663914120562, + "grad_norm": 1.683970298323594, + "learning_rate": 1.165496987594556e-05, + "loss": 1.0473, + "step": 3369 + }, + { + "epoch": 0.4638040187173135, + "grad_norm": 1.7217534447874112, + "learning_rate": 1.1650573753935067e-05, + "loss": 1.0535, + "step": 3370 + }, + { + "epoch": 0.4639416460225709, + "grad_norm": 1.7075654309394153, + "learning_rate": 1.1646177303978167e-05, + "loss": 1.0059, + "step": 3371 + }, + { + "epoch": 0.4640792733278282, + "grad_norm": 1.6761899959669933, + "learning_rate": 1.164178052694837e-05, + "loss": 1.0365, + "step": 3372 + }, + { + "epoch": 0.4642169006330856, + "grad_norm": 1.6937540134544466, + "learning_rate": 1.1637383423719255e-05, + "loss": 1.0402, + "step": 3373 + }, + { + "epoch": 0.464354527938343, + "grad_norm": 1.6037158561675933, + "learning_rate": 1.163298599516447e-05, + "loss": 1.1022, + "step": 3374 + }, + { + "epoch": 0.4644921552436003, + "grad_norm": 1.8083468429130618, + "learning_rate": 1.1628588242157719e-05, + "loss": 1.1337, + "step": 3375 + }, + { + "epoch": 0.4646297825488577, + "grad_norm": 1.7244388191904836, + "learning_rate": 1.1624190165572777e-05, + "loss": 1.0884, + "step": 3376 + }, + { + "epoch": 0.4647674098541151, + "grad_norm": 1.6416748204742255, + "learning_rate": 1.1619791766283481e-05, + "loss": 1.0445, + "step": 3377 + }, + { + "epoch": 0.4649050371593724, + "grad_norm": 1.5745217862949055, + "learning_rate": 1.1615393045163734e-05, + "loss": 1.0214, + "step": 3378 + }, + { + "epoch": 0.4650426644646298, + "grad_norm": 1.6715018007330258, + "learning_rate": 1.1610994003087504e-05, + "loss": 1.1757, + "step": 3379 + }, + { + "epoch": 0.4651802917698871, + "grad_norm": 1.6877155258332566, + "learning_rate": 1.1606594640928815e-05, + "loss": 1.0116, + "step": 3380 + }, + { + "epoch": 0.4653179190751445, + "grad_norm": 1.65126399078884, + "learning_rate": 1.1602194959561761e-05, + "loss": 1.0235, + "step": 3381 + }, + { + "epoch": 0.4654555463804019, + "grad_norm": 1.6992252131651249, + "learning_rate": 1.1597794959860505e-05, + "loss": 1.0953, + "step": 3382 + }, + { + "epoch": 0.4655931736856592, + "grad_norm": 1.7955430817009295, + "learning_rate": 1.1593394642699258e-05, + "loss": 1.1016, + "step": 3383 + }, + { + "epoch": 0.46573080099091657, + "grad_norm": 1.5867435642542689, + "learning_rate": 1.1588994008952307e-05, + "loss": 1.0749, + "step": 3384 + }, + { + "epoch": 0.465868428296174, + "grad_norm": 1.5699320398519483, + "learning_rate": 1.1584593059494e-05, + "loss": 0.9877, + "step": 3385 + }, + { + "epoch": 0.4660060556014313, + "grad_norm": 1.5605191647765546, + "learning_rate": 1.158019179519874e-05, + "loss": 0.9822, + "step": 3386 + }, + { + "epoch": 0.46614368290668867, + "grad_norm": 1.7702375449651875, + "learning_rate": 1.1575790216941e-05, + "loss": 1.0368, + "step": 3387 + }, + { + "epoch": 0.4662813102119461, + "grad_norm": 1.6651215668810446, + "learning_rate": 1.1571388325595319e-05, + "loss": 1.0527, + "step": 3388 + }, + { + "epoch": 0.4664189375172034, + "grad_norm": 1.7320979204382785, + "learning_rate": 1.1566986122036283e-05, + "loss": 1.1581, + "step": 3389 + }, + { + "epoch": 0.4665565648224608, + "grad_norm": 1.7421398836450757, + "learning_rate": 1.1562583607138556e-05, + "loss": 1.0984, + "step": 3390 + }, + { + "epoch": 0.4666941921277181, + "grad_norm": 1.5684890151003559, + "learning_rate": 1.1558180781776856e-05, + "loss": 1.0758, + "step": 3391 + }, + { + "epoch": 0.4668318194329755, + "grad_norm": 1.7065370494719578, + "learning_rate": 1.1553777646825963e-05, + "loss": 1.0616, + "step": 3392 + }, + { + "epoch": 0.4669694467382329, + "grad_norm": 1.6955138726608856, + "learning_rate": 1.154937420316072e-05, + "loss": 1.0649, + "step": 3393 + }, + { + "epoch": 0.4671070740434902, + "grad_norm": 1.5949273381469316, + "learning_rate": 1.1544970451656035e-05, + "loss": 1.0605, + "step": 3394 + }, + { + "epoch": 0.46724470134874757, + "grad_norm": 1.722080743848845, + "learning_rate": 1.1540566393186868e-05, + "loss": 1.0546, + "step": 3395 + }, + { + "epoch": 0.467382328654005, + "grad_norm": 1.8149888496462947, + "learning_rate": 1.1536162028628242e-05, + "loss": 1.0735, + "step": 3396 + }, + { + "epoch": 0.4675199559592623, + "grad_norm": 1.5560808303868876, + "learning_rate": 1.1531757358855253e-05, + "loss": 1.0552, + "step": 3397 + }, + { + "epoch": 0.46765758326451967, + "grad_norm": 1.6820785392575153, + "learning_rate": 1.1527352384743047e-05, + "loss": 0.9632, + "step": 3398 + }, + { + "epoch": 0.467795210569777, + "grad_norm": 1.9090923868548884, + "learning_rate": 1.1522947107166825e-05, + "loss": 1.0176, + "step": 3399 + }, + { + "epoch": 0.4679328378750344, + "grad_norm": 1.7521702061489202, + "learning_rate": 1.151854152700186e-05, + "loss": 1.017, + "step": 3400 + }, + { + "epoch": 0.4680704651802918, + "grad_norm": 1.9316621401191454, + "learning_rate": 1.151413564512348e-05, + "loss": 1.1116, + "step": 3401 + }, + { + "epoch": 0.4682080924855491, + "grad_norm": 1.7498879378255123, + "learning_rate": 1.1509729462407075e-05, + "loss": 1.0817, + "step": 3402 + }, + { + "epoch": 0.46834571979080647, + "grad_norm": 1.645633367704172, + "learning_rate": 1.150532297972809e-05, + "loss": 1.0759, + "step": 3403 + }, + { + "epoch": 0.4684833470960639, + "grad_norm": 1.799876086541932, + "learning_rate": 1.1500916197962036e-05, + "loss": 1.1274, + "step": 3404 + }, + { + "epoch": 0.4686209744013212, + "grad_norm": 1.6577174883923727, + "learning_rate": 1.1496509117984478e-05, + "loss": 1.0407, + "step": 3405 + }, + { + "epoch": 0.46875860170657857, + "grad_norm": 1.5964320157311451, + "learning_rate": 1.1492101740671044e-05, + "loss": 1.0631, + "step": 3406 + }, + { + "epoch": 0.468896229011836, + "grad_norm": 1.9886570476945782, + "learning_rate": 1.1487694066897419e-05, + "loss": 1.0381, + "step": 3407 + }, + { + "epoch": 0.4690338563170933, + "grad_norm": 1.9360353433469417, + "learning_rate": 1.1483286097539348e-05, + "loss": 1.0262, + "step": 3408 + }, + { + "epoch": 0.46917148362235067, + "grad_norm": 1.6249804209024834, + "learning_rate": 1.147887783347263e-05, + "loss": 1.047, + "step": 3409 + }, + { + "epoch": 0.469309110927608, + "grad_norm": 1.6494391231711798, + "learning_rate": 1.1474469275573135e-05, + "loss": 1.0139, + "step": 3410 + }, + { + "epoch": 0.4694467382328654, + "grad_norm": 1.6812721140218512, + "learning_rate": 1.1470060424716773e-05, + "loss": 0.9263, + "step": 3411 + }, + { + "epoch": 0.46958436553812277, + "grad_norm": 1.7196591614099348, + "learning_rate": 1.1465651281779529e-05, + "loss": 1.0832, + "step": 3412 + }, + { + "epoch": 0.4697219928433801, + "grad_norm": 1.9418529126139144, + "learning_rate": 1.146124184763744e-05, + "loss": 1.0232, + "step": 3413 + }, + { + "epoch": 0.46985962014863747, + "grad_norm": 1.690681684076394, + "learning_rate": 1.14568321231666e-05, + "loss": 0.9815, + "step": 3414 + }, + { + "epoch": 0.4699972474538949, + "grad_norm": 1.7913465004547984, + "learning_rate": 1.1452422109243155e-05, + "loss": 1.0586, + "step": 3415 + }, + { + "epoch": 0.4701348747591522, + "grad_norm": 1.7517903052122032, + "learning_rate": 1.144801180674332e-05, + "loss": 1.0372, + "step": 3416 + }, + { + "epoch": 0.47027250206440957, + "grad_norm": 1.8409775017330277, + "learning_rate": 1.1443601216543359e-05, + "loss": 1.1062, + "step": 3417 + }, + { + "epoch": 0.4704101293696669, + "grad_norm": 1.7501999706791864, + "learning_rate": 1.1439190339519597e-05, + "loss": 1.0958, + "step": 3418 + }, + { + "epoch": 0.4705477566749243, + "grad_norm": 1.848297711350787, + "learning_rate": 1.1434779176548414e-05, + "loss": 1.0542, + "step": 3419 + }, + { + "epoch": 0.47068538398018167, + "grad_norm": 1.6400020609396726, + "learning_rate": 1.1430367728506251e-05, + "loss": 1.0036, + "step": 3420 + }, + { + "epoch": 0.470823011285439, + "grad_norm": 1.87957309623098, + "learning_rate": 1.1425955996269597e-05, + "loss": 1.0706, + "step": 3421 + }, + { + "epoch": 0.47096063859069637, + "grad_norm": 1.7924968076770076, + "learning_rate": 1.1421543980715009e-05, + "loss": 1.1028, + "step": 3422 + }, + { + "epoch": 0.47109826589595377, + "grad_norm": 1.9061538869423025, + "learning_rate": 1.1417131682719088e-05, + "loss": 1.0631, + "step": 3423 + }, + { + "epoch": 0.4712358932012111, + "grad_norm": 1.7290513556324654, + "learning_rate": 1.1412719103158503e-05, + "loss": 1.0742, + "step": 3424 + }, + { + "epoch": 0.47137352050646847, + "grad_norm": 1.878145688288672, + "learning_rate": 1.140830624290997e-05, + "loss": 1.079, + "step": 3425 + }, + { + "epoch": 0.4715111478117259, + "grad_norm": 1.7697409137465385, + "learning_rate": 1.1403893102850263e-05, + "loss": 1.0036, + "step": 3426 + }, + { + "epoch": 0.4716487751169832, + "grad_norm": 1.6463625722631452, + "learning_rate": 1.1399479683856211e-05, + "loss": 0.9507, + "step": 3427 + }, + { + "epoch": 0.47178640242224057, + "grad_norm": 1.7485552427334696, + "learning_rate": 1.1395065986804705e-05, + "loss": 1.0057, + "step": 3428 + }, + { + "epoch": 0.4719240297274979, + "grad_norm": 1.804487396361699, + "learning_rate": 1.1390652012572684e-05, + "loss": 1.0873, + "step": 3429 + }, + { + "epoch": 0.4720616570327553, + "grad_norm": 1.7211790340482807, + "learning_rate": 1.1386237762037147e-05, + "loss": 0.9663, + "step": 3430 + }, + { + "epoch": 0.47219928433801267, + "grad_norm": 1.9535106987321724, + "learning_rate": 1.138182323607514e-05, + "loss": 1.113, + "step": 3431 + }, + { + "epoch": 0.47233691164327, + "grad_norm": 2.016452622312912, + "learning_rate": 1.137740843556377e-05, + "loss": 1.1021, + "step": 3432 + }, + { + "epoch": 0.47247453894852737, + "grad_norm": 1.8736887109904863, + "learning_rate": 1.1372993361380199e-05, + "loss": 1.0398, + "step": 3433 + }, + { + "epoch": 0.47261216625378477, + "grad_norm": 2.0374779129456417, + "learning_rate": 1.1368578014401638e-05, + "loss": 1.0328, + "step": 3434 + }, + { + "epoch": 0.4727497935590421, + "grad_norm": 1.7800147182598518, + "learning_rate": 1.1364162395505365e-05, + "loss": 1.0623, + "step": 3435 + }, + { + "epoch": 0.47288742086429947, + "grad_norm": 2.035339219883238, + "learning_rate": 1.1359746505568694e-05, + "loss": 1.0733, + "step": 3436 + }, + { + "epoch": 0.4730250481695568, + "grad_norm": 1.7423984305643005, + "learning_rate": 1.1355330345469006e-05, + "loss": 1.0837, + "step": 3437 + }, + { + "epoch": 0.4731626754748142, + "grad_norm": 1.7753299306354817, + "learning_rate": 1.1350913916083733e-05, + "loss": 1.006, + "step": 3438 + }, + { + "epoch": 0.47330030278007157, + "grad_norm": 1.9189751182574306, + "learning_rate": 1.1346497218290354e-05, + "loss": 1.067, + "step": 3439 + }, + { + "epoch": 0.4734379300853289, + "grad_norm": 1.6247065491550436, + "learning_rate": 1.134208025296641e-05, + "loss": 1.014, + "step": 3440 + }, + { + "epoch": 0.47357555739058627, + "grad_norm": 2.032874036597277, + "learning_rate": 1.133766302098949e-05, + "loss": 1.125, + "step": 3441 + }, + { + "epoch": 0.47371318469584367, + "grad_norm": 1.7252935866372037, + "learning_rate": 1.1333245523237239e-05, + "loss": 1.0058, + "step": 3442 + }, + { + "epoch": 0.473850812001101, + "grad_norm": 1.6383320135625083, + "learning_rate": 1.1328827760587353e-05, + "loss": 1.0339, + "step": 3443 + }, + { + "epoch": 0.47398843930635837, + "grad_norm": 1.8619014040152602, + "learning_rate": 1.1324409733917581e-05, + "loss": 0.9868, + "step": 3444 + }, + { + "epoch": 0.47412606661161577, + "grad_norm": 1.7512250477107274, + "learning_rate": 1.1319991444105722e-05, + "loss": 1.1194, + "step": 3445 + }, + { + "epoch": 0.4742636939168731, + "grad_norm": 1.7105090113238877, + "learning_rate": 1.1315572892029635e-05, + "loss": 1.1826, + "step": 3446 + }, + { + "epoch": 0.47440132122213047, + "grad_norm": 1.7033523678449303, + "learning_rate": 1.1311154078567224e-05, + "loss": 1.1011, + "step": 3447 + }, + { + "epoch": 0.4745389485273878, + "grad_norm": 1.7664853235559337, + "learning_rate": 1.130673500459644e-05, + "loss": 1.0836, + "step": 3448 + }, + { + "epoch": 0.4746765758326452, + "grad_norm": 1.6825428252960772, + "learning_rate": 1.13023156709953e-05, + "loss": 1.1703, + "step": 3449 + }, + { + "epoch": 0.47481420313790257, + "grad_norm": 1.5872166021080731, + "learning_rate": 1.1297896078641864e-05, + "loss": 1.117, + "step": 3450 + }, + { + "epoch": 0.4749518304431599, + "grad_norm": 1.6726504994266929, + "learning_rate": 1.1293476228414242e-05, + "loss": 1.0637, + "step": 3451 + }, + { + "epoch": 0.47508945774841727, + "grad_norm": 2.335500491252527, + "learning_rate": 1.12890561211906e-05, + "loss": 1.061, + "step": 3452 + }, + { + "epoch": 0.47522708505367467, + "grad_norm": 1.7906724471640978, + "learning_rate": 1.1284635757849151e-05, + "loss": 1.0241, + "step": 3453 + }, + { + "epoch": 0.475364712358932, + "grad_norm": 1.8597762926035852, + "learning_rate": 1.1280215139268163e-05, + "loss": 0.9982, + "step": 3454 + }, + { + "epoch": 0.47550233966418937, + "grad_norm": 1.7953922989735378, + "learning_rate": 1.127579426632595e-05, + "loss": 1.0546, + "step": 3455 + }, + { + "epoch": 0.4756399669694467, + "grad_norm": 1.4927847270143364, + "learning_rate": 1.127137313990088e-05, + "loss": 1.0, + "step": 3456 + }, + { + "epoch": 0.4757775942747041, + "grad_norm": 1.8142620792748545, + "learning_rate": 1.1266951760871372e-05, + "loss": 1.1491, + "step": 3457 + }, + { + "epoch": 0.47591522157996147, + "grad_norm": 1.9029938595405198, + "learning_rate": 1.1262530130115887e-05, + "loss": 1.0352, + "step": 3458 + }, + { + "epoch": 0.4760528488852188, + "grad_norm": 1.7609993143946812, + "learning_rate": 1.1258108248512951e-05, + "loss": 1.0354, + "step": 3459 + }, + { + "epoch": 0.47619047619047616, + "grad_norm": 1.789998098092864, + "learning_rate": 1.1253686116941129e-05, + "loss": 0.9582, + "step": 3460 + }, + { + "epoch": 0.47632810349573357, + "grad_norm": 1.8523481631060819, + "learning_rate": 1.1249263736279035e-05, + "loss": 1.2396, + "step": 3461 + }, + { + "epoch": 0.4764657308009909, + "grad_norm": 1.7705459937680121, + "learning_rate": 1.124484110740534e-05, + "loss": 1.0169, + "step": 3462 + }, + { + "epoch": 0.47660335810624826, + "grad_norm": 1.5814134703884635, + "learning_rate": 1.1240418231198753e-05, + "loss": 1.0413, + "step": 3463 + }, + { + "epoch": 0.47674098541150567, + "grad_norm": 1.7999615523373897, + "learning_rate": 1.1235995108538045e-05, + "loss": 1.0507, + "step": 3464 + }, + { + "epoch": 0.476878612716763, + "grad_norm": 2.2425542359294135, + "learning_rate": 1.1231571740302025e-05, + "loss": 1.058, + "step": 3465 + }, + { + "epoch": 0.47701624002202037, + "grad_norm": 1.5572219730913666, + "learning_rate": 1.122714812736956e-05, + "loss": 1.0715, + "step": 3466 + }, + { + "epoch": 0.4771538673272777, + "grad_norm": 1.6446105254722247, + "learning_rate": 1.1222724270619563e-05, + "loss": 1.0847, + "step": 3467 + }, + { + "epoch": 0.4772914946325351, + "grad_norm": 1.8424139326415117, + "learning_rate": 1.1218300170930987e-05, + "loss": 1.0688, + "step": 3468 + }, + { + "epoch": 0.47742912193779247, + "grad_norm": 1.696170750012753, + "learning_rate": 1.1213875829182845e-05, + "loss": 1.0131, + "step": 3469 + }, + { + "epoch": 0.4775667492430498, + "grad_norm": 1.6668185977659102, + "learning_rate": 1.1209451246254193e-05, + "loss": 1.0555, + "step": 3470 + }, + { + "epoch": 0.47770437654830716, + "grad_norm": 1.6368871789609842, + "learning_rate": 1.1205026423024132e-05, + "loss": 1.0083, + "step": 3471 + }, + { + "epoch": 0.47784200385356457, + "grad_norm": 1.8113324600535936, + "learning_rate": 1.1200601360371816e-05, + "loss": 1.098, + "step": 3472 + }, + { + "epoch": 0.4779796311588219, + "grad_norm": 1.6570845094441033, + "learning_rate": 1.1196176059176446e-05, + "loss": 1.0723, + "step": 3473 + }, + { + "epoch": 0.47811725846407926, + "grad_norm": 1.793862760962084, + "learning_rate": 1.1191750520317264e-05, + "loss": 1.0552, + "step": 3474 + }, + { + "epoch": 0.4782548857693366, + "grad_norm": 1.5711821602784453, + "learning_rate": 1.1187324744673569e-05, + "loss": 1.0843, + "step": 3475 + }, + { + "epoch": 0.478392513074594, + "grad_norm": 1.8213396747572819, + "learning_rate": 1.1182898733124699e-05, + "loss": 1.0545, + "step": 3476 + }, + { + "epoch": 0.47853014037985137, + "grad_norm": 1.6779529333384606, + "learning_rate": 1.1178472486550043e-05, + "loss": 0.9788, + "step": 3477 + }, + { + "epoch": 0.4786677676851087, + "grad_norm": 1.8718176639067832, + "learning_rate": 1.1174046005829038e-05, + "loss": 1.0809, + "step": 3478 + }, + { + "epoch": 0.47880539499036606, + "grad_norm": 1.6851169197608131, + "learning_rate": 1.116961929184116e-05, + "loss": 0.9601, + "step": 3479 + }, + { + "epoch": 0.47894302229562347, + "grad_norm": 1.538320518285011, + "learning_rate": 1.1165192345465941e-05, + "loss": 1.0137, + "step": 3480 + }, + { + "epoch": 0.4790806496008808, + "grad_norm": 1.747098512522145, + "learning_rate": 1.1160765167582947e-05, + "loss": 1.0023, + "step": 3481 + }, + { + "epoch": 0.47921827690613816, + "grad_norm": 1.8676841228507637, + "learning_rate": 1.115633775907181e-05, + "loss": 0.9468, + "step": 3482 + }, + { + "epoch": 0.47935590421139557, + "grad_norm": 1.7011510029279595, + "learning_rate": 1.1151910120812185e-05, + "loss": 1.0207, + "step": 3483 + }, + { + "epoch": 0.4794935315166529, + "grad_norm": 1.7519314114814235, + "learning_rate": 1.1147482253683788e-05, + "loss": 1.0562, + "step": 3484 + }, + { + "epoch": 0.47963115882191026, + "grad_norm": 1.6128264537500054, + "learning_rate": 1.1143054158566376e-05, + "loss": 0.9831, + "step": 3485 + }, + { + "epoch": 0.4797687861271676, + "grad_norm": 1.7753107798096068, + "learning_rate": 1.1138625836339746e-05, + "loss": 1.0524, + "step": 3486 + }, + { + "epoch": 0.479906413432425, + "grad_norm": 1.664649603429524, + "learning_rate": 1.1134197287883752e-05, + "loss": 1.1971, + "step": 3487 + }, + { + "epoch": 0.48004404073768236, + "grad_norm": 1.5558478448324111, + "learning_rate": 1.112976851407828e-05, + "loss": 1.0491, + "step": 3488 + }, + { + "epoch": 0.4801816680429397, + "grad_norm": 1.8130513660564276, + "learning_rate": 1.1125339515803271e-05, + "loss": 1.0594, + "step": 3489 + }, + { + "epoch": 0.48031929534819706, + "grad_norm": 1.523778412805424, + "learning_rate": 1.1120910293938703e-05, + "loss": 1.0539, + "step": 3490 + }, + { + "epoch": 0.48045692265345447, + "grad_norm": 1.945464037068747, + "learning_rate": 1.1116480849364603e-05, + "loss": 1.1278, + "step": 3491 + }, + { + "epoch": 0.4805945499587118, + "grad_norm": 1.56474393624702, + "learning_rate": 1.1112051182961042e-05, + "loss": 1.0121, + "step": 3492 + }, + { + "epoch": 0.48073217726396916, + "grad_norm": 1.6116149806382005, + "learning_rate": 1.1107621295608133e-05, + "loss": 1.1033, + "step": 3493 + }, + { + "epoch": 0.4808698045692265, + "grad_norm": 1.6622348856836084, + "learning_rate": 1.1103191188186038e-05, + "loss": 1.1077, + "step": 3494 + }, + { + "epoch": 0.4810074318744839, + "grad_norm": 1.744583728989335, + "learning_rate": 1.1098760861574951e-05, + "loss": 1.0788, + "step": 3495 + }, + { + "epoch": 0.48114505917974126, + "grad_norm": 1.6344367802786446, + "learning_rate": 1.1094330316655119e-05, + "loss": 1.0135, + "step": 3496 + }, + { + "epoch": 0.4812826864849986, + "grad_norm": 1.580084652148695, + "learning_rate": 1.1089899554306835e-05, + "loss": 1.0447, + "step": 3497 + }, + { + "epoch": 0.48142031379025596, + "grad_norm": 1.7172256726484543, + "learning_rate": 1.108546857541043e-05, + "loss": 1.0163, + "step": 3498 + }, + { + "epoch": 0.48155794109551336, + "grad_norm": 1.5833836865657778, + "learning_rate": 1.1081037380846276e-05, + "loss": 1.0574, + "step": 3499 + }, + { + "epoch": 0.4816955684007707, + "grad_norm": 1.6859862996145802, + "learning_rate": 1.1076605971494793e-05, + "loss": 1.1456, + "step": 3500 + }, + { + "epoch": 0.48183319570602806, + "grad_norm": 1.6299681597553304, + "learning_rate": 1.107217434823644e-05, + "loss": 1.0395, + "step": 3501 + }, + { + "epoch": 0.48197082301128547, + "grad_norm": 1.8394087068023697, + "learning_rate": 1.106774251195172e-05, + "loss": 1.0919, + "step": 3502 + }, + { + "epoch": 0.4821084503165428, + "grad_norm": 1.6338034410174067, + "learning_rate": 1.106331046352118e-05, + "loss": 1.1217, + "step": 3503 + }, + { + "epoch": 0.48224607762180016, + "grad_norm": 1.698479071616687, + "learning_rate": 1.1058878203825407e-05, + "loss": 1.0515, + "step": 3504 + }, + { + "epoch": 0.4823837049270575, + "grad_norm": 1.6692086408282805, + "learning_rate": 1.1054445733745025e-05, + "loss": 0.9861, + "step": 3505 + }, + { + "epoch": 0.4825213322323149, + "grad_norm": 1.6743437880744207, + "learning_rate": 1.1050013054160713e-05, + "loss": 1.1255, + "step": 3506 + }, + { + "epoch": 0.48265895953757226, + "grad_norm": 1.7687771360209608, + "learning_rate": 1.1045580165953179e-05, + "loss": 1.0873, + "step": 3507 + }, + { + "epoch": 0.4827965868428296, + "grad_norm": 1.5505344116454078, + "learning_rate": 1.104114707000318e-05, + "loss": 1.0077, + "step": 3508 + }, + { + "epoch": 0.48293421414808696, + "grad_norm": 1.5418368343185167, + "learning_rate": 1.103671376719151e-05, + "loss": 1.0689, + "step": 3509 + }, + { + "epoch": 0.48307184145334436, + "grad_norm": 1.6344129576227135, + "learning_rate": 1.1032280258399006e-05, + "loss": 1.0506, + "step": 3510 + }, + { + "epoch": 0.4832094687586017, + "grad_norm": 1.6459098985265208, + "learning_rate": 1.1027846544506542e-05, + "loss": 0.9854, + "step": 3511 + }, + { + "epoch": 0.48334709606385906, + "grad_norm": 1.6926117614057368, + "learning_rate": 1.102341262639504e-05, + "loss": 0.9909, + "step": 3512 + }, + { + "epoch": 0.4834847233691164, + "grad_norm": 1.6341847012839852, + "learning_rate": 1.101897850494546e-05, + "loss": 1.0329, + "step": 3513 + }, + { + "epoch": 0.4836223506743738, + "grad_norm": 1.7087022370602607, + "learning_rate": 1.1014544181038798e-05, + "loss": 0.9712, + "step": 3514 + }, + { + "epoch": 0.48375997797963116, + "grad_norm": 1.7346152230224559, + "learning_rate": 1.1010109655556094e-05, + "loss": 1.0887, + "step": 3515 + }, + { + "epoch": 0.4838976052848885, + "grad_norm": 1.7555438649286805, + "learning_rate": 1.100567492937843e-05, + "loss": 1.0306, + "step": 3516 + }, + { + "epoch": 0.48403523259014586, + "grad_norm": 1.7303283324437906, + "learning_rate": 1.1001240003386924e-05, + "loss": 1.1079, + "step": 3517 + }, + { + "epoch": 0.48417285989540326, + "grad_norm": 1.5644133689385946, + "learning_rate": 1.0996804878462735e-05, + "loss": 1.0728, + "step": 3518 + }, + { + "epoch": 0.4843104872006606, + "grad_norm": 1.8264423942066474, + "learning_rate": 1.099236955548706e-05, + "loss": 1.0379, + "step": 3519 + }, + { + "epoch": 0.48444811450591796, + "grad_norm": 1.7265951567237316, + "learning_rate": 1.0987934035341138e-05, + "loss": 0.992, + "step": 3520 + }, + { + "epoch": 0.48458574181117536, + "grad_norm": 2.024122042919884, + "learning_rate": 1.0983498318906245e-05, + "loss": 1.0994, + "step": 3521 + }, + { + "epoch": 0.4847233691164327, + "grad_norm": 1.6472654440087442, + "learning_rate": 1.0979062407063701e-05, + "loss": 1.0589, + "step": 3522 + }, + { + "epoch": 0.48486099642169006, + "grad_norm": 1.7069636837887578, + "learning_rate": 1.097462630069486e-05, + "loss": 1.1063, + "step": 3523 + }, + { + "epoch": 0.4849986237269474, + "grad_norm": 1.6846746197961866, + "learning_rate": 1.097019000068111e-05, + "loss": 1.0683, + "step": 3524 + }, + { + "epoch": 0.4851362510322048, + "grad_norm": 1.5930571762617842, + "learning_rate": 1.0965753507903894e-05, + "loss": 0.9549, + "step": 3525 + }, + { + "epoch": 0.48527387833746216, + "grad_norm": 1.5248622019745357, + "learning_rate": 1.0961316823244672e-05, + "loss": 1.0529, + "step": 3526 + }, + { + "epoch": 0.4854115056427195, + "grad_norm": 1.751758760245993, + "learning_rate": 1.0956879947584954e-05, + "loss": 0.9967, + "step": 3527 + }, + { + "epoch": 0.48554913294797686, + "grad_norm": 1.7160198382755134, + "learning_rate": 1.0952442881806291e-05, + "loss": 1.0415, + "step": 3528 + }, + { + "epoch": 0.48568676025323426, + "grad_norm": 1.8182142133453385, + "learning_rate": 1.0948005626790266e-05, + "loss": 1.0591, + "step": 3529 + }, + { + "epoch": 0.4858243875584916, + "grad_norm": 1.8036628384516902, + "learning_rate": 1.0943568183418499e-05, + "loss": 0.9981, + "step": 3530 + }, + { + "epoch": 0.48596201486374896, + "grad_norm": 1.6956712328530221, + "learning_rate": 1.0939130552572648e-05, + "loss": 1.0465, + "step": 3531 + }, + { + "epoch": 0.4860996421690063, + "grad_norm": 1.9483672952401996, + "learning_rate": 1.0934692735134413e-05, + "loss": 1.0801, + "step": 3532 + }, + { + "epoch": 0.4862372694742637, + "grad_norm": 1.5872547128618153, + "learning_rate": 1.0930254731985526e-05, + "loss": 1.0565, + "step": 3533 + }, + { + "epoch": 0.48637489677952106, + "grad_norm": 1.7210338012273994, + "learning_rate": 1.092581654400776e-05, + "loss": 1.0056, + "step": 3534 + }, + { + "epoch": 0.4865125240847784, + "grad_norm": 1.6637425662780916, + "learning_rate": 1.0921378172082915e-05, + "loss": 1.0566, + "step": 3535 + }, + { + "epoch": 0.48665015139003576, + "grad_norm": 1.9408092110193347, + "learning_rate": 1.091693961709284e-05, + "loss": 1.0515, + "step": 3536 + }, + { + "epoch": 0.48678777869529316, + "grad_norm": 1.743589855088594, + "learning_rate": 1.0912500879919416e-05, + "loss": 1.1286, + "step": 3537 + }, + { + "epoch": 0.4869254060005505, + "grad_norm": 1.7108384842812852, + "learning_rate": 1.0908061961444559e-05, + "loss": 1.0454, + "step": 3538 + }, + { + "epoch": 0.48706303330580786, + "grad_norm": 1.7834015400923728, + "learning_rate": 1.090362286255022e-05, + "loss": 0.9996, + "step": 3539 + }, + { + "epoch": 0.48720066061106526, + "grad_norm": 1.6258761384419342, + "learning_rate": 1.0899183584118387e-05, + "loss": 1.0489, + "step": 3540 + }, + { + "epoch": 0.4873382879163226, + "grad_norm": 1.8075868724474395, + "learning_rate": 1.0894744127031084e-05, + "loss": 1.0045, + "step": 3541 + }, + { + "epoch": 0.48747591522157996, + "grad_norm": 1.6588153561627, + "learning_rate": 1.0890304492170373e-05, + "loss": 1.0444, + "step": 3542 + }, + { + "epoch": 0.4876135425268373, + "grad_norm": 1.5877071243322192, + "learning_rate": 1.0885864680418343e-05, + "loss": 1.0361, + "step": 3543 + }, + { + "epoch": 0.4877511698320947, + "grad_norm": 1.8180082646382312, + "learning_rate": 1.088142469265713e-05, + "loss": 1.0328, + "step": 3544 + }, + { + "epoch": 0.48788879713735206, + "grad_norm": 1.9133564700784407, + "learning_rate": 1.0876984529768895e-05, + "loss": 1.0465, + "step": 3545 + }, + { + "epoch": 0.4880264244426094, + "grad_norm": 1.6525276133072366, + "learning_rate": 1.0872544192635836e-05, + "loss": 1.0492, + "step": 3546 + }, + { + "epoch": 0.48816405174786676, + "grad_norm": 1.7712878232281224, + "learning_rate": 1.0868103682140191e-05, + "loss": 1.0153, + "step": 3547 + }, + { + "epoch": 0.48830167905312416, + "grad_norm": 1.7745035085087129, + "learning_rate": 1.086366299916423e-05, + "loss": 1.0928, + "step": 3548 + }, + { + "epoch": 0.4884393063583815, + "grad_norm": 1.6005132836849814, + "learning_rate": 1.0859222144590252e-05, + "loss": 1.047, + "step": 3549 + }, + { + "epoch": 0.48857693366363886, + "grad_norm": 1.8217660920924623, + "learning_rate": 1.0854781119300596e-05, + "loss": 1.1032, + "step": 3550 + }, + { + "epoch": 0.4887145609688962, + "grad_norm": 1.777210319809963, + "learning_rate": 1.085033992417763e-05, + "loss": 1.0592, + "step": 3551 + }, + { + "epoch": 0.4888521882741536, + "grad_norm": 1.6165252974250917, + "learning_rate": 1.084589856010376e-05, + "loss": 1.0842, + "step": 3552 + }, + { + "epoch": 0.48898981557941096, + "grad_norm": 1.4999912340679995, + "learning_rate": 1.0841457027961429e-05, + "loss": 0.997, + "step": 3553 + }, + { + "epoch": 0.4891274428846683, + "grad_norm": 1.8106229842277155, + "learning_rate": 1.0837015328633102e-05, + "loss": 1.0678, + "step": 3554 + }, + { + "epoch": 0.48926507018992565, + "grad_norm": 1.901233887151323, + "learning_rate": 1.0832573463001289e-05, + "loss": 1.0901, + "step": 3555 + }, + { + "epoch": 0.48940269749518306, + "grad_norm": 1.6277103884309303, + "learning_rate": 1.0828131431948523e-05, + "loss": 1.0862, + "step": 3556 + }, + { + "epoch": 0.4895403248004404, + "grad_norm": 1.6314947151289938, + "learning_rate": 1.0823689236357379e-05, + "loss": 0.9439, + "step": 3557 + }, + { + "epoch": 0.48967795210569776, + "grad_norm": 1.6237013771517066, + "learning_rate": 1.081924687711046e-05, + "loss": 1.0498, + "step": 3558 + }, + { + "epoch": 0.48981557941095516, + "grad_norm": 1.7363682642100624, + "learning_rate": 1.0814804355090399e-05, + "loss": 1.1205, + "step": 3559 + }, + { + "epoch": 0.4899532067162125, + "grad_norm": 1.541585670538475, + "learning_rate": 1.0810361671179867e-05, + "loss": 0.9952, + "step": 3560 + }, + { + "epoch": 0.49009083402146986, + "grad_norm": 1.650520818593143, + "learning_rate": 1.0805918826261563e-05, + "loss": 1.072, + "step": 3561 + }, + { + "epoch": 0.4902284613267272, + "grad_norm": 1.609325540762437, + "learning_rate": 1.0801475821218219e-05, + "loss": 0.982, + "step": 3562 + }, + { + "epoch": 0.4903660886319846, + "grad_norm": 1.6053868007785101, + "learning_rate": 1.0797032656932602e-05, + "loss": 1.0525, + "step": 3563 + }, + { + "epoch": 0.49050371593724196, + "grad_norm": 1.6045610581867689, + "learning_rate": 1.0792589334287506e-05, + "loss": 1.1038, + "step": 3564 + }, + { + "epoch": 0.4906413432424993, + "grad_norm": 1.6326746237203646, + "learning_rate": 1.078814585416576e-05, + "loss": 1.0913, + "step": 3565 + }, + { + "epoch": 0.49077897054775665, + "grad_norm": 1.5337801014644312, + "learning_rate": 1.0783702217450219e-05, + "loss": 0.9896, + "step": 3566 + }, + { + "epoch": 0.49091659785301406, + "grad_norm": 1.6948687633743464, + "learning_rate": 1.0779258425023773e-05, + "loss": 1.0409, + "step": 3567 + }, + { + "epoch": 0.4910542251582714, + "grad_norm": 1.6231738021872701, + "learning_rate": 1.0774814477769348e-05, + "loss": 1.008, + "step": 3568 + }, + { + "epoch": 0.49119185246352876, + "grad_norm": 1.5716834903100676, + "learning_rate": 1.0770370376569892e-05, + "loss": 0.9943, + "step": 3569 + }, + { + "epoch": 0.4913294797687861, + "grad_norm": 1.722729925653256, + "learning_rate": 1.0765926122308387e-05, + "loss": 1.1153, + "step": 3570 + }, + { + "epoch": 0.4914671070740435, + "grad_norm": 1.7875398513381686, + "learning_rate": 1.0761481715867847e-05, + "loss": 1.0571, + "step": 3571 + }, + { + "epoch": 0.49160473437930086, + "grad_norm": 2.2108313271017193, + "learning_rate": 1.0757037158131314e-05, + "loss": 1.1396, + "step": 3572 + }, + { + "epoch": 0.4917423616845582, + "grad_norm": 1.9209424684229361, + "learning_rate": 1.0752592449981858e-05, + "loss": 1.0804, + "step": 3573 + }, + { + "epoch": 0.49187998898981555, + "grad_norm": 1.668238514864507, + "learning_rate": 1.0748147592302584e-05, + "loss": 1.0199, + "step": 3574 + }, + { + "epoch": 0.49201761629507296, + "grad_norm": 1.9882455416410778, + "learning_rate": 1.0743702585976628e-05, + "loss": 1.0336, + "step": 3575 + }, + { + "epoch": 0.4921552436003303, + "grad_norm": 1.7027484847816787, + "learning_rate": 1.0739257431887144e-05, + "loss": 1.0194, + "step": 3576 + }, + { + "epoch": 0.49229287090558765, + "grad_norm": 1.8411809651606363, + "learning_rate": 1.0734812130917331e-05, + "loss": 1.104, + "step": 3577 + }, + { + "epoch": 0.49243049821084506, + "grad_norm": 1.8707777196196447, + "learning_rate": 1.0730366683950406e-05, + "loss": 0.9991, + "step": 3578 + }, + { + "epoch": 0.4925681255161024, + "grad_norm": 1.5934745086991755, + "learning_rate": 1.0725921091869616e-05, + "loss": 1.0111, + "step": 3579 + }, + { + "epoch": 0.49270575282135975, + "grad_norm": 1.6936956222628783, + "learning_rate": 1.0721475355558243e-05, + "loss": 1.0579, + "step": 3580 + }, + { + "epoch": 0.4928433801266171, + "grad_norm": 1.6433330423273211, + "learning_rate": 1.0717029475899593e-05, + "loss": 1.1173, + "step": 3581 + }, + { + "epoch": 0.4929810074318745, + "grad_norm": 1.4375624327102545, + "learning_rate": 1.0712583453777002e-05, + "loss": 0.9984, + "step": 3582 + }, + { + "epoch": 0.49311863473713186, + "grad_norm": 1.7047067038360502, + "learning_rate": 1.070813729007383e-05, + "loss": 0.9384, + "step": 3583 + }, + { + "epoch": 0.4932562620423892, + "grad_norm": 1.9033695426736614, + "learning_rate": 1.0703690985673474e-05, + "loss": 0.9702, + "step": 3584 + }, + { + "epoch": 0.49339388934764655, + "grad_norm": 1.6028695622701497, + "learning_rate": 1.069924454145935e-05, + "loss": 1.0602, + "step": 3585 + }, + { + "epoch": 0.49353151665290396, + "grad_norm": 1.5792029918591102, + "learning_rate": 1.069479795831491e-05, + "loss": 1.0214, + "step": 3586 + }, + { + "epoch": 0.4936691439581613, + "grad_norm": 1.7874895050963413, + "learning_rate": 1.0690351237123626e-05, + "loss": 0.9222, + "step": 3587 + }, + { + "epoch": 0.49380677126341865, + "grad_norm": 1.7626879257229984, + "learning_rate": 1.0685904378768998e-05, + "loss": 1.0143, + "step": 3588 + }, + { + "epoch": 0.493944398568676, + "grad_norm": 1.8953974613526487, + "learning_rate": 1.0681457384134564e-05, + "loss": 1.088, + "step": 3589 + }, + { + "epoch": 0.4940820258739334, + "grad_norm": 1.787501322781793, + "learning_rate": 1.0677010254103872e-05, + "loss": 1.081, + "step": 3590 + }, + { + "epoch": 0.49421965317919075, + "grad_norm": 1.7094726927793387, + "learning_rate": 1.0672562989560511e-05, + "loss": 1.0665, + "step": 3591 + }, + { + "epoch": 0.4943572804844481, + "grad_norm": 1.7366339545950398, + "learning_rate": 1.066811559138809e-05, + "loss": 1.0937, + "step": 3592 + }, + { + "epoch": 0.49449490778970545, + "grad_norm": 1.5584257757784012, + "learning_rate": 1.0663668060470246e-05, + "loss": 0.9538, + "step": 3593 + }, + { + "epoch": 0.49463253509496286, + "grad_norm": 1.637444794043393, + "learning_rate": 1.0659220397690643e-05, + "loss": 1.0215, + "step": 3594 + }, + { + "epoch": 0.4947701624002202, + "grad_norm": 1.790669341553123, + "learning_rate": 1.0654772603932972e-05, + "loss": 1.0808, + "step": 3595 + }, + { + "epoch": 0.49490778970547755, + "grad_norm": 1.747385323715335, + "learning_rate": 1.0650324680080948e-05, + "loss": 1.1295, + "step": 3596 + }, + { + "epoch": 0.49504541701073496, + "grad_norm": 1.8021376377240488, + "learning_rate": 1.064587662701831e-05, + "loss": 1.0112, + "step": 3597 + }, + { + "epoch": 0.4951830443159923, + "grad_norm": 1.7306591852080317, + "learning_rate": 1.0641428445628827e-05, + "loss": 1.1666, + "step": 3598 + }, + { + "epoch": 0.49532067162124965, + "grad_norm": 1.7837803004465724, + "learning_rate": 1.0636980136796294e-05, + "loss": 1.1454, + "step": 3599 + }, + { + "epoch": 0.495458298926507, + "grad_norm": 1.637576550159746, + "learning_rate": 1.0632531701404524e-05, + "loss": 1.0093, + "step": 3600 + }, + { + "epoch": 0.4955959262317644, + "grad_norm": 1.721484005039665, + "learning_rate": 1.0628083140337368e-05, + "loss": 1.0718, + "step": 3601 + }, + { + "epoch": 0.49573355353702175, + "grad_norm": 1.6340611641789315, + "learning_rate": 1.0623634454478686e-05, + "loss": 1.0315, + "step": 3602 + }, + { + "epoch": 0.4958711808422791, + "grad_norm": 1.4875693776699317, + "learning_rate": 1.0619185644712377e-05, + "loss": 0.9972, + "step": 3603 + }, + { + "epoch": 0.49600880814753645, + "grad_norm": 1.536553497918394, + "learning_rate": 1.0614736711922356e-05, + "loss": 1.0261, + "step": 3604 + }, + { + "epoch": 0.49614643545279385, + "grad_norm": 1.6635709470715365, + "learning_rate": 1.0610287656992564e-05, + "loss": 1.0309, + "step": 3605 + }, + { + "epoch": 0.4962840627580512, + "grad_norm": 1.674577456774639, + "learning_rate": 1.0605838480806967e-05, + "loss": 0.984, + "step": 3606 + }, + { + "epoch": 0.49642169006330855, + "grad_norm": 1.773679587939119, + "learning_rate": 1.060138918424956e-05, + "loss": 1.0404, + "step": 3607 + }, + { + "epoch": 0.4965593173685659, + "grad_norm": 1.7963060828139978, + "learning_rate": 1.0596939768204353e-05, + "loss": 1.0476, + "step": 3608 + }, + { + "epoch": 0.4966969446738233, + "grad_norm": 1.5995070628774557, + "learning_rate": 1.0592490233555383e-05, + "loss": 1.0898, + "step": 3609 + }, + { + "epoch": 0.49683457197908065, + "grad_norm": 2.0680028649885878, + "learning_rate": 1.0588040581186716e-05, + "loss": 0.9877, + "step": 3610 + }, + { + "epoch": 0.496972199284338, + "grad_norm": 1.5483714623871836, + "learning_rate": 1.0583590811982437e-05, + "loss": 1.0199, + "step": 3611 + }, + { + "epoch": 0.49710982658959535, + "grad_norm": 1.63478872273824, + "learning_rate": 1.057914092682665e-05, + "loss": 1.0808, + "step": 3612 + }, + { + "epoch": 0.49724745389485275, + "grad_norm": 1.5937401841631034, + "learning_rate": 1.0574690926603488e-05, + "loss": 1.0132, + "step": 3613 + }, + { + "epoch": 0.4973850812001101, + "grad_norm": 1.725287176899473, + "learning_rate": 1.0570240812197103e-05, + "loss": 1.1035, + "step": 3614 + }, + { + "epoch": 0.49752270850536745, + "grad_norm": 1.698308149921419, + "learning_rate": 1.0565790584491677e-05, + "loss": 1.0335, + "step": 3615 + }, + { + "epoch": 0.49766033581062485, + "grad_norm": 1.6124922349424369, + "learning_rate": 1.0561340244371407e-05, + "loss": 1.0846, + "step": 3616 + }, + { + "epoch": 0.4977979631158822, + "grad_norm": 1.8394423116958303, + "learning_rate": 1.0556889792720516e-05, + "loss": 0.9412, + "step": 3617 + }, + { + "epoch": 0.49793559042113955, + "grad_norm": 1.9080921434643454, + "learning_rate": 1.0552439230423244e-05, + "loss": 1.001, + "step": 3618 + }, + { + "epoch": 0.4980732177263969, + "grad_norm": 1.810735264072154, + "learning_rate": 1.0547988558363858e-05, + "loss": 1.0359, + "step": 3619 + }, + { + "epoch": 0.4982108450316543, + "grad_norm": 1.6328037385370924, + "learning_rate": 1.0543537777426649e-05, + "loss": 1.054, + "step": 3620 + }, + { + "epoch": 0.49834847233691165, + "grad_norm": 1.4789925779419533, + "learning_rate": 1.0539086888495922e-05, + "loss": 1.0309, + "step": 3621 + }, + { + "epoch": 0.498486099642169, + "grad_norm": 1.8183255533610405, + "learning_rate": 1.053463589245601e-05, + "loss": 1.077, + "step": 3622 + }, + { + "epoch": 0.49862372694742635, + "grad_norm": 1.617203455947018, + "learning_rate": 1.0530184790191267e-05, + "loss": 1.0888, + "step": 3623 + }, + { + "epoch": 0.49876135425268375, + "grad_norm": 1.632080188536248, + "learning_rate": 1.0525733582586061e-05, + "loss": 1.0153, + "step": 3624 + }, + { + "epoch": 0.4988989815579411, + "grad_norm": 1.8022685267978615, + "learning_rate": 1.052128227052479e-05, + "loss": 1.1296, + "step": 3625 + }, + { + "epoch": 0.49903660886319845, + "grad_norm": 1.8534712718244832, + "learning_rate": 1.0516830854891866e-05, + "loss": 1.144, + "step": 3626 + }, + { + "epoch": 0.4991742361684558, + "grad_norm": 1.6285972283508823, + "learning_rate": 1.0512379336571725e-05, + "loss": 1.0246, + "step": 3627 + }, + { + "epoch": 0.4993118634737132, + "grad_norm": 2.093019253537683, + "learning_rate": 1.0507927716448827e-05, + "loss": 1.1888, + "step": 3628 + }, + { + "epoch": 0.49944949077897055, + "grad_norm": 1.570896400760891, + "learning_rate": 1.050347599540764e-05, + "loss": 0.9462, + "step": 3629 + }, + { + "epoch": 0.4995871180842279, + "grad_norm": 1.6907771311454358, + "learning_rate": 1.0499024174332667e-05, + "loss": 1.0303, + "step": 3630 + }, + { + "epoch": 0.49972474538948525, + "grad_norm": 1.9629858081703044, + "learning_rate": 1.0494572254108422e-05, + "loss": 0.9924, + "step": 3631 + }, + { + "epoch": 0.49986237269474265, + "grad_norm": 1.736487979873794, + "learning_rate": 1.0490120235619439e-05, + "loss": 1.0266, + "step": 3632 + }, + { + "epoch": 0.5, + "grad_norm": 1.866803282577337, + "learning_rate": 1.0485668119750273e-05, + "loss": 1.0246, + "step": 3633 + }, + { + "epoch": 0.5001376273052573, + "grad_norm": 1.803134982184472, + "learning_rate": 1.0481215907385503e-05, + "loss": 1.0757, + "step": 3634 + }, + { + "epoch": 0.5002752546105147, + "grad_norm": 1.818991274709966, + "learning_rate": 1.0476763599409719e-05, + "loss": 1.1309, + "step": 3635 + }, + { + "epoch": 0.500412881915772, + "grad_norm": 2.0056917166173824, + "learning_rate": 1.0472311196707532e-05, + "loss": 1.0962, + "step": 3636 + }, + { + "epoch": 0.5005505092210295, + "grad_norm": 1.7948790292155148, + "learning_rate": 1.0467858700163575e-05, + "loss": 1.0443, + "step": 3637 + }, + { + "epoch": 0.5006881365262869, + "grad_norm": 1.620657912612342, + "learning_rate": 1.0463406110662502e-05, + "loss": 1.0692, + "step": 3638 + }, + { + "epoch": 0.5008257638315442, + "grad_norm": 1.6784728545621452, + "learning_rate": 1.0458953429088975e-05, + "loss": 1.0731, + "step": 3639 + }, + { + "epoch": 0.5009633911368016, + "grad_norm": 1.8236351897225729, + "learning_rate": 1.0454500656327683e-05, + "loss": 1.0534, + "step": 3640 + }, + { + "epoch": 0.5011010184420589, + "grad_norm": 1.6964682727433784, + "learning_rate": 1.0450047793263337e-05, + "loss": 1.0647, + "step": 3641 + }, + { + "epoch": 0.5012386457473162, + "grad_norm": 1.8288714228239622, + "learning_rate": 1.044559484078065e-05, + "loss": 0.9893, + "step": 3642 + }, + { + "epoch": 0.5013762730525736, + "grad_norm": 1.6805213707480573, + "learning_rate": 1.0441141799764371e-05, + "loss": 1.0351, + "step": 3643 + }, + { + "epoch": 0.5015139003578309, + "grad_norm": 1.7384175133945252, + "learning_rate": 1.0436688671099252e-05, + "loss": 0.9991, + "step": 3644 + }, + { + "epoch": 0.5016515276630884, + "grad_norm": 1.6194220832537984, + "learning_rate": 1.0432235455670072e-05, + "loss": 0.9723, + "step": 3645 + }, + { + "epoch": 0.5017891549683458, + "grad_norm": 1.7205513483578003, + "learning_rate": 1.0427782154361625e-05, + "loss": 1.0988, + "step": 3646 + }, + { + "epoch": 0.5019267822736031, + "grad_norm": 1.6024933545225586, + "learning_rate": 1.042332876805872e-05, + "loss": 1.1487, + "step": 3647 + }, + { + "epoch": 0.5020644095788604, + "grad_norm": 1.6459542067469202, + "learning_rate": 1.0418875297646183e-05, + "loss": 1.0045, + "step": 3648 + }, + { + "epoch": 0.5022020368841178, + "grad_norm": 1.7443434476130137, + "learning_rate": 1.0414421744008857e-05, + "loss": 1.0653, + "step": 3649 + }, + { + "epoch": 0.5023396641893751, + "grad_norm": 1.6814560218440744, + "learning_rate": 1.0409968108031605e-05, + "loss": 1.0421, + "step": 3650 + }, + { + "epoch": 0.5024772914946325, + "grad_norm": 1.6464292491425856, + "learning_rate": 1.0405514390599303e-05, + "loss": 1.0209, + "step": 3651 + }, + { + "epoch": 0.5026149187998898, + "grad_norm": 1.7979900805457167, + "learning_rate": 1.0401060592596838e-05, + "loss": 1.0851, + "step": 3652 + }, + { + "epoch": 0.5027525461051473, + "grad_norm": 2.1100172281340193, + "learning_rate": 1.0396606714909127e-05, + "loss": 1.0334, + "step": 3653 + }, + { + "epoch": 0.5028901734104047, + "grad_norm": 1.9233464480399822, + "learning_rate": 1.039215275842109e-05, + "loss": 1.1319, + "step": 3654 + }, + { + "epoch": 0.503027800715662, + "grad_norm": 1.6498657479995467, + "learning_rate": 1.0387698724017668e-05, + "loss": 1.0685, + "step": 3655 + }, + { + "epoch": 0.5031654280209193, + "grad_norm": 1.8524281908235674, + "learning_rate": 1.0383244612583815e-05, + "loss": 1.1121, + "step": 3656 + }, + { + "epoch": 0.5033030553261767, + "grad_norm": 1.5900931978272586, + "learning_rate": 1.0378790425004506e-05, + "loss": 1.0318, + "step": 3657 + }, + { + "epoch": 0.503440682631434, + "grad_norm": 1.606794452020487, + "learning_rate": 1.0374336162164722e-05, + "loss": 1.001, + "step": 3658 + }, + { + "epoch": 0.5035783099366914, + "grad_norm": 1.765166752234181, + "learning_rate": 1.0369881824949469e-05, + "loss": 1.0793, + "step": 3659 + }, + { + "epoch": 0.5037159372419489, + "grad_norm": 1.8339332660364427, + "learning_rate": 1.0365427414243756e-05, + "loss": 1.0924, + "step": 3660 + }, + { + "epoch": 0.5038535645472062, + "grad_norm": 1.612316780324593, + "learning_rate": 1.0360972930932622e-05, + "loss": 1.1001, + "step": 3661 + }, + { + "epoch": 0.5039911918524635, + "grad_norm": 1.551479507101269, + "learning_rate": 1.0356518375901109e-05, + "loss": 1.0443, + "step": 3662 + }, + { + "epoch": 0.5041288191577209, + "grad_norm": 1.7203104169229857, + "learning_rate": 1.035206375003427e-05, + "loss": 1.046, + "step": 3663 + }, + { + "epoch": 0.5042664464629782, + "grad_norm": 1.6533651930309448, + "learning_rate": 1.0347609054217187e-05, + "loss": 1.0369, + "step": 3664 + }, + { + "epoch": 0.5044040737682356, + "grad_norm": 1.956336430699767, + "learning_rate": 1.0343154289334942e-05, + "loss": 1.06, + "step": 3665 + }, + { + "epoch": 0.5045417010734929, + "grad_norm": 1.6841171227539313, + "learning_rate": 1.0338699456272639e-05, + "loss": 1.0529, + "step": 3666 + }, + { + "epoch": 0.5046793283787503, + "grad_norm": 1.7529286707237446, + "learning_rate": 1.0334244555915388e-05, + "loss": 1.0199, + "step": 3667 + }, + { + "epoch": 0.5048169556840078, + "grad_norm": 1.5963719489699255, + "learning_rate": 1.032978958914832e-05, + "loss": 1.0522, + "step": 3668 + }, + { + "epoch": 0.5049545829892651, + "grad_norm": 1.6442938121182555, + "learning_rate": 1.0325334556856572e-05, + "loss": 1.0516, + "step": 3669 + }, + { + "epoch": 0.5050922102945224, + "grad_norm": 1.6496228970468823, + "learning_rate": 1.0320879459925306e-05, + "loss": 1.0674, + "step": 3670 + }, + { + "epoch": 0.5052298375997798, + "grad_norm": 1.7738652646046575, + "learning_rate": 1.031642429923968e-05, + "loss": 1.1196, + "step": 3671 + }, + { + "epoch": 0.5053674649050371, + "grad_norm": 1.7512180479554356, + "learning_rate": 1.031196907568488e-05, + "loss": 1.0533, + "step": 3672 + }, + { + "epoch": 0.5055050922102945, + "grad_norm": 1.6690540317633202, + "learning_rate": 1.0307513790146092e-05, + "loss": 0.9972, + "step": 3673 + }, + { + "epoch": 0.5056427195155518, + "grad_norm": 1.6251525794378752, + "learning_rate": 1.0303058443508524e-05, + "loss": 1.0118, + "step": 3674 + }, + { + "epoch": 0.5057803468208093, + "grad_norm": 1.7283757766951422, + "learning_rate": 1.0298603036657394e-05, + "loss": 1.0931, + "step": 3675 + }, + { + "epoch": 0.5059179741260666, + "grad_norm": 1.7087621949750793, + "learning_rate": 1.0294147570477924e-05, + "loss": 0.9911, + "step": 3676 + }, + { + "epoch": 0.506055601431324, + "grad_norm": 1.6233840094476675, + "learning_rate": 1.0289692045855362e-05, + "loss": 1.0266, + "step": 3677 + }, + { + "epoch": 0.5061932287365813, + "grad_norm": 1.82417963132422, + "learning_rate": 1.0285236463674952e-05, + "loss": 0.9795, + "step": 3678 + }, + { + "epoch": 0.5063308560418387, + "grad_norm": 2.5928922843401794, + "learning_rate": 1.0280780824821965e-05, + "loss": 1.0613, + "step": 3679 + }, + { + "epoch": 0.506468483347096, + "grad_norm": 1.7288797356142398, + "learning_rate": 1.027632513018167e-05, + "loss": 1.1681, + "step": 3680 + }, + { + "epoch": 0.5066061106523534, + "grad_norm": 1.7681771958526726, + "learning_rate": 1.0271869380639355e-05, + "loss": 1.0144, + "step": 3681 + }, + { + "epoch": 0.5067437379576107, + "grad_norm": 1.6423966704100725, + "learning_rate": 1.0267413577080315e-05, + "loss": 1.0661, + "step": 3682 + }, + { + "epoch": 0.5068813652628682, + "grad_norm": 1.570715117590332, + "learning_rate": 1.0262957720389857e-05, + "loss": 1.0323, + "step": 3683 + }, + { + "epoch": 0.5070189925681255, + "grad_norm": 1.6558910770797217, + "learning_rate": 1.02585018114533e-05, + "loss": 1.0297, + "step": 3684 + }, + { + "epoch": 0.5071566198733829, + "grad_norm": 1.7295672012698569, + "learning_rate": 1.025404585115597e-05, + "loss": 1.0854, + "step": 3685 + }, + { + "epoch": 0.5072942471786402, + "grad_norm": 1.5846774849157363, + "learning_rate": 1.0249589840383211e-05, + "loss": 1.0253, + "step": 3686 + }, + { + "epoch": 0.5074318744838976, + "grad_norm": 1.7786646903483323, + "learning_rate": 1.0245133780020364e-05, + "loss": 1.0074, + "step": 3687 + }, + { + "epoch": 0.5075695017891549, + "grad_norm": 1.9360098635358838, + "learning_rate": 1.0240677670952792e-05, + "loss": 1.1628, + "step": 3688 + }, + { + "epoch": 0.5077071290944123, + "grad_norm": 1.6677157386515384, + "learning_rate": 1.023622151406586e-05, + "loss": 1.0564, + "step": 3689 + }, + { + "epoch": 0.5078447563996696, + "grad_norm": 2.0006921397486583, + "learning_rate": 1.0231765310244948e-05, + "loss": 1.1413, + "step": 3690 + }, + { + "epoch": 0.5079823837049271, + "grad_norm": 1.543490019914018, + "learning_rate": 1.022730906037544e-05, + "loss": 1.0165, + "step": 3691 + }, + { + "epoch": 0.5081200110101844, + "grad_norm": 1.7960222886831883, + "learning_rate": 1.0222852765342732e-05, + "loss": 1.1044, + "step": 3692 + }, + { + "epoch": 0.5082576383154418, + "grad_norm": 1.6839564983728792, + "learning_rate": 1.021839642603223e-05, + "loss": 1.032, + "step": 3693 + }, + { + "epoch": 0.5083952656206991, + "grad_norm": 1.4523234570971106, + "learning_rate": 1.021394004332935e-05, + "loss": 1.0247, + "step": 3694 + }, + { + "epoch": 0.5085328929259565, + "grad_norm": 1.4614194327142296, + "learning_rate": 1.0209483618119513e-05, + "loss": 1.0572, + "step": 3695 + }, + { + "epoch": 0.5086705202312138, + "grad_norm": 1.6763058207341053, + "learning_rate": 1.0205027151288147e-05, + "loss": 1.0782, + "step": 3696 + }, + { + "epoch": 0.5088081475364712, + "grad_norm": 1.8280593860327268, + "learning_rate": 1.0200570643720693e-05, + "loss": 1.0466, + "step": 3697 + }, + { + "epoch": 0.5089457748417286, + "grad_norm": 1.7538140873688142, + "learning_rate": 1.0196114096302597e-05, + "loss": 1.1403, + "step": 3698 + }, + { + "epoch": 0.509083402146986, + "grad_norm": 1.7658685185606402, + "learning_rate": 1.0191657509919315e-05, + "loss": 1.0587, + "step": 3699 + }, + { + "epoch": 0.5092210294522433, + "grad_norm": 1.7729097365408693, + "learning_rate": 1.0187200885456306e-05, + "loss": 1.0477, + "step": 3700 + }, + { + "epoch": 0.5093586567575007, + "grad_norm": 1.820431589560232, + "learning_rate": 1.0182744223799046e-05, + "loss": 0.9971, + "step": 3701 + }, + { + "epoch": 0.509496284062758, + "grad_norm": 1.743844584621045, + "learning_rate": 1.0178287525833012e-05, + "loss": 1.0243, + "step": 3702 + }, + { + "epoch": 0.5096339113680154, + "grad_norm": 1.6903413632210242, + "learning_rate": 1.017383079244369e-05, + "loss": 1.0348, + "step": 3703 + }, + { + "epoch": 0.5097715386732727, + "grad_norm": 1.8460537715040826, + "learning_rate": 1.0169374024516562e-05, + "loss": 0.9714, + "step": 3704 + }, + { + "epoch": 0.5099091659785301, + "grad_norm": 1.5125088212257936, + "learning_rate": 1.0164917222937138e-05, + "loss": 0.9888, + "step": 3705 + }, + { + "epoch": 0.5100467932837875, + "grad_norm": 1.825005120629029, + "learning_rate": 1.0160460388590918e-05, + "loss": 0.9849, + "step": 3706 + }, + { + "epoch": 0.5101844205890449, + "grad_norm": 1.8582237919090017, + "learning_rate": 1.0156003522363413e-05, + "loss": 1.1005, + "step": 3707 + }, + { + "epoch": 0.5103220478943022, + "grad_norm": 1.7306031823587031, + "learning_rate": 1.0151546625140144e-05, + "loss": 1.0635, + "step": 3708 + }, + { + "epoch": 0.5104596751995596, + "grad_norm": 1.5780905729950525, + "learning_rate": 1.0147089697806638e-05, + "loss": 1.0444, + "step": 3709 + }, + { + "epoch": 0.5105973025048169, + "grad_norm": 1.7503046433238172, + "learning_rate": 1.0142632741248421e-05, + "loss": 1.0751, + "step": 3710 + }, + { + "epoch": 0.5107349298100743, + "grad_norm": 1.656146099387431, + "learning_rate": 1.0138175756351032e-05, + "loss": 1.0329, + "step": 3711 + }, + { + "epoch": 0.5108725571153316, + "grad_norm": 1.7012765662785974, + "learning_rate": 1.0133718744000009e-05, + "loss": 0.9921, + "step": 3712 + }, + { + "epoch": 0.5110101844205891, + "grad_norm": 1.599312054472005, + "learning_rate": 1.0129261705080906e-05, + "loss": 0.9828, + "step": 3713 + }, + { + "epoch": 0.5111478117258464, + "grad_norm": 1.5802052450242108, + "learning_rate": 1.0124804640479268e-05, + "loss": 0.9721, + "step": 3714 + }, + { + "epoch": 0.5112854390311038, + "grad_norm": 1.5633795859045756, + "learning_rate": 1.0120347551080658e-05, + "loss": 1.0904, + "step": 3715 + }, + { + "epoch": 0.5114230663363611, + "grad_norm": 1.6904453085234274, + "learning_rate": 1.0115890437770634e-05, + "loss": 1.1297, + "step": 3716 + }, + { + "epoch": 0.5115606936416185, + "grad_norm": 1.7482556045701696, + "learning_rate": 1.011143330143477e-05, + "loss": 1.0572, + "step": 3717 + }, + { + "epoch": 0.5116983209468758, + "grad_norm": 1.639525588162019, + "learning_rate": 1.0106976142958635e-05, + "loss": 1.0319, + "step": 3718 + }, + { + "epoch": 0.5118359482521332, + "grad_norm": 1.7377113405788274, + "learning_rate": 1.0102518963227803e-05, + "loss": 1.089, + "step": 3719 + }, + { + "epoch": 0.5119735755573905, + "grad_norm": 1.7632065698167017, + "learning_rate": 1.0098061763127858e-05, + "loss": 1.0105, + "step": 3720 + }, + { + "epoch": 0.512111202862648, + "grad_norm": 1.9817509295490594, + "learning_rate": 1.009360454354438e-05, + "loss": 1.1615, + "step": 3721 + }, + { + "epoch": 0.5122488301679053, + "grad_norm": 2.155299597310412, + "learning_rate": 1.0089147305362963e-05, + "loss": 1.1497, + "step": 3722 + }, + { + "epoch": 0.5123864574731627, + "grad_norm": 1.7152715888780967, + "learning_rate": 1.0084690049469192e-05, + "loss": 1.0044, + "step": 3723 + }, + { + "epoch": 0.51252408477842, + "grad_norm": 1.7350698569933563, + "learning_rate": 1.0080232776748674e-05, + "loss": 1.0503, + "step": 3724 + }, + { + "epoch": 0.5126617120836774, + "grad_norm": 1.735764841018283, + "learning_rate": 1.0075775488086996e-05, + "loss": 0.9355, + "step": 3725 + }, + { + "epoch": 0.5127993393889347, + "grad_norm": 1.8867100601426237, + "learning_rate": 1.007131818436977e-05, + "loss": 1.0739, + "step": 3726 + }, + { + "epoch": 0.5129369666941921, + "grad_norm": 1.6994828166701672, + "learning_rate": 1.0066860866482595e-05, + "loss": 0.9865, + "step": 3727 + }, + { + "epoch": 0.5130745939994494, + "grad_norm": 1.5722621818538653, + "learning_rate": 1.006240353531108e-05, + "loss": 0.9691, + "step": 3728 + }, + { + "epoch": 0.5132122213047069, + "grad_norm": 1.773267881388477, + "learning_rate": 1.0057946191740839e-05, + "loss": 1.1118, + "step": 3729 + }, + { + "epoch": 0.5133498486099642, + "grad_norm": 1.6831046517602444, + "learning_rate": 1.0053488836657483e-05, + "loss": 0.9948, + "step": 3730 + }, + { + "epoch": 0.5134874759152216, + "grad_norm": 1.7493845421698786, + "learning_rate": 1.0049031470946624e-05, + "loss": 1.0342, + "step": 3731 + }, + { + "epoch": 0.5136251032204789, + "grad_norm": 1.7496108141988185, + "learning_rate": 1.0044574095493884e-05, + "loss": 1.0449, + "step": 3732 + }, + { + "epoch": 0.5137627305257363, + "grad_norm": 1.7186088852375372, + "learning_rate": 1.0040116711184884e-05, + "loss": 0.9886, + "step": 3733 + }, + { + "epoch": 0.5139003578309936, + "grad_norm": 1.840951697227051, + "learning_rate": 1.0035659318905244e-05, + "loss": 0.9859, + "step": 3734 + }, + { + "epoch": 0.514037985136251, + "grad_norm": 1.5660185396231594, + "learning_rate": 1.0031201919540583e-05, + "loss": 0.9309, + "step": 3735 + }, + { + "epoch": 0.5141756124415084, + "grad_norm": 1.6383801012737529, + "learning_rate": 1.0026744513976527e-05, + "loss": 1.0711, + "step": 3736 + }, + { + "epoch": 0.5143132397467658, + "grad_norm": 1.6443499115211788, + "learning_rate": 1.0022287103098702e-05, + "loss": 0.9736, + "step": 3737 + }, + { + "epoch": 0.5144508670520231, + "grad_norm": 1.5188753468483267, + "learning_rate": 1.0017829687792732e-05, + "loss": 1.0048, + "step": 3738 + }, + { + "epoch": 0.5145884943572805, + "grad_norm": 1.7140193032228657, + "learning_rate": 1.0013372268944253e-05, + "loss": 1.0075, + "step": 3739 + }, + { + "epoch": 0.5147261216625378, + "grad_norm": 1.6243350415644062, + "learning_rate": 1.0008914847438884e-05, + "loss": 1.0515, + "step": 3740 + }, + { + "epoch": 0.5148637489677952, + "grad_norm": 1.8033473776499138, + "learning_rate": 1.0004457424162258e-05, + "loss": 1.0539, + "step": 3741 + }, + { + "epoch": 0.5150013762730525, + "grad_norm": 1.6825516404366865, + "learning_rate": 1e-05, + "loss": 0.9723, + "step": 3742 + }, + { + "epoch": 0.5151390035783099, + "grad_norm": 1.704995417090172, + "learning_rate": 9.995542575837747e-06, + "loss": 1.0115, + "step": 3743 + }, + { + "epoch": 0.5152766308835673, + "grad_norm": 1.6369464660271422, + "learning_rate": 9.991085152561119e-06, + "loss": 1.0148, + "step": 3744 + }, + { + "epoch": 0.5154142581888247, + "grad_norm": 1.624171201480478, + "learning_rate": 9.986627731055752e-06, + "loss": 0.9912, + "step": 3745 + }, + { + "epoch": 0.515551885494082, + "grad_norm": 1.9238153794159611, + "learning_rate": 9.98217031220727e-06, + "loss": 1.0713, + "step": 3746 + }, + { + "epoch": 0.5156895127993394, + "grad_norm": 1.72421206296874, + "learning_rate": 9.977712896901303e-06, + "loss": 1.0526, + "step": 3747 + }, + { + "epoch": 0.5158271401045967, + "grad_norm": 1.8604156533553424, + "learning_rate": 9.973255486023477e-06, + "loss": 1.1955, + "step": 3748 + }, + { + "epoch": 0.5159647674098541, + "grad_norm": 1.676058822024568, + "learning_rate": 9.968798080459424e-06, + "loss": 1.0224, + "step": 3749 + }, + { + "epoch": 0.5161023947151114, + "grad_norm": 1.6284108733913727, + "learning_rate": 9.96434068109476e-06, + "loss": 1.0415, + "step": 3750 + }, + { + "epoch": 0.5162400220203689, + "grad_norm": 1.89726297905937, + "learning_rate": 9.959883288815119e-06, + "loss": 1.015, + "step": 3751 + }, + { + "epoch": 0.5163776493256262, + "grad_norm": 1.7503170137033843, + "learning_rate": 9.955425904506114e-06, + "loss": 1.103, + "step": 3752 + }, + { + "epoch": 0.5165152766308836, + "grad_norm": 1.8725881109303228, + "learning_rate": 9.95096852905338e-06, + "loss": 1.005, + "step": 3753 + }, + { + "epoch": 0.5166529039361409, + "grad_norm": 1.6992899913800072, + "learning_rate": 9.94651116334252e-06, + "loss": 1.0641, + "step": 3754 + }, + { + "epoch": 0.5167905312413983, + "grad_norm": 1.582651809741018, + "learning_rate": 9.942053808259163e-06, + "loss": 1.0406, + "step": 3755 + }, + { + "epoch": 0.5169281585466556, + "grad_norm": 2.1302244688840317, + "learning_rate": 9.93759646468892e-06, + "loss": 1.0427, + "step": 3756 + }, + { + "epoch": 0.517065785851913, + "grad_norm": 1.6577359388648831, + "learning_rate": 9.933139133517409e-06, + "loss": 0.9762, + "step": 3757 + }, + { + "epoch": 0.5172034131571703, + "grad_norm": 1.796510940150195, + "learning_rate": 9.92868181563023e-06, + "loss": 1.0503, + "step": 3758 + }, + { + "epoch": 0.5173410404624278, + "grad_norm": 1.6269280324764142, + "learning_rate": 9.924224511913006e-06, + "loss": 1.0682, + "step": 3759 + }, + { + "epoch": 0.5174786677676851, + "grad_norm": 2.0315694171854175, + "learning_rate": 9.919767223251333e-06, + "loss": 1.1341, + "step": 3760 + }, + { + "epoch": 0.5176162950729425, + "grad_norm": 1.7248910405210414, + "learning_rate": 9.91530995053081e-06, + "loss": 1.0222, + "step": 3761 + }, + { + "epoch": 0.5177539223781998, + "grad_norm": 1.5296155761976447, + "learning_rate": 9.910852694637043e-06, + "loss": 1.0625, + "step": 3762 + }, + { + "epoch": 0.5178915496834572, + "grad_norm": 1.6801950441859106, + "learning_rate": 9.906395456455622e-06, + "loss": 1.0309, + "step": 3763 + }, + { + "epoch": 0.5180291769887145, + "grad_norm": 1.767967122473914, + "learning_rate": 9.901938236872149e-06, + "loss": 1.0404, + "step": 3764 + }, + { + "epoch": 0.5181668042939719, + "grad_norm": 1.9789534892954859, + "learning_rate": 9.8974810367722e-06, + "loss": 1.0079, + "step": 3765 + }, + { + "epoch": 0.5183044315992292, + "grad_norm": 1.6968858918967822, + "learning_rate": 9.89302385704137e-06, + "loss": 1.0021, + "step": 3766 + }, + { + "epoch": 0.5184420589044867, + "grad_norm": 1.5794358422814134, + "learning_rate": 9.88856669856523e-06, + "loss": 0.989, + "step": 3767 + }, + { + "epoch": 0.518579686209744, + "grad_norm": 1.806437091633027, + "learning_rate": 9.884109562229368e-06, + "loss": 1.0371, + "step": 3768 + }, + { + "epoch": 0.5187173135150014, + "grad_norm": 1.7189994892870109, + "learning_rate": 9.879652448919344e-06, + "loss": 1.0357, + "step": 3769 + }, + { + "epoch": 0.5188549408202587, + "grad_norm": 1.8139837047636342, + "learning_rate": 9.875195359520734e-06, + "loss": 1.0272, + "step": 3770 + }, + { + "epoch": 0.5189925681255161, + "grad_norm": 1.8320132643809295, + "learning_rate": 9.870738294919096e-06, + "loss": 1.0207, + "step": 3771 + }, + { + "epoch": 0.5191301954307734, + "grad_norm": 1.886779616600022, + "learning_rate": 9.866281255999993e-06, + "loss": 0.9705, + "step": 3772 + }, + { + "epoch": 0.5192678227360308, + "grad_norm": 1.6853487947782546, + "learning_rate": 9.861824243648968e-06, + "loss": 1.0485, + "step": 3773 + }, + { + "epoch": 0.5194054500412882, + "grad_norm": 1.6942470178959521, + "learning_rate": 9.857367258751582e-06, + "loss": 1.0498, + "step": 3774 + }, + { + "epoch": 0.5195430773465456, + "grad_norm": 1.8883628942388777, + "learning_rate": 9.852910302193367e-06, + "loss": 1.0257, + "step": 3775 + }, + { + "epoch": 0.5196807046518029, + "grad_norm": 1.698204080891631, + "learning_rate": 9.848453374859857e-06, + "loss": 1.1391, + "step": 3776 + }, + { + "epoch": 0.5198183319570603, + "grad_norm": 1.780949060296349, + "learning_rate": 9.84399647763659e-06, + "loss": 0.9374, + "step": 3777 + }, + { + "epoch": 0.5199559592623176, + "grad_norm": 1.7159764787602076, + "learning_rate": 9.839539611409086e-06, + "loss": 1.0077, + "step": 3778 + }, + { + "epoch": 0.520093586567575, + "grad_norm": 1.7752053989592338, + "learning_rate": 9.835082777062868e-06, + "loss": 1.1353, + "step": 3779 + }, + { + "epoch": 0.5202312138728323, + "grad_norm": 1.679182268597926, + "learning_rate": 9.83062597548344e-06, + "loss": 1.0714, + "step": 3780 + }, + { + "epoch": 0.5203688411780897, + "grad_norm": 1.6947975609275099, + "learning_rate": 9.826169207556318e-06, + "loss": 1.0394, + "step": 3781 + }, + { + "epoch": 0.5205064684833471, + "grad_norm": 1.6759159654407623, + "learning_rate": 9.82171247416699e-06, + "loss": 0.9814, + "step": 3782 + }, + { + "epoch": 0.5206440957886045, + "grad_norm": 1.788822794581056, + "learning_rate": 9.817255776200957e-06, + "loss": 1.0709, + "step": 3783 + }, + { + "epoch": 0.5207817230938618, + "grad_norm": 1.746786872320396, + "learning_rate": 9.812799114543695e-06, + "loss": 1.0359, + "step": 3784 + }, + { + "epoch": 0.5209193503991192, + "grad_norm": 1.666359513302726, + "learning_rate": 9.808342490080689e-06, + "loss": 1.1051, + "step": 3785 + }, + { + "epoch": 0.5210569777043765, + "grad_norm": 1.9743488023445492, + "learning_rate": 9.803885903697407e-06, + "loss": 0.9999, + "step": 3786 + }, + { + "epoch": 0.5211946050096339, + "grad_norm": 1.4362515492111947, + "learning_rate": 9.79942935627931e-06, + "loss": 1.0175, + "step": 3787 + }, + { + "epoch": 0.5213322323148912, + "grad_norm": 1.8622280027250133, + "learning_rate": 9.794972848711855e-06, + "loss": 0.9889, + "step": 3788 + }, + { + "epoch": 0.5214698596201487, + "grad_norm": 1.880202556161118, + "learning_rate": 9.79051638188049e-06, + "loss": 1.0473, + "step": 3789 + }, + { + "epoch": 0.521607486925406, + "grad_norm": 1.935376240850844, + "learning_rate": 9.78605995667065e-06, + "loss": 1.0181, + "step": 3790 + }, + { + "epoch": 0.5217451142306634, + "grad_norm": 1.6872372205994932, + "learning_rate": 9.781603573967771e-06, + "loss": 1.011, + "step": 3791 + }, + { + "epoch": 0.5218827415359207, + "grad_norm": 1.5869172939716962, + "learning_rate": 9.777147234657271e-06, + "loss": 1.0231, + "step": 3792 + }, + { + "epoch": 0.5220203688411781, + "grad_norm": 1.8067251515147564, + "learning_rate": 9.772690939624563e-06, + "loss": 1.0737, + "step": 3793 + }, + { + "epoch": 0.5221579961464354, + "grad_norm": 1.6464598640446624, + "learning_rate": 9.768234689755057e-06, + "loss": 0.993, + "step": 3794 + }, + { + "epoch": 0.5222956234516928, + "grad_norm": 1.7486532209739112, + "learning_rate": 9.763778485934143e-06, + "loss": 1.0085, + "step": 3795 + }, + { + "epoch": 0.5224332507569501, + "grad_norm": 1.8556316497350904, + "learning_rate": 9.759322329047214e-06, + "loss": 1.0327, + "step": 3796 + }, + { + "epoch": 0.5225708780622076, + "grad_norm": 1.6525516351671823, + "learning_rate": 9.75486621997964e-06, + "loss": 1.0926, + "step": 3797 + }, + { + "epoch": 0.5227085053674649, + "grad_norm": 1.6439850856264127, + "learning_rate": 9.750410159616794e-06, + "loss": 1.1132, + "step": 3798 + }, + { + "epoch": 0.5228461326727223, + "grad_norm": 1.5584784145356112, + "learning_rate": 9.745954148844032e-06, + "loss": 1.0024, + "step": 3799 + }, + { + "epoch": 0.5229837599779796, + "grad_norm": 1.8288025718733074, + "learning_rate": 9.741498188546703e-06, + "loss": 0.9761, + "step": 3800 + }, + { + "epoch": 0.523121387283237, + "grad_norm": 1.5838380640509493, + "learning_rate": 9.737042279610146e-06, + "loss": 1.0688, + "step": 3801 + }, + { + "epoch": 0.5232590145884943, + "grad_norm": 1.6152130732351877, + "learning_rate": 9.732586422919689e-06, + "loss": 1.0151, + "step": 3802 + }, + { + "epoch": 0.5233966418937517, + "grad_norm": 1.5112628263593764, + "learning_rate": 9.728130619360647e-06, + "loss": 0.9623, + "step": 3803 + }, + { + "epoch": 0.523534269199009, + "grad_norm": 1.732071231659601, + "learning_rate": 9.723674869818332e-06, + "loss": 0.9979, + "step": 3804 + }, + { + "epoch": 0.5236718965042665, + "grad_norm": 1.6913098661366945, + "learning_rate": 9.719219175178037e-06, + "loss": 0.994, + "step": 3805 + }, + { + "epoch": 0.5238095238095238, + "grad_norm": 1.7326210408760323, + "learning_rate": 9.71476353632505e-06, + "loss": 1.0873, + "step": 3806 + }, + { + "epoch": 0.5239471511147812, + "grad_norm": 1.6004285950649455, + "learning_rate": 9.710307954144643e-06, + "loss": 1.0545, + "step": 3807 + }, + { + "epoch": 0.5240847784200385, + "grad_norm": 1.6560546504490241, + "learning_rate": 9.70585242952208e-06, + "loss": 1.0376, + "step": 3808 + }, + { + "epoch": 0.5242224057252959, + "grad_norm": 1.7193129761288588, + "learning_rate": 9.701396963342611e-06, + "loss": 1.0495, + "step": 3809 + }, + { + "epoch": 0.5243600330305532, + "grad_norm": 1.667947819133195, + "learning_rate": 9.696941556491477e-06, + "loss": 1.0057, + "step": 3810 + }, + { + "epoch": 0.5244976603358106, + "grad_norm": 1.90895465679153, + "learning_rate": 9.692486209853911e-06, + "loss": 1.1257, + "step": 3811 + }, + { + "epoch": 0.524635287641068, + "grad_norm": 1.6730046156745004, + "learning_rate": 9.688030924315124e-06, + "loss": 1.0954, + "step": 3812 + }, + { + "epoch": 0.5247729149463254, + "grad_norm": 1.8130463515578326, + "learning_rate": 9.683575700760323e-06, + "loss": 1.0986, + "step": 3813 + }, + { + "epoch": 0.5249105422515827, + "grad_norm": 1.541011123419202, + "learning_rate": 9.679120540074697e-06, + "loss": 0.8903, + "step": 3814 + }, + { + "epoch": 0.5250481695568401, + "grad_norm": 1.7049055466375391, + "learning_rate": 9.67466544314343e-06, + "loss": 1.0217, + "step": 3815 + }, + { + "epoch": 0.5251857968620974, + "grad_norm": 1.6971724643081114, + "learning_rate": 9.670210410851684e-06, + "loss": 1.0272, + "step": 3816 + }, + { + "epoch": 0.5253234241673548, + "grad_norm": 1.8837959227824357, + "learning_rate": 9.665755444084614e-06, + "loss": 1.0624, + "step": 3817 + }, + { + "epoch": 0.5254610514726121, + "grad_norm": 1.7845727136599527, + "learning_rate": 9.661300543727363e-06, + "loss": 1.0018, + "step": 3818 + }, + { + "epoch": 0.5255986787778695, + "grad_norm": 1.7056200203654783, + "learning_rate": 9.656845710665061e-06, + "loss": 1.0259, + "step": 3819 + }, + { + "epoch": 0.5257363060831269, + "grad_norm": 1.7276654122842436, + "learning_rate": 9.652390945782813e-06, + "loss": 1.0123, + "step": 3820 + }, + { + "epoch": 0.5258739333883843, + "grad_norm": 1.4996999017539958, + "learning_rate": 9.647936249965731e-06, + "loss": 1.0215, + "step": 3821 + }, + { + "epoch": 0.5260115606936416, + "grad_norm": 1.6252114665201842, + "learning_rate": 9.643481624098893e-06, + "loss": 1.0444, + "step": 3822 + }, + { + "epoch": 0.526149187998899, + "grad_norm": 1.8939508955753133, + "learning_rate": 9.63902706906738e-06, + "loss": 1.1951, + "step": 3823 + }, + { + "epoch": 0.5262868153041563, + "grad_norm": 1.6392983815275937, + "learning_rate": 9.634572585756247e-06, + "loss": 1.048, + "step": 3824 + }, + { + "epoch": 0.5264244426094137, + "grad_norm": 1.837708054304148, + "learning_rate": 9.630118175050535e-06, + "loss": 0.9705, + "step": 3825 + }, + { + "epoch": 0.526562069914671, + "grad_norm": 1.753698623714684, + "learning_rate": 9.625663837835283e-06, + "loss": 1.0801, + "step": 3826 + }, + { + "epoch": 0.5266996972199285, + "grad_norm": 1.6337344831551184, + "learning_rate": 9.621209574995497e-06, + "loss": 1.0135, + "step": 3827 + }, + { + "epoch": 0.5268373245251858, + "grad_norm": 1.5601032232157375, + "learning_rate": 9.61675538741619e-06, + "loss": 1.0361, + "step": 3828 + }, + { + "epoch": 0.5269749518304432, + "grad_norm": 1.8366964942383963, + "learning_rate": 9.612301275982335e-06, + "loss": 1.0835, + "step": 3829 + }, + { + "epoch": 0.5271125791357005, + "grad_norm": 2.1299054437352525, + "learning_rate": 9.607847241578914e-06, + "loss": 1.0124, + "step": 3830 + }, + { + "epoch": 0.5272502064409579, + "grad_norm": 1.7093939932872375, + "learning_rate": 9.603393285090874e-06, + "loss": 1.0807, + "step": 3831 + }, + { + "epoch": 0.5273878337462152, + "grad_norm": 1.6674394246610071, + "learning_rate": 9.598939407403163e-06, + "loss": 1.0061, + "step": 3832 + }, + { + "epoch": 0.5275254610514726, + "grad_norm": 1.5982903143499696, + "learning_rate": 9.594485609400698e-06, + "loss": 0.9418, + "step": 3833 + }, + { + "epoch": 0.5276630883567299, + "grad_norm": 1.8152079823054406, + "learning_rate": 9.590031891968397e-06, + "loss": 1.0061, + "step": 3834 + }, + { + "epoch": 0.5278007156619874, + "grad_norm": 1.6266171384650536, + "learning_rate": 9.585578255991143e-06, + "loss": 1.0738, + "step": 3835 + }, + { + "epoch": 0.5279383429672447, + "grad_norm": 1.9092604004182847, + "learning_rate": 9.58112470235382e-06, + "loss": 0.9816, + "step": 3836 + }, + { + "epoch": 0.5280759702725021, + "grad_norm": 2.046959008468833, + "learning_rate": 9.57667123194128e-06, + "loss": 1.1644, + "step": 3837 + }, + { + "epoch": 0.5282135975777594, + "grad_norm": 1.697050879017924, + "learning_rate": 9.572217845638378e-06, + "loss": 1.0657, + "step": 3838 + }, + { + "epoch": 0.5283512248830168, + "grad_norm": 1.482869175498403, + "learning_rate": 9.56776454432993e-06, + "loss": 0.9644, + "step": 3839 + }, + { + "epoch": 0.5284888521882741, + "grad_norm": 1.65618306977903, + "learning_rate": 9.56331132890075e-06, + "loss": 1.0686, + "step": 3840 + }, + { + "epoch": 0.5286264794935315, + "grad_norm": 1.7472309716485506, + "learning_rate": 9.558858200235635e-06, + "loss": 1.0017, + "step": 3841 + }, + { + "epoch": 0.5287641067987888, + "grad_norm": 1.7006867970345885, + "learning_rate": 9.554405159219353e-06, + "loss": 1.0906, + "step": 3842 + }, + { + "epoch": 0.5289017341040463, + "grad_norm": 1.8403493687227561, + "learning_rate": 9.549952206736668e-06, + "loss": 1.1752, + "step": 3843 + }, + { + "epoch": 0.5290393614093036, + "grad_norm": 1.7239930064535378, + "learning_rate": 9.545499343672318e-06, + "loss": 1.0315, + "step": 3844 + }, + { + "epoch": 0.529176988714561, + "grad_norm": 1.9005594730355417, + "learning_rate": 9.54104657091103e-06, + "loss": 1.0045, + "step": 3845 + }, + { + "epoch": 0.5293146160198183, + "grad_norm": 1.5552004223658424, + "learning_rate": 9.536593889337502e-06, + "loss": 1.0153, + "step": 3846 + }, + { + "epoch": 0.5294522433250757, + "grad_norm": 1.89501673805388, + "learning_rate": 9.532141299836426e-06, + "loss": 1.1348, + "step": 3847 + }, + { + "epoch": 0.529589870630333, + "grad_norm": 1.7934443481546254, + "learning_rate": 9.527688803292472e-06, + "loss": 1.0594, + "step": 3848 + }, + { + "epoch": 0.5297274979355904, + "grad_norm": 1.65381651631811, + "learning_rate": 9.523236400590284e-06, + "loss": 1.1535, + "step": 3849 + }, + { + "epoch": 0.5298651252408478, + "grad_norm": 1.7849886985874959, + "learning_rate": 9.518784092614497e-06, + "loss": 0.9903, + "step": 3850 + }, + { + "epoch": 0.5300027525461052, + "grad_norm": 1.7434102318373084, + "learning_rate": 9.514331880249728e-06, + "loss": 0.9623, + "step": 3851 + }, + { + "epoch": 0.5301403798513625, + "grad_norm": 1.7120951184504127, + "learning_rate": 9.509879764380563e-06, + "loss": 0.979, + "step": 3852 + }, + { + "epoch": 0.5302780071566199, + "grad_norm": 1.6255376843054818, + "learning_rate": 9.505427745891582e-06, + "loss": 1.0198, + "step": 3853 + }, + { + "epoch": 0.5304156344618772, + "grad_norm": 1.7738051929558583, + "learning_rate": 9.500975825667333e-06, + "loss": 1.0102, + "step": 3854 + }, + { + "epoch": 0.5305532617671346, + "grad_norm": 1.605912422522909, + "learning_rate": 9.496524004592364e-06, + "loss": 0.9952, + "step": 3855 + }, + { + "epoch": 0.5306908890723919, + "grad_norm": 1.638841243339728, + "learning_rate": 9.49207228355118e-06, + "loss": 0.9361, + "step": 3856 + }, + { + "epoch": 0.5308285163776493, + "grad_norm": 1.6066175502071258, + "learning_rate": 9.487620663428276e-06, + "loss": 1.0568, + "step": 3857 + }, + { + "epoch": 0.5309661436829067, + "grad_norm": 1.6546692713402367, + "learning_rate": 9.48316914510814e-06, + "loss": 1.0065, + "step": 3858 + }, + { + "epoch": 0.5311037709881641, + "grad_norm": 1.5744529875649458, + "learning_rate": 9.478717729475214e-06, + "loss": 1.0474, + "step": 3859 + }, + { + "epoch": 0.5312413982934214, + "grad_norm": 1.7758170501412414, + "learning_rate": 9.474266417413944e-06, + "loss": 1.0316, + "step": 3860 + }, + { + "epoch": 0.5313790255986788, + "grad_norm": 1.6729868841164004, + "learning_rate": 9.469815209808738e-06, + "loss": 1.0092, + "step": 3861 + }, + { + "epoch": 0.5315166529039361, + "grad_norm": 1.5665269084764804, + "learning_rate": 9.465364107543991e-06, + "loss": 1.023, + "step": 3862 + }, + { + "epoch": 0.5316542802091935, + "grad_norm": 1.6297225665815323, + "learning_rate": 9.46091311150408e-06, + "loss": 1.0334, + "step": 3863 + }, + { + "epoch": 0.5317919075144508, + "grad_norm": 1.624633544965137, + "learning_rate": 9.456462222573354e-06, + "loss": 1.0832, + "step": 3864 + }, + { + "epoch": 0.5319295348197083, + "grad_norm": 1.5305370778043634, + "learning_rate": 9.452011441636142e-06, + "loss": 0.9794, + "step": 3865 + }, + { + "epoch": 0.5320671621249656, + "grad_norm": 1.699367530579064, + "learning_rate": 9.44756076957676e-06, + "loss": 1.0278, + "step": 3866 + }, + { + "epoch": 0.532204789430223, + "grad_norm": 1.5978923902682878, + "learning_rate": 9.443110207279486e-06, + "loss": 1.051, + "step": 3867 + }, + { + "epoch": 0.5323424167354803, + "grad_norm": 1.6452691714047998, + "learning_rate": 9.438659755628595e-06, + "loss": 1.0462, + "step": 3868 + }, + { + "epoch": 0.5324800440407377, + "grad_norm": 1.6006061765582986, + "learning_rate": 9.434209415508322e-06, + "loss": 0.9676, + "step": 3869 + }, + { + "epoch": 0.532617671345995, + "grad_norm": 1.6581083042037867, + "learning_rate": 9.4297591878029e-06, + "loss": 1.0157, + "step": 3870 + }, + { + "epoch": 0.5327552986512524, + "grad_norm": 1.6416976965233043, + "learning_rate": 9.425309073396519e-06, + "loss": 1.0774, + "step": 3871 + }, + { + "epoch": 0.5328929259565097, + "grad_norm": 1.794811193612008, + "learning_rate": 9.420859073173355e-06, + "loss": 0.9986, + "step": 3872 + }, + { + "epoch": 0.5330305532617672, + "grad_norm": 1.7253972789669378, + "learning_rate": 9.416409188017568e-06, + "loss": 1.0253, + "step": 3873 + }, + { + "epoch": 0.5331681805670245, + "grad_norm": 1.5708128159296446, + "learning_rate": 9.411959418813286e-06, + "loss": 1.0938, + "step": 3874 + }, + { + "epoch": 0.5333058078722819, + "grad_norm": 2.0736462514555547, + "learning_rate": 9.40750976644462e-06, + "loss": 0.9924, + "step": 3875 + }, + { + "epoch": 0.5334434351775392, + "grad_norm": 1.6615070298183594, + "learning_rate": 9.40306023179565e-06, + "loss": 1.0301, + "step": 3876 + }, + { + "epoch": 0.5335810624827966, + "grad_norm": 1.8777588434328962, + "learning_rate": 9.398610815750446e-06, + "loss": 0.9795, + "step": 3877 + }, + { + "epoch": 0.5337186897880539, + "grad_norm": 1.5007003198464026, + "learning_rate": 9.394161519193035e-06, + "loss": 0.9139, + "step": 3878 + }, + { + "epoch": 0.5338563170933113, + "grad_norm": 1.7134556342587728, + "learning_rate": 9.389712343007439e-06, + "loss": 1.0428, + "step": 3879 + }, + { + "epoch": 0.5339939443985686, + "grad_norm": 1.7271494353068595, + "learning_rate": 9.385263288077645e-06, + "loss": 0.9963, + "step": 3880 + }, + { + "epoch": 0.5341315717038261, + "grad_norm": 1.8380041169482233, + "learning_rate": 9.380814355287626e-06, + "loss": 0.968, + "step": 3881 + }, + { + "epoch": 0.5342691990090834, + "grad_norm": 1.5918904838607604, + "learning_rate": 9.376365545521314e-06, + "loss": 1.0005, + "step": 3882 + }, + { + "epoch": 0.5344068263143408, + "grad_norm": 1.7785911580799227, + "learning_rate": 9.371916859662635e-06, + "loss": 1.0918, + "step": 3883 + }, + { + "epoch": 0.5345444536195981, + "grad_norm": 1.711909601874512, + "learning_rate": 9.367468298595474e-06, + "loss": 1.1005, + "step": 3884 + }, + { + "epoch": 0.5346820809248555, + "grad_norm": 1.6937343320985883, + "learning_rate": 9.36301986320371e-06, + "loss": 0.9808, + "step": 3885 + }, + { + "epoch": 0.5348197082301128, + "grad_norm": 1.5833376851184493, + "learning_rate": 9.358571554371174e-06, + "loss": 0.8992, + "step": 3886 + }, + { + "epoch": 0.5349573355353702, + "grad_norm": 1.7887014569868183, + "learning_rate": 9.354123372981692e-06, + "loss": 0.9902, + "step": 3887 + }, + { + "epoch": 0.5350949628406276, + "grad_norm": 2.0857967285007284, + "learning_rate": 9.349675319919057e-06, + "loss": 0.9584, + "step": 3888 + }, + { + "epoch": 0.535232590145885, + "grad_norm": 1.9131669645790712, + "learning_rate": 9.34522739606703e-06, + "loss": 1.1007, + "step": 3889 + }, + { + "epoch": 0.5353702174511423, + "grad_norm": 1.8640943455233843, + "learning_rate": 9.34077960230936e-06, + "loss": 1.0409, + "step": 3890 + }, + { + "epoch": 0.5355078447563997, + "grad_norm": 1.6482922248771166, + "learning_rate": 9.336331939529755e-06, + "loss": 1.0128, + "step": 3891 + }, + { + "epoch": 0.535645472061657, + "grad_norm": 1.6118289315008025, + "learning_rate": 9.331884408611915e-06, + "loss": 1.0664, + "step": 3892 + }, + { + "epoch": 0.5357830993669144, + "grad_norm": 1.8266556287769047, + "learning_rate": 9.327437010439492e-06, + "loss": 0.9581, + "step": 3893 + }, + { + "epoch": 0.5359207266721717, + "grad_norm": 1.5437768551855613, + "learning_rate": 9.322989745896131e-06, + "loss": 1.0562, + "step": 3894 + }, + { + "epoch": 0.5360583539774291, + "grad_norm": 1.529280484862347, + "learning_rate": 9.318542615865437e-06, + "loss": 0.8906, + "step": 3895 + }, + { + "epoch": 0.5361959812826865, + "grad_norm": 1.4687591709202528, + "learning_rate": 9.314095621231003e-06, + "loss": 0.9726, + "step": 3896 + }, + { + "epoch": 0.5363336085879439, + "grad_norm": 1.9214992882949178, + "learning_rate": 9.309648762876376e-06, + "loss": 1.0279, + "step": 3897 + }, + { + "epoch": 0.5364712358932012, + "grad_norm": 1.7351924040997424, + "learning_rate": 9.305202041685092e-06, + "loss": 0.937, + "step": 3898 + }, + { + "epoch": 0.5366088631984586, + "grad_norm": 1.7687317033171404, + "learning_rate": 9.300755458540648e-06, + "loss": 0.9939, + "step": 3899 + }, + { + "epoch": 0.5367464905037159, + "grad_norm": 1.5872159660138214, + "learning_rate": 9.29630901432653e-06, + "loss": 0.997, + "step": 3900 + }, + { + "epoch": 0.5368841178089733, + "grad_norm": 1.7404538361882436, + "learning_rate": 9.29186270992617e-06, + "loss": 1.0434, + "step": 3901 + }, + { + "epoch": 0.5370217451142306, + "grad_norm": 1.637431221175942, + "learning_rate": 9.287416546223e-06, + "loss": 1.1033, + "step": 3902 + }, + { + "epoch": 0.5371593724194881, + "grad_norm": 1.6419849416190846, + "learning_rate": 9.282970524100412e-06, + "loss": 1.08, + "step": 3903 + }, + { + "epoch": 0.5372969997247454, + "grad_norm": 1.7016117074662158, + "learning_rate": 9.278524644441758e-06, + "loss": 1.0184, + "step": 3904 + }, + { + "epoch": 0.5374346270300028, + "grad_norm": 1.6932237338058265, + "learning_rate": 9.274078908130389e-06, + "loss": 1.0189, + "step": 3905 + }, + { + "epoch": 0.5375722543352601, + "grad_norm": 1.5523720946995796, + "learning_rate": 9.269633316049597e-06, + "loss": 1.0553, + "step": 3906 + }, + { + "epoch": 0.5377098816405175, + "grad_norm": 1.6741630595932375, + "learning_rate": 9.265187869082674e-06, + "loss": 1.0548, + "step": 3907 + }, + { + "epoch": 0.5378475089457748, + "grad_norm": 1.5354530327789282, + "learning_rate": 9.260742568112857e-06, + "loss": 1.0171, + "step": 3908 + }, + { + "epoch": 0.5379851362510322, + "grad_norm": 1.775194832420557, + "learning_rate": 9.256297414023376e-06, + "loss": 0.9707, + "step": 3909 + }, + { + "epoch": 0.5381227635562895, + "grad_norm": 1.5134466892664977, + "learning_rate": 9.251852407697417e-06, + "loss": 1.0266, + "step": 3910 + }, + { + "epoch": 0.538260390861547, + "grad_norm": 1.797947051439264, + "learning_rate": 9.247407550018145e-06, + "loss": 1.0801, + "step": 3911 + }, + { + "epoch": 0.5383980181668043, + "grad_norm": 1.8512508465850501, + "learning_rate": 9.242962841868688e-06, + "loss": 1.1125, + "step": 3912 + }, + { + "epoch": 0.5385356454720617, + "grad_norm": 1.6270419428118281, + "learning_rate": 9.238518284132156e-06, + "loss": 1.0266, + "step": 3913 + }, + { + "epoch": 0.538673272777319, + "grad_norm": 2.026777366120372, + "learning_rate": 9.234073877691613e-06, + "loss": 1.1091, + "step": 3914 + }, + { + "epoch": 0.5388109000825764, + "grad_norm": 1.6851614827533623, + "learning_rate": 9.229629623430112e-06, + "loss": 1.0031, + "step": 3915 + }, + { + "epoch": 0.5389485273878337, + "grad_norm": 1.6286985112139007, + "learning_rate": 9.225185522230654e-06, + "loss": 1.0377, + "step": 3916 + }, + { + "epoch": 0.5390861546930911, + "grad_norm": 1.827713262636613, + "learning_rate": 9.220741574976229e-06, + "loss": 1.0928, + "step": 3917 + }, + { + "epoch": 0.5392237819983484, + "grad_norm": 1.8207472933077034, + "learning_rate": 9.216297782549784e-06, + "loss": 0.9845, + "step": 3918 + }, + { + "epoch": 0.5393614093036059, + "grad_norm": 1.8617041462397839, + "learning_rate": 9.211854145834243e-06, + "loss": 1.1651, + "step": 3919 + }, + { + "epoch": 0.5394990366088632, + "grad_norm": 1.8468211678927453, + "learning_rate": 9.207410665712499e-06, + "loss": 0.9784, + "step": 3920 + }, + { + "epoch": 0.5396366639141206, + "grad_norm": 1.542069759394412, + "learning_rate": 9.2029673430674e-06, + "loss": 1.0396, + "step": 3921 + }, + { + "epoch": 0.5397742912193779, + "grad_norm": 1.4505777641325768, + "learning_rate": 9.198524178781785e-06, + "loss": 0.9671, + "step": 3922 + }, + { + "epoch": 0.5399119185246353, + "grad_norm": 1.6050589474619645, + "learning_rate": 9.19408117373844e-06, + "loss": 0.9645, + "step": 3923 + }, + { + "epoch": 0.5400495458298926, + "grad_norm": 1.8800979240650662, + "learning_rate": 9.189638328820138e-06, + "loss": 1.0226, + "step": 3924 + }, + { + "epoch": 0.54018717313515, + "grad_norm": 1.8980282072915977, + "learning_rate": 9.185195644909603e-06, + "loss": 1.0913, + "step": 3925 + }, + { + "epoch": 0.5403248004404074, + "grad_norm": 1.6419855042391214, + "learning_rate": 9.180753122889542e-06, + "loss": 1.0352, + "step": 3926 + }, + { + "epoch": 0.5404624277456648, + "grad_norm": 1.8408462225792674, + "learning_rate": 9.176310763642621e-06, + "loss": 1.0152, + "step": 3927 + }, + { + "epoch": 0.5406000550509221, + "grad_norm": 1.629404883011823, + "learning_rate": 9.171868568051479e-06, + "loss": 1.0165, + "step": 3928 + }, + { + "epoch": 0.5407376823561795, + "grad_norm": 1.5762819283854779, + "learning_rate": 9.167426536998713e-06, + "loss": 1.0764, + "step": 3929 + }, + { + "epoch": 0.5408753096614368, + "grad_norm": 1.7235475583029523, + "learning_rate": 9.162984671366901e-06, + "loss": 0.9726, + "step": 3930 + }, + { + "epoch": 0.5410129369666942, + "grad_norm": 1.5201467071012251, + "learning_rate": 9.158542972038573e-06, + "loss": 1.0861, + "step": 3931 + }, + { + "epoch": 0.5411505642719515, + "grad_norm": 1.5836489834186152, + "learning_rate": 9.154101439896243e-06, + "loss": 1.0395, + "step": 3932 + }, + { + "epoch": 0.5412881915772089, + "grad_norm": 1.5900271413266611, + "learning_rate": 9.149660075822373e-06, + "loss": 1.0234, + "step": 3933 + }, + { + "epoch": 0.5414258188824663, + "grad_norm": 1.5888588231503058, + "learning_rate": 9.145218880699408e-06, + "loss": 0.9906, + "step": 3934 + }, + { + "epoch": 0.5415634461877237, + "grad_norm": 1.8325390179702046, + "learning_rate": 9.140777855409752e-06, + "loss": 0.9132, + "step": 3935 + }, + { + "epoch": 0.541701073492981, + "grad_norm": 1.6377214717238329, + "learning_rate": 9.136337000835773e-06, + "loss": 1.0307, + "step": 3936 + }, + { + "epoch": 0.5418387007982384, + "grad_norm": 1.607856121647772, + "learning_rate": 9.131896317859812e-06, + "loss": 0.9973, + "step": 3937 + }, + { + "epoch": 0.5419763281034957, + "grad_norm": 1.9817131767073952, + "learning_rate": 9.127455807364165e-06, + "loss": 1.0445, + "step": 3938 + }, + { + "epoch": 0.5421139554087531, + "grad_norm": 1.5868201020121806, + "learning_rate": 9.123015470231111e-06, + "loss": 1.0087, + "step": 3939 + }, + { + "epoch": 0.5422515827140104, + "grad_norm": 1.7277173434537934, + "learning_rate": 9.118575307342874e-06, + "loss": 1.0655, + "step": 3940 + }, + { + "epoch": 0.5423892100192679, + "grad_norm": 2.0310904777546903, + "learning_rate": 9.114135319581659e-06, + "loss": 1.0148, + "step": 3941 + }, + { + "epoch": 0.5425268373245252, + "grad_norm": 1.9426031707748488, + "learning_rate": 9.109695507829628e-06, + "loss": 1.1568, + "step": 3942 + }, + { + "epoch": 0.5426644646297826, + "grad_norm": 1.8031406907537484, + "learning_rate": 9.105255872968917e-06, + "loss": 0.9951, + "step": 3943 + }, + { + "epoch": 0.5428020919350399, + "grad_norm": 1.6136701055228408, + "learning_rate": 9.100816415881613e-06, + "loss": 1.0321, + "step": 3944 + }, + { + "epoch": 0.5429397192402973, + "grad_norm": 1.6153210574067458, + "learning_rate": 9.096377137449783e-06, + "loss": 1.034, + "step": 3945 + }, + { + "epoch": 0.5430773465455546, + "grad_norm": 1.6891714347457558, + "learning_rate": 9.091938038555441e-06, + "loss": 1.011, + "step": 3946 + }, + { + "epoch": 0.543214973850812, + "grad_norm": 1.6380296814974908, + "learning_rate": 9.087499120080586e-06, + "loss": 0.9859, + "step": 3947 + }, + { + "epoch": 0.5433526011560693, + "grad_norm": 1.6594119497567779, + "learning_rate": 9.083060382907161e-06, + "loss": 1.0138, + "step": 3948 + }, + { + "epoch": 0.5434902284613268, + "grad_norm": 1.6752641511851678, + "learning_rate": 9.078621827917086e-06, + "loss": 1.018, + "step": 3949 + }, + { + "epoch": 0.5436278557665841, + "grad_norm": 1.8886647851390153, + "learning_rate": 9.074183455992244e-06, + "loss": 1.0103, + "step": 3950 + }, + { + "epoch": 0.5437654830718415, + "grad_norm": 1.767866556426858, + "learning_rate": 9.069745268014476e-06, + "loss": 1.0941, + "step": 3951 + }, + { + "epoch": 0.5439031103770988, + "grad_norm": 1.906685930012221, + "learning_rate": 9.065307264865592e-06, + "loss": 0.9813, + "step": 3952 + }, + { + "epoch": 0.5440407376823562, + "grad_norm": 1.8137655451651837, + "learning_rate": 9.060869447427353e-06, + "loss": 1.0682, + "step": 3953 + }, + { + "epoch": 0.5441783649876135, + "grad_norm": 1.6938215858836734, + "learning_rate": 9.056431816581506e-06, + "loss": 1.0512, + "step": 3954 + }, + { + "epoch": 0.5443159922928709, + "grad_norm": 1.7498041504404622, + "learning_rate": 9.051994373209738e-06, + "loss": 1.036, + "step": 3955 + }, + { + "epoch": 0.5444536195981282, + "grad_norm": 1.6251545894987196, + "learning_rate": 9.047557118193712e-06, + "loss": 1.0488, + "step": 3956 + }, + { + "epoch": 0.5445912469033857, + "grad_norm": 2.195265894990174, + "learning_rate": 9.043120052415049e-06, + "loss": 1.1594, + "step": 3957 + }, + { + "epoch": 0.544728874208643, + "grad_norm": 1.6321302368606516, + "learning_rate": 9.038683176755333e-06, + "loss": 0.9998, + "step": 3958 + }, + { + "epoch": 0.5448665015139004, + "grad_norm": 1.6100665080175312, + "learning_rate": 9.034246492096108e-06, + "loss": 1.0128, + "step": 3959 + }, + { + "epoch": 0.5450041288191577, + "grad_norm": 1.9271504717703556, + "learning_rate": 9.02980999931889e-06, + "loss": 1.0048, + "step": 3960 + }, + { + "epoch": 0.5451417561244151, + "grad_norm": 1.693137169320187, + "learning_rate": 9.02537369930514e-06, + "loss": 1.0224, + "step": 3961 + }, + { + "epoch": 0.5452793834296724, + "grad_norm": 1.6776886025620386, + "learning_rate": 9.020937592936302e-06, + "loss": 1.0745, + "step": 3962 + }, + { + "epoch": 0.5454170107349298, + "grad_norm": 2.1728619659702506, + "learning_rate": 9.016501681093755e-06, + "loss": 1.1489, + "step": 3963 + }, + { + "epoch": 0.5455546380401872, + "grad_norm": 1.5943403502270752, + "learning_rate": 9.012065964658865e-06, + "loss": 1.0116, + "step": 3964 + }, + { + "epoch": 0.5456922653454446, + "grad_norm": 1.8256872249987628, + "learning_rate": 9.007630444512944e-06, + "loss": 1.0708, + "step": 3965 + }, + { + "epoch": 0.5458298926507019, + "grad_norm": 1.7966555703844447, + "learning_rate": 9.00319512153727e-06, + "loss": 0.9358, + "step": 3966 + }, + { + "epoch": 0.5459675199559593, + "grad_norm": 1.679911444438586, + "learning_rate": 8.998759996613083e-06, + "loss": 0.999, + "step": 3967 + }, + { + "epoch": 0.5461051472612166, + "grad_norm": 1.976250180889497, + "learning_rate": 8.994325070621572e-06, + "loss": 1.0064, + "step": 3968 + }, + { + "epoch": 0.546242774566474, + "grad_norm": 1.7529014872738853, + "learning_rate": 8.98989034444391e-06, + "loss": 0.9588, + "step": 3969 + }, + { + "epoch": 0.5463804018717313, + "grad_norm": 1.6986757017523486, + "learning_rate": 8.985455818961206e-06, + "loss": 1.076, + "step": 3970 + }, + { + "epoch": 0.5465180291769887, + "grad_norm": 1.8171871229262688, + "learning_rate": 8.981021495054544e-06, + "loss": 0.9973, + "step": 3971 + }, + { + "epoch": 0.5466556564822461, + "grad_norm": 1.6591342846280948, + "learning_rate": 8.976587373604963e-06, + "loss": 1.0198, + "step": 3972 + }, + { + "epoch": 0.5467932837875035, + "grad_norm": 1.664420597783526, + "learning_rate": 8.972153455493462e-06, + "loss": 0.996, + "step": 3973 + }, + { + "epoch": 0.5469309110927608, + "grad_norm": 1.466244263978851, + "learning_rate": 8.967719741600997e-06, + "loss": 0.9687, + "step": 3974 + }, + { + "epoch": 0.5470685383980182, + "grad_norm": 1.63065074731512, + "learning_rate": 8.963286232808494e-06, + "loss": 1.1125, + "step": 3975 + }, + { + "epoch": 0.5472061657032755, + "grad_norm": 1.7816756175677586, + "learning_rate": 8.958852929996821e-06, + "loss": 1.0265, + "step": 3976 + }, + { + "epoch": 0.5473437930085329, + "grad_norm": 1.6621845010171312, + "learning_rate": 8.954419834046823e-06, + "loss": 1.0096, + "step": 3977 + }, + { + "epoch": 0.5474814203137902, + "grad_norm": 1.5516500163791924, + "learning_rate": 8.949986945839289e-06, + "loss": 1.0378, + "step": 3978 + }, + { + "epoch": 0.5476190476190477, + "grad_norm": 1.937129623332254, + "learning_rate": 8.945554266254978e-06, + "loss": 1.0714, + "step": 3979 + }, + { + "epoch": 0.547756674924305, + "grad_norm": 1.657712255431642, + "learning_rate": 8.941121796174596e-06, + "loss": 1.0442, + "step": 3980 + }, + { + "epoch": 0.5478943022295624, + "grad_norm": 1.6511210146796935, + "learning_rate": 8.936689536478822e-06, + "loss": 0.9567, + "step": 3981 + }, + { + "epoch": 0.5480319295348197, + "grad_norm": 1.7331073572877322, + "learning_rate": 8.93225748804828e-06, + "loss": 0.9835, + "step": 3982 + }, + { + "epoch": 0.5481695568400771, + "grad_norm": 1.6032467015442413, + "learning_rate": 8.927825651763563e-06, + "loss": 0.9593, + "step": 3983 + }, + { + "epoch": 0.5483071841453344, + "grad_norm": 1.8320194597201023, + "learning_rate": 8.923394028505212e-06, + "loss": 1.0053, + "step": 3984 + }, + { + "epoch": 0.5484448114505918, + "grad_norm": 1.6095440819565592, + "learning_rate": 8.918962619153727e-06, + "loss": 1.0478, + "step": 3985 + }, + { + "epoch": 0.5485824387558491, + "grad_norm": 1.8318282956182468, + "learning_rate": 8.914531424589575e-06, + "loss": 1.0706, + "step": 3986 + }, + { + "epoch": 0.5487200660611066, + "grad_norm": 1.7532991842675543, + "learning_rate": 8.910100445693168e-06, + "loss": 1.0921, + "step": 3987 + }, + { + "epoch": 0.5488576933663639, + "grad_norm": 1.722389035071202, + "learning_rate": 8.905669683344884e-06, + "loss": 1.0027, + "step": 3988 + }, + { + "epoch": 0.5489953206716213, + "grad_norm": 1.806594161119985, + "learning_rate": 8.901239138425054e-06, + "loss": 1.0553, + "step": 3989 + }, + { + "epoch": 0.5491329479768786, + "grad_norm": 1.9463313871694514, + "learning_rate": 8.896808811813969e-06, + "loss": 1.0043, + "step": 3990 + }, + { + "epoch": 0.549270575282136, + "grad_norm": 1.644955832287456, + "learning_rate": 8.892378704391867e-06, + "loss": 1.0799, + "step": 3991 + }, + { + "epoch": 0.5494082025873933, + "grad_norm": 1.6820277595725697, + "learning_rate": 8.88794881703896e-06, + "loss": 0.9991, + "step": 3992 + }, + { + "epoch": 0.5495458298926507, + "grad_norm": 1.6806249224646335, + "learning_rate": 8.883519150635398e-06, + "loss": 1.0465, + "step": 3993 + }, + { + "epoch": 0.549683457197908, + "grad_norm": 2.075053601257837, + "learning_rate": 8.8790897060613e-06, + "loss": 1.0677, + "step": 3994 + }, + { + "epoch": 0.5498210845031655, + "grad_norm": 1.932985751040996, + "learning_rate": 8.87466048419673e-06, + "loss": 0.9824, + "step": 3995 + }, + { + "epoch": 0.5499587118084228, + "grad_norm": 1.7139076546024135, + "learning_rate": 8.870231485921721e-06, + "loss": 1.0104, + "step": 3996 + }, + { + "epoch": 0.5500963391136802, + "grad_norm": 1.6743793269743235, + "learning_rate": 8.865802712116248e-06, + "loss": 0.9583, + "step": 3997 + }, + { + "epoch": 0.5502339664189375, + "grad_norm": 1.8631323122129777, + "learning_rate": 8.861374163660255e-06, + "loss": 1.103, + "step": 3998 + }, + { + "epoch": 0.5503715937241949, + "grad_norm": 1.8053725691802114, + "learning_rate": 8.856945841433629e-06, + "loss": 1.0138, + "step": 3999 + }, + { + "epoch": 0.5505092210294522, + "grad_norm": 1.7692770233224753, + "learning_rate": 8.852517746316214e-06, + "loss": 0.904, + "step": 4000 + }, + { + "epoch": 0.5506468483347096, + "grad_norm": 2.3958745104845653, + "learning_rate": 8.84808987918782e-06, + "loss": 1.0869, + "step": 4001 + }, + { + "epoch": 0.550784475639967, + "grad_norm": 1.7069923624706607, + "learning_rate": 8.843662240928194e-06, + "loss": 0.965, + "step": 4002 + }, + { + "epoch": 0.5509221029452244, + "grad_norm": 1.8362772562101821, + "learning_rate": 8.839234832417055e-06, + "loss": 1.1439, + "step": 4003 + }, + { + "epoch": 0.5510597302504817, + "grad_norm": 1.6376008618183546, + "learning_rate": 8.834807654534062e-06, + "loss": 1.0178, + "step": 4004 + }, + { + "epoch": 0.5511973575557391, + "grad_norm": 1.8150890728629336, + "learning_rate": 8.830380708158844e-06, + "loss": 0.9878, + "step": 4005 + }, + { + "epoch": 0.5513349848609964, + "grad_norm": 1.8397883561360153, + "learning_rate": 8.825953994170966e-06, + "loss": 1.0777, + "step": 4006 + }, + { + "epoch": 0.5514726121662538, + "grad_norm": 1.7939942560549935, + "learning_rate": 8.821527513449959e-06, + "loss": 1.0434, + "step": 4007 + }, + { + "epoch": 0.5516102394715111, + "grad_norm": 1.7230518711173215, + "learning_rate": 8.817101266875303e-06, + "loss": 1.0185, + "step": 4008 + }, + { + "epoch": 0.5517478667767685, + "grad_norm": 1.7652588136734766, + "learning_rate": 8.812675255326435e-06, + "loss": 1.0446, + "step": 4009 + }, + { + "epoch": 0.5518854940820259, + "grad_norm": 1.969856149240467, + "learning_rate": 8.808249479682736e-06, + "loss": 0.968, + "step": 4010 + }, + { + "epoch": 0.5520231213872833, + "grad_norm": 2.5667842507868297, + "learning_rate": 8.803823940823557e-06, + "loss": 0.9724, + "step": 4011 + }, + { + "epoch": 0.5521607486925406, + "grad_norm": 1.5113473973455593, + "learning_rate": 8.799398639628186e-06, + "loss": 1.0037, + "step": 4012 + }, + { + "epoch": 0.552298375997798, + "grad_norm": 1.810026846678308, + "learning_rate": 8.79497357697587e-06, + "loss": 0.8971, + "step": 4013 + }, + { + "epoch": 0.5524360033030553, + "grad_norm": 1.639963655110258, + "learning_rate": 8.790548753745808e-06, + "loss": 1.038, + "step": 4014 + }, + { + "epoch": 0.5525736306083127, + "grad_norm": 1.8695529145548873, + "learning_rate": 8.786124170817158e-06, + "loss": 1.0049, + "step": 4015 + }, + { + "epoch": 0.55271125791357, + "grad_norm": 1.746157513417196, + "learning_rate": 8.781699829069018e-06, + "loss": 0.9324, + "step": 4016 + }, + { + "epoch": 0.5528488852188275, + "grad_norm": 1.6869901825141598, + "learning_rate": 8.777275729380442e-06, + "loss": 0.977, + "step": 4017 + }, + { + "epoch": 0.5529865125240848, + "grad_norm": 1.965230850630973, + "learning_rate": 8.772851872630442e-06, + "loss": 1.0562, + "step": 4018 + }, + { + "epoch": 0.5531241398293422, + "grad_norm": 1.6411055931948964, + "learning_rate": 8.768428259697978e-06, + "loss": 1.1002, + "step": 4019 + }, + { + "epoch": 0.5532617671345995, + "grad_norm": 1.8347479565068978, + "learning_rate": 8.76400489146196e-06, + "loss": 0.9842, + "step": 4020 + }, + { + "epoch": 0.5533993944398569, + "grad_norm": 1.6047107626068835, + "learning_rate": 8.759581768801248e-06, + "loss": 1.0443, + "step": 4021 + }, + { + "epoch": 0.5535370217451142, + "grad_norm": 1.6718159477573014, + "learning_rate": 8.755158892594665e-06, + "loss": 0.9755, + "step": 4022 + }, + { + "epoch": 0.5536746490503716, + "grad_norm": 1.54774555392231, + "learning_rate": 8.750736263720966e-06, + "loss": 1.0119, + "step": 4023 + }, + { + "epoch": 0.5538122763556289, + "grad_norm": 1.7992675325515233, + "learning_rate": 8.746313883058875e-06, + "loss": 1.0391, + "step": 4024 + }, + { + "epoch": 0.5539499036608864, + "grad_norm": 1.6449476719333789, + "learning_rate": 8.741891751487049e-06, + "loss": 1.0209, + "step": 4025 + }, + { + "epoch": 0.5540875309661437, + "grad_norm": 1.847939023220919, + "learning_rate": 8.737469869884115e-06, + "loss": 1.062, + "step": 4026 + }, + { + "epoch": 0.5542251582714011, + "grad_norm": 1.7278692815253256, + "learning_rate": 8.733048239128632e-06, + "loss": 1.1497, + "step": 4027 + }, + { + "epoch": 0.5543627855766584, + "grad_norm": 1.723829063014673, + "learning_rate": 8.728626860099123e-06, + "loss": 1.0605, + "step": 4028 + }, + { + "epoch": 0.5545004128819158, + "grad_norm": 1.7343319783835072, + "learning_rate": 8.72420573367405e-06, + "loss": 1.0697, + "step": 4029 + }, + { + "epoch": 0.5546380401871731, + "grad_norm": 1.6170309632616842, + "learning_rate": 8.71978486073184e-06, + "loss": 0.9294, + "step": 4030 + }, + { + "epoch": 0.5547756674924305, + "grad_norm": 1.677417727288273, + "learning_rate": 8.715364242150854e-06, + "loss": 0.9211, + "step": 4031 + }, + { + "epoch": 0.5549132947976878, + "grad_norm": 1.648150433954116, + "learning_rate": 8.710943878809404e-06, + "loss": 0.9992, + "step": 4032 + }, + { + "epoch": 0.5550509221029453, + "grad_norm": 1.8157585018859017, + "learning_rate": 8.706523771585763e-06, + "loss": 0.9549, + "step": 4033 + }, + { + "epoch": 0.5551885494082026, + "grad_norm": 1.5317489786835896, + "learning_rate": 8.702103921358141e-06, + "loss": 0.9875, + "step": 4034 + }, + { + "epoch": 0.55532617671346, + "grad_norm": 1.8266023298073102, + "learning_rate": 8.697684329004703e-06, + "loss": 1.0267, + "step": 4035 + }, + { + "epoch": 0.5554638040187173, + "grad_norm": 1.6517954445642442, + "learning_rate": 8.693264995403562e-06, + "loss": 1.0624, + "step": 4036 + }, + { + "epoch": 0.5556014313239747, + "grad_norm": 1.6643250004330805, + "learning_rate": 8.688845921432783e-06, + "loss": 1.0087, + "step": 4037 + }, + { + "epoch": 0.555739058629232, + "grad_norm": 1.82944166980534, + "learning_rate": 8.684427107970366e-06, + "loss": 1.0424, + "step": 4038 + }, + { + "epoch": 0.5558766859344894, + "grad_norm": 1.6016502000775719, + "learning_rate": 8.680008555894281e-06, + "loss": 0.9601, + "step": 4039 + }, + { + "epoch": 0.5560143132397468, + "grad_norm": 1.8162718506778595, + "learning_rate": 8.67559026608242e-06, + "loss": 1.0208, + "step": 4040 + }, + { + "epoch": 0.5561519405450042, + "grad_norm": 1.7925171268797053, + "learning_rate": 8.67117223941265e-06, + "loss": 1.0698, + "step": 4041 + }, + { + "epoch": 0.5562895678502615, + "grad_norm": 1.9367795024546761, + "learning_rate": 8.666754476762763e-06, + "loss": 0.9788, + "step": 4042 + }, + { + "epoch": 0.5564271951555189, + "grad_norm": 1.721739609579454, + "learning_rate": 8.662336979010511e-06, + "loss": 0.9788, + "step": 4043 + }, + { + "epoch": 0.5565648224607762, + "grad_norm": 1.67068907990827, + "learning_rate": 8.65791974703359e-06, + "loss": 0.995, + "step": 4044 + }, + { + "epoch": 0.5567024497660336, + "grad_norm": 1.6841709022819278, + "learning_rate": 8.65350278170965e-06, + "loss": 0.9932, + "step": 4045 + }, + { + "epoch": 0.5568400770712909, + "grad_norm": 1.6726611603998105, + "learning_rate": 8.649086083916269e-06, + "loss": 1.0123, + "step": 4046 + }, + { + "epoch": 0.5569777043765483, + "grad_norm": 1.879855732792412, + "learning_rate": 8.644669654530995e-06, + "loss": 1.0081, + "step": 4047 + }, + { + "epoch": 0.5571153316818057, + "grad_norm": 4.559539425623358, + "learning_rate": 8.640253494431311e-06, + "loss": 1.0354, + "step": 4048 + }, + { + "epoch": 0.5572529589870631, + "grad_norm": 1.6266265189142677, + "learning_rate": 8.635837604494638e-06, + "loss": 0.9671, + "step": 4049 + }, + { + "epoch": 0.5573905862923204, + "grad_norm": 1.8928121903374728, + "learning_rate": 8.631421985598363e-06, + "loss": 0.9685, + "step": 4050 + }, + { + "epoch": 0.5575282135975778, + "grad_norm": 1.65852396430693, + "learning_rate": 8.627006638619803e-06, + "loss": 1.0248, + "step": 4051 + }, + { + "epoch": 0.5576658409028351, + "grad_norm": 1.6967778889823284, + "learning_rate": 8.622591564436235e-06, + "loss": 0.9628, + "step": 4052 + }, + { + "epoch": 0.5578034682080925, + "grad_norm": 1.5656230937495763, + "learning_rate": 8.618176763924864e-06, + "loss": 1.0085, + "step": 4053 + }, + { + "epoch": 0.5579410955133498, + "grad_norm": 1.8058466656836607, + "learning_rate": 8.613762237962858e-06, + "loss": 1.0253, + "step": 4054 + }, + { + "epoch": 0.5580787228186073, + "grad_norm": 1.8885789266120219, + "learning_rate": 8.609347987427318e-06, + "loss": 1.0161, + "step": 4055 + }, + { + "epoch": 0.5582163501238646, + "grad_norm": 1.6454330256265008, + "learning_rate": 8.604934013195297e-06, + "loss": 0.9388, + "step": 4056 + }, + { + "epoch": 0.558353977429122, + "grad_norm": 1.6538748153710856, + "learning_rate": 8.60052031614379e-06, + "loss": 1.0463, + "step": 4057 + }, + { + "epoch": 0.5584916047343793, + "grad_norm": 1.7003689336770682, + "learning_rate": 8.59610689714974e-06, + "loss": 0.9918, + "step": 4058 + }, + { + "epoch": 0.5586292320396367, + "grad_norm": 1.8742696488359554, + "learning_rate": 8.591693757090034e-06, + "loss": 1.11, + "step": 4059 + }, + { + "epoch": 0.558766859344894, + "grad_norm": 1.61900153167684, + "learning_rate": 8.587280896841499e-06, + "loss": 0.9137, + "step": 4060 + }, + { + "epoch": 0.5589044866501514, + "grad_norm": 1.6611891086280002, + "learning_rate": 8.582868317280911e-06, + "loss": 0.9523, + "step": 4061 + }, + { + "epoch": 0.5590421139554087, + "grad_norm": 1.9671402986853632, + "learning_rate": 8.578456019284994e-06, + "loss": 1.0609, + "step": 4062 + }, + { + "epoch": 0.5591797412606662, + "grad_norm": 1.862561616468973, + "learning_rate": 8.574044003730408e-06, + "loss": 0.978, + "step": 4063 + }, + { + "epoch": 0.5593173685659235, + "grad_norm": 1.6964521811301743, + "learning_rate": 8.569632271493752e-06, + "loss": 1.0476, + "step": 4064 + }, + { + "epoch": 0.5594549958711809, + "grad_norm": 1.6443698193151444, + "learning_rate": 8.56522082345159e-06, + "loss": 0.9877, + "step": 4065 + }, + { + "epoch": 0.5595926231764382, + "grad_norm": 1.6216029750041632, + "learning_rate": 8.560809660480404e-06, + "loss": 0.9407, + "step": 4066 + }, + { + "epoch": 0.5597302504816956, + "grad_norm": 2.102355596975558, + "learning_rate": 8.556398783456646e-06, + "loss": 1.0712, + "step": 4067 + }, + { + "epoch": 0.5598678777869529, + "grad_norm": 1.8415325131689604, + "learning_rate": 8.551988193256683e-06, + "loss": 1.0887, + "step": 4068 + }, + { + "epoch": 0.5600055050922103, + "grad_norm": 2.0849150967039405, + "learning_rate": 8.54757789075685e-06, + "loss": 1.0445, + "step": 4069 + }, + { + "epoch": 0.5601431323974676, + "grad_norm": 1.9241149133496436, + "learning_rate": 8.543167876833404e-06, + "loss": 1.0984, + "step": 4070 + }, + { + "epoch": 0.5602807597027251, + "grad_norm": 1.7858312818164144, + "learning_rate": 8.538758152362562e-06, + "loss": 1.0158, + "step": 4071 + }, + { + "epoch": 0.5604183870079824, + "grad_norm": 1.8552281674567, + "learning_rate": 8.534348718220471e-06, + "loss": 0.959, + "step": 4072 + }, + { + "epoch": 0.5605560143132398, + "grad_norm": 1.9698397455504308, + "learning_rate": 8.529939575283229e-06, + "loss": 0.9777, + "step": 4073 + }, + { + "epoch": 0.5606936416184971, + "grad_norm": 1.7615288978532848, + "learning_rate": 8.52553072442687e-06, + "loss": 1.0806, + "step": 4074 + }, + { + "epoch": 0.5608312689237545, + "grad_norm": 1.7076912862558042, + "learning_rate": 8.521122166527371e-06, + "loss": 0.9263, + "step": 4075 + }, + { + "epoch": 0.5609688962290118, + "grad_norm": 1.5597410190011463, + "learning_rate": 8.516713902460654e-06, + "loss": 1.0283, + "step": 4076 + }, + { + "epoch": 0.5611065235342692, + "grad_norm": 1.810549438251086, + "learning_rate": 8.512305933102584e-06, + "loss": 1.061, + "step": 4077 + }, + { + "epoch": 0.5612441508395266, + "grad_norm": 1.7597212102633768, + "learning_rate": 8.507898259328955e-06, + "loss": 1.0767, + "step": 4078 + }, + { + "epoch": 0.561381778144784, + "grad_norm": 1.7646516749701504, + "learning_rate": 8.503490882015523e-06, + "loss": 1.0038, + "step": 4079 + }, + { + "epoch": 0.5615194054500413, + "grad_norm": 1.6985309707618284, + "learning_rate": 8.499083802037966e-06, + "loss": 1.0404, + "step": 4080 + }, + { + "epoch": 0.5616570327552987, + "grad_norm": 1.8058245361566534, + "learning_rate": 8.494677020271913e-06, + "loss": 0.9264, + "step": 4081 + }, + { + "epoch": 0.561794660060556, + "grad_norm": 1.726167554726961, + "learning_rate": 8.490270537592928e-06, + "loss": 0.9795, + "step": 4082 + }, + { + "epoch": 0.5619322873658134, + "grad_norm": 1.5351131897418806, + "learning_rate": 8.485864354876521e-06, + "loss": 1.0113, + "step": 4083 + }, + { + "epoch": 0.5620699146710707, + "grad_norm": 1.8562635186896628, + "learning_rate": 8.481458472998145e-06, + "loss": 0.9286, + "step": 4084 + }, + { + "epoch": 0.562207541976328, + "grad_norm": 1.5587993968423375, + "learning_rate": 8.477052892833177e-06, + "loss": 1.0345, + "step": 4085 + }, + { + "epoch": 0.5623451692815855, + "grad_norm": 1.6956688642396152, + "learning_rate": 8.47264761525696e-06, + "loss": 0.935, + "step": 4086 + }, + { + "epoch": 0.5624827965868429, + "grad_norm": 1.737043827251866, + "learning_rate": 8.468242641144748e-06, + "loss": 0.9354, + "step": 4087 + }, + { + "epoch": 0.5626204238921002, + "grad_norm": 2.359954573574087, + "learning_rate": 8.46383797137176e-06, + "loss": 1.0025, + "step": 4088 + }, + { + "epoch": 0.5627580511973576, + "grad_norm": 1.741436545414816, + "learning_rate": 8.459433606813137e-06, + "loss": 1.0246, + "step": 4089 + }, + { + "epoch": 0.5628956785026149, + "grad_norm": 2.11279603890354, + "learning_rate": 8.455029548343969e-06, + "loss": 1.1304, + "step": 4090 + }, + { + "epoch": 0.5630333058078723, + "grad_norm": 1.6667892461492047, + "learning_rate": 8.45062579683928e-06, + "loss": 1.0209, + "step": 4091 + }, + { + "epoch": 0.5631709331131296, + "grad_norm": 1.8375287889582337, + "learning_rate": 8.446222353174039e-06, + "loss": 1.0425, + "step": 4092 + }, + { + "epoch": 0.5633085604183871, + "grad_norm": 1.7491430393656549, + "learning_rate": 8.441819218223144e-06, + "loss": 1.0108, + "step": 4093 + }, + { + "epoch": 0.5634461877236444, + "grad_norm": 1.670382579953507, + "learning_rate": 8.437416392861445e-06, + "loss": 1.0904, + "step": 4094 + }, + { + "epoch": 0.5635838150289018, + "grad_norm": 1.8840698238412177, + "learning_rate": 8.433013877963722e-06, + "loss": 1.0433, + "step": 4095 + }, + { + "epoch": 0.5637214423341591, + "grad_norm": 2.2659205124774218, + "learning_rate": 8.428611674404686e-06, + "loss": 1.0398, + "step": 4096 + }, + { + "epoch": 0.5638590696394165, + "grad_norm": 1.824576646614936, + "learning_rate": 8.424209783059001e-06, + "loss": 1.0627, + "step": 4097 + }, + { + "epoch": 0.5639966969446738, + "grad_norm": 1.904794571862147, + "learning_rate": 8.419808204801261e-06, + "loss": 1.0601, + "step": 4098 + }, + { + "epoch": 0.5641343242499312, + "grad_norm": 1.6467001724313644, + "learning_rate": 8.415406940506005e-06, + "loss": 0.99, + "step": 4099 + }, + { + "epoch": 0.5642719515551885, + "grad_norm": 1.5494121172512694, + "learning_rate": 8.411005991047695e-06, + "loss": 0.9969, + "step": 4100 + }, + { + "epoch": 0.564409578860446, + "grad_norm": 1.6290232300187526, + "learning_rate": 8.406605357300747e-06, + "loss": 1.0118, + "step": 4101 + }, + { + "epoch": 0.5645472061657033, + "grad_norm": 1.7657400921338566, + "learning_rate": 8.402205040139498e-06, + "loss": 0.9674, + "step": 4102 + }, + { + "epoch": 0.5646848334709607, + "grad_norm": 1.585490905574864, + "learning_rate": 8.397805040438242e-06, + "loss": 1.0422, + "step": 4103 + }, + { + "epoch": 0.564822460776218, + "grad_norm": 1.7485420981466837, + "learning_rate": 8.393405359071188e-06, + "loss": 0.9111, + "step": 4104 + }, + { + "epoch": 0.5649600880814754, + "grad_norm": 1.80029257820778, + "learning_rate": 8.3890059969125e-06, + "loss": 0.9125, + "step": 4105 + }, + { + "epoch": 0.5650977153867327, + "grad_norm": 1.7879356397236177, + "learning_rate": 8.384606954836264e-06, + "loss": 1.0473, + "step": 4106 + }, + { + "epoch": 0.56523534269199, + "grad_norm": 1.7142478817277533, + "learning_rate": 8.38020823371652e-06, + "loss": 0.968, + "step": 4107 + }, + { + "epoch": 0.5653729699972474, + "grad_norm": 1.6927398443145103, + "learning_rate": 8.375809834427225e-06, + "loss": 0.9704, + "step": 4108 + }, + { + "epoch": 0.5655105973025049, + "grad_norm": 1.720216511232695, + "learning_rate": 8.371411757842285e-06, + "loss": 1.0429, + "step": 4109 + }, + { + "epoch": 0.5656482246077622, + "grad_norm": 1.837175632656309, + "learning_rate": 8.367014004835531e-06, + "loss": 1.0877, + "step": 4110 + }, + { + "epoch": 0.5657858519130196, + "grad_norm": 1.6763421175317594, + "learning_rate": 8.362616576280747e-06, + "loss": 0.981, + "step": 4111 + }, + { + "epoch": 0.5659234792182769, + "grad_norm": 1.9256625709035307, + "learning_rate": 8.358219473051632e-06, + "loss": 0.9022, + "step": 4112 + }, + { + "epoch": 0.5660611065235343, + "grad_norm": 1.69977757583853, + "learning_rate": 8.353822696021835e-06, + "loss": 0.9143, + "step": 4113 + }, + { + "epoch": 0.5661987338287916, + "grad_norm": 1.71099276646812, + "learning_rate": 8.349426246064936e-06, + "loss": 0.9974, + "step": 4114 + }, + { + "epoch": 0.566336361134049, + "grad_norm": 1.5619621400764025, + "learning_rate": 8.345030124054446e-06, + "loss": 0.9613, + "step": 4115 + }, + { + "epoch": 0.5664739884393064, + "grad_norm": 1.8272880564800456, + "learning_rate": 8.340634330863818e-06, + "loss": 1.0615, + "step": 4116 + }, + { + "epoch": 0.5666116157445638, + "grad_norm": 1.7163420357547579, + "learning_rate": 8.336238867366431e-06, + "loss": 1.0695, + "step": 4117 + }, + { + "epoch": 0.5667492430498211, + "grad_norm": 2.032561028652858, + "learning_rate": 8.331843734435612e-06, + "loss": 0.9495, + "step": 4118 + }, + { + "epoch": 0.5668868703550785, + "grad_norm": 2.0101585082255515, + "learning_rate": 8.327448932944601e-06, + "loss": 0.956, + "step": 4119 + }, + { + "epoch": 0.5670244976603358, + "grad_norm": 1.7803288666189858, + "learning_rate": 8.323054463766598e-06, + "loss": 0.9781, + "step": 4120 + }, + { + "epoch": 0.5671621249655932, + "grad_norm": 1.6629451221539324, + "learning_rate": 8.318660327774716e-06, + "loss": 1.024, + "step": 4121 + }, + { + "epoch": 0.5672997522708505, + "grad_norm": 1.824961916746767, + "learning_rate": 8.314266525842014e-06, + "loss": 1.0462, + "step": 4122 + }, + { + "epoch": 0.5674373795761078, + "grad_norm": 1.7673697102343662, + "learning_rate": 8.309873058841474e-06, + "loss": 1.0283, + "step": 4123 + }, + { + "epoch": 0.5675750068813653, + "grad_norm": 1.6882592944303085, + "learning_rate": 8.305479927646028e-06, + "loss": 1.0205, + "step": 4124 + }, + { + "epoch": 0.5677126341866227, + "grad_norm": 1.5938230552812813, + "learning_rate": 8.301087133128518e-06, + "loss": 0.9735, + "step": 4125 + }, + { + "epoch": 0.56785026149188, + "grad_norm": 1.6891376968182923, + "learning_rate": 8.296694676161745e-06, + "loss": 0.9967, + "step": 4126 + }, + { + "epoch": 0.5679878887971374, + "grad_norm": 1.7463496108187126, + "learning_rate": 8.292302557618422e-06, + "loss": 1.0411, + "step": 4127 + }, + { + "epoch": 0.5681255161023947, + "grad_norm": 2.142899485341186, + "learning_rate": 8.287910778371206e-06, + "loss": 1.0389, + "step": 4128 + }, + { + "epoch": 0.568263143407652, + "grad_norm": 1.9476377944016832, + "learning_rate": 8.28351933929268e-06, + "loss": 1.0298, + "step": 4129 + }, + { + "epoch": 0.5684007707129094, + "grad_norm": 1.6706843649527119, + "learning_rate": 8.279128241255365e-06, + "loss": 1.0298, + "step": 4130 + }, + { + "epoch": 0.5685383980181669, + "grad_norm": 1.8046434549279606, + "learning_rate": 8.274737485131715e-06, + "loss": 1.0265, + "step": 4131 + }, + { + "epoch": 0.5686760253234242, + "grad_norm": 1.6909768333276554, + "learning_rate": 8.270347071794107e-06, + "loss": 0.9827, + "step": 4132 + }, + { + "epoch": 0.5688136526286816, + "grad_norm": 1.794147547346384, + "learning_rate": 8.265957002114864e-06, + "loss": 0.9373, + "step": 4133 + }, + { + "epoch": 0.5689512799339389, + "grad_norm": 1.8415423873415095, + "learning_rate": 8.261567276966223e-06, + "loss": 0.9539, + "step": 4134 + }, + { + "epoch": 0.5690889072391963, + "grad_norm": 1.831705029545054, + "learning_rate": 8.257177897220371e-06, + "loss": 1.0131, + "step": 4135 + }, + { + "epoch": 0.5692265345444536, + "grad_norm": 1.818652899879206, + "learning_rate": 8.252788863749414e-06, + "loss": 0.9498, + "step": 4136 + }, + { + "epoch": 0.569364161849711, + "grad_norm": 1.641762885434339, + "learning_rate": 8.248400177425392e-06, + "loss": 1.022, + "step": 4137 + }, + { + "epoch": 0.5695017891549683, + "grad_norm": 1.78633652596997, + "learning_rate": 8.244011839120277e-06, + "loss": 1.0665, + "step": 4138 + }, + { + "epoch": 0.5696394164602258, + "grad_norm": 1.6074253916566155, + "learning_rate": 8.239623849705976e-06, + "loss": 1.0811, + "step": 4139 + }, + { + "epoch": 0.5697770437654831, + "grad_norm": 1.7452729177445854, + "learning_rate": 8.235236210054315e-06, + "loss": 0.9746, + "step": 4140 + }, + { + "epoch": 0.5699146710707405, + "grad_norm": 1.8319463797494118, + "learning_rate": 8.230848921037066e-06, + "loss": 0.9687, + "step": 4141 + }, + { + "epoch": 0.5700522983759978, + "grad_norm": 1.9204861996323552, + "learning_rate": 8.226461983525917e-06, + "loss": 0.9407, + "step": 4142 + }, + { + "epoch": 0.5701899256812551, + "grad_norm": 1.6694819668121021, + "learning_rate": 8.222075398392498e-06, + "loss": 0.9357, + "step": 4143 + }, + { + "epoch": 0.5703275529865125, + "grad_norm": 1.546947872661611, + "learning_rate": 8.217689166508357e-06, + "loss": 0.986, + "step": 4144 + }, + { + "epoch": 0.5704651802917698, + "grad_norm": 1.6841022492177011, + "learning_rate": 8.21330328874498e-06, + "loss": 0.9404, + "step": 4145 + }, + { + "epoch": 0.5706028075970272, + "grad_norm": 1.757863698683949, + "learning_rate": 8.208917765973787e-06, + "loss": 1.0384, + "step": 4146 + }, + { + "epoch": 0.5707404349022847, + "grad_norm": 1.5671479923831644, + "learning_rate": 8.204532599066112e-06, + "loss": 0.994, + "step": 4147 + }, + { + "epoch": 0.570878062207542, + "grad_norm": 1.7908940269865012, + "learning_rate": 8.200147788893238e-06, + "loss": 1.0381, + "step": 4148 + }, + { + "epoch": 0.5710156895127994, + "grad_norm": 1.5031083094303492, + "learning_rate": 8.195763336326354e-06, + "loss": 0.9687, + "step": 4149 + }, + { + "epoch": 0.5711533168180567, + "grad_norm": 1.647351778653535, + "learning_rate": 8.191379242236603e-06, + "loss": 0.9369, + "step": 4150 + }, + { + "epoch": 0.571290944123314, + "grad_norm": 1.6423571612387926, + "learning_rate": 8.186995507495036e-06, + "loss": 1.0494, + "step": 4151 + }, + { + "epoch": 0.5714285714285714, + "grad_norm": 1.6667378774333477, + "learning_rate": 8.182612132972646e-06, + "loss": 0.9383, + "step": 4152 + }, + { + "epoch": 0.5715661987338287, + "grad_norm": 1.747671069083027, + "learning_rate": 8.178229119540345e-06, + "loss": 1.0125, + "step": 4153 + }, + { + "epoch": 0.5717038260390862, + "grad_norm": 1.6848249564610762, + "learning_rate": 8.173846468068987e-06, + "loss": 0.9725, + "step": 4154 + }, + { + "epoch": 0.5718414533443436, + "grad_norm": 1.7014445639576605, + "learning_rate": 8.169464179429332e-06, + "loss": 1.0055, + "step": 4155 + }, + { + "epoch": 0.5719790806496009, + "grad_norm": 1.852314589150778, + "learning_rate": 8.16508225449209e-06, + "loss": 0.9877, + "step": 4156 + }, + { + "epoch": 0.5721167079548583, + "grad_norm": 3.3906933936873083, + "learning_rate": 8.160700694127887e-06, + "loss": 0.9692, + "step": 4157 + }, + { + "epoch": 0.5722543352601156, + "grad_norm": 1.8309410676235658, + "learning_rate": 8.15631949920728e-06, + "loss": 1.0929, + "step": 4158 + }, + { + "epoch": 0.572391962565373, + "grad_norm": 1.752374117749652, + "learning_rate": 8.15193867060075e-06, + "loss": 0.9239, + "step": 4159 + }, + { + "epoch": 0.5725295898706303, + "grad_norm": 2.071523066067662, + "learning_rate": 8.147558209178708e-06, + "loss": 0.9809, + "step": 4160 + }, + { + "epoch": 0.5726672171758876, + "grad_norm": 2.0002157567798, + "learning_rate": 8.143178115811495e-06, + "loss": 1.0785, + "step": 4161 + }, + { + "epoch": 0.5728048444811451, + "grad_norm": 1.870131482615628, + "learning_rate": 8.13879839136937e-06, + "loss": 1.0327, + "step": 4162 + }, + { + "epoch": 0.5729424717864025, + "grad_norm": 1.8950512765929906, + "learning_rate": 8.13441903672253e-06, + "loss": 0.9886, + "step": 4163 + }, + { + "epoch": 0.5730800990916598, + "grad_norm": 1.7654884561549298, + "learning_rate": 8.130040052741089e-06, + "loss": 1.0525, + "step": 4164 + }, + { + "epoch": 0.5732177263969171, + "grad_norm": 1.8178888660495405, + "learning_rate": 8.125661440295094e-06, + "loss": 1.0443, + "step": 4165 + }, + { + "epoch": 0.5733553537021745, + "grad_norm": 1.7033579224334903, + "learning_rate": 8.121283200254513e-06, + "loss": 1.0342, + "step": 4166 + }, + { + "epoch": 0.5734929810074318, + "grad_norm": 1.7209275288783732, + "learning_rate": 8.116905333489242e-06, + "loss": 0.9357, + "step": 4167 + }, + { + "epoch": 0.5736306083126892, + "grad_norm": 1.870786556336648, + "learning_rate": 8.112527840869105e-06, + "loss": 1.0286, + "step": 4168 + }, + { + "epoch": 0.5737682356179467, + "grad_norm": 1.7912247826211616, + "learning_rate": 8.108150723263852e-06, + "loss": 1.0459, + "step": 4169 + }, + { + "epoch": 0.573905862923204, + "grad_norm": 1.755538482703091, + "learning_rate": 8.103773981543151e-06, + "loss": 0.9169, + "step": 4170 + }, + { + "epoch": 0.5740434902284614, + "grad_norm": 2.6369218349891885, + "learning_rate": 8.099397616576605e-06, + "loss": 1.0694, + "step": 4171 + }, + { + "epoch": 0.5741811175337187, + "grad_norm": 1.5730415568174594, + "learning_rate": 8.095021629233733e-06, + "loss": 0.9562, + "step": 4172 + }, + { + "epoch": 0.574318744838976, + "grad_norm": 1.5959210576067544, + "learning_rate": 8.090646020383993e-06, + "loss": 0.9689, + "step": 4173 + }, + { + "epoch": 0.5744563721442334, + "grad_norm": 1.717630115731798, + "learning_rate": 8.086270790896747e-06, + "loss": 0.986, + "step": 4174 + }, + { + "epoch": 0.5745939994494907, + "grad_norm": 1.7834953519071666, + "learning_rate": 8.0818959416413e-06, + "loss": 0.9045, + "step": 4175 + }, + { + "epoch": 0.5747316267547481, + "grad_norm": 1.8576152238066745, + "learning_rate": 8.077521473486877e-06, + "loss": 0.9996, + "step": 4176 + }, + { + "epoch": 0.5748692540600056, + "grad_norm": 1.7341241637037148, + "learning_rate": 8.073147387302616e-06, + "loss": 1.0178, + "step": 4177 + }, + { + "epoch": 0.5750068813652629, + "grad_norm": 1.6271349682690879, + "learning_rate": 8.068773683957598e-06, + "loss": 0.9513, + "step": 4178 + }, + { + "epoch": 0.5751445086705202, + "grad_norm": 1.9750022725718093, + "learning_rate": 8.064400364320809e-06, + "loss": 1.052, + "step": 4179 + }, + { + "epoch": 0.5752821359757776, + "grad_norm": 1.7249060856776555, + "learning_rate": 8.060027429261176e-06, + "loss": 1.0218, + "step": 4180 + }, + { + "epoch": 0.575419763281035, + "grad_norm": 1.7620318514282844, + "learning_rate": 8.055654879647535e-06, + "loss": 1.0455, + "step": 4181 + }, + { + "epoch": 0.5755573905862923, + "grad_norm": 1.6750596616229183, + "learning_rate": 8.051282716348655e-06, + "loss": 0.9941, + "step": 4182 + }, + { + "epoch": 0.5756950178915496, + "grad_norm": 1.6714530412314512, + "learning_rate": 8.046910940233224e-06, + "loss": 1.0569, + "step": 4183 + }, + { + "epoch": 0.575832645196807, + "grad_norm": 1.6481958595720827, + "learning_rate": 8.042539552169856e-06, + "loss": 0.9535, + "step": 4184 + }, + { + "epoch": 0.5759702725020645, + "grad_norm": 1.5239819349429768, + "learning_rate": 8.03816855302708e-06, + "loss": 0.9833, + "step": 4185 + }, + { + "epoch": 0.5761078998073218, + "grad_norm": 1.7220436770127434, + "learning_rate": 8.033797943673363e-06, + "loss": 1.0686, + "step": 4186 + }, + { + "epoch": 0.5762455271125791, + "grad_norm": 1.7925415287677329, + "learning_rate": 8.029427724977077e-06, + "loss": 1.0022, + "step": 4187 + }, + { + "epoch": 0.5763831544178365, + "grad_norm": 1.7053016051731829, + "learning_rate": 8.025057897806531e-06, + "loss": 0.9145, + "step": 4188 + }, + { + "epoch": 0.5765207817230938, + "grad_norm": 1.9748909127090817, + "learning_rate": 8.020688463029942e-06, + "loss": 0.998, + "step": 4189 + }, + { + "epoch": 0.5766584090283512, + "grad_norm": 2.051342883570065, + "learning_rate": 8.016319421515467e-06, + "loss": 0.9439, + "step": 4190 + }, + { + "epoch": 0.5767960363336085, + "grad_norm": 1.79927213892093, + "learning_rate": 8.011950774131167e-06, + "loss": 1.0192, + "step": 4191 + }, + { + "epoch": 0.576933663638866, + "grad_norm": 1.6074888927522777, + "learning_rate": 8.007582521745031e-06, + "loss": 0.9507, + "step": 4192 + }, + { + "epoch": 0.5770712909441233, + "grad_norm": 1.6705054548553586, + "learning_rate": 8.00321466522498e-06, + "loss": 0.995, + "step": 4193 + }, + { + "epoch": 0.5772089182493807, + "grad_norm": 1.6991705761238471, + "learning_rate": 7.998847205438838e-06, + "loss": 1.0305, + "step": 4194 + }, + { + "epoch": 0.577346545554638, + "grad_norm": 2.0318020481804138, + "learning_rate": 7.994480143254369e-06, + "loss": 1.0029, + "step": 4195 + }, + { + "epoch": 0.5774841728598954, + "grad_norm": 1.7024632485724989, + "learning_rate": 7.990113479539237e-06, + "loss": 0.9726, + "step": 4196 + }, + { + "epoch": 0.5776218001651527, + "grad_norm": 1.799983809221181, + "learning_rate": 7.985747215161048e-06, + "loss": 0.9961, + "step": 4197 + }, + { + "epoch": 0.5777594274704101, + "grad_norm": 1.7015472207834037, + "learning_rate": 7.981381350987313e-06, + "loss": 0.9778, + "step": 4198 + }, + { + "epoch": 0.5778970547756674, + "grad_norm": 1.560337422992669, + "learning_rate": 7.977015887885473e-06, + "loss": 0.9807, + "step": 4199 + }, + { + "epoch": 0.5780346820809249, + "grad_norm": 1.6587536028509373, + "learning_rate": 7.97265082672288e-06, + "loss": 0.9441, + "step": 4200 + }, + { + "epoch": 0.5781723093861822, + "grad_norm": 1.9037971739152533, + "learning_rate": 7.968286168366824e-06, + "loss": 1.0559, + "step": 4201 + }, + { + "epoch": 0.5783099366914396, + "grad_norm": 1.8890985826009206, + "learning_rate": 7.963921913684488e-06, + "loss": 0.9952, + "step": 4202 + }, + { + "epoch": 0.578447563996697, + "grad_norm": 1.6974743388793723, + "learning_rate": 7.959558063543005e-06, + "loss": 0.9835, + "step": 4203 + }, + { + "epoch": 0.5785851913019543, + "grad_norm": 1.669679861450674, + "learning_rate": 7.955194618809396e-06, + "loss": 0.9588, + "step": 4204 + }, + { + "epoch": 0.5787228186072116, + "grad_norm": 1.8994863316113748, + "learning_rate": 7.950831580350633e-06, + "loss": 1.0297, + "step": 4205 + }, + { + "epoch": 0.578860445912469, + "grad_norm": 1.650843678180431, + "learning_rate": 7.94646894903358e-06, + "loss": 1.0407, + "step": 4206 + }, + { + "epoch": 0.5789980732177264, + "grad_norm": 1.8180652299205629, + "learning_rate": 7.942106725725038e-06, + "loss": 1.0354, + "step": 4207 + }, + { + "epoch": 0.5791357005229838, + "grad_norm": 1.5641663870802276, + "learning_rate": 7.937744911291723e-06, + "loss": 0.9799, + "step": 4208 + }, + { + "epoch": 0.5792733278282411, + "grad_norm": 1.6109763841552938, + "learning_rate": 7.933383506600261e-06, + "loss": 1.0174, + "step": 4209 + }, + { + "epoch": 0.5794109551334985, + "grad_norm": 1.811855611098829, + "learning_rate": 7.92902251251721e-06, + "loss": 0.9682, + "step": 4210 + }, + { + "epoch": 0.5795485824387558, + "grad_norm": 1.895223018215018, + "learning_rate": 7.924661929909035e-06, + "loss": 1.0217, + "step": 4211 + }, + { + "epoch": 0.5796862097440132, + "grad_norm": 1.6468542091015417, + "learning_rate": 7.92030175964213e-06, + "loss": 1.0009, + "step": 4212 + }, + { + "epoch": 0.5798238370492705, + "grad_norm": 2.0673536864865696, + "learning_rate": 7.915942002582793e-06, + "loss": 0.9827, + "step": 4213 + }, + { + "epoch": 0.5799614643545279, + "grad_norm": 1.650148406061492, + "learning_rate": 7.911582659597253e-06, + "loss": 1.0068, + "step": 4214 + }, + { + "epoch": 0.5800990916597853, + "grad_norm": 2.0893035906691617, + "learning_rate": 7.90722373155165e-06, + "loss": 0.9624, + "step": 4215 + }, + { + "epoch": 0.5802367189650427, + "grad_norm": 1.8386097947898845, + "learning_rate": 7.90286521931205e-06, + "loss": 0.9638, + "step": 4216 + }, + { + "epoch": 0.5803743462703, + "grad_norm": 1.666757602230867, + "learning_rate": 7.898507123744419e-06, + "loss": 1.0621, + "step": 4217 + }, + { + "epoch": 0.5805119735755574, + "grad_norm": 1.9407347766851486, + "learning_rate": 7.894149445714661e-06, + "loss": 1.0124, + "step": 4218 + }, + { + "epoch": 0.5806496008808147, + "grad_norm": 1.922149174688596, + "learning_rate": 7.889792186088578e-06, + "loss": 1.0014, + "step": 4219 + }, + { + "epoch": 0.5807872281860721, + "grad_norm": 1.573704489778096, + "learning_rate": 7.885435345731905e-06, + "loss": 1.0084, + "step": 4220 + }, + { + "epoch": 0.5809248554913294, + "grad_norm": 1.7760277958691533, + "learning_rate": 7.88107892551028e-06, + "loss": 0.9841, + "step": 4221 + }, + { + "epoch": 0.5810624827965868, + "grad_norm": 1.634474543912554, + "learning_rate": 7.87672292628927e-06, + "loss": 0.9303, + "step": 4222 + }, + { + "epoch": 0.5812001101018442, + "grad_norm": 1.6689815214624326, + "learning_rate": 7.872367348934353e-06, + "loss": 1.0037, + "step": 4223 + }, + { + "epoch": 0.5813377374071016, + "grad_norm": 1.6955558305552014, + "learning_rate": 7.868012194310914e-06, + "loss": 1.0099, + "step": 4224 + }, + { + "epoch": 0.5814753647123589, + "grad_norm": 1.8226906694150415, + "learning_rate": 7.863657463284274e-06, + "loss": 1.0254, + "step": 4225 + }, + { + "epoch": 0.5816129920176163, + "grad_norm": 1.7254136983608634, + "learning_rate": 7.859303156719646e-06, + "loss": 1.0531, + "step": 4226 + }, + { + "epoch": 0.5817506193228736, + "grad_norm": 1.9269842478890744, + "learning_rate": 7.854949275482185e-06, + "loss": 1.0381, + "step": 4227 + }, + { + "epoch": 0.581888246628131, + "grad_norm": 1.7153618793183898, + "learning_rate": 7.850595820436933e-06, + "loss": 0.9376, + "step": 4228 + }, + { + "epoch": 0.5820258739333883, + "grad_norm": 1.7399612048476527, + "learning_rate": 7.846242792448873e-06, + "loss": 0.9228, + "step": 4229 + }, + { + "epoch": 0.5821635012386458, + "grad_norm": 2.0173364543126393, + "learning_rate": 7.84189019238289e-06, + "loss": 1.0168, + "step": 4230 + }, + { + "epoch": 0.5823011285439031, + "grad_norm": 1.7744042579516992, + "learning_rate": 7.837538021103783e-06, + "loss": 0.9436, + "step": 4231 + }, + { + "epoch": 0.5824387558491605, + "grad_norm": 1.938055025754311, + "learning_rate": 7.833186279476268e-06, + "loss": 1.1145, + "step": 4232 + }, + { + "epoch": 0.5825763831544178, + "grad_norm": 2.078467394147425, + "learning_rate": 7.828834968364984e-06, + "loss": 0.9641, + "step": 4233 + }, + { + "epoch": 0.5827140104596752, + "grad_norm": 1.7046173033468408, + "learning_rate": 7.824484088634467e-06, + "loss": 0.9885, + "step": 4234 + }, + { + "epoch": 0.5828516377649325, + "grad_norm": 1.8967074193608617, + "learning_rate": 7.820133641149186e-06, + "loss": 1.0123, + "step": 4235 + }, + { + "epoch": 0.5829892650701899, + "grad_norm": 1.6776998313675089, + "learning_rate": 7.815783626773506e-06, + "loss": 1.0483, + "step": 4236 + }, + { + "epoch": 0.5831268923754472, + "grad_norm": 1.5852957697810253, + "learning_rate": 7.811434046371724e-06, + "loss": 0.9784, + "step": 4237 + }, + { + "epoch": 0.5832645196807047, + "grad_norm": 1.868697205954871, + "learning_rate": 7.80708490080804e-06, + "loss": 1.0508, + "step": 4238 + }, + { + "epoch": 0.583402146985962, + "grad_norm": 1.627225740070741, + "learning_rate": 7.802736190946567e-06, + "loss": 0.9671, + "step": 4239 + }, + { + "epoch": 0.5835397742912194, + "grad_norm": 1.5673256140407603, + "learning_rate": 7.798387917651339e-06, + "loss": 0.9335, + "step": 4240 + }, + { + "epoch": 0.5836774015964767, + "grad_norm": 1.5446418939604443, + "learning_rate": 7.794040081786289e-06, + "loss": 0.9735, + "step": 4241 + }, + { + "epoch": 0.5838150289017341, + "grad_norm": 1.6567090020848032, + "learning_rate": 7.789692684215284e-06, + "loss": 1.0328, + "step": 4242 + }, + { + "epoch": 0.5839526562069914, + "grad_norm": 1.6741849837057594, + "learning_rate": 7.785345725802081e-06, + "loss": 0.9779, + "step": 4243 + }, + { + "epoch": 0.5840902835122488, + "grad_norm": 1.7116160809032748, + "learning_rate": 7.780999207410372e-06, + "loss": 0.9251, + "step": 4244 + }, + { + "epoch": 0.5842279108175062, + "grad_norm": 1.9401787891800675, + "learning_rate": 7.776653129903744e-06, + "loss": 1.0389, + "step": 4245 + }, + { + "epoch": 0.5843655381227636, + "grad_norm": 1.7148381799436883, + "learning_rate": 7.772307494145704e-06, + "loss": 1.0162, + "step": 4246 + }, + { + "epoch": 0.5845031654280209, + "grad_norm": 1.6309918658806077, + "learning_rate": 7.767962300999668e-06, + "loss": 0.9246, + "step": 4247 + }, + { + "epoch": 0.5846407927332783, + "grad_norm": 1.7996394202373762, + "learning_rate": 7.763617551328976e-06, + "loss": 0.9874, + "step": 4248 + }, + { + "epoch": 0.5847784200385356, + "grad_norm": 1.7006538590892448, + "learning_rate": 7.759273245996858e-06, + "loss": 1.0514, + "step": 4249 + }, + { + "epoch": 0.584916047343793, + "grad_norm": 1.7406472972814693, + "learning_rate": 7.754929385866481e-06, + "loss": 0.9165, + "step": 4250 + }, + { + "epoch": 0.5850536746490503, + "grad_norm": 1.6930528010112595, + "learning_rate": 7.750585971800898e-06, + "loss": 0.9681, + "step": 4251 + }, + { + "epoch": 0.5851913019543077, + "grad_norm": 1.6927088210177024, + "learning_rate": 7.746243004663094e-06, + "loss": 0.9921, + "step": 4252 + }, + { + "epoch": 0.5853289292595651, + "grad_norm": 1.8772656769757505, + "learning_rate": 7.74190048531595e-06, + "loss": 1.003, + "step": 4253 + }, + { + "epoch": 0.5854665565648225, + "grad_norm": 1.7615518290231331, + "learning_rate": 7.737558414622272e-06, + "loss": 0.8959, + "step": 4254 + }, + { + "epoch": 0.5856041838700798, + "grad_norm": 1.8312780138517997, + "learning_rate": 7.73321679344477e-06, + "loss": 1.0272, + "step": 4255 + }, + { + "epoch": 0.5857418111753372, + "grad_norm": 1.7553243670160097, + "learning_rate": 7.728875622646059e-06, + "loss": 1.0001, + "step": 4256 + }, + { + "epoch": 0.5858794384805945, + "grad_norm": 1.7364185368831442, + "learning_rate": 7.724534903088678e-06, + "loss": 0.935, + "step": 4257 + }, + { + "epoch": 0.5860170657858519, + "grad_norm": 1.6934138870216737, + "learning_rate": 7.720194635635058e-06, + "loss": 1.0027, + "step": 4258 + }, + { + "epoch": 0.5861546930911092, + "grad_norm": 1.7045791871198879, + "learning_rate": 7.715854821147562e-06, + "loss": 0.9646, + "step": 4259 + }, + { + "epoch": 0.5862923203963666, + "grad_norm": 1.87977994917619, + "learning_rate": 7.711515460488442e-06, + "loss": 0.9629, + "step": 4260 + }, + { + "epoch": 0.586429947701624, + "grad_norm": 1.7605783507012922, + "learning_rate": 7.707176554519876e-06, + "loss": 0.9016, + "step": 4261 + }, + { + "epoch": 0.5865675750068814, + "grad_norm": 1.7361617546998254, + "learning_rate": 7.702838104103941e-06, + "loss": 0.9905, + "step": 4262 + }, + { + "epoch": 0.5867052023121387, + "grad_norm": 1.752338629092878, + "learning_rate": 7.698500110102634e-06, + "loss": 0.9444, + "step": 4263 + }, + { + "epoch": 0.5868428296173961, + "grad_norm": 1.9392847696184061, + "learning_rate": 7.694162573377848e-06, + "loss": 1.077, + "step": 4264 + }, + { + "epoch": 0.5869804569226534, + "grad_norm": 1.8650372606218177, + "learning_rate": 7.689825494791398e-06, + "loss": 1.0288, + "step": 4265 + }, + { + "epoch": 0.5871180842279108, + "grad_norm": 1.583125478349895, + "learning_rate": 7.685488875204995e-06, + "loss": 0.939, + "step": 4266 + }, + { + "epoch": 0.5872557115331681, + "grad_norm": 1.8703814047459895, + "learning_rate": 7.681152715480276e-06, + "loss": 0.929, + "step": 4267 + }, + { + "epoch": 0.5873933388384256, + "grad_norm": 1.6012132804403048, + "learning_rate": 7.676817016478767e-06, + "loss": 1.0117, + "step": 4268 + }, + { + "epoch": 0.5875309661436829, + "grad_norm": 1.785121090363919, + "learning_rate": 7.672481779061917e-06, + "loss": 1.0689, + "step": 4269 + }, + { + "epoch": 0.5876685934489403, + "grad_norm": 1.8195958691366931, + "learning_rate": 7.668147004091075e-06, + "loss": 0.9496, + "step": 4270 + }, + { + "epoch": 0.5878062207541976, + "grad_norm": 1.9262855377804398, + "learning_rate": 7.663812692427509e-06, + "loss": 1.0324, + "step": 4271 + }, + { + "epoch": 0.587943848059455, + "grad_norm": 1.8464947465232997, + "learning_rate": 7.659478844932382e-06, + "loss": 0.9605, + "step": 4272 + }, + { + "epoch": 0.5880814753647123, + "grad_norm": 1.9281895897313235, + "learning_rate": 7.655145462466764e-06, + "loss": 0.9295, + "step": 4273 + }, + { + "epoch": 0.5882191026699697, + "grad_norm": 1.6188570430219926, + "learning_rate": 7.650812545891651e-06, + "loss": 0.9431, + "step": 4274 + }, + { + "epoch": 0.588356729975227, + "grad_norm": 1.7908935362669791, + "learning_rate": 7.646480096067923e-06, + "loss": 0.907, + "step": 4275 + }, + { + "epoch": 0.5884943572804845, + "grad_norm": 1.76610690775602, + "learning_rate": 7.642148113856387e-06, + "loss": 0.9282, + "step": 4276 + }, + { + "epoch": 0.5886319845857418, + "grad_norm": 1.9919960725753476, + "learning_rate": 7.637816600117743e-06, + "loss": 0.9463, + "step": 4277 + }, + { + "epoch": 0.5887696118909992, + "grad_norm": 1.77592318698332, + "learning_rate": 7.633485555712609e-06, + "loss": 0.9517, + "step": 4278 + }, + { + "epoch": 0.5889072391962565, + "grad_norm": 1.6463673180196006, + "learning_rate": 7.6291549815014975e-06, + "loss": 0.9296, + "step": 4279 + }, + { + "epoch": 0.5890448665015139, + "grad_norm": 1.7845877395106373, + "learning_rate": 7.624824878344842e-06, + "loss": 1.0362, + "step": 4280 + }, + { + "epoch": 0.5891824938067712, + "grad_norm": 1.6804502036020807, + "learning_rate": 7.620495247102966e-06, + "loss": 0.9934, + "step": 4281 + }, + { + "epoch": 0.5893201211120286, + "grad_norm": 2.0105003746974073, + "learning_rate": 7.616166088636117e-06, + "loss": 0.9691, + "step": 4282 + }, + { + "epoch": 0.589457748417286, + "grad_norm": 1.94255482906479, + "learning_rate": 7.611837403804432e-06, + "loss": 1.0411, + "step": 4283 + }, + { + "epoch": 0.5895953757225434, + "grad_norm": 1.9526622203390758, + "learning_rate": 7.607509193467965e-06, + "loss": 0.9645, + "step": 4284 + }, + { + "epoch": 0.5897330030278007, + "grad_norm": 1.7510249169065462, + "learning_rate": 7.603181458486673e-06, + "loss": 0.9754, + "step": 4285 + }, + { + "epoch": 0.5898706303330581, + "grad_norm": 1.5534972720759255, + "learning_rate": 7.598854199720415e-06, + "loss": 0.9458, + "step": 4286 + }, + { + "epoch": 0.5900082576383154, + "grad_norm": 1.6916293805454217, + "learning_rate": 7.594527418028962e-06, + "loss": 0.9258, + "step": 4287 + }, + { + "epoch": 0.5901458849435728, + "grad_norm": 1.7484705691551685, + "learning_rate": 7.590201114271979e-06, + "loss": 1.0162, + "step": 4288 + }, + { + "epoch": 0.5902835122488301, + "grad_norm": 1.936463477628001, + "learning_rate": 7.585875289309053e-06, + "loss": 1.0701, + "step": 4289 + }, + { + "epoch": 0.5904211395540875, + "grad_norm": 1.7480762669811787, + "learning_rate": 7.581549943999656e-06, + "loss": 0.9536, + "step": 4290 + }, + { + "epoch": 0.5905587668593449, + "grad_norm": 1.7414952981528666, + "learning_rate": 7.577225079203183e-06, + "loss": 0.9749, + "step": 4291 + }, + { + "epoch": 0.5906963941646023, + "grad_norm": 1.8002406990987645, + "learning_rate": 7.572900695778923e-06, + "loss": 0.9568, + "step": 4292 + }, + { + "epoch": 0.5908340214698596, + "grad_norm": 1.8158583573650602, + "learning_rate": 7.568576794586071e-06, + "loss": 0.9824, + "step": 4293 + }, + { + "epoch": 0.590971648775117, + "grad_norm": 1.9571750142903197, + "learning_rate": 7.564253376483725e-06, + "loss": 0.9835, + "step": 4294 + }, + { + "epoch": 0.5911092760803743, + "grad_norm": 2.124615875128621, + "learning_rate": 7.559930442330896e-06, + "loss": 1.0909, + "step": 4295 + }, + { + "epoch": 0.5912469033856317, + "grad_norm": 2.065636774527557, + "learning_rate": 7.555607992986484e-06, + "loss": 0.9987, + "step": 4296 + }, + { + "epoch": 0.591384530690889, + "grad_norm": 1.8207563325056715, + "learning_rate": 7.551286029309306e-06, + "loss": 1.0214, + "step": 4297 + }, + { + "epoch": 0.5915221579961464, + "grad_norm": 1.9015359550159594, + "learning_rate": 7.546964552158072e-06, + "loss": 0.9814, + "step": 4298 + }, + { + "epoch": 0.5916597853014038, + "grad_norm": 1.748315759143318, + "learning_rate": 7.542643562391406e-06, + "loss": 0.9164, + "step": 4299 + }, + { + "epoch": 0.5917974126066612, + "grad_norm": 1.6151843132686623, + "learning_rate": 7.538323060867822e-06, + "loss": 0.9285, + "step": 4300 + }, + { + "epoch": 0.5919350399119185, + "grad_norm": 1.9536274504785336, + "learning_rate": 7.534003048445753e-06, + "loss": 0.9954, + "step": 4301 + }, + { + "epoch": 0.5920726672171759, + "grad_norm": 1.6478925321154179, + "learning_rate": 7.52968352598352e-06, + "loss": 0.9485, + "step": 4302 + }, + { + "epoch": 0.5922102945224332, + "grad_norm": 1.7689648480227445, + "learning_rate": 7.525364494339359e-06, + "loss": 0.9763, + "step": 4303 + }, + { + "epoch": 0.5923479218276906, + "grad_norm": 1.6825615598365653, + "learning_rate": 7.521045954371399e-06, + "loss": 0.9946, + "step": 4304 + }, + { + "epoch": 0.5924855491329479, + "grad_norm": 1.8664072718276692, + "learning_rate": 7.516727906937671e-06, + "loss": 0.9539, + "step": 4305 + }, + { + "epoch": 0.5926231764382054, + "grad_norm": 1.7551972823446285, + "learning_rate": 7.5124103528961165e-06, + "loss": 0.863, + "step": 4306 + }, + { + "epoch": 0.5927608037434627, + "grad_norm": 1.76477509268471, + "learning_rate": 7.508093293104575e-06, + "loss": 0.9961, + "step": 4307 + }, + { + "epoch": 0.5928984310487201, + "grad_norm": 1.9779645762381661, + "learning_rate": 7.503776728420784e-06, + "loss": 1.0584, + "step": 4308 + }, + { + "epoch": 0.5930360583539774, + "grad_norm": 1.6307980395562403, + "learning_rate": 7.4994606597023865e-06, + "loss": 0.9022, + "step": 4309 + }, + { + "epoch": 0.5931736856592348, + "grad_norm": 1.8205406900020848, + "learning_rate": 7.495145087806932e-06, + "loss": 1.0011, + "step": 4310 + }, + { + "epoch": 0.5933113129644921, + "grad_norm": 1.818710751117435, + "learning_rate": 7.490830013591856e-06, + "loss": 0.9528, + "step": 4311 + }, + { + "epoch": 0.5934489402697495, + "grad_norm": 1.6928624422864835, + "learning_rate": 7.486515437914513e-06, + "loss": 0.9287, + "step": 4312 + }, + { + "epoch": 0.5935865675750068, + "grad_norm": 2.1283489058648577, + "learning_rate": 7.4822013616321435e-06, + "loss": 0.9616, + "step": 4313 + }, + { + "epoch": 0.5937241948802643, + "grad_norm": 1.6318718667112082, + "learning_rate": 7.4778877856019025e-06, + "loss": 0.949, + "step": 4314 + }, + { + "epoch": 0.5938618221855216, + "grad_norm": 2.145799510878873, + "learning_rate": 7.47357471068083e-06, + "loss": 0.9532, + "step": 4315 + }, + { + "epoch": 0.593999449490779, + "grad_norm": 1.738205940763011, + "learning_rate": 7.469262137725881e-06, + "loss": 0.9303, + "step": 4316 + }, + { + "epoch": 0.5941370767960363, + "grad_norm": 1.7285295609264977, + "learning_rate": 7.464950067593902e-06, + "loss": 0.9771, + "step": 4317 + }, + { + "epoch": 0.5942747041012937, + "grad_norm": 1.772813683712298, + "learning_rate": 7.4606385011416485e-06, + "loss": 1.0406, + "step": 4318 + }, + { + "epoch": 0.594412331406551, + "grad_norm": 1.8228535288113377, + "learning_rate": 7.456327439225763e-06, + "loss": 0.889, + "step": 4319 + }, + { + "epoch": 0.5945499587118084, + "grad_norm": 1.5869825715352297, + "learning_rate": 7.4520168827027926e-06, + "loss": 0.9672, + "step": 4320 + }, + { + "epoch": 0.5946875860170658, + "grad_norm": 1.8583496870657272, + "learning_rate": 7.447706832429194e-06, + "loss": 1.0053, + "step": 4321 + }, + { + "epoch": 0.5948252133223232, + "grad_norm": 1.7952782508807401, + "learning_rate": 7.4433972892613075e-06, + "loss": 0.9502, + "step": 4322 + }, + { + "epoch": 0.5949628406275805, + "grad_norm": 1.8544550101033679, + "learning_rate": 7.4390882540553845e-06, + "loss": 0.9822, + "step": 4323 + }, + { + "epoch": 0.5951004679328379, + "grad_norm": 1.8104325405066757, + "learning_rate": 7.434779727667568e-06, + "loss": 1.0139, + "step": 4324 + }, + { + "epoch": 0.5952380952380952, + "grad_norm": 1.6269655726096586, + "learning_rate": 7.43047171095391e-06, + "loss": 0.9645, + "step": 4325 + }, + { + "epoch": 0.5953757225433526, + "grad_norm": 1.7851718786574644, + "learning_rate": 7.426164204770346e-06, + "loss": 0.9876, + "step": 4326 + }, + { + "epoch": 0.5955133498486099, + "grad_norm": 1.8209927123482244, + "learning_rate": 7.421857209972727e-06, + "loss": 0.9199, + "step": 4327 + }, + { + "epoch": 0.5956509771538673, + "grad_norm": 1.838872212816762, + "learning_rate": 7.417550727416785e-06, + "loss": 1.0306, + "step": 4328 + }, + { + "epoch": 0.5957886044591247, + "grad_norm": 1.6264105095807433, + "learning_rate": 7.413244757958168e-06, + "loss": 0.9611, + "step": 4329 + }, + { + "epoch": 0.5959262317643821, + "grad_norm": 1.8726173544750835, + "learning_rate": 7.4089393024524045e-06, + "loss": 1.0441, + "step": 4330 + }, + { + "epoch": 0.5960638590696394, + "grad_norm": 1.6498568096359165, + "learning_rate": 7.404634361754936e-06, + "loss": 0.9539, + "step": 4331 + }, + { + "epoch": 0.5962014863748968, + "grad_norm": 1.7129491213979007, + "learning_rate": 7.400329936721092e-06, + "loss": 0.9375, + "step": 4332 + }, + { + "epoch": 0.5963391136801541, + "grad_norm": 1.8402535169501426, + "learning_rate": 7.396026028206105e-06, + "loss": 1.0239, + "step": 4333 + }, + { + "epoch": 0.5964767409854115, + "grad_norm": 1.8244820685360077, + "learning_rate": 7.391722637065099e-06, + "loss": 0.9607, + "step": 4334 + }, + { + "epoch": 0.5966143682906688, + "grad_norm": 1.8909826010998754, + "learning_rate": 7.387419764153108e-06, + "loss": 0.9027, + "step": 4335 + }, + { + "epoch": 0.5967519955959262, + "grad_norm": 1.832300508056641, + "learning_rate": 7.383117410325045e-06, + "loss": 0.9552, + "step": 4336 + }, + { + "epoch": 0.5968896229011836, + "grad_norm": 1.763287267146626, + "learning_rate": 7.3788155764357275e-06, + "loss": 0.9944, + "step": 4337 + }, + { + "epoch": 0.597027250206441, + "grad_norm": 1.7605664383262927, + "learning_rate": 7.374514263339878e-06, + "loss": 1.0226, + "step": 4338 + }, + { + "epoch": 0.5971648775116983, + "grad_norm": 1.7132576845740801, + "learning_rate": 7.370213471892102e-06, + "loss": 0.9699, + "step": 4339 + }, + { + "epoch": 0.5973025048169557, + "grad_norm": 1.8194253714874216, + "learning_rate": 7.365913202946917e-06, + "loss": 0.9055, + "step": 4340 + }, + { + "epoch": 0.597440132122213, + "grad_norm": 1.7900792349213197, + "learning_rate": 7.361613457358716e-06, + "loss": 0.9567, + "step": 4341 + }, + { + "epoch": 0.5975777594274704, + "grad_norm": 1.6912020893008777, + "learning_rate": 7.35731423598181e-06, + "loss": 0.9962, + "step": 4342 + }, + { + "epoch": 0.5977153867327277, + "grad_norm": 1.6675106707434213, + "learning_rate": 7.353015539670387e-06, + "loss": 0.9514, + "step": 4343 + }, + { + "epoch": 0.5978530140379852, + "grad_norm": 1.6020805739304795, + "learning_rate": 7.348717369278549e-06, + "loss": 0.9323, + "step": 4344 + }, + { + "epoch": 0.5979906413432425, + "grad_norm": 1.6162506496601305, + "learning_rate": 7.344419725660271e-06, + "loss": 0.9382, + "step": 4345 + }, + { + "epoch": 0.5981282686484999, + "grad_norm": 1.5864611427240152, + "learning_rate": 7.340122609669445e-06, + "loss": 0.9801, + "step": 4346 + }, + { + "epoch": 0.5982658959537572, + "grad_norm": 2.016888861173644, + "learning_rate": 7.335826022159848e-06, + "loss": 0.9876, + "step": 4347 + }, + { + "epoch": 0.5984035232590146, + "grad_norm": 1.8947269984006183, + "learning_rate": 7.331529963985151e-06, + "loss": 1.0123, + "step": 4348 + }, + { + "epoch": 0.5985411505642719, + "grad_norm": 1.8931015543537988, + "learning_rate": 7.32723443599892e-06, + "loss": 0.9218, + "step": 4349 + }, + { + "epoch": 0.5986787778695293, + "grad_norm": 1.7812862669412417, + "learning_rate": 7.322939439054626e-06, + "loss": 0.9584, + "step": 4350 + }, + { + "epoch": 0.5988164051747866, + "grad_norm": 1.6772236031703374, + "learning_rate": 7.31864497400562e-06, + "loss": 0.9587, + "step": 4351 + }, + { + "epoch": 0.5989540324800441, + "grad_norm": 1.852507241107247, + "learning_rate": 7.3143510417051485e-06, + "loss": 0.9767, + "step": 4352 + }, + { + "epoch": 0.5990916597853014, + "grad_norm": 1.7192984263418716, + "learning_rate": 7.310057643006365e-06, + "loss": 1.0229, + "step": 4353 + }, + { + "epoch": 0.5992292870905588, + "grad_norm": 1.7365212215787655, + "learning_rate": 7.305764778762308e-06, + "loss": 0.955, + "step": 4354 + }, + { + "epoch": 0.5993669143958161, + "grad_norm": 1.7604515272942076, + "learning_rate": 7.301472449825909e-06, + "loss": 0.8157, + "step": 4355 + }, + { + "epoch": 0.5995045417010735, + "grad_norm": 2.0147004853341417, + "learning_rate": 7.297180657049992e-06, + "loss": 1.0091, + "step": 4356 + }, + { + "epoch": 0.5996421690063308, + "grad_norm": 1.7015248211305911, + "learning_rate": 7.2928894012872865e-06, + "loss": 0.9926, + "step": 4357 + }, + { + "epoch": 0.5997797963115882, + "grad_norm": 1.5925282014668032, + "learning_rate": 7.288598683390395e-06, + "loss": 0.971, + "step": 4358 + }, + { + "epoch": 0.5999174236168456, + "grad_norm": 1.8033144291052599, + "learning_rate": 7.2843085042118355e-06, + "loss": 0.9811, + "step": 4359 + }, + { + "epoch": 0.600055050922103, + "grad_norm": 1.9802816832883887, + "learning_rate": 7.280018864603997e-06, + "loss": 0.9697, + "step": 4360 + }, + { + "epoch": 0.6001926782273603, + "grad_norm": 1.7960098141977567, + "learning_rate": 7.275729765419184e-06, + "loss": 1.0457, + "step": 4361 + }, + { + "epoch": 0.6003303055326177, + "grad_norm": 1.974676796621578, + "learning_rate": 7.2714412075095696e-06, + "loss": 1.0156, + "step": 4362 + }, + { + "epoch": 0.600467932837875, + "grad_norm": 1.7742499980059303, + "learning_rate": 7.267153191727239e-06, + "loss": 0.9013, + "step": 4363 + }, + { + "epoch": 0.6006055601431324, + "grad_norm": 1.8747885029689622, + "learning_rate": 7.2628657189241594e-06, + "loss": 0.9863, + "step": 4364 + }, + { + "epoch": 0.6007431874483897, + "grad_norm": 1.8903990814655263, + "learning_rate": 7.258578789952198e-06, + "loss": 0.9869, + "step": 4365 + }, + { + "epoch": 0.6008808147536471, + "grad_norm": 1.5162077621732435, + "learning_rate": 7.254292405663101e-06, + "loss": 0.9737, + "step": 4366 + }, + { + "epoch": 0.6010184420589045, + "grad_norm": 2.2763789014702063, + "learning_rate": 7.250006566908523e-06, + "loss": 0.9729, + "step": 4367 + }, + { + "epoch": 0.6011560693641619, + "grad_norm": 1.6096229140701266, + "learning_rate": 7.245721274539996e-06, + "loss": 0.9436, + "step": 4368 + }, + { + "epoch": 0.6012936966694192, + "grad_norm": 1.6776632166602625, + "learning_rate": 7.241436529408947e-06, + "loss": 1.041, + "step": 4369 + }, + { + "epoch": 0.6014313239746766, + "grad_norm": 1.825915858017262, + "learning_rate": 7.2371523323667e-06, + "loss": 1.0194, + "step": 4370 + }, + { + "epoch": 0.6015689512799339, + "grad_norm": 1.7594090565730396, + "learning_rate": 7.232868684264463e-06, + "loss": 0.9519, + "step": 4371 + }, + { + "epoch": 0.6017065785851913, + "grad_norm": 1.8547440392176489, + "learning_rate": 7.228585585953345e-06, + "loss": 1.0095, + "step": 4372 + }, + { + "epoch": 0.6018442058904486, + "grad_norm": 1.6182771667105258, + "learning_rate": 7.22430303828433e-06, + "loss": 0.9941, + "step": 4373 + }, + { + "epoch": 0.601981833195706, + "grad_norm": 2.185582252121093, + "learning_rate": 7.220021042108312e-06, + "loss": 0.936, + "step": 4374 + }, + { + "epoch": 0.6021194605009634, + "grad_norm": 1.7552957629711297, + "learning_rate": 7.215739598276054e-06, + "loss": 0.9299, + "step": 4375 + }, + { + "epoch": 0.6022570878062208, + "grad_norm": 1.663866373429022, + "learning_rate": 7.211458707638229e-06, + "loss": 0.918, + "step": 4376 + }, + { + "epoch": 0.6023947151114781, + "grad_norm": 1.8833735116909562, + "learning_rate": 7.207178371045385e-06, + "loss": 0.9441, + "step": 4377 + }, + { + "epoch": 0.6025323424167355, + "grad_norm": 1.9364103803296826, + "learning_rate": 7.202898589347969e-06, + "loss": 1.0047, + "step": 4378 + }, + { + "epoch": 0.6026699697219928, + "grad_norm": 2.0507533279218255, + "learning_rate": 7.198619363396315e-06, + "loss": 0.9313, + "step": 4379 + }, + { + "epoch": 0.6028075970272502, + "grad_norm": 1.825470156568926, + "learning_rate": 7.1943406940406494e-06, + "loss": 0.9446, + "step": 4380 + }, + { + "epoch": 0.6029452243325075, + "grad_norm": 1.685280082325007, + "learning_rate": 7.19006258213108e-06, + "loss": 0.9564, + "step": 4381 + }, + { + "epoch": 0.603082851637765, + "grad_norm": 1.735757128505911, + "learning_rate": 7.185785028517615e-06, + "loss": 1.0317, + "step": 4382 + }, + { + "epoch": 0.6032204789430223, + "grad_norm": 2.22157381683179, + "learning_rate": 7.181508034050142e-06, + "loss": 0.8981, + "step": 4383 + }, + { + "epoch": 0.6033581062482797, + "grad_norm": 2.104574167825755, + "learning_rate": 7.177231599578438e-06, + "loss": 0.9444, + "step": 4384 + }, + { + "epoch": 0.603495733553537, + "grad_norm": 1.6069556052540663, + "learning_rate": 7.172955725952178e-06, + "loss": 0.9892, + "step": 4385 + }, + { + "epoch": 0.6036333608587944, + "grad_norm": 1.7541124574746447, + "learning_rate": 7.168680414020916e-06, + "loss": 1.0048, + "step": 4386 + }, + { + "epoch": 0.6037709881640517, + "grad_norm": 1.7680190544540175, + "learning_rate": 7.164405664634102e-06, + "loss": 0.9538, + "step": 4387 + }, + { + "epoch": 0.6039086154693091, + "grad_norm": 1.9532630312841865, + "learning_rate": 7.160131478641064e-06, + "loss": 0.9807, + "step": 4388 + }, + { + "epoch": 0.6040462427745664, + "grad_norm": 1.880946859245638, + "learning_rate": 7.155857856891031e-06, + "loss": 0.8654, + "step": 4389 + }, + { + "epoch": 0.6041838700798239, + "grad_norm": 1.7073062917141437, + "learning_rate": 7.1515848002331045e-06, + "loss": 0.926, + "step": 4390 + }, + { + "epoch": 0.6043214973850812, + "grad_norm": 1.5883682846273928, + "learning_rate": 7.147312309516294e-06, + "loss": 0.9029, + "step": 4391 + }, + { + "epoch": 0.6044591246903386, + "grad_norm": 1.9180296965259906, + "learning_rate": 7.1430403855894734e-06, + "loss": 0.9242, + "step": 4392 + }, + { + "epoch": 0.6045967519955959, + "grad_norm": 1.8985200947725587, + "learning_rate": 7.138769029301423e-06, + "loss": 1.018, + "step": 4393 + }, + { + "epoch": 0.6047343793008533, + "grad_norm": 1.7806839157955971, + "learning_rate": 7.1344982415008005e-06, + "loss": 0.9889, + "step": 4394 + }, + { + "epoch": 0.6048720066061106, + "grad_norm": 2.001200197233345, + "learning_rate": 7.1302280230361535e-06, + "loss": 0.9604, + "step": 4395 + }, + { + "epoch": 0.605009633911368, + "grad_norm": 1.6312555611515147, + "learning_rate": 7.1259583747559126e-06, + "loss": 0.9958, + "step": 4396 + }, + { + "epoch": 0.6051472612166254, + "grad_norm": 1.8313068912147679, + "learning_rate": 7.121689297508405e-06, + "loss": 0.9894, + "step": 4397 + }, + { + "epoch": 0.6052848885218828, + "grad_norm": 1.8854203258406599, + "learning_rate": 7.117420792141831e-06, + "loss": 0.9894, + "step": 4398 + }, + { + "epoch": 0.6054225158271401, + "grad_norm": 1.7206228962587846, + "learning_rate": 7.1131528595042905e-06, + "loss": 0.9641, + "step": 4399 + }, + { + "epoch": 0.6055601431323975, + "grad_norm": 1.8719189460207613, + "learning_rate": 7.108885500443761e-06, + "loss": 0.911, + "step": 4400 + }, + { + "epoch": 0.6056977704376548, + "grad_norm": 1.7999444785641199, + "learning_rate": 7.104618715808105e-06, + "loss": 0.9951, + "step": 4401 + }, + { + "epoch": 0.6058353977429122, + "grad_norm": 1.8340658289209342, + "learning_rate": 7.100352506445076e-06, + "loss": 0.9934, + "step": 4402 + }, + { + "epoch": 0.6059730250481695, + "grad_norm": 1.9565492393285713, + "learning_rate": 7.096086873202311e-06, + "loss": 1.0034, + "step": 4403 + }, + { + "epoch": 0.6061106523534269, + "grad_norm": 1.7186967210968194, + "learning_rate": 7.091821816927339e-06, + "loss": 0.9682, + "step": 4404 + }, + { + "epoch": 0.6062482796586843, + "grad_norm": 1.806181552404213, + "learning_rate": 7.0875573384675585e-06, + "loss": 0.9595, + "step": 4405 + }, + { + "epoch": 0.6063859069639417, + "grad_norm": 1.7817547170038963, + "learning_rate": 7.083293438670272e-06, + "loss": 0.9962, + "step": 4406 + }, + { + "epoch": 0.606523534269199, + "grad_norm": 1.6326658728526442, + "learning_rate": 7.07903011838265e-06, + "loss": 0.9133, + "step": 4407 + }, + { + "epoch": 0.6066611615744564, + "grad_norm": 2.07312511727519, + "learning_rate": 7.074767378451761e-06, + "loss": 0.9907, + "step": 4408 + }, + { + "epoch": 0.6067987888797137, + "grad_norm": 1.7664910893696837, + "learning_rate": 7.070505219724553e-06, + "loss": 0.9772, + "step": 4409 + }, + { + "epoch": 0.6069364161849711, + "grad_norm": 1.6898218280238155, + "learning_rate": 7.066243643047857e-06, + "loss": 0.9921, + "step": 4410 + }, + { + "epoch": 0.6070740434902284, + "grad_norm": 1.6555706143603486, + "learning_rate": 7.061982649268389e-06, + "loss": 0.9304, + "step": 4411 + }, + { + "epoch": 0.6072116707954858, + "grad_norm": 2.1150285239476267, + "learning_rate": 7.057722239232754e-06, + "loss": 0.9662, + "step": 4412 + }, + { + "epoch": 0.6073492981007432, + "grad_norm": 1.5626970202872368, + "learning_rate": 7.053462413787431e-06, + "loss": 0.9969, + "step": 4413 + }, + { + "epoch": 0.6074869254060006, + "grad_norm": 1.6246223800556885, + "learning_rate": 7.049203173778798e-06, + "loss": 0.9481, + "step": 4414 + }, + { + "epoch": 0.6076245527112579, + "grad_norm": 2.092719836286789, + "learning_rate": 7.044944520053099e-06, + "loss": 1.0512, + "step": 4415 + }, + { + "epoch": 0.6077621800165153, + "grad_norm": 1.7756217499405682, + "learning_rate": 7.040686453456474e-06, + "loss": 1.0425, + "step": 4416 + }, + { + "epoch": 0.6078998073217726, + "grad_norm": 1.6013349947595639, + "learning_rate": 7.036428974834941e-06, + "loss": 0.9337, + "step": 4417 + }, + { + "epoch": 0.60803743462703, + "grad_norm": 1.8076674601389076, + "learning_rate": 7.0321720850344036e-06, + "loss": 0.9515, + "step": 4418 + }, + { + "epoch": 0.6081750619322873, + "grad_norm": 1.9579803938295546, + "learning_rate": 7.0279157849006516e-06, + "loss": 1.0061, + "step": 4419 + }, + { + "epoch": 0.6083126892375448, + "grad_norm": 1.6689484957128382, + "learning_rate": 7.023660075279345e-06, + "loss": 0.9718, + "step": 4420 + }, + { + "epoch": 0.6084503165428021, + "grad_norm": 1.8904325452021673, + "learning_rate": 7.019404957016045e-06, + "loss": 0.913, + "step": 4421 + }, + { + "epoch": 0.6085879438480595, + "grad_norm": 1.7361300613475077, + "learning_rate": 7.015150430956175e-06, + "loss": 0.9936, + "step": 4422 + }, + { + "epoch": 0.6087255711533168, + "grad_norm": 1.9477970003946525, + "learning_rate": 7.01089649794506e-06, + "loss": 0.9446, + "step": 4423 + }, + { + "epoch": 0.6088631984585742, + "grad_norm": 1.952911230965377, + "learning_rate": 7.006643158827891e-06, + "loss": 0.9884, + "step": 4424 + }, + { + "epoch": 0.6090008257638315, + "grad_norm": 1.746085588578271, + "learning_rate": 7.0023904144497536e-06, + "loss": 0.9611, + "step": 4425 + }, + { + "epoch": 0.6091384530690889, + "grad_norm": 1.975769632736285, + "learning_rate": 6.998138265655605e-06, + "loss": 1.0011, + "step": 4426 + }, + { + "epoch": 0.6092760803743462, + "grad_norm": 1.8857546055821717, + "learning_rate": 6.993886713290299e-06, + "loss": 0.9735, + "step": 4427 + }, + { + "epoch": 0.6094137076796037, + "grad_norm": 1.6482638552499609, + "learning_rate": 6.989635758198547e-06, + "loss": 0.9907, + "step": 4428 + }, + { + "epoch": 0.609551334984861, + "grad_norm": 1.6880083447059313, + "learning_rate": 6.985385401224968e-06, + "loss": 1.0295, + "step": 4429 + }, + { + "epoch": 0.6096889622901184, + "grad_norm": 1.8446399210200966, + "learning_rate": 6.981135643214039e-06, + "loss": 0.9844, + "step": 4430 + }, + { + "epoch": 0.6098265895953757, + "grad_norm": 1.8342555441409474, + "learning_rate": 6.976886485010139e-06, + "loss": 0.942, + "step": 4431 + }, + { + "epoch": 0.6099642169006331, + "grad_norm": 1.7909740404788146, + "learning_rate": 6.9726379274575104e-06, + "loss": 0.9842, + "step": 4432 + }, + { + "epoch": 0.6101018442058904, + "grad_norm": 2.0052596270347687, + "learning_rate": 6.9683899714002826e-06, + "loss": 0.9397, + "step": 4433 + }, + { + "epoch": 0.6102394715111478, + "grad_norm": 1.7081525349106654, + "learning_rate": 6.964142617682475e-06, + "loss": 0.9751, + "step": 4434 + }, + { + "epoch": 0.6103770988164052, + "grad_norm": 1.8273247300410294, + "learning_rate": 6.959895867147967e-06, + "loss": 0.96, + "step": 4435 + }, + { + "epoch": 0.6105147261216626, + "grad_norm": 1.797382200856444, + "learning_rate": 6.955649720640541e-06, + "loss": 0.9753, + "step": 4436 + }, + { + "epoch": 0.6106523534269199, + "grad_norm": 1.6907715972499473, + "learning_rate": 6.951404179003838e-06, + "loss": 0.9645, + "step": 4437 + }, + { + "epoch": 0.6107899807321773, + "grad_norm": 1.7803995997392006, + "learning_rate": 6.947159243081398e-06, + "loss": 0.9678, + "step": 4438 + }, + { + "epoch": 0.6109276080374346, + "grad_norm": 2.2509109966297958, + "learning_rate": 6.942914913716624e-06, + "loss": 0.9934, + "step": 4439 + }, + { + "epoch": 0.611065235342692, + "grad_norm": 1.6895251721199356, + "learning_rate": 6.938671191752812e-06, + "loss": 1.0122, + "step": 4440 + }, + { + "epoch": 0.6112028626479493, + "grad_norm": 1.9484730731077469, + "learning_rate": 6.934428078033126e-06, + "loss": 0.8772, + "step": 4441 + }, + { + "epoch": 0.6113404899532067, + "grad_norm": 1.6524308979224689, + "learning_rate": 6.930185573400623e-06, + "loss": 0.9599, + "step": 4442 + }, + { + "epoch": 0.6114781172584641, + "grad_norm": 1.6925061691546168, + "learning_rate": 6.925943678698221e-06, + "loss": 0.9266, + "step": 4443 + }, + { + "epoch": 0.6116157445637215, + "grad_norm": 1.768200928568819, + "learning_rate": 6.921702394768734e-06, + "loss": 0.9887, + "step": 4444 + }, + { + "epoch": 0.6117533718689788, + "grad_norm": 1.6590213237060751, + "learning_rate": 6.9174617224548416e-06, + "loss": 0.9093, + "step": 4445 + }, + { + "epoch": 0.6118909991742362, + "grad_norm": 1.7090619134342846, + "learning_rate": 6.9132216625991125e-06, + "loss": 0.9562, + "step": 4446 + }, + { + "epoch": 0.6120286264794935, + "grad_norm": 1.97552275977963, + "learning_rate": 6.908982216043983e-06, + "loss": 0.9476, + "step": 4447 + }, + { + "epoch": 0.6121662537847509, + "grad_norm": 1.7198883601249262, + "learning_rate": 6.904743383631774e-06, + "loss": 1.0351, + "step": 4448 + }, + { + "epoch": 0.6123038810900082, + "grad_norm": 1.7869008364227306, + "learning_rate": 6.90050516620469e-06, + "loss": 0.9178, + "step": 4449 + }, + { + "epoch": 0.6124415083952656, + "grad_norm": 1.705614021275434, + "learning_rate": 6.8962675646047985e-06, + "loss": 0.9502, + "step": 4450 + }, + { + "epoch": 0.612579135700523, + "grad_norm": 1.5587707606082601, + "learning_rate": 6.89203057967406e-06, + "loss": 0.9387, + "step": 4451 + }, + { + "epoch": 0.6127167630057804, + "grad_norm": 1.8932076345798052, + "learning_rate": 6.887794212254297e-06, + "loss": 1.0069, + "step": 4452 + }, + { + "epoch": 0.6128543903110377, + "grad_norm": 1.580039796501389, + "learning_rate": 6.8835584631872276e-06, + "loss": 0.959, + "step": 4453 + }, + { + "epoch": 0.6129920176162951, + "grad_norm": 1.8924956166714384, + "learning_rate": 6.879323333314427e-06, + "loss": 1.0559, + "step": 4454 + }, + { + "epoch": 0.6131296449215524, + "grad_norm": 1.9875788518435897, + "learning_rate": 6.875088823477365e-06, + "loss": 1.0566, + "step": 4455 + }, + { + "epoch": 0.6132672722268098, + "grad_norm": 1.5732188298905556, + "learning_rate": 6.8708549345173795e-06, + "loss": 0.8837, + "step": 4456 + }, + { + "epoch": 0.6134048995320671, + "grad_norm": 1.6918133696364572, + "learning_rate": 6.866621667275683e-06, + "loss": 0.9745, + "step": 4457 + }, + { + "epoch": 0.6135425268373246, + "grad_norm": 1.8224494788772838, + "learning_rate": 6.86238902259337e-06, + "loss": 0.9576, + "step": 4458 + }, + { + "epoch": 0.6136801541425819, + "grad_norm": 1.4540768667079262, + "learning_rate": 6.8581570013114125e-06, + "loss": 0.9997, + "step": 4459 + }, + { + "epoch": 0.6138177814478393, + "grad_norm": 2.1547969127174826, + "learning_rate": 6.853925604270648e-06, + "loss": 1.0207, + "step": 4460 + }, + { + "epoch": 0.6139554087530966, + "grad_norm": 1.744539613788188, + "learning_rate": 6.849694832311804e-06, + "loss": 0.9955, + "step": 4461 + }, + { + "epoch": 0.614093036058354, + "grad_norm": 1.7096847131376491, + "learning_rate": 6.84546468627547e-06, + "loss": 0.8209, + "step": 4462 + }, + { + "epoch": 0.6142306633636113, + "grad_norm": 1.8980210482945494, + "learning_rate": 6.841235167002127e-06, + "loss": 0.9711, + "step": 4463 + }, + { + "epoch": 0.6143682906688687, + "grad_norm": 2.124712588386739, + "learning_rate": 6.837006275332114e-06, + "loss": 1.0104, + "step": 4464 + }, + { + "epoch": 0.614505917974126, + "grad_norm": 1.7323357989751254, + "learning_rate": 6.832778012105657e-06, + "loss": 1.0109, + "step": 4465 + }, + { + "epoch": 0.6146435452793835, + "grad_norm": 1.5890815648818166, + "learning_rate": 6.828550378162857e-06, + "loss": 0.8971, + "step": 4466 + }, + { + "epoch": 0.6147811725846408, + "grad_norm": 2.1736918727993526, + "learning_rate": 6.8243233743436806e-06, + "loss": 0.9837, + "step": 4467 + }, + { + "epoch": 0.6149187998898982, + "grad_norm": 1.6121732670162712, + "learning_rate": 6.820097001487984e-06, + "loss": 0.9142, + "step": 4468 + }, + { + "epoch": 0.6150564271951555, + "grad_norm": 1.6670486922126246, + "learning_rate": 6.8158712604354806e-06, + "loss": 0.9048, + "step": 4469 + }, + { + "epoch": 0.6151940545004129, + "grad_norm": 1.6911687757933196, + "learning_rate": 6.811646152025777e-06, + "loss": 0.9842, + "step": 4470 + }, + { + "epoch": 0.6153316818056702, + "grad_norm": 1.8159766276600469, + "learning_rate": 6.807421677098334e-06, + "loss": 0.9047, + "step": 4471 + }, + { + "epoch": 0.6154693091109276, + "grad_norm": 1.8077067506063895, + "learning_rate": 6.8031978364925045e-06, + "loss": 1.0123, + "step": 4472 + }, + { + "epoch": 0.615606936416185, + "grad_norm": 1.893495562227542, + "learning_rate": 6.798974631047504e-06, + "loss": 0.9302, + "step": 4473 + }, + { + "epoch": 0.6157445637214424, + "grad_norm": 1.6024456499052941, + "learning_rate": 6.7947520616024295e-06, + "loss": 0.9906, + "step": 4474 + }, + { + "epoch": 0.6158821910266997, + "grad_norm": 1.7644480193381793, + "learning_rate": 6.7905301289962425e-06, + "loss": 0.8543, + "step": 4475 + }, + { + "epoch": 0.6160198183319571, + "grad_norm": 1.7900186539467675, + "learning_rate": 6.78630883406779e-06, + "loss": 0.9619, + "step": 4476 + }, + { + "epoch": 0.6161574456372144, + "grad_norm": 1.99877642023352, + "learning_rate": 6.782088177655777e-06, + "loss": 0.9704, + "step": 4477 + }, + { + "epoch": 0.6162950729424718, + "grad_norm": 1.7145831055222354, + "learning_rate": 6.777868160598799e-06, + "loss": 0.9768, + "step": 4478 + }, + { + "epoch": 0.6164327002477291, + "grad_norm": 1.7617800142825604, + "learning_rate": 6.77364878373531e-06, + "loss": 0.8817, + "step": 4479 + }, + { + "epoch": 0.6165703275529865, + "grad_norm": 1.8139685877842213, + "learning_rate": 6.76943004790364e-06, + "loss": 0.9247, + "step": 4480 + }, + { + "epoch": 0.6167079548582439, + "grad_norm": 1.7115076760300016, + "learning_rate": 6.765211953942004e-06, + "loss": 1.0141, + "step": 4481 + }, + { + "epoch": 0.6168455821635013, + "grad_norm": 1.7982185982642773, + "learning_rate": 6.7609945026884674e-06, + "loss": 0.9318, + "step": 4482 + }, + { + "epoch": 0.6169832094687586, + "grad_norm": 1.7299220290593724, + "learning_rate": 6.75677769498099e-06, + "loss": 0.8956, + "step": 4483 + }, + { + "epoch": 0.617120836774016, + "grad_norm": 1.850374189195348, + "learning_rate": 6.752561531657386e-06, + "loss": 1.0139, + "step": 4484 + }, + { + "epoch": 0.6172584640792733, + "grad_norm": 1.821946795404957, + "learning_rate": 6.748346013555358e-06, + "loss": 0.9814, + "step": 4485 + }, + { + "epoch": 0.6173960913845307, + "grad_norm": 2.08896702829991, + "learning_rate": 6.744131141512462e-06, + "loss": 1.0086, + "step": 4486 + }, + { + "epoch": 0.617533718689788, + "grad_norm": 1.7118137650396255, + "learning_rate": 6.739916916366142e-06, + "loss": 0.8922, + "step": 4487 + }, + { + "epoch": 0.6176713459950454, + "grad_norm": 1.7211392664959346, + "learning_rate": 6.735703338953704e-06, + "loss": 0.9187, + "step": 4488 + }, + { + "epoch": 0.6178089733003028, + "grad_norm": 1.9276258108013018, + "learning_rate": 6.731490410112334e-06, + "loss": 0.8237, + "step": 4489 + }, + { + "epoch": 0.6179466006055602, + "grad_norm": 1.8139520730128054, + "learning_rate": 6.727278130679072e-06, + "loss": 0.9943, + "step": 4490 + }, + { + "epoch": 0.6180842279108175, + "grad_norm": 1.8441529772662606, + "learning_rate": 6.723066501490853e-06, + "loss": 0.9508, + "step": 4491 + }, + { + "epoch": 0.6182218552160749, + "grad_norm": 1.6653433667927398, + "learning_rate": 6.718855523384458e-06, + "loss": 0.9674, + "step": 4492 + }, + { + "epoch": 0.6183594825213322, + "grad_norm": 1.7288863719573833, + "learning_rate": 6.714645197196561e-06, + "loss": 0.9297, + "step": 4493 + }, + { + "epoch": 0.6184971098265896, + "grad_norm": 1.6829657495423669, + "learning_rate": 6.710435523763689e-06, + "loss": 0.9254, + "step": 4494 + }, + { + "epoch": 0.6186347371318469, + "grad_norm": 1.7299596814673934, + "learning_rate": 6.70622650392225e-06, + "loss": 0.8965, + "step": 4495 + }, + { + "epoch": 0.6187723644371044, + "grad_norm": 1.7981768030661358, + "learning_rate": 6.7020181385085195e-06, + "loss": 0.9646, + "step": 4496 + }, + { + "epoch": 0.6189099917423617, + "grad_norm": 2.3041212474505937, + "learning_rate": 6.697810428358638e-06, + "loss": 0.9812, + "step": 4497 + }, + { + "epoch": 0.6190476190476191, + "grad_norm": 1.9122901584221164, + "learning_rate": 6.693603374308624e-06, + "loss": 0.9456, + "step": 4498 + }, + { + "epoch": 0.6191852463528764, + "grad_norm": 1.7736810331251673, + "learning_rate": 6.689396977194358e-06, + "loss": 0.953, + "step": 4499 + }, + { + "epoch": 0.6193228736581338, + "grad_norm": 1.7676134033034956, + "learning_rate": 6.685191237851598e-06, + "loss": 0.981, + "step": 4500 + }, + { + "epoch": 0.6194605009633911, + "grad_norm": 1.7979574732727979, + "learning_rate": 6.680986157115961e-06, + "loss": 0.9373, + "step": 4501 + }, + { + "epoch": 0.6195981282686485, + "grad_norm": 1.6432602785409371, + "learning_rate": 6.676781735822944e-06, + "loss": 0.9636, + "step": 4502 + }, + { + "epoch": 0.6197357555739058, + "grad_norm": 1.6935718056399358, + "learning_rate": 6.672577974807907e-06, + "loss": 0.9365, + "step": 4503 + }, + { + "epoch": 0.6198733828791633, + "grad_norm": 1.6598125217366497, + "learning_rate": 6.6683748749060784e-06, + "loss": 0.9962, + "step": 4504 + }, + { + "epoch": 0.6200110101844206, + "grad_norm": 1.6046752343593662, + "learning_rate": 6.664172436952554e-06, + "loss": 0.9597, + "step": 4505 + }, + { + "epoch": 0.620148637489678, + "grad_norm": 1.953949662707906, + "learning_rate": 6.659970661782309e-06, + "loss": 0.8744, + "step": 4506 + }, + { + "epoch": 0.6202862647949353, + "grad_norm": 1.6218952101345383, + "learning_rate": 6.65576955023017e-06, + "loss": 0.9892, + "step": 4507 + }, + { + "epoch": 0.6204238921001927, + "grad_norm": 1.9563776290406325, + "learning_rate": 6.651569103130847e-06, + "loss": 1.0087, + "step": 4508 + }, + { + "epoch": 0.62056151940545, + "grad_norm": 1.6391478543875306, + "learning_rate": 6.647369321318904e-06, + "loss": 0.9507, + "step": 4509 + }, + { + "epoch": 0.6206991467107074, + "grad_norm": 1.6207910585644774, + "learning_rate": 6.643170205628787e-06, + "loss": 0.8633, + "step": 4510 + }, + { + "epoch": 0.6208367740159648, + "grad_norm": 1.6319776164770134, + "learning_rate": 6.638971756894803e-06, + "loss": 0.9128, + "step": 4511 + }, + { + "epoch": 0.6209744013212222, + "grad_norm": 1.7280362428740252, + "learning_rate": 6.634773975951118e-06, + "loss": 0.9664, + "step": 4512 + }, + { + "epoch": 0.6211120286264795, + "grad_norm": 1.8667595451984118, + "learning_rate": 6.630576863631783e-06, + "loss": 0.9421, + "step": 4513 + }, + { + "epoch": 0.6212496559317369, + "grad_norm": 1.5948902789487673, + "learning_rate": 6.6263804207706995e-06, + "loss": 0.8514, + "step": 4514 + }, + { + "epoch": 0.6213872832369942, + "grad_norm": 1.5296976537817324, + "learning_rate": 6.622184648201651e-06, + "loss": 0.9862, + "step": 4515 + }, + { + "epoch": 0.6215249105422516, + "grad_norm": 1.7670016729894655, + "learning_rate": 6.61798954675827e-06, + "loss": 0.9553, + "step": 4516 + }, + { + "epoch": 0.6216625378475089, + "grad_norm": 1.7798781127399919, + "learning_rate": 6.6137951172740745e-06, + "loss": 0.9537, + "step": 4517 + }, + { + "epoch": 0.6218001651527663, + "grad_norm": 1.3624423703696258, + "learning_rate": 6.609601360582436e-06, + "loss": 0.9837, + "step": 4518 + }, + { + "epoch": 0.6219377924580237, + "grad_norm": 1.8268211969022745, + "learning_rate": 6.605408277516599e-06, + "loss": 0.9496, + "step": 4519 + }, + { + "epoch": 0.6220754197632811, + "grad_norm": 1.5860370335182972, + "learning_rate": 6.6012158689096685e-06, + "loss": 0.9463, + "step": 4520 + }, + { + "epoch": 0.6222130470685384, + "grad_norm": 1.685284875608895, + "learning_rate": 6.5970241355946244e-06, + "loss": 0.9448, + "step": 4521 + }, + { + "epoch": 0.6223506743737958, + "grad_norm": 1.733551905001973, + "learning_rate": 6.592833078404298e-06, + "loss": 0.9065, + "step": 4522 + }, + { + "epoch": 0.6224883016790531, + "grad_norm": 1.6223166778702045, + "learning_rate": 6.588642698171404e-06, + "loss": 0.937, + "step": 4523 + }, + { + "epoch": 0.6226259289843105, + "grad_norm": 2.073413283770982, + "learning_rate": 6.584452995728506e-06, + "loss": 1.01, + "step": 4524 + }, + { + "epoch": 0.6227635562895678, + "grad_norm": 1.9076989896953125, + "learning_rate": 6.5802639719080475e-06, + "loss": 0.9775, + "step": 4525 + }, + { + "epoch": 0.6229011835948252, + "grad_norm": 2.061698074796893, + "learning_rate": 6.576075627542324e-06, + "loss": 0.9633, + "step": 4526 + }, + { + "epoch": 0.6230388109000826, + "grad_norm": 2.150175309088167, + "learning_rate": 6.571887963463506e-06, + "loss": 0.9074, + "step": 4527 + }, + { + "epoch": 0.62317643820534, + "grad_norm": 1.9783326260263014, + "learning_rate": 6.567700980503626e-06, + "loss": 0.9102, + "step": 4528 + }, + { + "epoch": 0.6233140655105973, + "grad_norm": 1.9963996691508377, + "learning_rate": 6.5635146794945736e-06, + "loss": 0.9373, + "step": 4529 + }, + { + "epoch": 0.6234516928158547, + "grad_norm": 1.9874746409003496, + "learning_rate": 6.5593290612681184e-06, + "loss": 1.0362, + "step": 4530 + }, + { + "epoch": 0.623589320121112, + "grad_norm": 1.829447604064256, + "learning_rate": 6.5551441266558766e-06, + "loss": 0.8754, + "step": 4531 + }, + { + "epoch": 0.6237269474263694, + "grad_norm": 1.8293180758493182, + "learning_rate": 6.550959876489346e-06, + "loss": 0.9472, + "step": 4532 + }, + { + "epoch": 0.6238645747316267, + "grad_norm": 1.840591837441197, + "learning_rate": 6.546776311599874e-06, + "loss": 1.0114, + "step": 4533 + }, + { + "epoch": 0.6240022020368842, + "grad_norm": 2.0662437512294747, + "learning_rate": 6.542593432818678e-06, + "loss": 0.9999, + "step": 4534 + }, + { + "epoch": 0.6241398293421415, + "grad_norm": 1.9158264094918873, + "learning_rate": 6.538411240976841e-06, + "loss": 0.8872, + "step": 4535 + }, + { + "epoch": 0.6242774566473989, + "grad_norm": 1.6829820373568665, + "learning_rate": 6.534229736905308e-06, + "loss": 0.905, + "step": 4536 + }, + { + "epoch": 0.6244150839526562, + "grad_norm": 1.8190642229261782, + "learning_rate": 6.530048921434883e-06, + "loss": 0.9393, + "step": 4537 + }, + { + "epoch": 0.6245527112579136, + "grad_norm": 1.9568265821547344, + "learning_rate": 6.525868795396242e-06, + "loss": 0.9962, + "step": 4538 + }, + { + "epoch": 0.6246903385631709, + "grad_norm": 1.8598101357555294, + "learning_rate": 6.521689359619912e-06, + "loss": 0.8569, + "step": 4539 + }, + { + "epoch": 0.6248279658684283, + "grad_norm": 1.9915750387644138, + "learning_rate": 6.517510614936296e-06, + "loss": 0.9665, + "step": 4540 + }, + { + "epoch": 0.6249655931736856, + "grad_norm": 1.6363578414706583, + "learning_rate": 6.5133325621756485e-06, + "loss": 0.9135, + "step": 4541 + }, + { + "epoch": 0.6251032204789431, + "grad_norm": 1.7323935384462967, + "learning_rate": 6.509155202168095e-06, + "loss": 0.8315, + "step": 4542 + }, + { + "epoch": 0.6252408477842004, + "grad_norm": 2.2013494508324647, + "learning_rate": 6.5049785357436205e-06, + "loss": 0.8225, + "step": 4543 + }, + { + "epoch": 0.6253784750894578, + "grad_norm": 1.7597843871901941, + "learning_rate": 6.500802563732065e-06, + "loss": 0.8615, + "step": 4544 + }, + { + "epoch": 0.6255161023947151, + "grad_norm": 1.7705422104814144, + "learning_rate": 6.496627286963145e-06, + "loss": 0.9228, + "step": 4545 + }, + { + "epoch": 0.6256537296999725, + "grad_norm": 1.5983986625591127, + "learning_rate": 6.4924527062664235e-06, + "loss": 0.948, + "step": 4546 + }, + { + "epoch": 0.6257913570052298, + "grad_norm": 1.8358494483854382, + "learning_rate": 6.4882788224713415e-06, + "loss": 0.9824, + "step": 4547 + }, + { + "epoch": 0.6259289843104872, + "grad_norm": 1.7224441243078317, + "learning_rate": 6.484105636407181e-06, + "loss": 0.9076, + "step": 4548 + }, + { + "epoch": 0.6260666116157446, + "grad_norm": 1.8110841628987042, + "learning_rate": 6.479933148903106e-06, + "loss": 0.9458, + "step": 4549 + }, + { + "epoch": 0.626204238921002, + "grad_norm": 1.9328605269516776, + "learning_rate": 6.475761360788127e-06, + "loss": 0.9925, + "step": 4550 + }, + { + "epoch": 0.6263418662262593, + "grad_norm": 1.7210512511024718, + "learning_rate": 6.471590272891131e-06, + "loss": 1.013, + "step": 4551 + }, + { + "epoch": 0.6264794935315167, + "grad_norm": 1.5195711747140455, + "learning_rate": 6.467419886040842e-06, + "loss": 0.9257, + "step": 4552 + }, + { + "epoch": 0.626617120836774, + "grad_norm": 1.8144414991938833, + "learning_rate": 6.463250201065872e-06, + "loss": 0.9851, + "step": 4553 + }, + { + "epoch": 0.6267547481420314, + "grad_norm": 1.7400891548170292, + "learning_rate": 6.4590812187946685e-06, + "loss": 0.8825, + "step": 4554 + }, + { + "epoch": 0.6268923754472887, + "grad_norm": 1.685406882312589, + "learning_rate": 6.454912940055562e-06, + "loss": 0.9014, + "step": 4555 + }, + { + "epoch": 0.6270300027525461, + "grad_norm": 2.083533024765111, + "learning_rate": 6.450745365676723e-06, + "loss": 0.9408, + "step": 4556 + }, + { + "epoch": 0.6271676300578035, + "grad_norm": 1.85691320224333, + "learning_rate": 6.446578496486198e-06, + "loss": 1.0137, + "step": 4557 + }, + { + "epoch": 0.6273052573630609, + "grad_norm": 1.6404055103543915, + "learning_rate": 6.442412333311885e-06, + "loss": 0.9602, + "step": 4558 + }, + { + "epoch": 0.6274428846683182, + "grad_norm": 1.692965959448579, + "learning_rate": 6.4382468769815425e-06, + "loss": 0.9705, + "step": 4559 + }, + { + "epoch": 0.6275805119735756, + "grad_norm": 1.920463055552961, + "learning_rate": 6.434082128322793e-06, + "loss": 0.9434, + "step": 4560 + }, + { + "epoch": 0.6277181392788329, + "grad_norm": 1.609780908307586, + "learning_rate": 6.429918088163108e-06, + "loss": 0.822, + "step": 4561 + }, + { + "epoch": 0.6278557665840903, + "grad_norm": 1.729881477276873, + "learning_rate": 6.425754757329834e-06, + "loss": 1.0114, + "step": 4562 + }, + { + "epoch": 0.6279933938893476, + "grad_norm": 1.6764058134535513, + "learning_rate": 6.4215921366501595e-06, + "loss": 0.9317, + "step": 4563 + }, + { + "epoch": 0.628131021194605, + "grad_norm": 1.8227690862787205, + "learning_rate": 6.417430226951147e-06, + "loss": 0.8814, + "step": 4564 + }, + { + "epoch": 0.6282686484998624, + "grad_norm": 2.370103510048762, + "learning_rate": 6.413269029059708e-06, + "loss": 0.9715, + "step": 4565 + }, + { + "epoch": 0.6284062758051198, + "grad_norm": 1.9714898548898367, + "learning_rate": 6.409108543802616e-06, + "loss": 0.9869, + "step": 4566 + }, + { + "epoch": 0.6285439031103771, + "grad_norm": 1.6301782557324682, + "learning_rate": 6.404948772006501e-06, + "loss": 0.9634, + "step": 4567 + }, + { + "epoch": 0.6286815304156345, + "grad_norm": 1.7232377612483616, + "learning_rate": 6.400789714497857e-06, + "loss": 1.0178, + "step": 4568 + }, + { + "epoch": 0.6288191577208918, + "grad_norm": 1.6399466304210857, + "learning_rate": 6.396631372103025e-06, + "loss": 0.8581, + "step": 4569 + }, + { + "epoch": 0.6289567850261492, + "grad_norm": 1.7227874479247651, + "learning_rate": 6.39247374564822e-06, + "loss": 0.906, + "step": 4570 + }, + { + "epoch": 0.6290944123314065, + "grad_norm": 1.7129212534598153, + "learning_rate": 6.388316835959494e-06, + "loss": 0.941, + "step": 4571 + }, + { + "epoch": 0.629232039636664, + "grad_norm": 1.8264263292231637, + "learning_rate": 6.38416064386278e-06, + "loss": 0.992, + "step": 4572 + }, + { + "epoch": 0.6293696669419213, + "grad_norm": 1.7966949252964057, + "learning_rate": 6.380005170183845e-06, + "loss": 0.8975, + "step": 4573 + }, + { + "epoch": 0.6295072942471787, + "grad_norm": 1.6673329806483295, + "learning_rate": 6.375850415748334e-06, + "loss": 0.9535, + "step": 4574 + }, + { + "epoch": 0.629644921552436, + "grad_norm": 1.6944808000957843, + "learning_rate": 6.371696381381738e-06, + "loss": 0.9815, + "step": 4575 + }, + { + "epoch": 0.6297825488576934, + "grad_norm": 2.15224034144866, + "learning_rate": 6.367543067909398e-06, + "loss": 1.0617, + "step": 4576 + }, + { + "epoch": 0.6299201761629507, + "grad_norm": 1.6628423210212386, + "learning_rate": 6.363390476156535e-06, + "loss": 0.9383, + "step": 4577 + }, + { + "epoch": 0.630057803468208, + "grad_norm": 1.9157529346847277, + "learning_rate": 6.359238606948199e-06, + "loss": 0.9862, + "step": 4578 + }, + { + "epoch": 0.6301954307734654, + "grad_norm": 1.7379222830485246, + "learning_rate": 6.355087461109319e-06, + "loss": 0.9597, + "step": 4579 + }, + { + "epoch": 0.6303330580787229, + "grad_norm": 1.9179897370554069, + "learning_rate": 6.350937039464665e-06, + "loss": 0.8966, + "step": 4580 + }, + { + "epoch": 0.6304706853839802, + "grad_norm": 1.840516061012518, + "learning_rate": 6.346787342838872e-06, + "loss": 0.9306, + "step": 4581 + }, + { + "epoch": 0.6306083126892376, + "grad_norm": 1.7019370051973535, + "learning_rate": 6.342638372056424e-06, + "loss": 0.9521, + "step": 4582 + }, + { + "epoch": 0.6307459399944949, + "grad_norm": 1.9441645010860646, + "learning_rate": 6.338490127941672e-06, + "loss": 0.9325, + "step": 4583 + }, + { + "epoch": 0.6308835672997523, + "grad_norm": 1.9475061161166787, + "learning_rate": 6.334342611318809e-06, + "loss": 0.9587, + "step": 4584 + }, + { + "epoch": 0.6310211946050096, + "grad_norm": 1.7841902203061712, + "learning_rate": 6.330195823011893e-06, + "loss": 0.9737, + "step": 4585 + }, + { + "epoch": 0.631158821910267, + "grad_norm": 1.5401862918848828, + "learning_rate": 6.32604976384483e-06, + "loss": 0.852, + "step": 4586 + }, + { + "epoch": 0.6312964492155244, + "grad_norm": 1.7047538176401293, + "learning_rate": 6.321904434641393e-06, + "loss": 0.9467, + "step": 4587 + }, + { + "epoch": 0.6314340765207818, + "grad_norm": 1.8788755055549675, + "learning_rate": 6.317759836225192e-06, + "loss": 0.9741, + "step": 4588 + }, + { + "epoch": 0.6315717038260391, + "grad_norm": 1.8784692919010602, + "learning_rate": 6.313615969419709e-06, + "loss": 0.946, + "step": 4589 + }, + { + "epoch": 0.6317093311312965, + "grad_norm": 1.8456604305112205, + "learning_rate": 6.309472835048269e-06, + "loss": 0.9952, + "step": 4590 + }, + { + "epoch": 0.6318469584365538, + "grad_norm": 1.6877454775084553, + "learning_rate": 6.305330433934065e-06, + "loss": 0.9624, + "step": 4591 + }, + { + "epoch": 0.6319845857418112, + "grad_norm": 1.6633000884945794, + "learning_rate": 6.301188766900128e-06, + "loss": 0.9883, + "step": 4592 + }, + { + "epoch": 0.6321222130470685, + "grad_norm": 1.803549913995877, + "learning_rate": 6.297047834769348e-06, + "loss": 0.9171, + "step": 4593 + }, + { + "epoch": 0.6322598403523259, + "grad_norm": 1.8479320922201288, + "learning_rate": 6.2929076383644794e-06, + "loss": 0.9336, + "step": 4594 + }, + { + "epoch": 0.6323974676575833, + "grad_norm": 2.1215087666031884, + "learning_rate": 6.288768178508113e-06, + "loss": 0.8441, + "step": 4595 + }, + { + "epoch": 0.6325350949628407, + "grad_norm": 2.1747931518428607, + "learning_rate": 6.284629456022712e-06, + "loss": 0.9496, + "step": 4596 + }, + { + "epoch": 0.632672722268098, + "grad_norm": 1.9032727138305383, + "learning_rate": 6.280491471730579e-06, + "loss": 0.9382, + "step": 4597 + }, + { + "epoch": 0.6328103495733554, + "grad_norm": 1.7584107220230383, + "learning_rate": 6.276354226453879e-06, + "loss": 0.8954, + "step": 4598 + }, + { + "epoch": 0.6329479768786127, + "grad_norm": 1.741230785725227, + "learning_rate": 6.27221772101462e-06, + "loss": 0.9235, + "step": 4599 + }, + { + "epoch": 0.63308560418387, + "grad_norm": 1.7397058796892202, + "learning_rate": 6.268081956234674e-06, + "loss": 0.9996, + "step": 4600 + }, + { + "epoch": 0.6332232314891274, + "grad_norm": 2.0605057841510077, + "learning_rate": 6.263946932935756e-06, + "loss": 0.9307, + "step": 4601 + }, + { + "epoch": 0.6333608587943848, + "grad_norm": 1.9098459535955392, + "learning_rate": 6.259812651939445e-06, + "loss": 0.9737, + "step": 4602 + }, + { + "epoch": 0.6334984860996422, + "grad_norm": 1.9034677852534794, + "learning_rate": 6.255679114067159e-06, + "loss": 0.9931, + "step": 4603 + }, + { + "epoch": 0.6336361134048996, + "grad_norm": 2.0639150194046647, + "learning_rate": 6.25154632014018e-06, + "loss": 1.0295, + "step": 4604 + }, + { + "epoch": 0.6337737407101569, + "grad_norm": 2.3136478769144464, + "learning_rate": 6.247414270979637e-06, + "loss": 0.9085, + "step": 4605 + }, + { + "epoch": 0.6339113680154143, + "grad_norm": 1.8152414942433093, + "learning_rate": 6.243282967406512e-06, + "loss": 0.9932, + "step": 4606 + }, + { + "epoch": 0.6340489953206716, + "grad_norm": 1.6842077952622179, + "learning_rate": 6.23915241024164e-06, + "loss": 0.9545, + "step": 4607 + }, + { + "epoch": 0.634186622625929, + "grad_norm": 1.8967381480379688, + "learning_rate": 6.235022600305698e-06, + "loss": 0.9918, + "step": 4608 + }, + { + "epoch": 0.6343242499311863, + "grad_norm": 1.664866459014443, + "learning_rate": 6.230893538419233e-06, + "loss": 0.9336, + "step": 4609 + }, + { + "epoch": 0.6344618772364438, + "grad_norm": 2.2302153639348594, + "learning_rate": 6.226765225402624e-06, + "loss": 0.9025, + "step": 4610 + }, + { + "epoch": 0.6345995045417011, + "grad_norm": 1.9569268871282208, + "learning_rate": 6.222637662076116e-06, + "loss": 0.9543, + "step": 4611 + }, + { + "epoch": 0.6347371318469585, + "grad_norm": 1.7715802281594577, + "learning_rate": 6.218510849259798e-06, + "loss": 0.9082, + "step": 4612 + }, + { + "epoch": 0.6348747591522158, + "grad_norm": 2.0242036357095765, + "learning_rate": 6.214384787773615e-06, + "loss": 0.9581, + "step": 4613 + }, + { + "epoch": 0.6350123864574732, + "grad_norm": 1.7431041495277726, + "learning_rate": 6.2102594784373505e-06, + "loss": 0.9796, + "step": 4614 + }, + { + "epoch": 0.6351500137627305, + "grad_norm": 1.6086591294961665, + "learning_rate": 6.206134922070657e-06, + "loss": 0.9637, + "step": 4615 + }, + { + "epoch": 0.6352876410679879, + "grad_norm": 1.6835302459481012, + "learning_rate": 6.202011119493017e-06, + "loss": 0.9507, + "step": 4616 + }, + { + "epoch": 0.6354252683732452, + "grad_norm": 1.7933742370024826, + "learning_rate": 6.197888071523782e-06, + "loss": 0.9404, + "step": 4617 + }, + { + "epoch": 0.6355628956785027, + "grad_norm": 1.6417429052534318, + "learning_rate": 6.193765778982139e-06, + "loss": 0.8917, + "step": 4618 + }, + { + "epoch": 0.63570052298376, + "grad_norm": 1.6273818894142318, + "learning_rate": 6.189644242687136e-06, + "loss": 0.9408, + "step": 4619 + }, + { + "epoch": 0.6358381502890174, + "grad_norm": 1.8426928385151873, + "learning_rate": 6.185523463457663e-06, + "loss": 0.9577, + "step": 4620 + }, + { + "epoch": 0.6359757775942747, + "grad_norm": 1.657653882119092, + "learning_rate": 6.181403442112465e-06, + "loss": 0.9274, + "step": 4621 + }, + { + "epoch": 0.636113404899532, + "grad_norm": 1.810994741495603, + "learning_rate": 6.177284179470129e-06, + "loss": 0.92, + "step": 4622 + }, + { + "epoch": 0.6362510322047894, + "grad_norm": 1.800592057941373, + "learning_rate": 6.173165676349103e-06, + "loss": 0.8795, + "step": 4623 + }, + { + "epoch": 0.6363886595100468, + "grad_norm": 1.8843958809791, + "learning_rate": 6.1690479335676746e-06, + "loss": 0.9191, + "step": 4624 + }, + { + "epoch": 0.6365262868153042, + "grad_norm": 1.8889468027793805, + "learning_rate": 6.164930951943977e-06, + "loss": 0.9807, + "step": 4625 + }, + { + "epoch": 0.6366639141205616, + "grad_norm": 1.736093471525083, + "learning_rate": 6.160814732296006e-06, + "loss": 0.8687, + "step": 4626 + }, + { + "epoch": 0.6368015414258189, + "grad_norm": 1.9294585106080533, + "learning_rate": 6.156699275441593e-06, + "loss": 0.9567, + "step": 4627 + }, + { + "epoch": 0.6369391687310763, + "grad_norm": 1.7588852504478198, + "learning_rate": 6.152584582198426e-06, + "loss": 0.9945, + "step": 4628 + }, + { + "epoch": 0.6370767960363336, + "grad_norm": 1.8681902407435589, + "learning_rate": 6.148470653384035e-06, + "loss": 0.9822, + "step": 4629 + }, + { + "epoch": 0.637214423341591, + "grad_norm": 1.9439027840303185, + "learning_rate": 6.144357489815807e-06, + "loss": 0.9364, + "step": 4630 + }, + { + "epoch": 0.6373520506468483, + "grad_norm": 1.7824840622220914, + "learning_rate": 6.140245092310965e-06, + "loss": 1.0202, + "step": 4631 + }, + { + "epoch": 0.6374896779521056, + "grad_norm": 1.9785826832150997, + "learning_rate": 6.136133461686592e-06, + "loss": 1.0364, + "step": 4632 + }, + { + "epoch": 0.6376273052573631, + "grad_norm": 1.6291993178184687, + "learning_rate": 6.132022598759606e-06, + "loss": 0.9153, + "step": 4633 + }, + { + "epoch": 0.6377649325626205, + "grad_norm": 1.6061192849148591, + "learning_rate": 6.127912504346787e-06, + "loss": 0.9707, + "step": 4634 + }, + { + "epoch": 0.6379025598678778, + "grad_norm": 1.8211819100195648, + "learning_rate": 6.123803179264745e-06, + "loss": 0.9209, + "step": 4635 + }, + { + "epoch": 0.6380401871731352, + "grad_norm": 1.974506030104459, + "learning_rate": 6.119694624329955e-06, + "loss": 0.9405, + "step": 4636 + }, + { + "epoch": 0.6381778144783925, + "grad_norm": 1.6682661537780281, + "learning_rate": 6.1155868403587244e-06, + "loss": 0.8608, + "step": 4637 + }, + { + "epoch": 0.6383154417836499, + "grad_norm": 1.8300911745380444, + "learning_rate": 6.111479828167223e-06, + "loss": 0.9059, + "step": 4638 + }, + { + "epoch": 0.6384530690889072, + "grad_norm": 2.067664248250286, + "learning_rate": 6.107373588571446e-06, + "loss": 0.8844, + "step": 4639 + }, + { + "epoch": 0.6385906963941645, + "grad_norm": 1.6141235217150596, + "learning_rate": 6.103268122387257e-06, + "loss": 0.949, + "step": 4640 + }, + { + "epoch": 0.638728323699422, + "grad_norm": 1.619387738715072, + "learning_rate": 6.0991634304303525e-06, + "loss": 0.8603, + "step": 4641 + }, + { + "epoch": 0.6388659510046794, + "grad_norm": 1.7858257145957521, + "learning_rate": 6.095059513516273e-06, + "loss": 0.9699, + "step": 4642 + }, + { + "epoch": 0.6390035783099367, + "grad_norm": 2.0177160073397618, + "learning_rate": 6.090956372460417e-06, + "loss": 0.9108, + "step": 4643 + }, + { + "epoch": 0.639141205615194, + "grad_norm": 2.3850421478173214, + "learning_rate": 6.086854008078018e-06, + "loss": 0.931, + "step": 4644 + }, + { + "epoch": 0.6392788329204514, + "grad_norm": 1.8381084393735032, + "learning_rate": 6.082752421184168e-06, + "loss": 0.8905, + "step": 4645 + }, + { + "epoch": 0.6394164602257087, + "grad_norm": 1.6825705245239608, + "learning_rate": 6.078651612593786e-06, + "loss": 0.9231, + "step": 4646 + }, + { + "epoch": 0.6395540875309661, + "grad_norm": 1.8278742209284728, + "learning_rate": 6.0745515831216564e-06, + "loss": 0.8227, + "step": 4647 + }, + { + "epoch": 0.6396917148362236, + "grad_norm": 1.7003716983282071, + "learning_rate": 6.070452333582388e-06, + "loss": 0.9219, + "step": 4648 + }, + { + "epoch": 0.6398293421414809, + "grad_norm": 1.7822249434901423, + "learning_rate": 6.0663538647904575e-06, + "loss": 0.9658, + "step": 4649 + }, + { + "epoch": 0.6399669694467383, + "grad_norm": 1.7221117461989217, + "learning_rate": 6.062256177560162e-06, + "loss": 0.9094, + "step": 4650 + }, + { + "epoch": 0.6401045967519956, + "grad_norm": 1.709032045874571, + "learning_rate": 6.058159272705665e-06, + "loss": 1.0093, + "step": 4651 + }, + { + "epoch": 0.640242224057253, + "grad_norm": 1.6033615323747255, + "learning_rate": 6.054063151040961e-06, + "loss": 0.9761, + "step": 4652 + }, + { + "epoch": 0.6403798513625103, + "grad_norm": 1.9429281380778696, + "learning_rate": 6.049967813379898e-06, + "loss": 1.0152, + "step": 4653 + }, + { + "epoch": 0.6405174786677676, + "grad_norm": 1.94251012865841, + "learning_rate": 6.045873260536157e-06, + "loss": 0.9515, + "step": 4654 + }, + { + "epoch": 0.640655105973025, + "grad_norm": 2.0562120657176126, + "learning_rate": 6.0417794933232764e-06, + "loss": 1.0149, + "step": 4655 + }, + { + "epoch": 0.6407927332782825, + "grad_norm": 1.8418920801983083, + "learning_rate": 6.03768651255463e-06, + "loss": 0.9306, + "step": 4656 + }, + { + "epoch": 0.6409303605835398, + "grad_norm": 1.9130854029445379, + "learning_rate": 6.0335943190434295e-06, + "loss": 0.8917, + "step": 4657 + }, + { + "epoch": 0.6410679878887972, + "grad_norm": 1.923950068728734, + "learning_rate": 6.029502913602747e-06, + "loss": 0.9722, + "step": 4658 + }, + { + "epoch": 0.6412056151940545, + "grad_norm": 1.6943391679054847, + "learning_rate": 6.025412297045483e-06, + "loss": 0.9813, + "step": 4659 + }, + { + "epoch": 0.6413432424993118, + "grad_norm": 1.92277797136287, + "learning_rate": 6.021322470184392e-06, + "loss": 0.9817, + "step": 4660 + }, + { + "epoch": 0.6414808698045692, + "grad_norm": 1.6847458692955486, + "learning_rate": 6.017233433832062e-06, + "loss": 0.9577, + "step": 4661 + }, + { + "epoch": 0.6416184971098265, + "grad_norm": 1.7684009714172524, + "learning_rate": 6.013145188800933e-06, + "loss": 0.9794, + "step": 4662 + }, + { + "epoch": 0.641756124415084, + "grad_norm": 1.6344184981483505, + "learning_rate": 6.009057735903279e-06, + "loss": 0.9203, + "step": 4663 + }, + { + "epoch": 0.6418937517203414, + "grad_norm": 1.6437118974050084, + "learning_rate": 6.004971075951225e-06, + "loss": 0.9245, + "step": 4664 + }, + { + "epoch": 0.6420313790255987, + "grad_norm": 1.4991400381238833, + "learning_rate": 6.000885209756729e-06, + "loss": 0.9327, + "step": 4665 + }, + { + "epoch": 0.642169006330856, + "grad_norm": 1.6376648247514098, + "learning_rate": 5.996800138131601e-06, + "loss": 0.8793, + "step": 4666 + }, + { + "epoch": 0.6423066336361134, + "grad_norm": 1.7087378081005948, + "learning_rate": 5.992715861887488e-06, + "loss": 0.9331, + "step": 4667 + }, + { + "epoch": 0.6424442609413707, + "grad_norm": 1.8381711735532147, + "learning_rate": 5.98863238183588e-06, + "loss": 0.9613, + "step": 4668 + }, + { + "epoch": 0.6425818882466281, + "grad_norm": 1.5953842704243024, + "learning_rate": 5.984549698788107e-06, + "loss": 0.9075, + "step": 4669 + }, + { + "epoch": 0.6427195155518854, + "grad_norm": 1.5735097453533011, + "learning_rate": 5.980467813555346e-06, + "loss": 0.9128, + "step": 4670 + }, + { + "epoch": 0.6428571428571429, + "grad_norm": 1.6565054989385288, + "learning_rate": 5.9763867269486045e-06, + "loss": 0.9598, + "step": 4671 + }, + { + "epoch": 0.6429947701624003, + "grad_norm": 1.868156698608928, + "learning_rate": 5.972306439778747e-06, + "loss": 0.9345, + "step": 4672 + }, + { + "epoch": 0.6431323974676576, + "grad_norm": 1.8653699995880506, + "learning_rate": 5.9682269528564674e-06, + "loss": 1.0512, + "step": 4673 + }, + { + "epoch": 0.643270024772915, + "grad_norm": 1.6922773472644, + "learning_rate": 5.9641482669923e-06, + "loss": 0.9788, + "step": 4674 + }, + { + "epoch": 0.6434076520781723, + "grad_norm": 1.6564431915129343, + "learning_rate": 5.96007038299663e-06, + "loss": 0.93, + "step": 4675 + }, + { + "epoch": 0.6435452793834296, + "grad_norm": 1.8075376492203272, + "learning_rate": 5.9559933016796715e-06, + "loss": 0.8591, + "step": 4676 + }, + { + "epoch": 0.643682906688687, + "grad_norm": 1.715258834675489, + "learning_rate": 5.95191702385149e-06, + "loss": 0.9834, + "step": 4677 + }, + { + "epoch": 0.6438205339939443, + "grad_norm": 1.622766964087439, + "learning_rate": 5.947841550321982e-06, + "loss": 0.962, + "step": 4678 + }, + { + "epoch": 0.6439581612992018, + "grad_norm": 1.7459788265544067, + "learning_rate": 5.943766881900892e-06, + "loss": 0.9196, + "step": 4679 + }, + { + "epoch": 0.6440957886044592, + "grad_norm": 1.7014460448796573, + "learning_rate": 5.939693019397797e-06, + "loss": 0.9242, + "step": 4680 + }, + { + "epoch": 0.6442334159097165, + "grad_norm": 1.8972065704131083, + "learning_rate": 5.935619963622119e-06, + "loss": 1.013, + "step": 4681 + }, + { + "epoch": 0.6443710432149738, + "grad_norm": 2.023762240829979, + "learning_rate": 5.931547715383119e-06, + "loss": 0.9532, + "step": 4682 + }, + { + "epoch": 0.6445086705202312, + "grad_norm": 1.7810926212959912, + "learning_rate": 5.9274762754898985e-06, + "loss": 0.9196, + "step": 4683 + }, + { + "epoch": 0.6446462978254885, + "grad_norm": 1.719199410346551, + "learning_rate": 5.923405644751392e-06, + "loss": 0.9617, + "step": 4684 + }, + { + "epoch": 0.6447839251307459, + "grad_norm": 1.672050553692242, + "learning_rate": 5.919335823976386e-06, + "loss": 0.9804, + "step": 4685 + }, + { + "epoch": 0.6449215524360034, + "grad_norm": 3.1500105986941715, + "learning_rate": 5.9152668139734904e-06, + "loss": 0.9469, + "step": 4686 + }, + { + "epoch": 0.6450591797412607, + "grad_norm": 1.7401597977646142, + "learning_rate": 5.911198615551168e-06, + "loss": 0.9504, + "step": 4687 + }, + { + "epoch": 0.645196807046518, + "grad_norm": 1.508635890595875, + "learning_rate": 5.9071312295177085e-06, + "loss": 0.8874, + "step": 4688 + }, + { + "epoch": 0.6453344343517754, + "grad_norm": 1.6663587876081523, + "learning_rate": 5.90306465668125e-06, + "loss": 0.9077, + "step": 4689 + }, + { + "epoch": 0.6454720616570327, + "grad_norm": 1.8801021530484494, + "learning_rate": 5.8989988978497616e-06, + "loss": 0.92, + "step": 4690 + }, + { + "epoch": 0.6456096889622901, + "grad_norm": 1.6241074841735041, + "learning_rate": 5.8949339538310544e-06, + "loss": 0.9418, + "step": 4691 + }, + { + "epoch": 0.6457473162675474, + "grad_norm": 1.7110146643569286, + "learning_rate": 5.890869825432784e-06, + "loss": 0.8564, + "step": 4692 + }, + { + "epoch": 0.6458849435728048, + "grad_norm": 1.6619081632275132, + "learning_rate": 5.8868065134624244e-06, + "loss": 0.9687, + "step": 4693 + }, + { + "epoch": 0.6460225708780623, + "grad_norm": 1.773332522569468, + "learning_rate": 5.882744018727313e-06, + "loss": 0.9919, + "step": 4694 + }, + { + "epoch": 0.6461601981833196, + "grad_norm": 1.8127500232206128, + "learning_rate": 5.8786823420346e-06, + "loss": 0.9672, + "step": 4695 + }, + { + "epoch": 0.646297825488577, + "grad_norm": 1.7038749585149044, + "learning_rate": 5.874621484191298e-06, + "loss": 0.9024, + "step": 4696 + }, + { + "epoch": 0.6464354527938343, + "grad_norm": 1.8796920781012176, + "learning_rate": 5.87056144600423e-06, + "loss": 0.9546, + "step": 4697 + }, + { + "epoch": 0.6465730800990916, + "grad_norm": 1.7182549181074176, + "learning_rate": 5.866502228280081e-06, + "loss": 0.9269, + "step": 4698 + }, + { + "epoch": 0.646710707404349, + "grad_norm": 1.9853619760095602, + "learning_rate": 5.862443831825355e-06, + "loss": 0.9305, + "step": 4699 + }, + { + "epoch": 0.6468483347096063, + "grad_norm": 1.827084484117126, + "learning_rate": 5.858386257446407e-06, + "loss": 0.9402, + "step": 4700 + }, + { + "epoch": 0.6469859620148638, + "grad_norm": 1.7445711613971626, + "learning_rate": 5.854329505949412e-06, + "loss": 0.9596, + "step": 4701 + }, + { + "epoch": 0.6471235893201212, + "grad_norm": 1.9044111836036044, + "learning_rate": 5.850273578140402e-06, + "loss": 0.8499, + "step": 4702 + }, + { + "epoch": 0.6472612166253785, + "grad_norm": 1.9652352776464437, + "learning_rate": 5.846218474825223e-06, + "loss": 0.9789, + "step": 4703 + }, + { + "epoch": 0.6473988439306358, + "grad_norm": 1.8453069534738111, + "learning_rate": 5.842164196809578e-06, + "loss": 0.9696, + "step": 4704 + }, + { + "epoch": 0.6475364712358932, + "grad_norm": 1.8822558013626276, + "learning_rate": 5.8381107448989926e-06, + "loss": 0.9415, + "step": 4705 + }, + { + "epoch": 0.6476740985411505, + "grad_norm": 2.084590747919869, + "learning_rate": 5.8340581198988265e-06, + "loss": 1.0655, + "step": 4706 + }, + { + "epoch": 0.6478117258464079, + "grad_norm": 1.9309659309017047, + "learning_rate": 5.8300063226142945e-06, + "loss": 0.9664, + "step": 4707 + }, + { + "epoch": 0.6479493531516652, + "grad_norm": 2.2183599264132163, + "learning_rate": 5.825955353850417e-06, + "loss": 1.013, + "step": 4708 + }, + { + "epoch": 0.6480869804569227, + "grad_norm": 1.710352144999003, + "learning_rate": 5.821905214412078e-06, + "loss": 0.9546, + "step": 4709 + }, + { + "epoch": 0.64822460776218, + "grad_norm": 1.8604922652304907, + "learning_rate": 5.81785590510398e-06, + "loss": 1.0412, + "step": 4710 + }, + { + "epoch": 0.6483622350674374, + "grad_norm": 1.9653711384531218, + "learning_rate": 5.813807426730669e-06, + "loss": 0.8976, + "step": 4711 + }, + { + "epoch": 0.6484998623726947, + "grad_norm": 1.5291957626161208, + "learning_rate": 5.809759780096518e-06, + "loss": 0.894, + "step": 4712 + }, + { + "epoch": 0.6486374896779521, + "grad_norm": 1.7653736633462207, + "learning_rate": 5.8057129660057395e-06, + "loss": 0.9613, + "step": 4713 + }, + { + "epoch": 0.6487751169832094, + "grad_norm": 1.7100631108672826, + "learning_rate": 5.801666985262382e-06, + "loss": 0.9161, + "step": 4714 + }, + { + "epoch": 0.6489127442884668, + "grad_norm": 1.743464950414026, + "learning_rate": 5.797621838670327e-06, + "loss": 0.9422, + "step": 4715 + }, + { + "epoch": 0.6490503715937241, + "grad_norm": 1.6825908282723192, + "learning_rate": 5.793577527033287e-06, + "loss": 0.8859, + "step": 4716 + }, + { + "epoch": 0.6491879988989816, + "grad_norm": 1.5861789880968253, + "learning_rate": 5.789534051154812e-06, + "loss": 0.9208, + "step": 4717 + }, + { + "epoch": 0.649325626204239, + "grad_norm": 1.7545750361860968, + "learning_rate": 5.7854914118382875e-06, + "loss": 0.877, + "step": 4718 + }, + { + "epoch": 0.6494632535094963, + "grad_norm": 1.8820652439866667, + "learning_rate": 5.781449609886929e-06, + "loss": 0.8799, + "step": 4719 + }, + { + "epoch": 0.6496008808147536, + "grad_norm": 1.643839326782363, + "learning_rate": 5.777408646103787e-06, + "loss": 0.942, + "step": 4720 + }, + { + "epoch": 0.649738508120011, + "grad_norm": 1.8147122938479316, + "learning_rate": 5.773368521291747e-06, + "loss": 0.9166, + "step": 4721 + }, + { + "epoch": 0.6498761354252683, + "grad_norm": 1.8078875124530347, + "learning_rate": 5.7693292362535235e-06, + "loss": 0.9616, + "step": 4722 + }, + { + "epoch": 0.6500137627305257, + "grad_norm": 2.0379238169140153, + "learning_rate": 5.7652907917916716e-06, + "loss": 0.9568, + "step": 4723 + }, + { + "epoch": 0.6501513900357831, + "grad_norm": 1.7467298298810137, + "learning_rate": 5.7612531887085696e-06, + "loss": 0.9781, + "step": 4724 + }, + { + "epoch": 0.6502890173410405, + "grad_norm": 1.7668215924698745, + "learning_rate": 5.757216427806436e-06, + "loss": 0.9078, + "step": 4725 + }, + { + "epoch": 0.6504266446462978, + "grad_norm": 1.7423680113086488, + "learning_rate": 5.753180509887326e-06, + "loss": 0.9372, + "step": 4726 + }, + { + "epoch": 0.6505642719515552, + "grad_norm": 1.7393768333476862, + "learning_rate": 5.749145435753109e-06, + "loss": 0.9453, + "step": 4727 + }, + { + "epoch": 0.6507018992568125, + "grad_norm": 1.8641347058759012, + "learning_rate": 5.745111206205516e-06, + "loss": 0.8891, + "step": 4728 + }, + { + "epoch": 0.6508395265620699, + "grad_norm": 1.7571265197369372, + "learning_rate": 5.741077822046073e-06, + "loss": 0.9899, + "step": 4729 + }, + { + "epoch": 0.6509771538673272, + "grad_norm": 1.8297773399279351, + "learning_rate": 5.737045284076177e-06, + "loss": 0.8556, + "step": 4730 + }, + { + "epoch": 0.6511147811725846, + "grad_norm": 1.7359387469685557, + "learning_rate": 5.733013593097021e-06, + "loss": 0.9097, + "step": 4731 + }, + { + "epoch": 0.651252408477842, + "grad_norm": 1.9387900736890944, + "learning_rate": 5.7289827499096606e-06, + "loss": 0.8859, + "step": 4732 + }, + { + "epoch": 0.6513900357830994, + "grad_norm": 1.7203412998219079, + "learning_rate": 5.724952755314963e-06, + "loss": 0.9505, + "step": 4733 + }, + { + "epoch": 0.6515276630883567, + "grad_norm": 1.9342835919485395, + "learning_rate": 5.720923610113636e-06, + "loss": 0.9664, + "step": 4734 + }, + { + "epoch": 0.6516652903936141, + "grad_norm": 2.1792824439155836, + "learning_rate": 5.71689531510621e-06, + "loss": 0.9726, + "step": 4735 + }, + { + "epoch": 0.6518029176988714, + "grad_norm": 1.5905732080850266, + "learning_rate": 5.7128678710930576e-06, + "loss": 0.945, + "step": 4736 + }, + { + "epoch": 0.6519405450041288, + "grad_norm": 1.7088038238450163, + "learning_rate": 5.70884127887438e-06, + "loss": 0.9233, + "step": 4737 + }, + { + "epoch": 0.6520781723093861, + "grad_norm": 1.5713960702488403, + "learning_rate": 5.70481553925019e-06, + "loss": 0.9563, + "step": 4738 + }, + { + "epoch": 0.6522157996146436, + "grad_norm": 1.6719472569196379, + "learning_rate": 5.700790653020363e-06, + "loss": 0.9359, + "step": 4739 + }, + { + "epoch": 0.652353426919901, + "grad_norm": 1.9278555371991215, + "learning_rate": 5.696766620984584e-06, + "loss": 0.8846, + "step": 4740 + }, + { + "epoch": 0.6524910542251583, + "grad_norm": 1.7381745370144501, + "learning_rate": 5.692743443942372e-06, + "loss": 0.9174, + "step": 4741 + }, + { + "epoch": 0.6526286815304156, + "grad_norm": 1.9247989791258253, + "learning_rate": 5.6887211226930775e-06, + "loss": 0.9176, + "step": 4742 + }, + { + "epoch": 0.652766308835673, + "grad_norm": 1.7044798008380282, + "learning_rate": 5.684699658035881e-06, + "loss": 0.917, + "step": 4743 + }, + { + "epoch": 0.6529039361409303, + "grad_norm": 1.6269841549903865, + "learning_rate": 5.6806790507697926e-06, + "loss": 0.8471, + "step": 4744 + }, + { + "epoch": 0.6530415634461877, + "grad_norm": 2.348984834001434, + "learning_rate": 5.676659301693652e-06, + "loss": 1.0082, + "step": 4745 + }, + { + "epoch": 0.653179190751445, + "grad_norm": 1.9103381392471932, + "learning_rate": 5.672640411606127e-06, + "loss": 1.0059, + "step": 4746 + }, + { + "epoch": 0.6533168180567025, + "grad_norm": 2.021722357731002, + "learning_rate": 5.66862238130572e-06, + "loss": 0.8592, + "step": 4747 + }, + { + "epoch": 0.6534544453619598, + "grad_norm": 1.7710243885406995, + "learning_rate": 5.6646052115907524e-06, + "loss": 0.9133, + "step": 4748 + }, + { + "epoch": 0.6535920726672172, + "grad_norm": 1.5313561846396435, + "learning_rate": 5.660588903259385e-06, + "loss": 0.9385, + "step": 4749 + }, + { + "epoch": 0.6537296999724745, + "grad_norm": 1.6744837350781705, + "learning_rate": 5.6565734571096e-06, + "loss": 0.9253, + "step": 4750 + }, + { + "epoch": 0.6538673272777319, + "grad_norm": 1.7481336595352612, + "learning_rate": 5.65255887393922e-06, + "loss": 0.9638, + "step": 4751 + }, + { + "epoch": 0.6540049545829892, + "grad_norm": 1.6889358517692095, + "learning_rate": 5.648545154545879e-06, + "loss": 0.8283, + "step": 4752 + }, + { + "epoch": 0.6541425818882466, + "grad_norm": 1.6245962274552208, + "learning_rate": 5.644532299727051e-06, + "loss": 1.0306, + "step": 4753 + }, + { + "epoch": 0.6542802091935039, + "grad_norm": 1.8931121557903652, + "learning_rate": 5.640520310280036e-06, + "loss": 0.9319, + "step": 4754 + }, + { + "epoch": 0.6544178364987614, + "grad_norm": 1.9735605247289432, + "learning_rate": 5.636509187001958e-06, + "loss": 0.9889, + "step": 4755 + }, + { + "epoch": 0.6545554638040187, + "grad_norm": 1.9029307130516868, + "learning_rate": 5.632498930689776e-06, + "loss": 0.9192, + "step": 4756 + }, + { + "epoch": 0.6546930911092761, + "grad_norm": 1.6870658867971808, + "learning_rate": 5.628489542140269e-06, + "loss": 0.9097, + "step": 4757 + }, + { + "epoch": 0.6548307184145334, + "grad_norm": 1.9398313396404252, + "learning_rate": 5.624481022150057e-06, + "loss": 0.982, + "step": 4758 + }, + { + "epoch": 0.6549683457197908, + "grad_norm": 1.6461949603165773, + "learning_rate": 5.620473371515564e-06, + "loss": 0.9559, + "step": 4759 + }, + { + "epoch": 0.6551059730250481, + "grad_norm": 3.046982130521264, + "learning_rate": 5.6164665910330715e-06, + "loss": 0.9246, + "step": 4760 + }, + { + "epoch": 0.6552436003303055, + "grad_norm": 1.921365374427787, + "learning_rate": 5.612460681498655e-06, + "loss": 0.8761, + "step": 4761 + }, + { + "epoch": 0.655381227635563, + "grad_norm": 1.7877611803003675, + "learning_rate": 5.608455643708251e-06, + "loss": 0.9784, + "step": 4762 + }, + { + "epoch": 0.6555188549408203, + "grad_norm": 1.8622379279682406, + "learning_rate": 5.6044514784575865e-06, + "loss": 0.9699, + "step": 4763 + }, + { + "epoch": 0.6556564822460776, + "grad_norm": 1.798972256952548, + "learning_rate": 5.600448186542251e-06, + "loss": 0.9484, + "step": 4764 + }, + { + "epoch": 0.655794109551335, + "grad_norm": 1.7873423292286488, + "learning_rate": 5.5964457687576366e-06, + "loss": 0.9781, + "step": 4765 + }, + { + "epoch": 0.6559317368565923, + "grad_norm": 1.6487300758632697, + "learning_rate": 5.5924442258989694e-06, + "loss": 0.9011, + "step": 4766 + }, + { + "epoch": 0.6560693641618497, + "grad_norm": 2.016685073026264, + "learning_rate": 5.588443558761303e-06, + "loss": 0.9134, + "step": 4767 + }, + { + "epoch": 0.656206991467107, + "grad_norm": 1.6595368082724218, + "learning_rate": 5.584443768139514e-06, + "loss": 0.9201, + "step": 4768 + }, + { + "epoch": 0.6563446187723644, + "grad_norm": 1.9181399705764264, + "learning_rate": 5.580444854828309e-06, + "loss": 0.935, + "step": 4769 + }, + { + "epoch": 0.6564822460776218, + "grad_norm": 1.6575017485135672, + "learning_rate": 5.576446819622205e-06, + "loss": 0.8864, + "step": 4770 + }, + { + "epoch": 0.6566198733828792, + "grad_norm": 1.5596366349219368, + "learning_rate": 5.572449663315572e-06, + "loss": 0.9618, + "step": 4771 + }, + { + "epoch": 0.6567575006881365, + "grad_norm": 1.5915940632181482, + "learning_rate": 5.5684533867025836e-06, + "loss": 0.9346, + "step": 4772 + }, + { + "epoch": 0.6568951279933939, + "grad_norm": 1.6786747500734258, + "learning_rate": 5.564457990577246e-06, + "loss": 0.9246, + "step": 4773 + }, + { + "epoch": 0.6570327552986512, + "grad_norm": 2.0542785043894902, + "learning_rate": 5.560463475733391e-06, + "loss": 0.8635, + "step": 4774 + }, + { + "epoch": 0.6571703826039086, + "grad_norm": 1.8286086758025915, + "learning_rate": 5.556469842964671e-06, + "loss": 0.9226, + "step": 4775 + }, + { + "epoch": 0.6573080099091659, + "grad_norm": 1.9070787855225195, + "learning_rate": 5.552477093064569e-06, + "loss": 0.926, + "step": 4776 + }, + { + "epoch": 0.6574456372144234, + "grad_norm": 1.8197117485145144, + "learning_rate": 5.548485226826388e-06, + "loss": 0.9327, + "step": 4777 + }, + { + "epoch": 0.6575832645196807, + "grad_norm": 1.817641231659796, + "learning_rate": 5.544494245043257e-06, + "loss": 0.9512, + "step": 4778 + }, + { + "epoch": 0.6577208918249381, + "grad_norm": 1.6158656313706663, + "learning_rate": 5.5405041485081325e-06, + "loss": 0.9718, + "step": 4779 + }, + { + "epoch": 0.6578585191301954, + "grad_norm": 1.7958837150032625, + "learning_rate": 5.536514938013788e-06, + "loss": 1.0335, + "step": 4780 + }, + { + "epoch": 0.6579961464354528, + "grad_norm": 1.8111482609644687, + "learning_rate": 5.5325266143528275e-06, + "loss": 0.993, + "step": 4781 + }, + { + "epoch": 0.6581337737407101, + "grad_norm": 1.7682189690482637, + "learning_rate": 5.528539178317671e-06, + "loss": 1.0005, + "step": 4782 + }, + { + "epoch": 0.6582714010459675, + "grad_norm": 2.3681347559499333, + "learning_rate": 5.524552630700582e-06, + "loss": 0.9315, + "step": 4783 + }, + { + "epoch": 0.6584090283512248, + "grad_norm": 2.273217520412821, + "learning_rate": 5.520566972293617e-06, + "loss": 0.9656, + "step": 4784 + }, + { + "epoch": 0.6585466556564823, + "grad_norm": 1.6436123867671102, + "learning_rate": 5.516582203888679e-06, + "loss": 0.9339, + "step": 4785 + }, + { + "epoch": 0.6586842829617396, + "grad_norm": 1.7825602949204846, + "learning_rate": 5.512598326277485e-06, + "loss": 0.8829, + "step": 4786 + }, + { + "epoch": 0.658821910266997, + "grad_norm": 1.586233092149229, + "learning_rate": 5.50861534025158e-06, + "loss": 0.8386, + "step": 4787 + }, + { + "epoch": 0.6589595375722543, + "grad_norm": 1.5519737846349984, + "learning_rate": 5.504633246602325e-06, + "loss": 0.8585, + "step": 4788 + }, + { + "epoch": 0.6590971648775117, + "grad_norm": 2.8139886685347775, + "learning_rate": 5.500652046120904e-06, + "loss": 0.9331, + "step": 4789 + }, + { + "epoch": 0.659234792182769, + "grad_norm": 2.0849848563577753, + "learning_rate": 5.49667173959834e-06, + "loss": 0.9474, + "step": 4790 + }, + { + "epoch": 0.6593724194880264, + "grad_norm": 1.9472216192364793, + "learning_rate": 5.49269232782545e-06, + "loss": 0.9759, + "step": 4791 + }, + { + "epoch": 0.6595100467932837, + "grad_norm": 1.5223370743680098, + "learning_rate": 5.488713811592905e-06, + "loss": 0.95, + "step": 4792 + }, + { + "epoch": 0.6596476740985412, + "grad_norm": 1.7795115276815248, + "learning_rate": 5.484736191691164e-06, + "loss": 0.9644, + "step": 4793 + }, + { + "epoch": 0.6597853014037985, + "grad_norm": 1.8301772376442782, + "learning_rate": 5.480759468910539e-06, + "loss": 0.9963, + "step": 4794 + }, + { + "epoch": 0.6599229287090559, + "grad_norm": 2.0207218472046042, + "learning_rate": 5.476783644041146e-06, + "loss": 1.046, + "step": 4795 + }, + { + "epoch": 0.6600605560143132, + "grad_norm": 1.6322878989117735, + "learning_rate": 5.472808717872927e-06, + "loss": 0.9139, + "step": 4796 + }, + { + "epoch": 0.6601981833195706, + "grad_norm": 1.8705520577060168, + "learning_rate": 5.468834691195646e-06, + "loss": 0.9237, + "step": 4797 + }, + { + "epoch": 0.6603358106248279, + "grad_norm": 1.55841029130605, + "learning_rate": 5.464861564798885e-06, + "loss": 0.9807, + "step": 4798 + }, + { + "epoch": 0.6604734379300853, + "grad_norm": 2.231216787937271, + "learning_rate": 5.460889339472053e-06, + "loss": 0.8948, + "step": 4799 + }, + { + "epoch": 0.6606110652353427, + "grad_norm": 1.7642161742655231, + "learning_rate": 5.4569180160043744e-06, + "loss": 0.9431, + "step": 4800 + }, + { + "epoch": 0.6607486925406001, + "grad_norm": 1.8862589333636366, + "learning_rate": 5.452947595184899e-06, + "loss": 0.9465, + "step": 4801 + }, + { + "epoch": 0.6608863198458574, + "grad_norm": 1.684327523303295, + "learning_rate": 5.448978077802493e-06, + "loss": 0.922, + "step": 4802 + }, + { + "epoch": 0.6610239471511148, + "grad_norm": 1.586718451754625, + "learning_rate": 5.445009464645847e-06, + "loss": 0.9331, + "step": 4803 + }, + { + "epoch": 0.6611615744563721, + "grad_norm": 1.5648252421752016, + "learning_rate": 5.441041756503471e-06, + "loss": 0.985, + "step": 4804 + }, + { + "epoch": 0.6612992017616295, + "grad_norm": 1.9870894219262807, + "learning_rate": 5.43707495416369e-06, + "loss": 0.8787, + "step": 4805 + }, + { + "epoch": 0.6614368290668868, + "grad_norm": 1.8217231945777181, + "learning_rate": 5.433109058414658e-06, + "loss": 0.9183, + "step": 4806 + }, + { + "epoch": 0.6615744563721442, + "grad_norm": 1.733092520996274, + "learning_rate": 5.429144070044341e-06, + "loss": 0.9478, + "step": 4807 + }, + { + "epoch": 0.6617120836774016, + "grad_norm": 1.6807428712357377, + "learning_rate": 5.42517998984053e-06, + "loss": 0.9346, + "step": 4808 + }, + { + "epoch": 0.661849710982659, + "grad_norm": 1.9261952148208565, + "learning_rate": 5.4212168185908306e-06, + "loss": 0.9717, + "step": 4809 + }, + { + "epoch": 0.6619873382879163, + "grad_norm": 1.7070469195944993, + "learning_rate": 5.417254557082674e-06, + "loss": 0.9289, + "step": 4810 + }, + { + "epoch": 0.6621249655931737, + "grad_norm": 1.6527808900697794, + "learning_rate": 5.413293206103307e-06, + "loss": 0.858, + "step": 4811 + }, + { + "epoch": 0.662262592898431, + "grad_norm": 1.6352672618739776, + "learning_rate": 5.40933276643979e-06, + "loss": 0.9326, + "step": 4812 + }, + { + "epoch": 0.6624002202036884, + "grad_norm": 1.6388311857160311, + "learning_rate": 5.405373238879019e-06, + "loss": 0.8872, + "step": 4813 + }, + { + "epoch": 0.6625378475089457, + "grad_norm": 1.7798790121996448, + "learning_rate": 5.401414624207686e-06, + "loss": 0.9936, + "step": 4814 + }, + { + "epoch": 0.6626754748142032, + "grad_norm": 1.7964817196908192, + "learning_rate": 5.3974569232123276e-06, + "loss": 0.9169, + "step": 4815 + }, + { + "epoch": 0.6628131021194605, + "grad_norm": 1.7539945311181884, + "learning_rate": 5.39350013667927e-06, + "loss": 0.8869, + "step": 4816 + }, + { + "epoch": 0.6629507294247179, + "grad_norm": 1.837124684683883, + "learning_rate": 5.3895442653946815e-06, + "loss": 0.9352, + "step": 4817 + }, + { + "epoch": 0.6630883567299752, + "grad_norm": 1.884975813977823, + "learning_rate": 5.385589310144537e-06, + "loss": 0.93, + "step": 4818 + }, + { + "epoch": 0.6632259840352326, + "grad_norm": 2.0147527899623223, + "learning_rate": 5.381635271714628e-06, + "loss": 0.9091, + "step": 4819 + }, + { + "epoch": 0.6633636113404899, + "grad_norm": 1.9933332905998353, + "learning_rate": 5.37768215089058e-06, + "loss": 0.9507, + "step": 4820 + }, + { + "epoch": 0.6635012386457473, + "grad_norm": 1.6162683540936273, + "learning_rate": 5.373729948457808e-06, + "loss": 0.9423, + "step": 4821 + }, + { + "epoch": 0.6636388659510046, + "grad_norm": 1.8077022990854756, + "learning_rate": 5.369778665201576e-06, + "loss": 0.8766, + "step": 4822 + }, + { + "epoch": 0.6637764932562621, + "grad_norm": 1.8565527119308636, + "learning_rate": 5.365828301906935e-06, + "loss": 0.9864, + "step": 4823 + }, + { + "epoch": 0.6639141205615194, + "grad_norm": 1.9682076812298217, + "learning_rate": 5.361878859358782e-06, + "loss": 1.0084, + "step": 4824 + }, + { + "epoch": 0.6640517478667768, + "grad_norm": 1.8094138519308847, + "learning_rate": 5.3579303383418055e-06, + "loss": 0.9184, + "step": 4825 + }, + { + "epoch": 0.6641893751720341, + "grad_norm": 1.70571074518677, + "learning_rate": 5.35398273964053e-06, + "loss": 0.9156, + "step": 4826 + }, + { + "epoch": 0.6643270024772915, + "grad_norm": 1.8078830540605337, + "learning_rate": 5.350036064039287e-06, + "loss": 0.9213, + "step": 4827 + }, + { + "epoch": 0.6644646297825488, + "grad_norm": 1.7880119449978071, + "learning_rate": 5.346090312322227e-06, + "loss": 0.8809, + "step": 4828 + }, + { + "epoch": 0.6646022570878062, + "grad_norm": 1.7348864464791165, + "learning_rate": 5.342145485273318e-06, + "loss": 0.9622, + "step": 4829 + }, + { + "epoch": 0.6647398843930635, + "grad_norm": 1.8730222989355196, + "learning_rate": 5.338201583676341e-06, + "loss": 0.9168, + "step": 4830 + }, + { + "epoch": 0.664877511698321, + "grad_norm": 1.8779717580668454, + "learning_rate": 5.334258608314896e-06, + "loss": 0.9868, + "step": 4831 + }, + { + "epoch": 0.6650151390035783, + "grad_norm": 1.8596894935275075, + "learning_rate": 5.330316559972399e-06, + "loss": 0.9811, + "step": 4832 + }, + { + "epoch": 0.6651527663088357, + "grad_norm": 1.5671035282518169, + "learning_rate": 5.326375439432081e-06, + "loss": 0.8558, + "step": 4833 + }, + { + "epoch": 0.665290393614093, + "grad_norm": 1.7469413819214, + "learning_rate": 5.322435247476987e-06, + "loss": 0.9223, + "step": 4834 + }, + { + "epoch": 0.6654280209193504, + "grad_norm": 1.7965477684775375, + "learning_rate": 5.318495984889981e-06, + "loss": 0.9751, + "step": 4835 + }, + { + "epoch": 0.6655656482246077, + "grad_norm": 1.7216704915826047, + "learning_rate": 5.31455765245374e-06, + "loss": 0.9299, + "step": 4836 + }, + { + "epoch": 0.6657032755298651, + "grad_norm": 1.872358180203065, + "learning_rate": 5.310620250950757e-06, + "loss": 0.9166, + "step": 4837 + }, + { + "epoch": 0.6658409028351225, + "grad_norm": 1.7665845013952979, + "learning_rate": 5.30668378116334e-06, + "loss": 0.9693, + "step": 4838 + }, + { + "epoch": 0.6659785301403799, + "grad_norm": 1.6258215132595784, + "learning_rate": 5.30274824387361e-06, + "loss": 0.9233, + "step": 4839 + }, + { + "epoch": 0.6661161574456372, + "grad_norm": 2.2642381979066646, + "learning_rate": 5.298813639863506e-06, + "loss": 0.9443, + "step": 4840 + }, + { + "epoch": 0.6662537847508946, + "grad_norm": 1.6263913166049058, + "learning_rate": 5.294879969914779e-06, + "loss": 0.9225, + "step": 4841 + }, + { + "epoch": 0.6663914120561519, + "grad_norm": 1.7302759276664266, + "learning_rate": 5.2909472348089966e-06, + "loss": 0.8868, + "step": 4842 + }, + { + "epoch": 0.6665290393614093, + "grad_norm": 1.681557386524565, + "learning_rate": 5.287015435327538e-06, + "loss": 0.9594, + "step": 4843 + }, + { + "epoch": 0.6666666666666666, + "grad_norm": 1.5932125643263908, + "learning_rate": 5.2830845722515965e-06, + "loss": 0.9608, + "step": 4844 + }, + { + "epoch": 0.666804293971924, + "grad_norm": 1.966749014603279, + "learning_rate": 5.27915464636219e-06, + "loss": 0.9553, + "step": 4845 + }, + { + "epoch": 0.6669419212771814, + "grad_norm": 1.856777001811676, + "learning_rate": 5.275225658440126e-06, + "loss": 0.9877, + "step": 4846 + }, + { + "epoch": 0.6670795485824388, + "grad_norm": 1.7926240840334098, + "learning_rate": 5.271297609266055e-06, + "loss": 0.9351, + "step": 4847 + }, + { + "epoch": 0.6672171758876961, + "grad_norm": 2.0506245590916583, + "learning_rate": 5.267370499620419e-06, + "loss": 0.9382, + "step": 4848 + }, + { + "epoch": 0.6673548031929535, + "grad_norm": 1.521391167830223, + "learning_rate": 5.263444330283481e-06, + "loss": 0.9135, + "step": 4849 + }, + { + "epoch": 0.6674924304982108, + "grad_norm": 1.7534530916343891, + "learning_rate": 5.2595191020353165e-06, + "loss": 0.9862, + "step": 4850 + }, + { + "epoch": 0.6676300578034682, + "grad_norm": 1.5138644780689012, + "learning_rate": 5.255594815655813e-06, + "loss": 0.8674, + "step": 4851 + }, + { + "epoch": 0.6677676851087255, + "grad_norm": 2.0913163258831897, + "learning_rate": 5.251671471924684e-06, + "loss": 0.9312, + "step": 4852 + }, + { + "epoch": 0.667905312413983, + "grad_norm": 1.6595909635099093, + "learning_rate": 5.247749071621429e-06, + "loss": 0.9631, + "step": 4853 + }, + { + "epoch": 0.6680429397192403, + "grad_norm": 1.8304869811787914, + "learning_rate": 5.2438276155253895e-06, + "loss": 0.9758, + "step": 4854 + }, + { + "epoch": 0.6681805670244977, + "grad_norm": 1.6917234365057052, + "learning_rate": 5.23990710441569e-06, + "loss": 0.975, + "step": 4855 + }, + { + "epoch": 0.668318194329755, + "grad_norm": 1.9442752869120192, + "learning_rate": 5.235987539071299e-06, + "loss": 0.9268, + "step": 4856 + }, + { + "epoch": 0.6684558216350124, + "grad_norm": 1.82269803077331, + "learning_rate": 5.232068920270963e-06, + "loss": 1.0476, + "step": 4857 + }, + { + "epoch": 0.6685934489402697, + "grad_norm": 1.7286872373511077, + "learning_rate": 5.22815124879327e-06, + "loss": 0.991, + "step": 4858 + }, + { + "epoch": 0.6687310762455271, + "grad_norm": 1.9100540474384782, + "learning_rate": 5.224234525416605e-06, + "loss": 0.9912, + "step": 4859 + }, + { + "epoch": 0.6688687035507844, + "grad_norm": 1.7559940599718344, + "learning_rate": 5.2203187509191665e-06, + "loss": 0.9198, + "step": 4860 + }, + { + "epoch": 0.6690063308560419, + "grad_norm": 2.0016348354342153, + "learning_rate": 5.216403926078965e-06, + "loss": 1.0055, + "step": 4861 + }, + { + "epoch": 0.6691439581612992, + "grad_norm": 2.0452095138914785, + "learning_rate": 5.212490051673825e-06, + "loss": 0.968, + "step": 4862 + }, + { + "epoch": 0.6692815854665566, + "grad_norm": 1.567994824741222, + "learning_rate": 5.208577128481377e-06, + "loss": 0.8547, + "step": 4863 + }, + { + "epoch": 0.6694192127718139, + "grad_norm": 1.5134339630187161, + "learning_rate": 5.204665157279065e-06, + "loss": 0.933, + "step": 4864 + }, + { + "epoch": 0.6695568400770713, + "grad_norm": 1.666285144543641, + "learning_rate": 5.200754138844146e-06, + "loss": 0.9787, + "step": 4865 + }, + { + "epoch": 0.6696944673823286, + "grad_norm": 1.9828382305226402, + "learning_rate": 5.196844073953686e-06, + "loss": 0.9891, + "step": 4866 + }, + { + "epoch": 0.669832094687586, + "grad_norm": 2.0634522205797183, + "learning_rate": 5.1929349633845595e-06, + "loss": 0.9344, + "step": 4867 + }, + { + "epoch": 0.6699697219928433, + "grad_norm": 1.840569318680048, + "learning_rate": 5.189026807913454e-06, + "loss": 0.8883, + "step": 4868 + }, + { + "epoch": 0.6701073492981008, + "grad_norm": 1.891805822897526, + "learning_rate": 5.185119608316868e-06, + "loss": 0.9621, + "step": 4869 + }, + { + "epoch": 0.6702449766033581, + "grad_norm": 1.7694740575556107, + "learning_rate": 5.181213365371106e-06, + "loss": 0.9102, + "step": 4870 + }, + { + "epoch": 0.6703826039086155, + "grad_norm": 1.7168621508469908, + "learning_rate": 5.177308079852287e-06, + "loss": 0.8503, + "step": 4871 + }, + { + "epoch": 0.6705202312138728, + "grad_norm": 1.8110756654453317, + "learning_rate": 5.1734037525363365e-06, + "loss": 1.0106, + "step": 4872 + }, + { + "epoch": 0.6706578585191302, + "grad_norm": 1.7150932683300406, + "learning_rate": 5.169500384198991e-06, + "loss": 0.9444, + "step": 4873 + }, + { + "epoch": 0.6707954858243875, + "grad_norm": 1.6850579383751274, + "learning_rate": 5.165597975615795e-06, + "loss": 0.9829, + "step": 4874 + }, + { + "epoch": 0.6709331131296449, + "grad_norm": 1.741830437601638, + "learning_rate": 5.161696527562111e-06, + "loss": 0.8876, + "step": 4875 + }, + { + "epoch": 0.6710707404349023, + "grad_norm": 1.7626312821494987, + "learning_rate": 5.1577960408130915e-06, + "loss": 0.9304, + "step": 4876 + }, + { + "epoch": 0.6712083677401597, + "grad_norm": 1.8535563987135282, + "learning_rate": 5.153896516143724e-06, + "loss": 0.95, + "step": 4877 + }, + { + "epoch": 0.671345995045417, + "grad_norm": 1.6673524819972623, + "learning_rate": 5.149997954328774e-06, + "loss": 0.9201, + "step": 4878 + }, + { + "epoch": 0.6714836223506744, + "grad_norm": 1.7347341637172635, + "learning_rate": 5.146100356142849e-06, + "loss": 0.9301, + "step": 4879 + }, + { + "epoch": 0.6716212496559317, + "grad_norm": 1.814521382583184, + "learning_rate": 5.142203722360337e-06, + "loss": 0.963, + "step": 4880 + }, + { + "epoch": 0.6717588769611891, + "grad_norm": 1.744258831797634, + "learning_rate": 5.138308053755446e-06, + "loss": 0.9504, + "step": 4881 + }, + { + "epoch": 0.6718965042664464, + "grad_norm": 1.8947185062577048, + "learning_rate": 5.134413351102203e-06, + "loss": 0.8865, + "step": 4882 + }, + { + "epoch": 0.6720341315717038, + "grad_norm": 1.642824926409088, + "learning_rate": 5.130519615174417e-06, + "loss": 0.9286, + "step": 4883 + }, + { + "epoch": 0.6721717588769612, + "grad_norm": 1.6325174431026037, + "learning_rate": 5.126626846745735e-06, + "loss": 0.9526, + "step": 4884 + }, + { + "epoch": 0.6723093861822186, + "grad_norm": 1.6860398501437657, + "learning_rate": 5.122735046589579e-06, + "loss": 0.9136, + "step": 4885 + }, + { + "epoch": 0.6724470134874759, + "grad_norm": 1.5908606501820293, + "learning_rate": 5.118844215479218e-06, + "loss": 0.9358, + "step": 4886 + }, + { + "epoch": 0.6725846407927333, + "grad_norm": 1.7091069150230902, + "learning_rate": 5.114954354187687e-06, + "loss": 0.9479, + "step": 4887 + }, + { + "epoch": 0.6727222680979906, + "grad_norm": 1.8393342977331808, + "learning_rate": 5.111065463487861e-06, + "loss": 1.0103, + "step": 4888 + }, + { + "epoch": 0.672859895403248, + "grad_norm": 1.6980355620182184, + "learning_rate": 5.107177544152404e-06, + "loss": 0.9685, + "step": 4889 + }, + { + "epoch": 0.6729975227085053, + "grad_norm": 1.5326246337283154, + "learning_rate": 5.103290596953795e-06, + "loss": 0.926, + "step": 4890 + }, + { + "epoch": 0.6731351500137628, + "grad_norm": 1.7614009824483032, + "learning_rate": 5.099404622664314e-06, + "loss": 0.9024, + "step": 4891 + }, + { + "epoch": 0.6732727773190201, + "grad_norm": 1.8199055096495367, + "learning_rate": 5.095519622056054e-06, + "loss": 0.8748, + "step": 4892 + }, + { + "epoch": 0.6734104046242775, + "grad_norm": 1.797990214991825, + "learning_rate": 5.09163559590091e-06, + "loss": 0.9356, + "step": 4893 + }, + { + "epoch": 0.6735480319295348, + "grad_norm": 1.922115466134818, + "learning_rate": 5.087752544970584e-06, + "loss": 0.9569, + "step": 4894 + }, + { + "epoch": 0.6736856592347922, + "grad_norm": 1.7594958535625165, + "learning_rate": 5.083870470036587e-06, + "loss": 0.9034, + "step": 4895 + }, + { + "epoch": 0.6738232865400495, + "grad_norm": 1.8920957941838494, + "learning_rate": 5.079989371870233e-06, + "loss": 0.9709, + "step": 4896 + }, + { + "epoch": 0.6739609138453069, + "grad_norm": 2.0314050550495257, + "learning_rate": 5.076109251242642e-06, + "loss": 0.9525, + "step": 4897 + }, + { + "epoch": 0.6740985411505642, + "grad_norm": 1.6096477438334647, + "learning_rate": 5.072230108924744e-06, + "loss": 0.9215, + "step": 4898 + }, + { + "epoch": 0.6742361684558217, + "grad_norm": 1.9153485492985267, + "learning_rate": 5.068351945687269e-06, + "loss": 0.9813, + "step": 4899 + }, + { + "epoch": 0.674373795761079, + "grad_norm": 1.6668128465768288, + "learning_rate": 5.064474762300755e-06, + "loss": 0.9972, + "step": 4900 + }, + { + "epoch": 0.6745114230663364, + "grad_norm": 3.741843183742772, + "learning_rate": 5.060598559535546e-06, + "loss": 0.918, + "step": 4901 + }, + { + "epoch": 0.6746490503715937, + "grad_norm": 1.9024416342728618, + "learning_rate": 5.056723338161788e-06, + "loss": 0.9131, + "step": 4902 + }, + { + "epoch": 0.6747866776768511, + "grad_norm": 1.7603050121508654, + "learning_rate": 5.052849098949439e-06, + "loss": 0.9179, + "step": 4903 + }, + { + "epoch": 0.6749243049821084, + "grad_norm": 1.6410275524886213, + "learning_rate": 5.048975842668255e-06, + "loss": 0.9476, + "step": 4904 + }, + { + "epoch": 0.6750619322873658, + "grad_norm": 2.299042220984978, + "learning_rate": 5.0451035700877994e-06, + "loss": 0.9264, + "step": 4905 + }, + { + "epoch": 0.6751995595926231, + "grad_norm": 1.9758463227506775, + "learning_rate": 5.041232281977433e-06, + "loss": 0.9327, + "step": 4906 + }, + { + "epoch": 0.6753371868978806, + "grad_norm": 1.732042578657454, + "learning_rate": 5.037361979106343e-06, + "loss": 0.9925, + "step": 4907 + }, + { + "epoch": 0.6754748142031379, + "grad_norm": 1.6958795087144407, + "learning_rate": 5.03349266224349e-06, + "loss": 0.9383, + "step": 4908 + }, + { + "epoch": 0.6756124415083953, + "grad_norm": 1.88049279989569, + "learning_rate": 5.029624332157667e-06, + "loss": 1.0068, + "step": 4909 + }, + { + "epoch": 0.6757500688136526, + "grad_norm": 1.962142678853269, + "learning_rate": 5.0257569896174455e-06, + "loss": 0.9781, + "step": 4910 + }, + { + "epoch": 0.67588769611891, + "grad_norm": 1.7102186948348521, + "learning_rate": 5.021890635391228e-06, + "loss": 1.0075, + "step": 4911 + }, + { + "epoch": 0.6760253234241673, + "grad_norm": 1.89182648444078, + "learning_rate": 5.0180252702471935e-06, + "loss": 0.9129, + "step": 4912 + }, + { + "epoch": 0.6761629507294247, + "grad_norm": 1.895372922650316, + "learning_rate": 5.014160894953339e-06, + "loss": 0.9253, + "step": 4913 + }, + { + "epoch": 0.6763005780346821, + "grad_norm": 1.8217178997996266, + "learning_rate": 5.010297510277473e-06, + "loss": 1.0025, + "step": 4914 + }, + { + "epoch": 0.6764382053399395, + "grad_norm": 1.9861275749238183, + "learning_rate": 5.006435116987183e-06, + "loss": 0.8884, + "step": 4915 + }, + { + "epoch": 0.6765758326451968, + "grad_norm": 1.8083563593812109, + "learning_rate": 5.002573715849889e-06, + "loss": 0.9211, + "step": 4916 + }, + { + "epoch": 0.6767134599504542, + "grad_norm": 1.8625348875238106, + "learning_rate": 4.998713307632782e-06, + "loss": 0.92, + "step": 4917 + }, + { + "epoch": 0.6768510872557115, + "grad_norm": 2.0793553117396484, + "learning_rate": 4.994853893102888e-06, + "loss": 0.9728, + "step": 4918 + }, + { + "epoch": 0.6769887145609689, + "grad_norm": 2.145997614393466, + "learning_rate": 4.990995473027005e-06, + "loss": 1.0395, + "step": 4919 + }, + { + "epoch": 0.6771263418662262, + "grad_norm": 1.5999667976502856, + "learning_rate": 4.9871380481717605e-06, + "loss": 0.9383, + "step": 4920 + }, + { + "epoch": 0.6772639691714836, + "grad_norm": 1.798350427548774, + "learning_rate": 4.983281619303566e-06, + "loss": 0.9576, + "step": 4921 + }, + { + "epoch": 0.677401596476741, + "grad_norm": 1.719450648615924, + "learning_rate": 4.979426187188643e-06, + "loss": 0.9151, + "step": 4922 + }, + { + "epoch": 0.6775392237819984, + "grad_norm": 1.8590760704400113, + "learning_rate": 4.9755717525930114e-06, + "loss": 0.9367, + "step": 4923 + }, + { + "epoch": 0.6776768510872557, + "grad_norm": 1.7103590697132074, + "learning_rate": 4.971718316282497e-06, + "loss": 0.9423, + "step": 4924 + }, + { + "epoch": 0.6778144783925131, + "grad_norm": 1.8103921355254051, + "learning_rate": 4.967865879022723e-06, + "loss": 0.8955, + "step": 4925 + }, + { + "epoch": 0.6779521056977704, + "grad_norm": 1.7951062928202395, + "learning_rate": 4.964014441579116e-06, + "loss": 0.8939, + "step": 4926 + }, + { + "epoch": 0.6780897330030278, + "grad_norm": 1.6226869437066, + "learning_rate": 4.960164004716904e-06, + "loss": 0.9275, + "step": 4927 + }, + { + "epoch": 0.6782273603082851, + "grad_norm": 1.5371496661751622, + "learning_rate": 4.956314569201117e-06, + "loss": 1.0389, + "step": 4928 + }, + { + "epoch": 0.6783649876135426, + "grad_norm": 1.6719937490941819, + "learning_rate": 4.9524661357965855e-06, + "loss": 1.0125, + "step": 4929 + }, + { + "epoch": 0.6785026149187999, + "grad_norm": 1.7697414861521257, + "learning_rate": 4.948618705267939e-06, + "loss": 0.9539, + "step": 4930 + }, + { + "epoch": 0.6786402422240573, + "grad_norm": 1.7599469289461205, + "learning_rate": 4.944772278379609e-06, + "loss": 0.9656, + "step": 4931 + }, + { + "epoch": 0.6787778695293146, + "grad_norm": 1.7408539388808886, + "learning_rate": 4.94092685589583e-06, + "loss": 0.9336, + "step": 4932 + }, + { + "epoch": 0.678915496834572, + "grad_norm": 1.7822578431839784, + "learning_rate": 4.937082438580632e-06, + "loss": 0.887, + "step": 4933 + }, + { + "epoch": 0.6790531241398293, + "grad_norm": 1.5934620256104077, + "learning_rate": 4.9332390271978515e-06, + "loss": 0.9021, + "step": 4934 + }, + { + "epoch": 0.6791907514450867, + "grad_norm": 1.6045986034569957, + "learning_rate": 4.929396622511119e-06, + "loss": 0.9195, + "step": 4935 + }, + { + "epoch": 0.679328378750344, + "grad_norm": 1.8531309235422835, + "learning_rate": 4.925555225283865e-06, + "loss": 0.972, + "step": 4936 + }, + { + "epoch": 0.6794660060556015, + "grad_norm": 1.915395223416936, + "learning_rate": 4.9217148362793335e-06, + "loss": 0.9471, + "step": 4937 + }, + { + "epoch": 0.6796036333608588, + "grad_norm": 1.922853491302613, + "learning_rate": 4.917875456260542e-06, + "loss": 0.9584, + "step": 4938 + }, + { + "epoch": 0.6797412606661162, + "grad_norm": 1.6358694617553937, + "learning_rate": 4.914037085990339e-06, + "loss": 0.8697, + "step": 4939 + }, + { + "epoch": 0.6798788879713735, + "grad_norm": 1.7488284082949492, + "learning_rate": 4.910199726231339e-06, + "loss": 0.9605, + "step": 4940 + }, + { + "epoch": 0.6800165152766309, + "grad_norm": 1.7174729768910724, + "learning_rate": 4.906363377745991e-06, + "loss": 0.9204, + "step": 4941 + }, + { + "epoch": 0.6801541425818882, + "grad_norm": 1.869540283414227, + "learning_rate": 4.902528041296506e-06, + "loss": 0.9735, + "step": 4942 + }, + { + "epoch": 0.6802917698871456, + "grad_norm": 1.7786879702782017, + "learning_rate": 4.898693717644928e-06, + "loss": 1.033, + "step": 4943 + }, + { + "epoch": 0.6804293971924029, + "grad_norm": 1.7560619812285323, + "learning_rate": 4.894860407553082e-06, + "loss": 0.9593, + "step": 4944 + }, + { + "epoch": 0.6805670244976604, + "grad_norm": 1.852669501691022, + "learning_rate": 4.891028111782583e-06, + "loss": 0.9441, + "step": 4945 + }, + { + "epoch": 0.6807046518029177, + "grad_norm": 1.7898855348291138, + "learning_rate": 4.887196831094872e-06, + "loss": 0.9291, + "step": 4946 + }, + { + "epoch": 0.6808422791081751, + "grad_norm": 1.7644383690418188, + "learning_rate": 4.883366566251158e-06, + "loss": 0.9396, + "step": 4947 + }, + { + "epoch": 0.6809799064134324, + "grad_norm": 1.7808462557909008, + "learning_rate": 4.879537318012474e-06, + "loss": 0.9524, + "step": 4948 + }, + { + "epoch": 0.6811175337186898, + "grad_norm": 1.7009945488856582, + "learning_rate": 4.875709087139626e-06, + "loss": 0.9261, + "step": 4949 + }, + { + "epoch": 0.6812551610239471, + "grad_norm": 1.7448583202706687, + "learning_rate": 4.871881874393243e-06, + "loss": 0.9503, + "step": 4950 + }, + { + "epoch": 0.6813927883292045, + "grad_norm": 1.6337230405515122, + "learning_rate": 4.868055680533734e-06, + "loss": 0.8795, + "step": 4951 + }, + { + "epoch": 0.6815304156344619, + "grad_norm": 1.8367386551234477, + "learning_rate": 4.864230506321314e-06, + "loss": 0.8959, + "step": 4952 + }, + { + "epoch": 0.6816680429397193, + "grad_norm": 1.725004620380599, + "learning_rate": 4.860406352515991e-06, + "loss": 0.9269, + "step": 4953 + }, + { + "epoch": 0.6818056702449766, + "grad_norm": 1.8678371795404125, + "learning_rate": 4.856583219877572e-06, + "loss": 0.9849, + "step": 4954 + }, + { + "epoch": 0.681943297550234, + "grad_norm": 1.623119185277133, + "learning_rate": 4.852761109165662e-06, + "loss": 0.9306, + "step": 4955 + }, + { + "epoch": 0.6820809248554913, + "grad_norm": 1.8623989488170152, + "learning_rate": 4.848940021139661e-06, + "loss": 0.9511, + "step": 4956 + }, + { + "epoch": 0.6822185521607487, + "grad_norm": 1.611358420717568, + "learning_rate": 4.845119956558767e-06, + "loss": 0.8748, + "step": 4957 + }, + { + "epoch": 0.682356179466006, + "grad_norm": 1.595792769997048, + "learning_rate": 4.841300916181977e-06, + "loss": 0.8806, + "step": 4958 + }, + { + "epoch": 0.6824938067712634, + "grad_norm": 1.770378644564124, + "learning_rate": 4.837482900768078e-06, + "loss": 0.8901, + "step": 4959 + }, + { + "epoch": 0.6826314340765208, + "grad_norm": 2.05373699386087, + "learning_rate": 4.83366591107566e-06, + "loss": 0.9358, + "step": 4960 + }, + { + "epoch": 0.6827690613817782, + "grad_norm": 2.0458388177753055, + "learning_rate": 4.829849947863107e-06, + "loss": 0.9092, + "step": 4961 + }, + { + "epoch": 0.6829066886870355, + "grad_norm": 1.6861740382248513, + "learning_rate": 4.826035011888597e-06, + "loss": 0.9347, + "step": 4962 + }, + { + "epoch": 0.6830443159922929, + "grad_norm": 1.7151779799031506, + "learning_rate": 4.822221103910107e-06, + "loss": 0.8523, + "step": 4963 + }, + { + "epoch": 0.6831819432975502, + "grad_norm": 1.64012526341682, + "learning_rate": 4.818408224685406e-06, + "loss": 0.8926, + "step": 4964 + }, + { + "epoch": 0.6833195706028076, + "grad_norm": 1.7226300292390875, + "learning_rate": 4.814596374972064e-06, + "loss": 0.9122, + "step": 4965 + }, + { + "epoch": 0.6834571979080649, + "grad_norm": 1.9547004259546499, + "learning_rate": 4.810785555527441e-06, + "loss": 0.9161, + "step": 4966 + }, + { + "epoch": 0.6835948252133224, + "grad_norm": 1.9039369685779217, + "learning_rate": 4.806975767108696e-06, + "loss": 1.005, + "step": 4967 + }, + { + "epoch": 0.6837324525185797, + "grad_norm": 2.6622065196069027, + "learning_rate": 4.803167010472777e-06, + "loss": 0.924, + "step": 4968 + }, + { + "epoch": 0.6838700798238371, + "grad_norm": 1.566835386579845, + "learning_rate": 4.7993592863764435e-06, + "loss": 0.9378, + "step": 4969 + }, + { + "epoch": 0.6840077071290944, + "grad_norm": 1.8946839165839233, + "learning_rate": 4.795552595576224e-06, + "loss": 0.9778, + "step": 4970 + }, + { + "epoch": 0.6841453344343518, + "grad_norm": 1.9449562192240912, + "learning_rate": 4.79174693882847e-06, + "loss": 0.9486, + "step": 4971 + }, + { + "epoch": 0.6842829617396091, + "grad_norm": 2.249459941137504, + "learning_rate": 4.787942316889299e-06, + "loss": 0.9884, + "step": 4972 + }, + { + "epoch": 0.6844205890448665, + "grad_norm": 1.8506237219140045, + "learning_rate": 4.7841387305146515e-06, + "loss": 0.9586, + "step": 4973 + }, + { + "epoch": 0.6845582163501238, + "grad_norm": 1.8264772842282788, + "learning_rate": 4.780336180460232e-06, + "loss": 0.8529, + "step": 4974 + }, + { + "epoch": 0.6846958436553813, + "grad_norm": 1.741485205526366, + "learning_rate": 4.776534667481569e-06, + "loss": 0.8661, + "step": 4975 + }, + { + "epoch": 0.6848334709606386, + "grad_norm": 2.141915861588948, + "learning_rate": 4.772734192333971e-06, + "loss": 0.9758, + "step": 4976 + }, + { + "epoch": 0.684971098265896, + "grad_norm": 1.899312132190996, + "learning_rate": 4.768934755772526e-06, + "loss": 0.8742, + "step": 4977 + }, + { + "epoch": 0.6851087255711533, + "grad_norm": 1.683148728759335, + "learning_rate": 4.765136358552148e-06, + "loss": 0.8962, + "step": 4978 + }, + { + "epoch": 0.6852463528764107, + "grad_norm": 2.29957107817339, + "learning_rate": 4.761339001427511e-06, + "loss": 0.9208, + "step": 4979 + }, + { + "epoch": 0.685383980181668, + "grad_norm": 1.6102643760526887, + "learning_rate": 4.757542685153112e-06, + "loss": 0.9178, + "step": 4980 + }, + { + "epoch": 0.6855216074869254, + "grad_norm": 1.8786862723461684, + "learning_rate": 4.7537474104832125e-06, + "loss": 0.8999, + "step": 4981 + }, + { + "epoch": 0.6856592347921827, + "grad_norm": 1.7206997719140997, + "learning_rate": 4.749953178171892e-06, + "loss": 0.8343, + "step": 4982 + }, + { + "epoch": 0.6857968620974402, + "grad_norm": 1.8596850690952889, + "learning_rate": 4.746159988973012e-06, + "loss": 0.9411, + "step": 4983 + }, + { + "epoch": 0.6859344894026975, + "grad_norm": 2.1515285955309347, + "learning_rate": 4.742367843640223e-06, + "loss": 0.9138, + "step": 4984 + }, + { + "epoch": 0.6860721167079549, + "grad_norm": 2.028593179935735, + "learning_rate": 4.738576742926975e-06, + "loss": 1.0088, + "step": 4985 + }, + { + "epoch": 0.6862097440132122, + "grad_norm": 1.76748842799167, + "learning_rate": 4.7347866875865064e-06, + "loss": 0.9316, + "step": 4986 + }, + { + "epoch": 0.6863473713184696, + "grad_norm": 2.0105651170064993, + "learning_rate": 4.73099767837185e-06, + "loss": 0.9381, + "step": 4987 + }, + { + "epoch": 0.6864849986237269, + "grad_norm": 1.6086823834985942, + "learning_rate": 4.727209716035832e-06, + "loss": 0.8929, + "step": 4988 + }, + { + "epoch": 0.6866226259289843, + "grad_norm": 1.9198759077251246, + "learning_rate": 4.723422801331066e-06, + "loss": 0.9064, + "step": 4989 + }, + { + "epoch": 0.6867602532342417, + "grad_norm": 1.6752439003143222, + "learning_rate": 4.719636935009961e-06, + "loss": 0.8506, + "step": 4990 + }, + { + "epoch": 0.6868978805394991, + "grad_norm": 1.689782379137984, + "learning_rate": 4.715852117824715e-06, + "loss": 0.9439, + "step": 4991 + }, + { + "epoch": 0.6870355078447564, + "grad_norm": 3.4026186438204697, + "learning_rate": 4.712068350527322e-06, + "loss": 0.945, + "step": 4992 + }, + { + "epoch": 0.6871731351500138, + "grad_norm": 2.0516754735560943, + "learning_rate": 4.708285633869565e-06, + "loss": 0.967, + "step": 4993 + }, + { + "epoch": 0.6873107624552711, + "grad_norm": 1.7024857442643633, + "learning_rate": 4.704503968603016e-06, + "loss": 0.8804, + "step": 4994 + }, + { + "epoch": 0.6874483897605285, + "grad_norm": 1.8275348483722258, + "learning_rate": 4.70072335547904e-06, + "loss": 0.9127, + "step": 4995 + }, + { + "epoch": 0.6875860170657858, + "grad_norm": 1.8199309236566261, + "learning_rate": 4.696943795248795e-06, + "loss": 0.9535, + "step": 4996 + }, + { + "epoch": 0.6877236443710432, + "grad_norm": 1.7490506924266702, + "learning_rate": 4.693165288663227e-06, + "loss": 0.9471, + "step": 4997 + }, + { + "epoch": 0.6878612716763006, + "grad_norm": 1.5475710490741603, + "learning_rate": 4.68938783647307e-06, + "loss": 0.9254, + "step": 4998 + }, + { + "epoch": 0.687998898981558, + "grad_norm": 1.927495751280919, + "learning_rate": 4.685611439428863e-06, + "loss": 0.9643, + "step": 4999 + }, + { + "epoch": 0.6881365262868153, + "grad_norm": 1.996139310903076, + "learning_rate": 4.68183609828091e-06, + "loss": 0.9565, + "step": 5000 + }, + { + "epoch": 0.6882741535920727, + "grad_norm": 1.759831811215478, + "learning_rate": 4.6780618137793334e-06, + "loss": 0.935, + "step": 5001 + }, + { + "epoch": 0.68841178089733, + "grad_norm": 1.9466430789924138, + "learning_rate": 4.6742885866740165e-06, + "loss": 0.8576, + "step": 5002 + }, + { + "epoch": 0.6885494082025874, + "grad_norm": 1.6898647834180371, + "learning_rate": 4.6705164177146656e-06, + "loss": 0.968, + "step": 5003 + }, + { + "epoch": 0.6886870355078447, + "grad_norm": 1.8551985436705014, + "learning_rate": 4.6667453076507405e-06, + "loss": 0.964, + "step": 5004 + }, + { + "epoch": 0.6888246628131022, + "grad_norm": 1.5802135984593486, + "learning_rate": 4.662975257231523e-06, + "loss": 0.9254, + "step": 5005 + }, + { + "epoch": 0.6889622901183595, + "grad_norm": 1.845109994576382, + "learning_rate": 4.659206267206066e-06, + "loss": 0.973, + "step": 5006 + }, + { + "epoch": 0.6890999174236169, + "grad_norm": 1.9123954312094356, + "learning_rate": 4.655438338323217e-06, + "loss": 0.9074, + "step": 5007 + }, + { + "epoch": 0.6892375447288742, + "grad_norm": 1.688824307352302, + "learning_rate": 4.6516714713316145e-06, + "loss": 0.9427, + "step": 5008 + }, + { + "epoch": 0.6893751720341316, + "grad_norm": 1.7489037958701934, + "learning_rate": 4.6479056669796715e-06, + "loss": 0.9485, + "step": 5009 + }, + { + "epoch": 0.6895127993393889, + "grad_norm": 1.6149190711416013, + "learning_rate": 4.644140926015619e-06, + "loss": 0.8967, + "step": 5010 + }, + { + "epoch": 0.6896504266446463, + "grad_norm": 1.6458682322280005, + "learning_rate": 4.640377249187443e-06, + "loss": 0.9403, + "step": 5011 + }, + { + "epoch": 0.6897880539499036, + "grad_norm": 1.557411567682636, + "learning_rate": 4.636614637242946e-06, + "loss": 0.9067, + "step": 5012 + }, + { + "epoch": 0.6899256812551611, + "grad_norm": 1.7229437615335952, + "learning_rate": 4.632853090929704e-06, + "loss": 0.9069, + "step": 5013 + }, + { + "epoch": 0.6900633085604184, + "grad_norm": 1.6347499584680492, + "learning_rate": 4.629092610995084e-06, + "loss": 0.99, + "step": 5014 + }, + { + "epoch": 0.6902009358656758, + "grad_norm": 1.7735717243426252, + "learning_rate": 4.625333198186243e-06, + "loss": 0.9079, + "step": 5015 + }, + { + "epoch": 0.6903385631709331, + "grad_norm": 1.856301730144119, + "learning_rate": 4.621574853250125e-06, + "loss": 0.9557, + "step": 5016 + }, + { + "epoch": 0.6904761904761905, + "grad_norm": 1.749418650221802, + "learning_rate": 4.61781757693346e-06, + "loss": 0.9423, + "step": 5017 + }, + { + "epoch": 0.6906138177814478, + "grad_norm": 1.666314006955715, + "learning_rate": 4.614061369982769e-06, + "loss": 0.9135, + "step": 5018 + }, + { + "epoch": 0.6907514450867052, + "grad_norm": 1.9457679624688526, + "learning_rate": 4.610306233144357e-06, + "loss": 0.9279, + "step": 5019 + }, + { + "epoch": 0.6908890723919625, + "grad_norm": 2.1120479310881137, + "learning_rate": 4.606552167164321e-06, + "loss": 0.9483, + "step": 5020 + }, + { + "epoch": 0.69102669969722, + "grad_norm": 1.8082121175251815, + "learning_rate": 4.60279917278854e-06, + "loss": 0.8935, + "step": 5021 + }, + { + "epoch": 0.6911643270024773, + "grad_norm": 1.627884443410556, + "learning_rate": 4.599047250762685e-06, + "loss": 0.885, + "step": 5022 + }, + { + "epoch": 0.6913019543077347, + "grad_norm": 1.7698937400070622, + "learning_rate": 4.5952964018322055e-06, + "loss": 0.9503, + "step": 5023 + }, + { + "epoch": 0.691439581612992, + "grad_norm": 1.7996573663514066, + "learning_rate": 4.591546626742357e-06, + "loss": 0.9461, + "step": 5024 + }, + { + "epoch": 0.6915772089182494, + "grad_norm": 2.011102894061317, + "learning_rate": 4.587797926238156e-06, + "loss": 0.926, + "step": 5025 + }, + { + "epoch": 0.6917148362235067, + "grad_norm": 1.7717564800851904, + "learning_rate": 4.584050301064422e-06, + "loss": 0.9995, + "step": 5026 + }, + { + "epoch": 0.6918524635287641, + "grad_norm": 1.71233771187298, + "learning_rate": 4.580303751965757e-06, + "loss": 0.8939, + "step": 5027 + }, + { + "epoch": 0.6919900908340215, + "grad_norm": 1.6034244274634826, + "learning_rate": 4.576558279686549e-06, + "loss": 0.9052, + "step": 5028 + }, + { + "epoch": 0.6921277181392789, + "grad_norm": 1.5799361891399502, + "learning_rate": 4.572813884970971e-06, + "loss": 0.8869, + "step": 5029 + }, + { + "epoch": 0.6922653454445362, + "grad_norm": 1.7128902298303348, + "learning_rate": 4.569070568562982e-06, + "loss": 0.9527, + "step": 5030 + }, + { + "epoch": 0.6924029727497936, + "grad_norm": 1.828669881167549, + "learning_rate": 4.565328331206336e-06, + "loss": 0.8584, + "step": 5031 + }, + { + "epoch": 0.6925406000550509, + "grad_norm": 2.002968598115233, + "learning_rate": 4.56158717364455e-06, + "loss": 0.8489, + "step": 5032 + }, + { + "epoch": 0.6926782273603083, + "grad_norm": 1.7867045368787395, + "learning_rate": 4.5578470966209585e-06, + "loss": 0.9316, + "step": 5033 + }, + { + "epoch": 0.6928158546655656, + "grad_norm": 1.9342150930189863, + "learning_rate": 4.5541081008786445e-06, + "loss": 0.9264, + "step": 5034 + }, + { + "epoch": 0.692953481970823, + "grad_norm": 1.6393870728396391, + "learning_rate": 4.550370187160514e-06, + "loss": 0.9125, + "step": 5035 + }, + { + "epoch": 0.6930911092760804, + "grad_norm": 1.6411814818934647, + "learning_rate": 4.54663335620922e-06, + "loss": 0.9215, + "step": 5036 + }, + { + "epoch": 0.6932287365813378, + "grad_norm": 1.8609114076736357, + "learning_rate": 4.542897608767235e-06, + "loss": 0.954, + "step": 5037 + }, + { + "epoch": 0.6933663638865951, + "grad_norm": 1.9402138704474456, + "learning_rate": 4.539162945576796e-06, + "loss": 0.8912, + "step": 5038 + }, + { + "epoch": 0.6935039911918525, + "grad_norm": 1.7212138435155977, + "learning_rate": 4.5354293673799275e-06, + "loss": 0.9657, + "step": 5039 + }, + { + "epoch": 0.6936416184971098, + "grad_norm": 1.9488583388004803, + "learning_rate": 4.531696874918447e-06, + "loss": 0.9979, + "step": 5040 + }, + { + "epoch": 0.6937792458023672, + "grad_norm": 1.9733601395233151, + "learning_rate": 4.527965468933936e-06, + "loss": 0.9101, + "step": 5041 + }, + { + "epoch": 0.6939168731076245, + "grad_norm": 1.6537495669445832, + "learning_rate": 4.524235150167791e-06, + "loss": 0.9451, + "step": 5042 + }, + { + "epoch": 0.694054500412882, + "grad_norm": 1.9992087087807289, + "learning_rate": 4.520505919361158e-06, + "loss": 0.8664, + "step": 5043 + }, + { + "epoch": 0.6941921277181393, + "grad_norm": 1.707762027041643, + "learning_rate": 4.516777777254997e-06, + "loss": 0.9105, + "step": 5044 + }, + { + "epoch": 0.6943297550233967, + "grad_norm": 1.6470742170830577, + "learning_rate": 4.513050724590034e-06, + "loss": 0.9396, + "step": 5045 + }, + { + "epoch": 0.694467382328654, + "grad_norm": 1.5505674665324045, + "learning_rate": 4.509324762106785e-06, + "loss": 0.9758, + "step": 5046 + }, + { + "epoch": 0.6946050096339114, + "grad_norm": 1.689757768716327, + "learning_rate": 4.505599890545546e-06, + "loss": 0.8687, + "step": 5047 + }, + { + "epoch": 0.6947426369391687, + "grad_norm": 1.750502030657462, + "learning_rate": 4.501876110646398e-06, + "loss": 0.9558, + "step": 5048 + }, + { + "epoch": 0.6948802642444261, + "grad_norm": 1.6881035053459295, + "learning_rate": 4.498153423149206e-06, + "loss": 0.8956, + "step": 5049 + }, + { + "epoch": 0.6950178915496834, + "grad_norm": 1.600464187255, + "learning_rate": 4.494431828793617e-06, + "loss": 0.9392, + "step": 5050 + }, + { + "epoch": 0.6951555188549409, + "grad_norm": 1.8454503549916428, + "learning_rate": 4.490711328319061e-06, + "loss": 0.8438, + "step": 5051 + }, + { + "epoch": 0.6952931461601982, + "grad_norm": 1.7924179949631098, + "learning_rate": 4.48699192246475e-06, + "loss": 0.8354, + "step": 5052 + }, + { + "epoch": 0.6954307734654556, + "grad_norm": 2.7429979845297585, + "learning_rate": 4.483273611969678e-06, + "loss": 0.9427, + "step": 5053 + }, + { + "epoch": 0.6955684007707129, + "grad_norm": 1.80973788396394, + "learning_rate": 4.479556397572625e-06, + "loss": 0.9501, + "step": 5054 + }, + { + "epoch": 0.6957060280759703, + "grad_norm": 1.7180215191928057, + "learning_rate": 4.475840280012145e-06, + "loss": 0.8818, + "step": 5055 + }, + { + "epoch": 0.6958436553812276, + "grad_norm": 1.757184681171232, + "learning_rate": 4.4721252600265906e-06, + "loss": 0.8963, + "step": 5056 + }, + { + "epoch": 0.695981282686485, + "grad_norm": 2.0366286461346457, + "learning_rate": 4.468411338354076e-06, + "loss": 0.9522, + "step": 5057 + }, + { + "epoch": 0.6961189099917423, + "grad_norm": 1.6238520625772739, + "learning_rate": 4.464698515732509e-06, + "loss": 0.9386, + "step": 5058 + }, + { + "epoch": 0.6962565372969998, + "grad_norm": 1.6463525888851822, + "learning_rate": 4.460986792899577e-06, + "loss": 0.8367, + "step": 5059 + }, + { + "epoch": 0.6963941646022571, + "grad_norm": 1.8406637374589985, + "learning_rate": 4.457276170592747e-06, + "loss": 0.8345, + "step": 5060 + }, + { + "epoch": 0.6965317919075145, + "grad_norm": 1.8972674833865015, + "learning_rate": 4.453566649549272e-06, + "loss": 0.9294, + "step": 5061 + }, + { + "epoch": 0.6966694192127718, + "grad_norm": 2.0991956045004225, + "learning_rate": 4.4498582305061766e-06, + "loss": 0.864, + "step": 5062 + }, + { + "epoch": 0.6968070465180292, + "grad_norm": 1.770954857217113, + "learning_rate": 4.4461509142002855e-06, + "loss": 0.9226, + "step": 5063 + }, + { + "epoch": 0.6969446738232865, + "grad_norm": 1.6099766852750965, + "learning_rate": 4.442444701368176e-06, + "loss": 0.8943, + "step": 5064 + }, + { + "epoch": 0.6970823011285439, + "grad_norm": 1.8417392691570047, + "learning_rate": 4.4387395927462386e-06, + "loss": 0.9849, + "step": 5065 + }, + { + "epoch": 0.6972199284338013, + "grad_norm": 1.735280765924145, + "learning_rate": 4.43503558907061e-06, + "loss": 1.0523, + "step": 5066 + }, + { + "epoch": 0.6973575557390587, + "grad_norm": 1.7382444912259905, + "learning_rate": 4.431332691077237e-06, + "loss": 0.8983, + "step": 5067 + }, + { + "epoch": 0.697495183044316, + "grad_norm": 1.981834560500647, + "learning_rate": 4.4276308995018324e-06, + "loss": 0.8794, + "step": 5068 + }, + { + "epoch": 0.6976328103495734, + "grad_norm": 1.7853412053778503, + "learning_rate": 4.423930215079891e-06, + "loss": 0.8786, + "step": 5069 + }, + { + "epoch": 0.6977704376548307, + "grad_norm": 1.8210074861693657, + "learning_rate": 4.420230638546686e-06, + "loss": 0.9105, + "step": 5070 + }, + { + "epoch": 0.6979080649600881, + "grad_norm": 1.6171687846001375, + "learning_rate": 4.416532170637277e-06, + "loss": 0.8906, + "step": 5071 + }, + { + "epoch": 0.6980456922653454, + "grad_norm": 1.5679187883466783, + "learning_rate": 4.412834812086498e-06, + "loss": 0.8864, + "step": 5072 + }, + { + "epoch": 0.6981833195706028, + "grad_norm": 1.7711300429214918, + "learning_rate": 4.409138563628955e-06, + "loss": 0.9302, + "step": 5073 + }, + { + "epoch": 0.6983209468758602, + "grad_norm": 1.6338100080453581, + "learning_rate": 4.405443425999053e-06, + "loss": 0.9209, + "step": 5074 + }, + { + "epoch": 0.6984585741811176, + "grad_norm": 1.8470469128309877, + "learning_rate": 4.401749399930961e-06, + "loss": 0.9292, + "step": 5075 + }, + { + "epoch": 0.6985962014863749, + "grad_norm": 1.7601740652576645, + "learning_rate": 4.398056486158632e-06, + "loss": 0.9023, + "step": 5076 + }, + { + "epoch": 0.6987338287916323, + "grad_norm": 1.9813217669251713, + "learning_rate": 4.394364685415796e-06, + "loss": 0.9543, + "step": 5077 + }, + { + "epoch": 0.6988714560968896, + "grad_norm": 1.7073496724290795, + "learning_rate": 4.3906739984359645e-06, + "loss": 0.8983, + "step": 5078 + }, + { + "epoch": 0.699009083402147, + "grad_norm": 3.2418186032007266, + "learning_rate": 4.386984425952426e-06, + "loss": 0.9684, + "step": 5079 + }, + { + "epoch": 0.6991467107074043, + "grad_norm": 1.5655246795437379, + "learning_rate": 4.383295968698249e-06, + "loss": 0.9298, + "step": 5080 + }, + { + "epoch": 0.6992843380126618, + "grad_norm": 1.6625161097264447, + "learning_rate": 4.379608627406279e-06, + "loss": 0.9324, + "step": 5081 + }, + { + "epoch": 0.6994219653179191, + "grad_norm": 1.6955130966414405, + "learning_rate": 4.375922402809139e-06, + "loss": 0.9216, + "step": 5082 + }, + { + "epoch": 0.6995595926231765, + "grad_norm": 2.004391818743147, + "learning_rate": 4.372237295639233e-06, + "loss": 1.0068, + "step": 5083 + }, + { + "epoch": 0.6996972199284338, + "grad_norm": 1.635449374095777, + "learning_rate": 4.368553306628739e-06, + "loss": 0.8396, + "step": 5084 + }, + { + "epoch": 0.6998348472336912, + "grad_norm": 2.6487348985895145, + "learning_rate": 4.364870436509615e-06, + "loss": 0.8152, + "step": 5085 + }, + { + "epoch": 0.6999724745389485, + "grad_norm": 1.5964588132176627, + "learning_rate": 4.3611886860136056e-06, + "loss": 0.9415, + "step": 5086 + }, + { + "epoch": 0.7001101018442059, + "grad_norm": 1.578979348422104, + "learning_rate": 4.35750805587221e-06, + "loss": 0.992, + "step": 5087 + }, + { + "epoch": 0.7002477291494632, + "grad_norm": 1.982151703279505, + "learning_rate": 4.353828546816733e-06, + "loss": 0.846, + "step": 5088 + }, + { + "epoch": 0.7003853564547207, + "grad_norm": 1.9971555101594234, + "learning_rate": 4.350150159578233e-06, + "loss": 0.9517, + "step": 5089 + }, + { + "epoch": 0.700522983759978, + "grad_norm": 1.8198494209290095, + "learning_rate": 4.346472894887559e-06, + "loss": 0.9937, + "step": 5090 + }, + { + "epoch": 0.7006606110652354, + "grad_norm": 1.776615992516107, + "learning_rate": 4.342796753475333e-06, + "loss": 0.9639, + "step": 5091 + }, + { + "epoch": 0.7007982383704927, + "grad_norm": 1.9055024267091587, + "learning_rate": 4.3391217360719476e-06, + "loss": 0.9868, + "step": 5092 + }, + { + "epoch": 0.7009358656757501, + "grad_norm": 1.8910497045944668, + "learning_rate": 4.335447843407592e-06, + "loss": 1.039, + "step": 5093 + }, + { + "epoch": 0.7010734929810074, + "grad_norm": 2.380984242710699, + "learning_rate": 4.331775076212205e-06, + "loss": 0.9229, + "step": 5094 + }, + { + "epoch": 0.7012111202862648, + "grad_norm": 1.689714486241484, + "learning_rate": 4.328103435215526e-06, + "loss": 0.9378, + "step": 5095 + }, + { + "epoch": 0.7013487475915221, + "grad_norm": 1.7371153469063594, + "learning_rate": 4.324432921147048e-06, + "loss": 0.9182, + "step": 5096 + }, + { + "epoch": 0.7014863748967796, + "grad_norm": 1.421112508405305, + "learning_rate": 4.320763534736067e-06, + "loss": 0.8385, + "step": 5097 + }, + { + "epoch": 0.7016240022020369, + "grad_norm": 1.7944259310051087, + "learning_rate": 4.317095276711623e-06, + "loss": 1.0481, + "step": 5098 + }, + { + "epoch": 0.7017616295072943, + "grad_norm": 1.594604658905283, + "learning_rate": 4.313428147802561e-06, + "loss": 0.8667, + "step": 5099 + }, + { + "epoch": 0.7018992568125516, + "grad_norm": 1.9431659822182392, + "learning_rate": 4.3097621487374855e-06, + "loss": 0.9655, + "step": 5100 + }, + { + "epoch": 0.702036884117809, + "grad_norm": 1.8322117271678973, + "learning_rate": 4.3060972802447794e-06, + "loss": 0.8521, + "step": 5101 + }, + { + "epoch": 0.7021745114230663, + "grad_norm": 1.6653951450820863, + "learning_rate": 4.302433543052603e-06, + "loss": 0.9038, + "step": 5102 + }, + { + "epoch": 0.7023121387283237, + "grad_norm": 1.6769778864380802, + "learning_rate": 4.298770937888891e-06, + "loss": 0.9502, + "step": 5103 + }, + { + "epoch": 0.7024497660335811, + "grad_norm": 1.7461942484728894, + "learning_rate": 4.295109465481355e-06, + "loss": 0.9513, + "step": 5104 + }, + { + "epoch": 0.7025873933388385, + "grad_norm": 1.7661378265176921, + "learning_rate": 4.291449126557471e-06, + "loss": 0.9389, + "step": 5105 + }, + { + "epoch": 0.7027250206440958, + "grad_norm": 2.195692478834936, + "learning_rate": 4.287789921844506e-06, + "loss": 0.884, + "step": 5106 + }, + { + "epoch": 0.7028626479493532, + "grad_norm": 1.7561965044384864, + "learning_rate": 4.284131852069493e-06, + "loss": 0.9098, + "step": 5107 + }, + { + "epoch": 0.7030002752546105, + "grad_norm": 1.9272378152556409, + "learning_rate": 4.280474917959239e-06, + "loss": 0.9392, + "step": 5108 + }, + { + "epoch": 0.7031379025598679, + "grad_norm": 1.8936805287275464, + "learning_rate": 4.276819120240326e-06, + "loss": 0.8729, + "step": 5109 + }, + { + "epoch": 0.7032755298651252, + "grad_norm": 1.6259954911558936, + "learning_rate": 4.273164459639114e-06, + "loss": 0.875, + "step": 5110 + }, + { + "epoch": 0.7034131571703826, + "grad_norm": 1.616431255779718, + "learning_rate": 4.269510936881731e-06, + "loss": 0.8197, + "step": 5111 + }, + { + "epoch": 0.70355078447564, + "grad_norm": 2.013099888258806, + "learning_rate": 4.265858552694082e-06, + "loss": 0.8914, + "step": 5112 + }, + { + "epoch": 0.7036884117808974, + "grad_norm": 2.0170750350652105, + "learning_rate": 4.262207307801849e-06, + "loss": 0.9402, + "step": 5113 + }, + { + "epoch": 0.7038260390861547, + "grad_norm": 1.7612946796947122, + "learning_rate": 4.25855720293048e-06, + "loss": 1.0239, + "step": 5114 + }, + { + "epoch": 0.7039636663914121, + "grad_norm": 1.7104142569718108, + "learning_rate": 4.254908238805204e-06, + "loss": 0.9417, + "step": 5115 + }, + { + "epoch": 0.7041012936966694, + "grad_norm": 2.0731302610217788, + "learning_rate": 4.251260416151018e-06, + "loss": 0.9292, + "step": 5116 + }, + { + "epoch": 0.7042389210019268, + "grad_norm": 2.029551884520438, + "learning_rate": 4.2476137356926935e-06, + "loss": 0.9174, + "step": 5117 + }, + { + "epoch": 0.7043765483071841, + "grad_norm": 1.6968039093407905, + "learning_rate": 4.2439681981547855e-06, + "loss": 0.9774, + "step": 5118 + }, + { + "epoch": 0.7045141756124416, + "grad_norm": 1.8179240033331696, + "learning_rate": 4.240323804261597e-06, + "loss": 0.8942, + "step": 5119 + }, + { + "epoch": 0.7046518029176989, + "grad_norm": 1.821617109513332, + "learning_rate": 4.236680554737235e-06, + "loss": 0.8086, + "step": 5120 + }, + { + "epoch": 0.7047894302229563, + "grad_norm": 1.7937915729278946, + "learning_rate": 4.233038450305551e-06, + "loss": 0.9329, + "step": 5121 + }, + { + "epoch": 0.7049270575282136, + "grad_norm": 1.9325743767502186, + "learning_rate": 4.229397491690187e-06, + "loss": 0.9061, + "step": 5122 + }, + { + "epoch": 0.705064684833471, + "grad_norm": 1.8430262209993065, + "learning_rate": 4.225757679614549e-06, + "loss": 0.9044, + "step": 5123 + }, + { + "epoch": 0.7052023121387283, + "grad_norm": 1.7578583148029037, + "learning_rate": 4.222119014801816e-06, + "loss": 0.9916, + "step": 5124 + }, + { + "epoch": 0.7053399394439857, + "grad_norm": 1.9375013779738766, + "learning_rate": 4.218481497974953e-06, + "loss": 0.8992, + "step": 5125 + }, + { + "epoch": 0.705477566749243, + "grad_norm": 1.579422166887886, + "learning_rate": 4.2148451298566675e-06, + "loss": 0.9285, + "step": 5126 + }, + { + "epoch": 0.7056151940545005, + "grad_norm": 1.709043378152004, + "learning_rate": 4.211209911169472e-06, + "loss": 0.9081, + "step": 5127 + }, + { + "epoch": 0.7057528213597578, + "grad_norm": 1.75980777070148, + "learning_rate": 4.207575842635621e-06, + "loss": 0.9395, + "step": 5128 + }, + { + "epoch": 0.7058904486650152, + "grad_norm": 1.867885763014054, + "learning_rate": 4.203942924977166e-06, + "loss": 0.934, + "step": 5129 + }, + { + "epoch": 0.7060280759702725, + "grad_norm": 1.6065633190259931, + "learning_rate": 4.200311158915906e-06, + "loss": 0.8635, + "step": 5130 + }, + { + "epoch": 0.7061657032755299, + "grad_norm": 1.9430742321776113, + "learning_rate": 4.1966805451734326e-06, + "loss": 0.9337, + "step": 5131 + }, + { + "epoch": 0.7063033305807872, + "grad_norm": 1.899291306153583, + "learning_rate": 4.193051084471097e-06, + "loss": 0.8886, + "step": 5132 + }, + { + "epoch": 0.7064409578860446, + "grad_norm": 1.74447418673423, + "learning_rate": 4.189422777530022e-06, + "loss": 0.9348, + "step": 5133 + }, + { + "epoch": 0.7065785851913019, + "grad_norm": 1.5858299933739237, + "learning_rate": 4.185795625071103e-06, + "loss": 0.9333, + "step": 5134 + }, + { + "epoch": 0.7067162124965594, + "grad_norm": 4.57845572930728, + "learning_rate": 4.182169627815006e-06, + "loss": 0.9476, + "step": 5135 + }, + { + "epoch": 0.7068538398018167, + "grad_norm": 1.7944037692801242, + "learning_rate": 4.178544786482166e-06, + "loss": 0.965, + "step": 5136 + }, + { + "epoch": 0.7069914671070741, + "grad_norm": 2.1741245882885614, + "learning_rate": 4.17492110179279e-06, + "loss": 0.8316, + "step": 5137 + }, + { + "epoch": 0.7071290944123314, + "grad_norm": 1.740339624162474, + "learning_rate": 4.171298574466854e-06, + "loss": 0.839, + "step": 5138 + }, + { + "epoch": 0.7072667217175888, + "grad_norm": 2.118429588149702, + "learning_rate": 4.1676772052241056e-06, + "loss": 0.9695, + "step": 5139 + }, + { + "epoch": 0.7074043490228461, + "grad_norm": 1.9406191468978404, + "learning_rate": 4.16405699478406e-06, + "loss": 0.9821, + "step": 5140 + }, + { + "epoch": 0.7075419763281035, + "grad_norm": 1.7216561690217567, + "learning_rate": 4.160437943866005e-06, + "loss": 0.8829, + "step": 5141 + }, + { + "epoch": 0.7076796036333609, + "grad_norm": 1.7945984824970234, + "learning_rate": 4.1568200531889955e-06, + "loss": 0.9487, + "step": 5142 + }, + { + "epoch": 0.7078172309386183, + "grad_norm": 1.7062678903133426, + "learning_rate": 4.153203323471857e-06, + "loss": 0.8869, + "step": 5143 + }, + { + "epoch": 0.7079548582438756, + "grad_norm": 1.5643552707532848, + "learning_rate": 4.149587755433184e-06, + "loss": 0.9421, + "step": 5144 + }, + { + "epoch": 0.708092485549133, + "grad_norm": 1.6653434254427788, + "learning_rate": 4.14597334979134e-06, + "loss": 0.8991, + "step": 5145 + }, + { + "epoch": 0.7082301128543903, + "grad_norm": 1.8347264388190192, + "learning_rate": 4.1423601072644595e-06, + "loss": 0.9462, + "step": 5146 + }, + { + "epoch": 0.7083677401596477, + "grad_norm": 1.6634482687510321, + "learning_rate": 4.1387480285704394e-06, + "loss": 0.9437, + "step": 5147 + }, + { + "epoch": 0.708505367464905, + "grad_norm": 1.9346315543403325, + "learning_rate": 4.1351371144269615e-06, + "loss": 0.8925, + "step": 5148 + }, + { + "epoch": 0.7086429947701623, + "grad_norm": 1.5991762264317202, + "learning_rate": 4.131527365551451e-06, + "loss": 1.0165, + "step": 5149 + }, + { + "epoch": 0.7087806220754198, + "grad_norm": 1.8558370226776015, + "learning_rate": 4.127918782661129e-06, + "loss": 0.9249, + "step": 5150 + }, + { + "epoch": 0.7089182493806772, + "grad_norm": 1.780712934907872, + "learning_rate": 4.124311366472957e-06, + "loss": 0.9777, + "step": 5151 + }, + { + "epoch": 0.7090558766859345, + "grad_norm": 1.763501091453496, + "learning_rate": 4.120705117703695e-06, + "loss": 0.8792, + "step": 5152 + }, + { + "epoch": 0.7091935039911919, + "grad_norm": 1.7723132838551234, + "learning_rate": 4.117100037069843e-06, + "loss": 0.9014, + "step": 5153 + }, + { + "epoch": 0.7093311312964492, + "grad_norm": 1.7648997383276515, + "learning_rate": 4.113496125287684e-06, + "loss": 0.8744, + "step": 5154 + }, + { + "epoch": 0.7094687586017066, + "grad_norm": 1.8450302584433758, + "learning_rate": 4.109893383073272e-06, + "loss": 0.9234, + "step": 5155 + }, + { + "epoch": 0.7096063859069639, + "grad_norm": 1.7314708998318946, + "learning_rate": 4.1062918111424125e-06, + "loss": 0.8855, + "step": 5156 + }, + { + "epoch": 0.7097440132122214, + "grad_norm": 2.0483126995367673, + "learning_rate": 4.102691410210701e-06, + "loss": 0.9015, + "step": 5157 + }, + { + "epoch": 0.7098816405174787, + "grad_norm": 2.0716274036129074, + "learning_rate": 4.099092180993473e-06, + "loss": 0.9164, + "step": 5158 + }, + { + "epoch": 0.710019267822736, + "grad_norm": 1.8253615518829598, + "learning_rate": 4.095494124205862e-06, + "loss": 0.9709, + "step": 5159 + }, + { + "epoch": 0.7101568951279934, + "grad_norm": 4.3472928954049115, + "learning_rate": 4.091897240562738e-06, + "loss": 0.8435, + "step": 5160 + }, + { + "epoch": 0.7102945224332508, + "grad_norm": 1.9431924866373664, + "learning_rate": 4.088301530778761e-06, + "loss": 0.9033, + "step": 5161 + }, + { + "epoch": 0.7104321497385081, + "grad_norm": 1.6864649958934186, + "learning_rate": 4.084706995568348e-06, + "loss": 0.83, + "step": 5162 + }, + { + "epoch": 0.7105697770437654, + "grad_norm": 1.5950939216258953, + "learning_rate": 4.081113635645685e-06, + "loss": 0.896, + "step": 5163 + }, + { + "epoch": 0.7107074043490228, + "grad_norm": 1.7827142663105238, + "learning_rate": 4.077521451724721e-06, + "loss": 0.8448, + "step": 5164 + }, + { + "epoch": 0.7108450316542803, + "grad_norm": 1.6387713114811087, + "learning_rate": 4.073930444519174e-06, + "loss": 0.9589, + "step": 5165 + }, + { + "epoch": 0.7109826589595376, + "grad_norm": 1.8345285254965196, + "learning_rate": 4.0703406147425285e-06, + "loss": 0.864, + "step": 5166 + }, + { + "epoch": 0.711120286264795, + "grad_norm": 1.9354392742878037, + "learning_rate": 4.066751963108036e-06, + "loss": 0.9052, + "step": 5167 + }, + { + "epoch": 0.7112579135700523, + "grad_norm": 1.8679777348058537, + "learning_rate": 4.063164490328709e-06, + "loss": 0.9281, + "step": 5168 + }, + { + "epoch": 0.7113955408753097, + "grad_norm": 1.7562475436384717, + "learning_rate": 4.059578197117333e-06, + "loss": 0.8879, + "step": 5169 + }, + { + "epoch": 0.711533168180567, + "grad_norm": 2.171488725153095, + "learning_rate": 4.055993084186454e-06, + "loss": 0.896, + "step": 5170 + }, + { + "epoch": 0.7116707954858243, + "grad_norm": 1.7364546113594967, + "learning_rate": 4.052409152248384e-06, + "loss": 0.9404, + "step": 5171 + }, + { + "epoch": 0.7118084227910817, + "grad_norm": 1.7820678815272881, + "learning_rate": 4.048826402015202e-06, + "loss": 0.8297, + "step": 5172 + }, + { + "epoch": 0.7119460500963392, + "grad_norm": 1.891700810845978, + "learning_rate": 4.045244834198751e-06, + "loss": 0.8718, + "step": 5173 + }, + { + "epoch": 0.7120836774015965, + "grad_norm": 2.100898732909389, + "learning_rate": 4.04166444951064e-06, + "loss": 0.8939, + "step": 5174 + }, + { + "epoch": 0.7122213047068539, + "grad_norm": 1.8969501973323752, + "learning_rate": 4.038085248662241e-06, + "loss": 0.9474, + "step": 5175 + }, + { + "epoch": 0.7123589320121112, + "grad_norm": 1.6402772845274616, + "learning_rate": 4.0345072323646935e-06, + "loss": 0.8497, + "step": 5176 + }, + { + "epoch": 0.7124965593173685, + "grad_norm": 1.8318917346709507, + "learning_rate": 4.0309304013289015e-06, + "loss": 0.8924, + "step": 5177 + }, + { + "epoch": 0.7126341866226259, + "grad_norm": 1.6992771362082488, + "learning_rate": 4.027354756265532e-06, + "loss": 0.9864, + "step": 5178 + }, + { + "epoch": 0.7127718139278832, + "grad_norm": 1.7491826815977427, + "learning_rate": 4.023780297885009e-06, + "loss": 0.8895, + "step": 5179 + }, + { + "epoch": 0.7129094412331407, + "grad_norm": 1.69115979570301, + "learning_rate": 4.020207026897545e-06, + "loss": 0.9106, + "step": 5180 + }, + { + "epoch": 0.713047068538398, + "grad_norm": 1.6464588206448079, + "learning_rate": 4.016634944013083e-06, + "loss": 0.9605, + "step": 5181 + }, + { + "epoch": 0.7131846958436554, + "grad_norm": 1.7087088345827548, + "learning_rate": 4.0130640499413595e-06, + "loss": 0.9327, + "step": 5182 + }, + { + "epoch": 0.7133223231489128, + "grad_norm": 1.8971866147173888, + "learning_rate": 4.009494345391852e-06, + "loss": 0.9411, + "step": 5183 + }, + { + "epoch": 0.7134599504541701, + "grad_norm": 1.684924141482463, + "learning_rate": 4.005925831073823e-06, + "loss": 0.9301, + "step": 5184 + }, + { + "epoch": 0.7135975777594274, + "grad_norm": 1.7695544878711318, + "learning_rate": 4.002358507696279e-06, + "loss": 0.8487, + "step": 5185 + }, + { + "epoch": 0.7137352050646848, + "grad_norm": 2.0193482303143755, + "learning_rate": 3.998792375967996e-06, + "loss": 0.9124, + "step": 5186 + }, + { + "epoch": 0.7138728323699421, + "grad_norm": 1.79156740294054, + "learning_rate": 3.995227436597529e-06, + "loss": 0.9038, + "step": 5187 + }, + { + "epoch": 0.7140104596751996, + "grad_norm": 1.8287373740371755, + "learning_rate": 3.9916636902931645e-06, + "loss": 0.9675, + "step": 5188 + }, + { + "epoch": 0.714148086980457, + "grad_norm": 1.6469279548988085, + "learning_rate": 3.988101137762989e-06, + "loss": 0.9761, + "step": 5189 + }, + { + "epoch": 0.7142857142857143, + "grad_norm": 1.6399448636270328, + "learning_rate": 3.984539779714815e-06, + "loss": 0.9704, + "step": 5190 + }, + { + "epoch": 0.7144233415909717, + "grad_norm": 1.9456805609355556, + "learning_rate": 3.980979616856251e-06, + "loss": 0.8817, + "step": 5191 + }, + { + "epoch": 0.714560968896229, + "grad_norm": 1.67026520352583, + "learning_rate": 3.977420649894639e-06, + "loss": 0.9282, + "step": 5192 + }, + { + "epoch": 0.7146985962014863, + "grad_norm": 1.759640764178995, + "learning_rate": 3.973862879537107e-06, + "loss": 0.9224, + "step": 5193 + }, + { + "epoch": 0.7148362235067437, + "grad_norm": 2.1250237513389494, + "learning_rate": 3.9703063064905334e-06, + "loss": 0.9761, + "step": 5194 + }, + { + "epoch": 0.7149738508120012, + "grad_norm": 1.754815361304875, + "learning_rate": 3.966750931461558e-06, + "loss": 0.9232, + "step": 5195 + }, + { + "epoch": 0.7151114781172585, + "grad_norm": 1.7770231839810173, + "learning_rate": 3.9631967551565865e-06, + "loss": 0.9118, + "step": 5196 + }, + { + "epoch": 0.7152491054225159, + "grad_norm": 1.669203149221461, + "learning_rate": 3.9596437782817845e-06, + "loss": 0.8883, + "step": 5197 + }, + { + "epoch": 0.7153867327277732, + "grad_norm": 1.9620900659568794, + "learning_rate": 3.956092001543081e-06, + "loss": 0.9039, + "step": 5198 + }, + { + "epoch": 0.7155243600330305, + "grad_norm": 1.716769340671222, + "learning_rate": 3.952541425646165e-06, + "loss": 0.8577, + "step": 5199 + }, + { + "epoch": 0.7156619873382879, + "grad_norm": 1.865992111411231, + "learning_rate": 3.948992051296486e-06, + "loss": 0.9694, + "step": 5200 + }, + { + "epoch": 0.7157996146435452, + "grad_norm": 2.204792558830319, + "learning_rate": 3.945443879199258e-06, + "loss": 0.9082, + "step": 5201 + }, + { + "epoch": 0.7159372419488026, + "grad_norm": 1.5725319299339242, + "learning_rate": 3.941896910059453e-06, + "loss": 0.8553, + "step": 5202 + }, + { + "epoch": 0.71607486925406, + "grad_norm": 2.362840551724885, + "learning_rate": 3.938351144581805e-06, + "loss": 0.906, + "step": 5203 + }, + { + "epoch": 0.7162124965593174, + "grad_norm": 1.66163411165644, + "learning_rate": 3.93480658347081e-06, + "loss": 0.8495, + "step": 5204 + }, + { + "epoch": 0.7163501238645748, + "grad_norm": 2.6247269543727496, + "learning_rate": 3.9312632274307235e-06, + "loss": 0.9239, + "step": 5205 + }, + { + "epoch": 0.7164877511698321, + "grad_norm": 1.7039754157142608, + "learning_rate": 3.9277210771655626e-06, + "loss": 0.9438, + "step": 5206 + }, + { + "epoch": 0.7166253784750894, + "grad_norm": 2.1254439404088887, + "learning_rate": 3.924180133379102e-06, + "loss": 0.9236, + "step": 5207 + }, + { + "epoch": 0.7167630057803468, + "grad_norm": 1.65718300312885, + "learning_rate": 3.920640396774881e-06, + "loss": 0.8456, + "step": 5208 + }, + { + "epoch": 0.7169006330856041, + "grad_norm": 2.017130873129969, + "learning_rate": 3.917101868056191e-06, + "loss": 0.8775, + "step": 5209 + }, + { + "epoch": 0.7170382603908615, + "grad_norm": 1.8569807887756613, + "learning_rate": 3.913564547926102e-06, + "loss": 0.97, + "step": 5210 + }, + { + "epoch": 0.717175887696119, + "grad_norm": 1.7345055043261586, + "learning_rate": 3.910028437087417e-06, + "loss": 0.8458, + "step": 5211 + }, + { + "epoch": 0.7173135150013763, + "grad_norm": 1.6035081867123977, + "learning_rate": 3.906493536242723e-06, + "loss": 0.9072, + "step": 5212 + }, + { + "epoch": 0.7174511423066336, + "grad_norm": 1.757623442915531, + "learning_rate": 3.9029598460943484e-06, + "loss": 0.9535, + "step": 5213 + }, + { + "epoch": 0.717588769611891, + "grad_norm": 1.7661737896475227, + "learning_rate": 3.899427367344399e-06, + "loss": 0.9248, + "step": 5214 + }, + { + "epoch": 0.7177263969171483, + "grad_norm": 1.7459796318533067, + "learning_rate": 3.895896100694716e-06, + "loss": 0.9615, + "step": 5215 + }, + { + "epoch": 0.7178640242224057, + "grad_norm": 2.136993783408053, + "learning_rate": 3.892366046846926e-06, + "loss": 0.9191, + "step": 5216 + }, + { + "epoch": 0.718001651527663, + "grad_norm": 2.320579839527249, + "learning_rate": 3.888837206502402e-06, + "loss": 0.8957, + "step": 5217 + }, + { + "epoch": 0.7181392788329205, + "grad_norm": 1.6961020435138954, + "learning_rate": 3.8853095803622645e-06, + "loss": 0.8478, + "step": 5218 + }, + { + "epoch": 0.7182769061381779, + "grad_norm": 1.7318511221437074, + "learning_rate": 3.88178316912742e-06, + "loss": 0.9662, + "step": 5219 + }, + { + "epoch": 0.7184145334434352, + "grad_norm": 1.8465130333206523, + "learning_rate": 3.878257973498504e-06, + "loss": 0.9646, + "step": 5220 + }, + { + "epoch": 0.7185521607486925, + "grad_norm": 1.7602364506001495, + "learning_rate": 3.874733994175936e-06, + "loss": 0.9402, + "step": 5221 + }, + { + "epoch": 0.7186897880539499, + "grad_norm": 1.6146063521295564, + "learning_rate": 3.871211231859872e-06, + "loss": 0.9452, + "step": 5222 + }, + { + "epoch": 0.7188274153592072, + "grad_norm": 2.0963344165175104, + "learning_rate": 3.867689687250246e-06, + "loss": 0.9228, + "step": 5223 + }, + { + "epoch": 0.7189650426644646, + "grad_norm": 1.7265714229237468, + "learning_rate": 3.864169361046738e-06, + "loss": 0.9128, + "step": 5224 + }, + { + "epoch": 0.7191026699697219, + "grad_norm": 1.7187370554733907, + "learning_rate": 3.860650253948785e-06, + "loss": 0.9007, + "step": 5225 + }, + { + "epoch": 0.7192402972749794, + "grad_norm": 1.853105589795603, + "learning_rate": 3.8571323666555906e-06, + "loss": 0.9387, + "step": 5226 + }, + { + "epoch": 0.7193779245802367, + "grad_norm": 1.5260829526036763, + "learning_rate": 3.8536156998661066e-06, + "loss": 0.9076, + "step": 5227 + }, + { + "epoch": 0.7195155518854941, + "grad_norm": 1.6612488991374663, + "learning_rate": 3.850100254279049e-06, + "loss": 0.9173, + "step": 5228 + }, + { + "epoch": 0.7196531791907514, + "grad_norm": 1.658363019234065, + "learning_rate": 3.846586030592887e-06, + "loss": 0.8663, + "step": 5229 + }, + { + "epoch": 0.7197908064960088, + "grad_norm": 1.7004386431691416, + "learning_rate": 3.843073029505851e-06, + "loss": 0.8976, + "step": 5230 + }, + { + "epoch": 0.7199284338012661, + "grad_norm": 1.6639051727664846, + "learning_rate": 3.839561251715924e-06, + "loss": 0.8598, + "step": 5231 + }, + { + "epoch": 0.7200660611065235, + "grad_norm": 6.208591695681176, + "learning_rate": 3.836050697920849e-06, + "loss": 0.9584, + "step": 5232 + }, + { + "epoch": 0.720203688411781, + "grad_norm": 1.8747018687196448, + "learning_rate": 3.832541368818126e-06, + "loss": 0.8802, + "step": 5233 + }, + { + "epoch": 0.7203413157170383, + "grad_norm": 1.6778380549433567, + "learning_rate": 3.8290332651050075e-06, + "loss": 0.9663, + "step": 5234 + }, + { + "epoch": 0.7204789430222956, + "grad_norm": 1.9800282132513551, + "learning_rate": 3.825526387478509e-06, + "loss": 0.934, + "step": 5235 + }, + { + "epoch": 0.720616570327553, + "grad_norm": 1.6718434352999254, + "learning_rate": 3.822020736635399e-06, + "loss": 0.8407, + "step": 5236 + }, + { + "epoch": 0.7207541976328103, + "grad_norm": 1.9797734589933966, + "learning_rate": 3.818516313272202e-06, + "loss": 0.9502, + "step": 5237 + }, + { + "epoch": 0.7208918249380677, + "grad_norm": 1.6937231758547295, + "learning_rate": 3.815013118085196e-06, + "loss": 0.9156, + "step": 5238 + }, + { + "epoch": 0.721029452243325, + "grad_norm": 1.8042615207641088, + "learning_rate": 3.8115111517704207e-06, + "loss": 0.9367, + "step": 5239 + }, + { + "epoch": 0.7211670795485824, + "grad_norm": 1.579846358890038, + "learning_rate": 3.8080104150236686e-06, + "loss": 0.9142, + "step": 5240 + }, + { + "epoch": 0.7213047068538398, + "grad_norm": 1.9842851711564975, + "learning_rate": 3.8045109085404853e-06, + "loss": 0.8966, + "step": 5241 + }, + { + "epoch": 0.7214423341590972, + "grad_norm": 1.7983023009526422, + "learning_rate": 3.8010126330161835e-06, + "loss": 0.878, + "step": 5242 + }, + { + "epoch": 0.7215799614643545, + "grad_norm": 1.8152979543153747, + "learning_rate": 3.7975155891458093e-06, + "loss": 0.8809, + "step": 5243 + }, + { + "epoch": 0.7217175887696119, + "grad_norm": 2.0685041369383566, + "learning_rate": 3.7940197776241926e-06, + "loss": 0.9268, + "step": 5244 + }, + { + "epoch": 0.7218552160748692, + "grad_norm": 1.879852493942682, + "learning_rate": 3.790525199145888e-06, + "loss": 0.8955, + "step": 5245 + }, + { + "epoch": 0.7219928433801266, + "grad_norm": 2.0435675705074616, + "learning_rate": 3.7870318544052354e-06, + "loss": 0.9387, + "step": 5246 + }, + { + "epoch": 0.7221304706853839, + "grad_norm": 1.8461888059817466, + "learning_rate": 3.7835397440962996e-06, + "loss": 0.9438, + "step": 5247 + }, + { + "epoch": 0.7222680979906413, + "grad_norm": 1.7121294634244952, + "learning_rate": 3.7800488689129267e-06, + "loss": 0.8898, + "step": 5248 + }, + { + "epoch": 0.7224057252958987, + "grad_norm": 1.8573986553593866, + "learning_rate": 3.7765592295487043e-06, + "loss": 0.9717, + "step": 5249 + }, + { + "epoch": 0.7225433526011561, + "grad_norm": 1.848516250989898, + "learning_rate": 3.7730708266969673e-06, + "loss": 0.9513, + "step": 5250 + }, + { + "epoch": 0.7226809799064134, + "grad_norm": 1.6329369215329173, + "learning_rate": 3.7695836610508264e-06, + "loss": 0.933, + "step": 5251 + }, + { + "epoch": 0.7228186072116708, + "grad_norm": 1.8335791719487562, + "learning_rate": 3.766097733303119e-06, + "loss": 0.9349, + "step": 5252 + }, + { + "epoch": 0.7229562345169281, + "grad_norm": 1.7547533046067223, + "learning_rate": 3.762613044146467e-06, + "loss": 0.8945, + "step": 5253 + }, + { + "epoch": 0.7230938618221855, + "grad_norm": 2.013973372815274, + "learning_rate": 3.7591295942732153e-06, + "loss": 0.9604, + "step": 5254 + }, + { + "epoch": 0.7232314891274428, + "grad_norm": 1.7212157840811677, + "learning_rate": 3.7556473843754893e-06, + "loss": 0.999, + "step": 5255 + }, + { + "epoch": 0.7233691164327003, + "grad_norm": 1.693696731476249, + "learning_rate": 3.752166415145151e-06, + "loss": 0.9312, + "step": 5256 + }, + { + "epoch": 0.7235067437379576, + "grad_norm": 1.8853649645866906, + "learning_rate": 3.7486866872738236e-06, + "loss": 0.8978, + "step": 5257 + }, + { + "epoch": 0.723644371043215, + "grad_norm": 1.8665831157373645, + "learning_rate": 3.745208201452879e-06, + "loss": 0.9871, + "step": 5258 + }, + { + "epoch": 0.7237819983484723, + "grad_norm": 1.6916700185985052, + "learning_rate": 3.7417309583734475e-06, + "loss": 0.8862, + "step": 5259 + }, + { + "epoch": 0.7239196256537297, + "grad_norm": 1.5910627863982651, + "learning_rate": 3.738254958726407e-06, + "loss": 0.9534, + "step": 5260 + }, + { + "epoch": 0.724057252958987, + "grad_norm": 1.6892806461852596, + "learning_rate": 3.7347802032023927e-06, + "loss": 0.9363, + "step": 5261 + }, + { + "epoch": 0.7241948802642444, + "grad_norm": 1.6945488960301496, + "learning_rate": 3.7313066924917906e-06, + "loss": 0.9018, + "step": 5262 + }, + { + "epoch": 0.7243325075695017, + "grad_norm": 1.7948871348035533, + "learning_rate": 3.72783442728474e-06, + "loss": 0.9148, + "step": 5263 + }, + { + "epoch": 0.7244701348747592, + "grad_norm": 1.7637580360175407, + "learning_rate": 3.724363408271131e-06, + "loss": 0.9596, + "step": 5264 + }, + { + "epoch": 0.7246077621800165, + "grad_norm": 1.5531617042455883, + "learning_rate": 3.7208936361406088e-06, + "loss": 0.9206, + "step": 5265 + }, + { + "epoch": 0.7247453894852739, + "grad_norm": 1.9635847378187228, + "learning_rate": 3.717425111582571e-06, + "loss": 0.9251, + "step": 5266 + }, + { + "epoch": 0.7248830167905312, + "grad_norm": 1.6387599373641766, + "learning_rate": 3.713957835286163e-06, + "loss": 0.9414, + "step": 5267 + }, + { + "epoch": 0.7250206440957886, + "grad_norm": 1.8786223482022815, + "learning_rate": 3.7104918079402874e-06, + "loss": 0.9167, + "step": 5268 + }, + { + "epoch": 0.7251582714010459, + "grad_norm": 1.9298772009633662, + "learning_rate": 3.7070270302335954e-06, + "loss": 0.9694, + "step": 5269 + }, + { + "epoch": 0.7252958987063033, + "grad_norm": 2.0868091426204853, + "learning_rate": 3.70356350285449e-06, + "loss": 0.8991, + "step": 5270 + }, + { + "epoch": 0.7254335260115607, + "grad_norm": 1.6350046170287333, + "learning_rate": 3.700101226491126e-06, + "loss": 0.8414, + "step": 5271 + }, + { + "epoch": 0.7255711533168181, + "grad_norm": 1.75295336139188, + "learning_rate": 3.6966402018314187e-06, + "loss": 0.9623, + "step": 5272 + }, + { + "epoch": 0.7257087806220754, + "grad_norm": 1.8242044394605565, + "learning_rate": 3.6931804295630115e-06, + "loss": 0.8499, + "step": 5273 + }, + { + "epoch": 0.7258464079273328, + "grad_norm": 1.715079109892309, + "learning_rate": 3.6897219103733294e-06, + "loss": 0.8921, + "step": 5274 + }, + { + "epoch": 0.7259840352325901, + "grad_norm": 1.6603639259547738, + "learning_rate": 3.6862646449495187e-06, + "loss": 0.9137, + "step": 5275 + }, + { + "epoch": 0.7261216625378475, + "grad_norm": 1.8731215829523309, + "learning_rate": 3.682808633978505e-06, + "loss": 0.9039, + "step": 5276 + }, + { + "epoch": 0.7262592898431048, + "grad_norm": 1.6633463610622765, + "learning_rate": 3.6793538781469353e-06, + "loss": 0.8874, + "step": 5277 + }, + { + "epoch": 0.7263969171483622, + "grad_norm": 1.8778534024817988, + "learning_rate": 3.6759003781412327e-06, + "loss": 0.9715, + "step": 5278 + }, + { + "epoch": 0.7265345444536196, + "grad_norm": 1.9260690641432139, + "learning_rate": 3.672448134647558e-06, + "loss": 0.8807, + "step": 5279 + }, + { + "epoch": 0.726672171758877, + "grad_norm": 2.0817387566057257, + "learning_rate": 3.668997148351825e-06, + "loss": 0.859, + "step": 5280 + }, + { + "epoch": 0.7268097990641343, + "grad_norm": 2.1115394378825676, + "learning_rate": 3.665547419939699e-06, + "loss": 0.9809, + "step": 5281 + }, + { + "epoch": 0.7269474263693917, + "grad_norm": 1.6655224131489796, + "learning_rate": 3.6620989500965853e-06, + "loss": 0.9162, + "step": 5282 + }, + { + "epoch": 0.727085053674649, + "grad_norm": 1.71990596088117, + "learning_rate": 3.6586517395076605e-06, + "loss": 0.8823, + "step": 5283 + }, + { + "epoch": 0.7272226809799064, + "grad_norm": 1.5776383793454134, + "learning_rate": 3.655205788857824e-06, + "loss": 0.8971, + "step": 5284 + }, + { + "epoch": 0.7273603082851637, + "grad_norm": 1.8201200880919435, + "learning_rate": 3.65176109883175e-06, + "loss": 0.9235, + "step": 5285 + }, + { + "epoch": 0.7274979355904211, + "grad_norm": 1.7898470375281073, + "learning_rate": 3.648317670113848e-06, + "loss": 0.8691, + "step": 5286 + }, + { + "epoch": 0.7276355628956785, + "grad_norm": 2.1412492397768728, + "learning_rate": 3.6448755033882797e-06, + "loss": 0.9068, + "step": 5287 + }, + { + "epoch": 0.7277731902009359, + "grad_norm": 1.6840438736877315, + "learning_rate": 3.641434599338958e-06, + "loss": 0.9903, + "step": 5288 + }, + { + "epoch": 0.7279108175061932, + "grad_norm": 1.460442993483951, + "learning_rate": 3.637994958649541e-06, + "loss": 0.9597, + "step": 5289 + }, + { + "epoch": 0.7280484448114506, + "grad_norm": 2.087061697669228, + "learning_rate": 3.634556582003439e-06, + "loss": 0.9406, + "step": 5290 + }, + { + "epoch": 0.7281860721167079, + "grad_norm": 2.0188184569801573, + "learning_rate": 3.6311194700838113e-06, + "loss": 0.9161, + "step": 5291 + }, + { + "epoch": 0.7283236994219653, + "grad_norm": 1.8125083634099686, + "learning_rate": 3.6276836235735646e-06, + "loss": 0.8558, + "step": 5292 + }, + { + "epoch": 0.7284613267272226, + "grad_norm": 1.6142611852611792, + "learning_rate": 3.6242490431553536e-06, + "loss": 0.9602, + "step": 5293 + }, + { + "epoch": 0.7285989540324801, + "grad_norm": 1.561917549846892, + "learning_rate": 3.620815729511584e-06, + "loss": 0.9175, + "step": 5294 + }, + { + "epoch": 0.7287365813377374, + "grad_norm": 1.9969988879096154, + "learning_rate": 3.617383683324407e-06, + "loss": 0.8478, + "step": 5295 + }, + { + "epoch": 0.7288742086429948, + "grad_norm": 2.2055649871786773, + "learning_rate": 3.613952905275724e-06, + "loss": 0.8514, + "step": 5296 + }, + { + "epoch": 0.7290118359482521, + "grad_norm": 1.712659926677815, + "learning_rate": 3.610523396047183e-06, + "loss": 0.9044, + "step": 5297 + }, + { + "epoch": 0.7291494632535095, + "grad_norm": 1.8692593447289574, + "learning_rate": 3.6070951563201805e-06, + "loss": 0.9659, + "step": 5298 + }, + { + "epoch": 0.7292870905587668, + "grad_norm": 1.82367916255235, + "learning_rate": 3.6036681867758618e-06, + "loss": 0.9261, + "step": 5299 + }, + { + "epoch": 0.7294247178640242, + "grad_norm": 1.7360811856324996, + "learning_rate": 3.600242488095117e-06, + "loss": 0.8825, + "step": 5300 + }, + { + "epoch": 0.7295623451692815, + "grad_norm": 1.7300883478966302, + "learning_rate": 3.596818060958588e-06, + "loss": 0.896, + "step": 5301 + }, + { + "epoch": 0.729699972474539, + "grad_norm": 1.8700646782066308, + "learning_rate": 3.593394906046659e-06, + "loss": 0.7985, + "step": 5302 + }, + { + "epoch": 0.7298375997797963, + "grad_norm": 1.6467434806308108, + "learning_rate": 3.5899730240394628e-06, + "loss": 0.9176, + "step": 5303 + }, + { + "epoch": 0.7299752270850537, + "grad_norm": 1.6630454812127828, + "learning_rate": 3.5865524156168894e-06, + "loss": 0.9313, + "step": 5304 + }, + { + "epoch": 0.730112854390311, + "grad_norm": 1.755643099263133, + "learning_rate": 3.583133081458554e-06, + "loss": 0.8778, + "step": 5305 + }, + { + "epoch": 0.7302504816955684, + "grad_norm": 1.6602541788271965, + "learning_rate": 3.579715022243845e-06, + "loss": 0.9221, + "step": 5306 + }, + { + "epoch": 0.7303881090008257, + "grad_norm": 1.5903015203608266, + "learning_rate": 3.5762982386518695e-06, + "loss": 0.8873, + "step": 5307 + }, + { + "epoch": 0.7305257363060831, + "grad_norm": 1.6925348902802826, + "learning_rate": 3.57288273136151e-06, + "loss": 0.9088, + "step": 5308 + }, + { + "epoch": 0.7306633636113405, + "grad_norm": 1.855645528680667, + "learning_rate": 3.5694685010513686e-06, + "loss": 0.9022, + "step": 5309 + }, + { + "epoch": 0.7308009909165979, + "grad_norm": 1.726876241284321, + "learning_rate": 3.5660555483998137e-06, + "loss": 0.9479, + "step": 5310 + }, + { + "epoch": 0.7309386182218552, + "grad_norm": 1.7173795202580493, + "learning_rate": 3.56264387408495e-06, + "loss": 0.9005, + "step": 5311 + }, + { + "epoch": 0.7310762455271126, + "grad_norm": 1.722255610882931, + "learning_rate": 3.5592334787846314e-06, + "loss": 0.9018, + "step": 5312 + }, + { + "epoch": 0.7312138728323699, + "grad_norm": 1.6791395519326309, + "learning_rate": 3.55582436317646e-06, + "loss": 0.9206, + "step": 5313 + }, + { + "epoch": 0.7313515001376273, + "grad_norm": 1.7869267242749116, + "learning_rate": 3.552416527937769e-06, + "loss": 0.9804, + "step": 5314 + }, + { + "epoch": 0.7314891274428846, + "grad_norm": 1.649235723816545, + "learning_rate": 3.549009973745663e-06, + "loss": 0.9486, + "step": 5315 + }, + { + "epoch": 0.731626754748142, + "grad_norm": 1.9403884309205994, + "learning_rate": 3.5456047012769632e-06, + "loss": 0.9408, + "step": 5316 + }, + { + "epoch": 0.7317643820533994, + "grad_norm": 1.7170603858133688, + "learning_rate": 3.5422007112082624e-06, + "loss": 0.899, + "step": 5317 + }, + { + "epoch": 0.7319020093586568, + "grad_norm": 1.8249777785884589, + "learning_rate": 3.538798004215883e-06, + "loss": 0.9159, + "step": 5318 + }, + { + "epoch": 0.7320396366639141, + "grad_norm": 2.420433625816806, + "learning_rate": 3.535396580975895e-06, + "loss": 0.9549, + "step": 5319 + }, + { + "epoch": 0.7321772639691715, + "grad_norm": 1.5553825700930723, + "learning_rate": 3.5319964421641163e-06, + "loss": 0.8696, + "step": 5320 + }, + { + "epoch": 0.7323148912744288, + "grad_norm": 1.9392250757763636, + "learning_rate": 3.5285975884561075e-06, + "loss": 0.9615, + "step": 5321 + }, + { + "epoch": 0.7324525185796862, + "grad_norm": 1.8613023387684744, + "learning_rate": 3.525200020527173e-06, + "loss": 0.9537, + "step": 5322 + }, + { + "epoch": 0.7325901458849435, + "grad_norm": 1.7425791223313267, + "learning_rate": 3.5218037390523653e-06, + "loss": 0.8905, + "step": 5323 + }, + { + "epoch": 0.7327277731902009, + "grad_norm": 1.5807277348544768, + "learning_rate": 3.5184087447064763e-06, + "loss": 0.8795, + "step": 5324 + }, + { + "epoch": 0.7328654004954583, + "grad_norm": 1.892101501467385, + "learning_rate": 3.515015038164048e-06, + "loss": 0.9652, + "step": 5325 + }, + { + "epoch": 0.7330030278007157, + "grad_norm": 1.6750855341760533, + "learning_rate": 3.5116226200993606e-06, + "loss": 0.8535, + "step": 5326 + }, + { + "epoch": 0.733140655105973, + "grad_norm": 1.6148431943533017, + "learning_rate": 3.5082314911864422e-06, + "loss": 0.9851, + "step": 5327 + }, + { + "epoch": 0.7332782824112304, + "grad_norm": 1.8507295280267404, + "learning_rate": 3.5048416520990645e-06, + "loss": 0.8968, + "step": 5328 + }, + { + "epoch": 0.7334159097164877, + "grad_norm": 1.7676870106796847, + "learning_rate": 3.5014531035107423e-06, + "loss": 0.9001, + "step": 5329 + }, + { + "epoch": 0.7335535370217451, + "grad_norm": 1.7335978897367443, + "learning_rate": 3.4980658460947326e-06, + "loss": 0.9168, + "step": 5330 + }, + { + "epoch": 0.7336911643270024, + "grad_norm": 1.8490527682830393, + "learning_rate": 3.494679880524037e-06, + "loss": 0.9198, + "step": 5331 + }, + { + "epoch": 0.7338287916322599, + "grad_norm": 1.523958992740363, + "learning_rate": 3.4912952074714014e-06, + "loss": 0.87, + "step": 5332 + }, + { + "epoch": 0.7339664189375172, + "grad_norm": 1.6361506164946327, + "learning_rate": 3.4879118276093126e-06, + "loss": 0.933, + "step": 5333 + }, + { + "epoch": 0.7341040462427746, + "grad_norm": 1.764890623843533, + "learning_rate": 3.4845297416100044e-06, + "loss": 0.8634, + "step": 5334 + }, + { + "epoch": 0.7342416735480319, + "grad_norm": 1.6735032683532114, + "learning_rate": 3.4811489501454444e-06, + "loss": 0.9088, + "step": 5335 + }, + { + "epoch": 0.7343793008532893, + "grad_norm": 1.9644629983346653, + "learning_rate": 3.477769453887363e-06, + "loss": 0.9836, + "step": 5336 + }, + { + "epoch": 0.7345169281585466, + "grad_norm": 1.8158415007835387, + "learning_rate": 3.4743912535072042e-06, + "loss": 0.8275, + "step": 5337 + }, + { + "epoch": 0.734654555463804, + "grad_norm": 1.8794224657216105, + "learning_rate": 3.471014349676184e-06, + "loss": 0.9107, + "step": 5338 + }, + { + "epoch": 0.7347921827690613, + "grad_norm": 1.843173646543842, + "learning_rate": 3.4676387430652335e-06, + "loss": 0.9727, + "step": 5339 + }, + { + "epoch": 0.7349298100743188, + "grad_norm": 1.6410817311317818, + "learning_rate": 3.4642644343450503e-06, + "loss": 0.9845, + "step": 5340 + }, + { + "epoch": 0.7350674373795761, + "grad_norm": 1.757701676761118, + "learning_rate": 3.4608914241860593e-06, + "loss": 0.8836, + "step": 5341 + }, + { + "epoch": 0.7352050646848335, + "grad_norm": 1.760059814249015, + "learning_rate": 3.4575197132584316e-06, + "loss": 0.9001, + "step": 5342 + }, + { + "epoch": 0.7353426919900908, + "grad_norm": 1.7351843979098676, + "learning_rate": 3.4541493022320803e-06, + "loss": 0.8596, + "step": 5343 + }, + { + "epoch": 0.7354803192953482, + "grad_norm": 1.9743375169592805, + "learning_rate": 3.4507801917766605e-06, + "loss": 0.938, + "step": 5344 + }, + { + "epoch": 0.7356179466006055, + "grad_norm": 1.8370298988349438, + "learning_rate": 3.44741238256157e-06, + "loss": 0.9139, + "step": 5345 + }, + { + "epoch": 0.7357555739058629, + "grad_norm": 2.065942388534253, + "learning_rate": 3.444045875255937e-06, + "loss": 0.9235, + "step": 5346 + }, + { + "epoch": 0.7358932012111203, + "grad_norm": 1.8479359650419447, + "learning_rate": 3.440680670528651e-06, + "loss": 0.8183, + "step": 5347 + }, + { + "epoch": 0.7360308285163777, + "grad_norm": 1.8722118740478073, + "learning_rate": 3.437316769048329e-06, + "loss": 0.9297, + "step": 5348 + }, + { + "epoch": 0.736168455821635, + "grad_norm": 1.9216855865907785, + "learning_rate": 3.43395417148333e-06, + "loss": 0.938, + "step": 5349 + }, + { + "epoch": 0.7363060831268924, + "grad_norm": 1.6383505618484056, + "learning_rate": 3.4305928785017595e-06, + "loss": 0.9385, + "step": 5350 + }, + { + "epoch": 0.7364437104321497, + "grad_norm": 1.8605079699724014, + "learning_rate": 3.427232890771458e-06, + "loss": 0.9587, + "step": 5351 + }, + { + "epoch": 0.7365813377374071, + "grad_norm": 1.9585010423187021, + "learning_rate": 3.4238742089600098e-06, + "loss": 0.8927, + "step": 5352 + }, + { + "epoch": 0.7367189650426644, + "grad_norm": 1.9797679417380907, + "learning_rate": 3.4205168337347393e-06, + "loss": 0.9388, + "step": 5353 + }, + { + "epoch": 0.7368565923479218, + "grad_norm": 1.7583741814972624, + "learning_rate": 3.417160765762709e-06, + "loss": 0.9243, + "step": 5354 + }, + { + "epoch": 0.7369942196531792, + "grad_norm": 1.7405355366294266, + "learning_rate": 3.413806005710727e-06, + "loss": 0.9315, + "step": 5355 + }, + { + "epoch": 0.7371318469584366, + "grad_norm": 1.597786272578749, + "learning_rate": 3.410452554245336e-06, + "loss": 0.9375, + "step": 5356 + }, + { + "epoch": 0.7372694742636939, + "grad_norm": 1.6616529422224957, + "learning_rate": 3.4071004120328223e-06, + "loss": 0.9527, + "step": 5357 + }, + { + "epoch": 0.7374071015689513, + "grad_norm": 1.8896497108057202, + "learning_rate": 3.403749579739205e-06, + "loss": 0.842, + "step": 5358 + }, + { + "epoch": 0.7375447288742086, + "grad_norm": 1.7703938245966926, + "learning_rate": 3.4004000580302608e-06, + "loss": 0.9347, + "step": 5359 + }, + { + "epoch": 0.737682356179466, + "grad_norm": 1.7263967035827457, + "learning_rate": 3.397051847571483e-06, + "loss": 0.8795, + "step": 5360 + }, + { + "epoch": 0.7378199834847233, + "grad_norm": 1.9059037793469038, + "learning_rate": 3.3937049490281184e-06, + "loss": 0.9569, + "step": 5361 + }, + { + "epoch": 0.7379576107899807, + "grad_norm": 1.9953453096096039, + "learning_rate": 3.39035936306515e-06, + "loss": 0.8431, + "step": 5362 + }, + { + "epoch": 0.7380952380952381, + "grad_norm": 1.6740389680721992, + "learning_rate": 3.3870150903473e-06, + "loss": 0.8983, + "step": 5363 + }, + { + "epoch": 0.7382328654004955, + "grad_norm": 1.8230905771735781, + "learning_rate": 3.38367213153903e-06, + "loss": 0.8949, + "step": 5364 + }, + { + "epoch": 0.7383704927057528, + "grad_norm": 1.7597530190851718, + "learning_rate": 3.380330487304536e-06, + "loss": 0.8838, + "step": 5365 + }, + { + "epoch": 0.7385081200110102, + "grad_norm": 1.8585719908041678, + "learning_rate": 3.376990158307768e-06, + "loss": 0.887, + "step": 5366 + }, + { + "epoch": 0.7386457473162675, + "grad_norm": 1.7260618173731745, + "learning_rate": 3.373651145212389e-06, + "loss": 0.9083, + "step": 5367 + }, + { + "epoch": 0.7387833746215249, + "grad_norm": 1.6602032472903947, + "learning_rate": 3.37031344868183e-06, + "loss": 0.9326, + "step": 5368 + }, + { + "epoch": 0.7389210019267822, + "grad_norm": 1.6617601151026693, + "learning_rate": 3.3669770693792313e-06, + "loss": 0.9389, + "step": 5369 + }, + { + "epoch": 0.7390586292320397, + "grad_norm": 1.7039987133921115, + "learning_rate": 3.363642007967499e-06, + "loss": 0.9255, + "step": 5370 + }, + { + "epoch": 0.739196256537297, + "grad_norm": 1.8051054749125792, + "learning_rate": 3.360308265109252e-06, + "loss": 0.8974, + "step": 5371 + }, + { + "epoch": 0.7393338838425544, + "grad_norm": 1.9555580519007822, + "learning_rate": 3.3569758414668686e-06, + "loss": 0.8786, + "step": 5372 + }, + { + "epoch": 0.7394715111478117, + "grad_norm": 1.7647423184782687, + "learning_rate": 3.3536447377024516e-06, + "loss": 0.9408, + "step": 5373 + }, + { + "epoch": 0.7396091384530691, + "grad_norm": 1.8573980690437624, + "learning_rate": 3.350314954477847e-06, + "loss": 0.8428, + "step": 5374 + }, + { + "epoch": 0.7397467657583264, + "grad_norm": 1.6184287065613328, + "learning_rate": 3.346986492454637e-06, + "loss": 0.9617, + "step": 5375 + }, + { + "epoch": 0.7398843930635838, + "grad_norm": 1.773773314211498, + "learning_rate": 3.343659352294141e-06, + "loss": 0.915, + "step": 5376 + }, + { + "epoch": 0.7400220203688411, + "grad_norm": 1.6272666452804558, + "learning_rate": 3.3403335346574186e-06, + "loss": 0.9132, + "step": 5377 + }, + { + "epoch": 0.7401596476740986, + "grad_norm": 2.037348616917277, + "learning_rate": 3.3370090402052557e-06, + "loss": 0.9709, + "step": 5378 + }, + { + "epoch": 0.7402972749793559, + "grad_norm": 1.8313727575610053, + "learning_rate": 3.3336858695981944e-06, + "loss": 0.949, + "step": 5379 + }, + { + "epoch": 0.7404349022846133, + "grad_norm": 1.8379224312058455, + "learning_rate": 3.3303640234964985e-06, + "loss": 0.8437, + "step": 5380 + }, + { + "epoch": 0.7405725295898706, + "grad_norm": 1.6492160972115861, + "learning_rate": 3.327043502560173e-06, + "loss": 0.9047, + "step": 5381 + }, + { + "epoch": 0.740710156895128, + "grad_norm": 1.6887732087357337, + "learning_rate": 3.323724307448961e-06, + "loss": 0.9511, + "step": 5382 + }, + { + "epoch": 0.7408477842003853, + "grad_norm": 1.814184828178591, + "learning_rate": 3.320406438822341e-06, + "loss": 0.9093, + "step": 5383 + }, + { + "epoch": 0.7409854115056427, + "grad_norm": 1.6067990511749097, + "learning_rate": 3.317089897339528e-06, + "loss": 0.8933, + "step": 5384 + }, + { + "epoch": 0.7411230388109001, + "grad_norm": 1.8594203825447861, + "learning_rate": 3.313774683659474e-06, + "loss": 0.9051, + "step": 5385 + }, + { + "epoch": 0.7412606661161575, + "grad_norm": 1.6909211227845824, + "learning_rate": 3.3104607984408656e-06, + "loss": 0.868, + "step": 5386 + }, + { + "epoch": 0.7413982934214148, + "grad_norm": 1.8876855767360883, + "learning_rate": 3.3071482423421263e-06, + "loss": 0.9713, + "step": 5387 + }, + { + "epoch": 0.7415359207266722, + "grad_norm": 1.5619637083852707, + "learning_rate": 3.303837016021416e-06, + "loss": 0.9227, + "step": 5388 + }, + { + "epoch": 0.7416735480319295, + "grad_norm": 1.8841475413765907, + "learning_rate": 3.3005271201366308e-06, + "loss": 0.818, + "step": 5389 + }, + { + "epoch": 0.7418111753371869, + "grad_norm": 1.8620515112203149, + "learning_rate": 3.297218555345397e-06, + "loss": 0.8327, + "step": 5390 + }, + { + "epoch": 0.7419488026424442, + "grad_norm": 1.6206115198537818, + "learning_rate": 3.293911322305092e-06, + "loss": 0.9443, + "step": 5391 + }, + { + "epoch": 0.7420864299477016, + "grad_norm": 1.8720287500960495, + "learning_rate": 3.290605421672808e-06, + "loss": 0.9364, + "step": 5392 + }, + { + "epoch": 0.742224057252959, + "grad_norm": 1.8095489134616263, + "learning_rate": 3.287300854105385e-06, + "loss": 0.9045, + "step": 5393 + }, + { + "epoch": 0.7423616845582164, + "grad_norm": 1.5721489510668316, + "learning_rate": 3.283997620259395e-06, + "loss": 0.903, + "step": 5394 + }, + { + "epoch": 0.7424993118634737, + "grad_norm": 1.5783819784498287, + "learning_rate": 3.280695720791146e-06, + "loss": 0.879, + "step": 5395 + }, + { + "epoch": 0.7426369391687311, + "grad_norm": 1.6398086761186235, + "learning_rate": 3.277395156356681e-06, + "loss": 0.9216, + "step": 5396 + }, + { + "epoch": 0.7427745664739884, + "grad_norm": 1.8808392007775487, + "learning_rate": 3.274095927611771e-06, + "loss": 0.9967, + "step": 5397 + }, + { + "epoch": 0.7429121937792458, + "grad_norm": 2.019374211119897, + "learning_rate": 3.2707980352119406e-06, + "loss": 0.8459, + "step": 5398 + }, + { + "epoch": 0.7430498210845031, + "grad_norm": 1.7630190873828178, + "learning_rate": 3.2675014798124207e-06, + "loss": 0.9641, + "step": 5399 + }, + { + "epoch": 0.7431874483897605, + "grad_norm": 1.6530262546829535, + "learning_rate": 3.264206262068205e-06, + "loss": 0.8669, + "step": 5400 + }, + { + "epoch": 0.7433250756950179, + "grad_norm": 1.9922344982339548, + "learning_rate": 3.2609123826339963e-06, + "loss": 0.8798, + "step": 5401 + }, + { + "epoch": 0.7434627030002753, + "grad_norm": 1.8617786728056946, + "learning_rate": 3.2576198421642546e-06, + "loss": 0.9115, + "step": 5402 + }, + { + "epoch": 0.7436003303055326, + "grad_norm": 1.8934102227703686, + "learning_rate": 3.2543286413131514e-06, + "loss": 0.9282, + "step": 5403 + }, + { + "epoch": 0.74373795761079, + "grad_norm": 2.122019153131676, + "learning_rate": 3.251038780734612e-06, + "loss": 0.9306, + "step": 5404 + }, + { + "epoch": 0.7438755849160473, + "grad_norm": 1.905915958054961, + "learning_rate": 3.247750261082284e-06, + "loss": 0.8845, + "step": 5405 + }, + { + "epoch": 0.7440132122213047, + "grad_norm": 1.7314551044514395, + "learning_rate": 3.244463083009551e-06, + "loss": 0.8916, + "step": 5406 + }, + { + "epoch": 0.744150839526562, + "grad_norm": 2.073314583404827, + "learning_rate": 3.2411772471695304e-06, + "loss": 0.8556, + "step": 5407 + }, + { + "epoch": 0.7442884668318195, + "grad_norm": 2.126073146155783, + "learning_rate": 3.237892754215072e-06, + "loss": 0.9129, + "step": 5408 + }, + { + "epoch": 0.7444260941370768, + "grad_norm": 2.1715746779703746, + "learning_rate": 3.2346096047987597e-06, + "loss": 0.9109, + "step": 5409 + }, + { + "epoch": 0.7445637214423342, + "grad_norm": 1.9741336782657246, + "learning_rate": 3.231327799572912e-06, + "loss": 1.0158, + "step": 5410 + }, + { + "epoch": 0.7447013487475915, + "grad_norm": 1.8554458166182735, + "learning_rate": 3.2280473391895763e-06, + "loss": 0.9327, + "step": 5411 + }, + { + "epoch": 0.7448389760528489, + "grad_norm": 1.7460568193649428, + "learning_rate": 3.2247682243005374e-06, + "loss": 1.014, + "step": 5412 + }, + { + "epoch": 0.7449766033581062, + "grad_norm": 1.9326093311009171, + "learning_rate": 3.221490455557309e-06, + "loss": 0.92, + "step": 5413 + }, + { + "epoch": 0.7451142306633636, + "grad_norm": 2.2598726718785245, + "learning_rate": 3.218214033611139e-06, + "loss": 0.9249, + "step": 5414 + }, + { + "epoch": 0.7452518579686209, + "grad_norm": 1.6448314284586654, + "learning_rate": 3.2149389591130085e-06, + "loss": 0.9373, + "step": 5415 + }, + { + "epoch": 0.7453894852738784, + "grad_norm": 1.6574154005581918, + "learning_rate": 3.21166523271363e-06, + "loss": 0.8915, + "step": 5416 + }, + { + "epoch": 0.7455271125791357, + "grad_norm": 1.9045173034951033, + "learning_rate": 3.208392855063447e-06, + "loss": 0.9118, + "step": 5417 + }, + { + "epoch": 0.7456647398843931, + "grad_norm": 1.9639611833758877, + "learning_rate": 3.205121826812636e-06, + "loss": 0.8621, + "step": 5418 + }, + { + "epoch": 0.7458023671896504, + "grad_norm": 1.7553652473411705, + "learning_rate": 3.2018521486111074e-06, + "loss": 0.8826, + "step": 5419 + }, + { + "epoch": 0.7459399944949078, + "grad_norm": 1.8359439343973158, + "learning_rate": 3.1985838211084963e-06, + "loss": 0.9303, + "step": 5420 + }, + { + "epoch": 0.7460776218001651, + "grad_norm": 2.0571710726519603, + "learning_rate": 3.195316844954186e-06, + "loss": 0.8348, + "step": 5421 + }, + { + "epoch": 0.7462152491054225, + "grad_norm": 1.9130877046369414, + "learning_rate": 3.1920512207972655e-06, + "loss": 0.9857, + "step": 5422 + }, + { + "epoch": 0.7463528764106799, + "grad_norm": 1.89377256644532, + "learning_rate": 3.188786949286583e-06, + "loss": 0.9464, + "step": 5423 + }, + { + "epoch": 0.7464905037159373, + "grad_norm": 1.9778426903734947, + "learning_rate": 3.1855240310706958e-06, + "loss": 0.8644, + "step": 5424 + }, + { + "epoch": 0.7466281310211946, + "grad_norm": 1.8355999065289066, + "learning_rate": 3.182262466797904e-06, + "loss": 0.8305, + "step": 5425 + }, + { + "epoch": 0.746765758326452, + "grad_norm": 1.698401098014406, + "learning_rate": 3.179002257116235e-06, + "loss": 0.9272, + "step": 5426 + }, + { + "epoch": 0.7469033856317093, + "grad_norm": 1.7610010664913602, + "learning_rate": 3.1757434026734446e-06, + "loss": 0.9721, + "step": 5427 + }, + { + "epoch": 0.7470410129369667, + "grad_norm": 1.9070882551386175, + "learning_rate": 3.172485904117033e-06, + "loss": 0.9008, + "step": 5428 + }, + { + "epoch": 0.747178640242224, + "grad_norm": 1.966270733438746, + "learning_rate": 3.169229762094207e-06, + "loss": 0.9327, + "step": 5429 + }, + { + "epoch": 0.7473162675474814, + "grad_norm": 1.8998540306605942, + "learning_rate": 3.165974977251931e-06, + "loss": 0.9322, + "step": 5430 + }, + { + "epoch": 0.7474538948527388, + "grad_norm": 1.6680477255976567, + "learning_rate": 3.1627215502368726e-06, + "loss": 0.8748, + "step": 5431 + }, + { + "epoch": 0.7475915221579962, + "grad_norm": 1.7629237799269026, + "learning_rate": 3.159469481695456e-06, + "loss": 0.9392, + "step": 5432 + }, + { + "epoch": 0.7477291494632535, + "grad_norm": 1.782760631063199, + "learning_rate": 3.1562187722738113e-06, + "loss": 0.9774, + "step": 5433 + }, + { + "epoch": 0.7478667767685109, + "grad_norm": 1.9147972160786704, + "learning_rate": 3.1529694226178186e-06, + "loss": 0.9148, + "step": 5434 + }, + { + "epoch": 0.7480044040737682, + "grad_norm": 2.038299124603252, + "learning_rate": 3.149721433373075e-06, + "loss": 0.892, + "step": 5435 + }, + { + "epoch": 0.7481420313790256, + "grad_norm": 1.850721578082473, + "learning_rate": 3.1464748051849135e-06, + "loss": 0.8826, + "step": 5436 + }, + { + "epoch": 0.7482796586842829, + "grad_norm": 2.2890755680903503, + "learning_rate": 3.1432295386983935e-06, + "loss": 0.8822, + "step": 5437 + }, + { + "epoch": 0.7484172859895403, + "grad_norm": 1.8356698759222332, + "learning_rate": 3.1399856345583046e-06, + "loss": 0.9434, + "step": 5438 + }, + { + "epoch": 0.7485549132947977, + "grad_norm": 1.6864293688590377, + "learning_rate": 3.136743093409168e-06, + "loss": 0.8998, + "step": 5439 + }, + { + "epoch": 0.7486925406000551, + "grad_norm": 1.8881792360705087, + "learning_rate": 3.1335019158952296e-06, + "loss": 0.8823, + "step": 5440 + }, + { + "epoch": 0.7488301679053124, + "grad_norm": 1.7727556113488412, + "learning_rate": 3.1302621026604696e-06, + "loss": 0.9498, + "step": 5441 + }, + { + "epoch": 0.7489677952105698, + "grad_norm": 1.5928286656453894, + "learning_rate": 3.127023654348592e-06, + "loss": 0.8553, + "step": 5442 + }, + { + "epoch": 0.7491054225158271, + "grad_norm": 1.7012005203422216, + "learning_rate": 3.1237865716030347e-06, + "loss": 0.957, + "step": 5443 + }, + { + "epoch": 0.7492430498210845, + "grad_norm": 1.7533626663946758, + "learning_rate": 3.1205508550669596e-06, + "loss": 0.915, + "step": 5444 + }, + { + "epoch": 0.7493806771263418, + "grad_norm": 1.8168089720860214, + "learning_rate": 3.117316505383261e-06, + "loss": 0.8651, + "step": 5445 + }, + { + "epoch": 0.7495183044315993, + "grad_norm": 2.1551005442819045, + "learning_rate": 3.114083523194559e-06, + "loss": 0.9812, + "step": 5446 + }, + { + "epoch": 0.7496559317368566, + "grad_norm": 1.7441247420169146, + "learning_rate": 3.110851909143202e-06, + "loss": 0.8795, + "step": 5447 + }, + { + "epoch": 0.749793559042114, + "grad_norm": 1.5585055988851828, + "learning_rate": 3.1076216638712696e-06, + "loss": 0.8621, + "step": 5448 + }, + { + "epoch": 0.7499311863473713, + "grad_norm": 1.692233338774258, + "learning_rate": 3.104392788020566e-06, + "loss": 0.9136, + "step": 5449 + }, + { + "epoch": 0.7500688136526287, + "grad_norm": 1.7578892913906397, + "learning_rate": 3.101165282232624e-06, + "loss": 0.8977, + "step": 5450 + }, + { + "epoch": 0.750206440957886, + "grad_norm": 1.899213855859315, + "learning_rate": 3.097939147148706e-06, + "loss": 0.96, + "step": 5451 + }, + { + "epoch": 0.7503440682631434, + "grad_norm": 1.6731229211911267, + "learning_rate": 3.0947143834097983e-06, + "loss": 0.9947, + "step": 5452 + }, + { + "epoch": 0.7504816955684007, + "grad_norm": 2.065107727898624, + "learning_rate": 3.091490991656625e-06, + "loss": 0.9305, + "step": 5453 + }, + { + "epoch": 0.7506193228736582, + "grad_norm": 1.79581842507103, + "learning_rate": 3.088268972529619e-06, + "loss": 0.9765, + "step": 5454 + }, + { + "epoch": 0.7507569501789155, + "grad_norm": 1.8675997290600785, + "learning_rate": 3.0850483266689623e-06, + "loss": 0.907, + "step": 5455 + }, + { + "epoch": 0.7508945774841729, + "grad_norm": 1.8161503562128376, + "learning_rate": 3.0818290547145447e-06, + "loss": 0.9117, + "step": 5456 + }, + { + "epoch": 0.7510322047894302, + "grad_norm": 1.7219725302622113, + "learning_rate": 3.078611157305995e-06, + "loss": 0.7934, + "step": 5457 + }, + { + "epoch": 0.7511698320946876, + "grad_norm": 1.7410702982127422, + "learning_rate": 3.075394635082665e-06, + "loss": 0.8775, + "step": 5458 + }, + { + "epoch": 0.7513074593999449, + "grad_norm": 1.7522922381402224, + "learning_rate": 3.07217948868363e-06, + "loss": 0.8636, + "step": 5459 + }, + { + "epoch": 0.7514450867052023, + "grad_norm": 1.9211295230424688, + "learning_rate": 3.068965718747705e-06, + "loss": 0.8494, + "step": 5460 + }, + { + "epoch": 0.7515827140104597, + "grad_norm": 1.9091904274798164, + "learning_rate": 3.0657533259134098e-06, + "loss": 0.9094, + "step": 5461 + }, + { + "epoch": 0.7517203413157171, + "grad_norm": 1.8109550401946803, + "learning_rate": 3.0625423108190155e-06, + "loss": 0.9752, + "step": 5462 + }, + { + "epoch": 0.7518579686209744, + "grad_norm": 1.944562176496586, + "learning_rate": 3.0593326741024955e-06, + "loss": 0.9199, + "step": 5463 + }, + { + "epoch": 0.7519955959262318, + "grad_norm": 2.0765669766029458, + "learning_rate": 3.0561244164015714e-06, + "loss": 0.8715, + "step": 5464 + }, + { + "epoch": 0.7521332232314891, + "grad_norm": 1.7798393655381646, + "learning_rate": 3.052917538353667e-06, + "loss": 0.9554, + "step": 5465 + }, + { + "epoch": 0.7522708505367465, + "grad_norm": 1.779900375297692, + "learning_rate": 3.049712040595957e-06, + "loss": 0.9382, + "step": 5466 + }, + { + "epoch": 0.7524084778420038, + "grad_norm": 1.6912992199611379, + "learning_rate": 3.046507923765325e-06, + "loss": 0.9452, + "step": 5467 + }, + { + "epoch": 0.7525461051472612, + "grad_norm": 1.6794501772391903, + "learning_rate": 3.0433051884983856e-06, + "loss": 0.9039, + "step": 5468 + }, + { + "epoch": 0.7526837324525186, + "grad_norm": 1.9748418667695296, + "learning_rate": 3.0401038354314794e-06, + "loss": 0.8883, + "step": 5469 + }, + { + "epoch": 0.752821359757776, + "grad_norm": 1.5999923957778552, + "learning_rate": 3.036903865200669e-06, + "loss": 0.952, + "step": 5470 + }, + { + "epoch": 0.7529589870630333, + "grad_norm": 1.7495976309053185, + "learning_rate": 3.0337052784417477e-06, + "loss": 0.8686, + "step": 5471 + }, + { + "epoch": 0.7530966143682907, + "grad_norm": 1.8197347386555367, + "learning_rate": 3.030508075790228e-06, + "loss": 0.874, + "step": 5472 + }, + { + "epoch": 0.753234241673548, + "grad_norm": 1.5545893909023443, + "learning_rate": 3.027312257881353e-06, + "loss": 0.9594, + "step": 5473 + }, + { + "epoch": 0.7533718689788054, + "grad_norm": 1.6522512550449608, + "learning_rate": 3.0241178253500857e-06, + "loss": 0.8892, + "step": 5474 + }, + { + "epoch": 0.7535094962840627, + "grad_norm": 1.7011374341843077, + "learning_rate": 3.020924778831117e-06, + "loss": 0.8872, + "step": 5475 + }, + { + "epoch": 0.7536471235893201, + "grad_norm": 1.7006397961277306, + "learning_rate": 3.017733118958862e-06, + "loss": 0.8801, + "step": 5476 + }, + { + "epoch": 0.7537847508945775, + "grad_norm": 1.6647266524058915, + "learning_rate": 3.0145428463674597e-06, + "loss": 0.9, + "step": 5477 + }, + { + "epoch": 0.7539223781998349, + "grad_norm": 1.7982351484736916, + "learning_rate": 3.0113539616907726e-06, + "loss": 0.9467, + "step": 5478 + }, + { + "epoch": 0.7540600055050922, + "grad_norm": 1.8227822116820565, + "learning_rate": 3.0081664655623897e-06, + "loss": 0.9322, + "step": 5479 + }, + { + "epoch": 0.7541976328103496, + "grad_norm": 1.6843341248558537, + "learning_rate": 3.0049803586156224e-06, + "loss": 0.9248, + "step": 5480 + }, + { + "epoch": 0.7543352601156069, + "grad_norm": 1.6785061662391683, + "learning_rate": 3.001795641483506e-06, + "loss": 0.9072, + "step": 5481 + }, + { + "epoch": 0.7544728874208643, + "grad_norm": 1.6926379270842984, + "learning_rate": 2.9986123147987978e-06, + "loss": 0.9043, + "step": 5482 + }, + { + "epoch": 0.7546105147261216, + "grad_norm": 1.8821567394872063, + "learning_rate": 2.995430379193991e-06, + "loss": 0.8038, + "step": 5483 + }, + { + "epoch": 0.7547481420313791, + "grad_norm": 1.8331406712016125, + "learning_rate": 2.9922498353012786e-06, + "loss": 0.9074, + "step": 5484 + }, + { + "epoch": 0.7548857693366364, + "grad_norm": 2.015963975382853, + "learning_rate": 2.9890706837526052e-06, + "loss": 0.8145, + "step": 5485 + }, + { + "epoch": 0.7550233966418938, + "grad_norm": 1.792575701786076, + "learning_rate": 2.985892925179611e-06, + "loss": 0.9103, + "step": 5486 + }, + { + "epoch": 0.7551610239471511, + "grad_norm": 1.7821774333884204, + "learning_rate": 2.9827165602136854e-06, + "loss": 0.9331, + "step": 5487 + }, + { + "epoch": 0.7552986512524085, + "grad_norm": 1.6431258839444023, + "learning_rate": 2.9795415894859216e-06, + "loss": 0.8412, + "step": 5488 + }, + { + "epoch": 0.7554362785576658, + "grad_norm": 1.5925118471929725, + "learning_rate": 2.97636801362714e-06, + "loss": 0.8684, + "step": 5489 + }, + { + "epoch": 0.7555739058629232, + "grad_norm": 1.7743398979774374, + "learning_rate": 2.9731958332678977e-06, + "loss": 0.9485, + "step": 5490 + }, + { + "epoch": 0.7557115331681805, + "grad_norm": 1.886578798633562, + "learning_rate": 2.9700250490384507e-06, + "loss": 0.8733, + "step": 5491 + }, + { + "epoch": 0.755849160473438, + "grad_norm": 1.6380299942802359, + "learning_rate": 2.9668556615688037e-06, + "loss": 0.9024, + "step": 5492 + }, + { + "epoch": 0.7559867877786953, + "grad_norm": 1.6773900852635475, + "learning_rate": 2.9636876714886562e-06, + "loss": 0.8915, + "step": 5493 + }, + { + "epoch": 0.7561244150839527, + "grad_norm": 1.6217614547621944, + "learning_rate": 2.9605210794274588e-06, + "loss": 0.9215, + "step": 5494 + }, + { + "epoch": 0.75626204238921, + "grad_norm": 1.7554982483425272, + "learning_rate": 2.957355886014356e-06, + "loss": 0.8722, + "step": 5495 + }, + { + "epoch": 0.7563996696944674, + "grad_norm": 1.7928245987801514, + "learning_rate": 2.9541920918782375e-06, + "loss": 0.9093, + "step": 5496 + }, + { + "epoch": 0.7565372969997247, + "grad_norm": 1.7615719020511682, + "learning_rate": 2.9510296976477047e-06, + "loss": 1.0024, + "step": 5497 + }, + { + "epoch": 0.7566749243049821, + "grad_norm": 1.5613812349936373, + "learning_rate": 2.9478687039510823e-06, + "loss": 0.9034, + "step": 5498 + }, + { + "epoch": 0.7568125516102395, + "grad_norm": 1.8099573500175263, + "learning_rate": 2.9447091114164137e-06, + "loss": 0.9357, + "step": 5499 + }, + { + "epoch": 0.7569501789154969, + "grad_norm": 1.691815261150095, + "learning_rate": 2.9415509206714686e-06, + "loss": 0.8604, + "step": 5500 + }, + { + "epoch": 0.7570878062207542, + "grad_norm": 1.6306825811415881, + "learning_rate": 2.938394132343736e-06, + "loss": 0.9175, + "step": 5501 + }, + { + "epoch": 0.7572254335260116, + "grad_norm": 1.5867109916951256, + "learning_rate": 2.9352387470604272e-06, + "loss": 0.8968, + "step": 5502 + }, + { + "epoch": 0.7573630608312689, + "grad_norm": 1.615124336960378, + "learning_rate": 2.9320847654484717e-06, + "loss": 0.9449, + "step": 5503 + }, + { + "epoch": 0.7575006881365263, + "grad_norm": 1.6235135069323288, + "learning_rate": 2.9289321881345257e-06, + "loss": 0.9055, + "step": 5504 + }, + { + "epoch": 0.7576383154417836, + "grad_norm": 1.744416354215708, + "learning_rate": 2.9257810157449607e-06, + "loss": 0.9511, + "step": 5505 + }, + { + "epoch": 0.757775942747041, + "grad_norm": 1.7654497908380193, + "learning_rate": 2.922631248905873e-06, + "loss": 0.9349, + "step": 5506 + }, + { + "epoch": 0.7579135700522984, + "grad_norm": 1.8887775225031076, + "learning_rate": 2.919482888243077e-06, + "loss": 0.8787, + "step": 5507 + }, + { + "epoch": 0.7580511973575558, + "grad_norm": 2.0751444106240404, + "learning_rate": 2.9163359343821097e-06, + "loss": 0.9322, + "step": 5508 + }, + { + "epoch": 0.7581888246628131, + "grad_norm": 2.0425270185351634, + "learning_rate": 2.913190387948227e-06, + "loss": 0.906, + "step": 5509 + }, + { + "epoch": 0.7583264519680705, + "grad_norm": 1.8045879966559795, + "learning_rate": 2.9100462495664063e-06, + "loss": 0.8724, + "step": 5510 + }, + { + "epoch": 0.7584640792733278, + "grad_norm": 1.5413003269395515, + "learning_rate": 2.906903519861345e-06, + "loss": 0.9475, + "step": 5511 + }, + { + "epoch": 0.7586017065785852, + "grad_norm": 1.9239341678929622, + "learning_rate": 2.9037621994574607e-06, + "loss": 1.0228, + "step": 5512 + }, + { + "epoch": 0.7587393338838425, + "grad_norm": 1.6944041848823597, + "learning_rate": 2.9006222889788903e-06, + "loss": 0.9037, + "step": 5513 + }, + { + "epoch": 0.7588769611890999, + "grad_norm": 1.9024791369340794, + "learning_rate": 2.8974837890494876e-06, + "loss": 0.9412, + "step": 5514 + }, + { + "epoch": 0.7590145884943573, + "grad_norm": 1.9203269161579444, + "learning_rate": 2.8943467002928404e-06, + "loss": 0.9163, + "step": 5515 + }, + { + "epoch": 0.7591522157996147, + "grad_norm": 1.8710500457059325, + "learning_rate": 2.891211023332231e-06, + "loss": 0.8501, + "step": 5516 + }, + { + "epoch": 0.759289843104872, + "grad_norm": 1.7651689388637386, + "learning_rate": 2.88807675879069e-06, + "loss": 0.9424, + "step": 5517 + }, + { + "epoch": 0.7594274704101294, + "grad_norm": 1.6679558549842892, + "learning_rate": 2.884943907290938e-06, + "loss": 0.8733, + "step": 5518 + }, + { + "epoch": 0.7595650977153867, + "grad_norm": 1.8705669217832368, + "learning_rate": 2.881812469455444e-06, + "loss": 0.8998, + "step": 5519 + }, + { + "epoch": 0.7597027250206441, + "grad_norm": 1.892494155693648, + "learning_rate": 2.878682445906368e-06, + "loss": 0.8899, + "step": 5520 + }, + { + "epoch": 0.7598403523259014, + "grad_norm": 1.5493013241351175, + "learning_rate": 2.8755538372656132e-06, + "loss": 0.8785, + "step": 5521 + }, + { + "epoch": 0.7599779796311589, + "grad_norm": 1.8177794558789961, + "learning_rate": 2.872426644154791e-06, + "loss": 0.9468, + "step": 5522 + }, + { + "epoch": 0.7601156069364162, + "grad_norm": 1.5859483503571836, + "learning_rate": 2.8693008671952225e-06, + "loss": 0.9438, + "step": 5523 + }, + { + "epoch": 0.7602532342416736, + "grad_norm": 1.5978646805733026, + "learning_rate": 2.8661765070079695e-06, + "loss": 0.8968, + "step": 5524 + }, + { + "epoch": 0.7603908615469309, + "grad_norm": 1.744257849747236, + "learning_rate": 2.8630535642137867e-06, + "loss": 0.9645, + "step": 5525 + }, + { + "epoch": 0.7605284888521883, + "grad_norm": 1.6489470983330505, + "learning_rate": 2.8599320394331733e-06, + "loss": 0.8865, + "step": 5526 + }, + { + "epoch": 0.7606661161574456, + "grad_norm": 1.6862950081011878, + "learning_rate": 2.85681193328632e-06, + "loss": 0.9298, + "step": 5527 + }, + { + "epoch": 0.760803743462703, + "grad_norm": 1.8536580323135852, + "learning_rate": 2.853693246393159e-06, + "loss": 0.9197, + "step": 5528 + }, + { + "epoch": 0.7609413707679603, + "grad_norm": 1.802102131842383, + "learning_rate": 2.8505759793733277e-06, + "loss": 0.9562, + "step": 5529 + }, + { + "epoch": 0.7610789980732178, + "grad_norm": 7.4980599994822015, + "learning_rate": 2.8474601328461847e-06, + "loss": 0.9761, + "step": 5530 + }, + { + "epoch": 0.7612166253784751, + "grad_norm": 1.7240928427906432, + "learning_rate": 2.8443457074308055e-06, + "loss": 0.9187, + "step": 5531 + }, + { + "epoch": 0.7613542526837325, + "grad_norm": 1.6609117529759112, + "learning_rate": 2.841232703745984e-06, + "loss": 0.9496, + "step": 5532 + }, + { + "epoch": 0.7614918799889898, + "grad_norm": 1.7544038310263668, + "learning_rate": 2.8381211224102313e-06, + "loss": 0.9298, + "step": 5533 + }, + { + "epoch": 0.7616295072942472, + "grad_norm": 1.5965033029381335, + "learning_rate": 2.835010964041777e-06, + "loss": 0.9409, + "step": 5534 + }, + { + "epoch": 0.7617671345995045, + "grad_norm": 1.5423369374412463, + "learning_rate": 2.831902229258565e-06, + "loss": 0.8482, + "step": 5535 + }, + { + "epoch": 0.7619047619047619, + "grad_norm": 2.0701862558609245, + "learning_rate": 2.8287949186782594e-06, + "loss": 0.8841, + "step": 5536 + }, + { + "epoch": 0.7620423892100193, + "grad_norm": 1.9792824414926642, + "learning_rate": 2.8256890329182417e-06, + "loss": 0.8336, + "step": 5537 + }, + { + "epoch": 0.7621800165152767, + "grad_norm": 1.6662758550941312, + "learning_rate": 2.822584572595606e-06, + "loss": 0.9393, + "step": 5538 + }, + { + "epoch": 0.762317643820534, + "grad_norm": 1.7330159306255433, + "learning_rate": 2.819481538327169e-06, + "loss": 0.8795, + "step": 5539 + }, + { + "epoch": 0.7624552711257914, + "grad_norm": 1.6559720243117015, + "learning_rate": 2.816379930729458e-06, + "loss": 0.8982, + "step": 5540 + }, + { + "epoch": 0.7625928984310487, + "grad_norm": 1.7976406610939581, + "learning_rate": 2.813279750418724e-06, + "loss": 0.991, + "step": 5541 + }, + { + "epoch": 0.7627305257363061, + "grad_norm": 1.6581970044420524, + "learning_rate": 2.8101809980109264e-06, + "loss": 0.9047, + "step": 5542 + }, + { + "epoch": 0.7628681530415634, + "grad_norm": 1.9072540086696796, + "learning_rate": 2.8070836741217466e-06, + "loss": 0.8754, + "step": 5543 + }, + { + "epoch": 0.7630057803468208, + "grad_norm": 1.8397962915543937, + "learning_rate": 2.803987779366578e-06, + "loss": 0.8954, + "step": 5544 + }, + { + "epoch": 0.7631434076520782, + "grad_norm": 1.7882342981135908, + "learning_rate": 2.800893314360541e-06, + "loss": 0.846, + "step": 5545 + }, + { + "epoch": 0.7632810349573356, + "grad_norm": 1.6480205598859798, + "learning_rate": 2.797800279718451e-06, + "loss": 0.8966, + "step": 5546 + }, + { + "epoch": 0.7634186622625929, + "grad_norm": 1.8147891241535186, + "learning_rate": 2.7947086760548647e-06, + "loss": 0.9242, + "step": 5547 + }, + { + "epoch": 0.7635562895678503, + "grad_norm": 1.4550626352429987, + "learning_rate": 2.7916185039840282e-06, + "loss": 0.9313, + "step": 5548 + }, + { + "epoch": 0.7636939168731076, + "grad_norm": 1.741547953639759, + "learning_rate": 2.7885297641199293e-06, + "loss": 0.9002, + "step": 5549 + }, + { + "epoch": 0.763831544178365, + "grad_norm": 2.057413370539198, + "learning_rate": 2.7854424570762463e-06, + "loss": 0.8646, + "step": 5550 + }, + { + "epoch": 0.7639691714836223, + "grad_norm": 1.8737370235337334, + "learning_rate": 2.7823565834663937e-06, + "loss": 0.894, + "step": 5551 + }, + { + "epoch": 0.7641067987888797, + "grad_norm": 1.633193244391118, + "learning_rate": 2.7792721439034873e-06, + "loss": 0.8694, + "step": 5552 + }, + { + "epoch": 0.7642444260941371, + "grad_norm": 1.7475690868797231, + "learning_rate": 2.776189139000367e-06, + "loss": 0.9291, + "step": 5553 + }, + { + "epoch": 0.7643820533993945, + "grad_norm": 2.215703890484703, + "learning_rate": 2.773107569369584e-06, + "loss": 0.9099, + "step": 5554 + }, + { + "epoch": 0.7645196807046518, + "grad_norm": 1.5391569738851028, + "learning_rate": 2.7700274356233958e-06, + "loss": 0.945, + "step": 5555 + }, + { + "epoch": 0.7646573080099092, + "grad_norm": 1.5810782493414817, + "learning_rate": 2.7669487383737936e-06, + "loss": 0.8103, + "step": 5556 + }, + { + "epoch": 0.7647949353151665, + "grad_norm": 1.8017350757286585, + "learning_rate": 2.7638714782324627e-06, + "loss": 0.8304, + "step": 5557 + }, + { + "epoch": 0.7649325626204239, + "grad_norm": 1.646079182649469, + "learning_rate": 2.7607956558108197e-06, + "loss": 0.935, + "step": 5558 + }, + { + "epoch": 0.7650701899256812, + "grad_norm": 1.8350396285951502, + "learning_rate": 2.7577212717199853e-06, + "loss": 0.8771, + "step": 5559 + }, + { + "epoch": 0.7652078172309387, + "grad_norm": 1.8036222782929265, + "learning_rate": 2.754648326570799e-06, + "loss": 0.8798, + "step": 5560 + }, + { + "epoch": 0.765345444536196, + "grad_norm": 1.798053074340803, + "learning_rate": 2.7515768209738116e-06, + "loss": 0.8635, + "step": 5561 + }, + { + "epoch": 0.7654830718414534, + "grad_norm": 1.8278432340871467, + "learning_rate": 2.7485067555392907e-06, + "loss": 0.9069, + "step": 5562 + }, + { + "epoch": 0.7656206991467107, + "grad_norm": 1.6124224736592274, + "learning_rate": 2.745438130877215e-06, + "loss": 0.9207, + "step": 5563 + }, + { + "epoch": 0.7657583264519681, + "grad_norm": 1.659774624739692, + "learning_rate": 2.742370947597278e-06, + "loss": 0.9167, + "step": 5564 + }, + { + "epoch": 0.7658959537572254, + "grad_norm": 1.741125298830129, + "learning_rate": 2.7393052063088877e-06, + "loss": 0.9615, + "step": 5565 + }, + { + "epoch": 0.7660335810624828, + "grad_norm": 2.021968986307654, + "learning_rate": 2.736240907621165e-06, + "loss": 0.9185, + "step": 5566 + }, + { + "epoch": 0.7661712083677401, + "grad_norm": 1.7137102649838716, + "learning_rate": 2.733178052142944e-06, + "loss": 0.9211, + "step": 5567 + }, + { + "epoch": 0.7663088356729976, + "grad_norm": 1.6547548816984932, + "learning_rate": 2.7301166404827716e-06, + "loss": 0.851, + "step": 5568 + }, + { + "epoch": 0.7664464629782549, + "grad_norm": 1.8962841242426112, + "learning_rate": 2.72705667324891e-06, + "loss": 0.8603, + "step": 5569 + }, + { + "epoch": 0.7665840902835123, + "grad_norm": 1.6615353939241628, + "learning_rate": 2.7239981510493306e-06, + "loss": 0.8274, + "step": 5570 + }, + { + "epoch": 0.7667217175887696, + "grad_norm": 2.0019135520893783, + "learning_rate": 2.720941074491722e-06, + "loss": 0.9123, + "step": 5571 + }, + { + "epoch": 0.766859344894027, + "grad_norm": 1.7858963227401488, + "learning_rate": 2.7178854441834813e-06, + "loss": 0.9391, + "step": 5572 + }, + { + "epoch": 0.7669969721992843, + "grad_norm": 1.8425243572647436, + "learning_rate": 2.7148312607317227e-06, + "loss": 0.9804, + "step": 5573 + }, + { + "epoch": 0.7671345995045417, + "grad_norm": 1.6712628732838954, + "learning_rate": 2.711778524743268e-06, + "loss": 0.8998, + "step": 5574 + }, + { + "epoch": 0.7672722268097991, + "grad_norm": 1.9097480668124736, + "learning_rate": 2.7087272368246574e-06, + "loss": 0.9437, + "step": 5575 + }, + { + "epoch": 0.7674098541150565, + "grad_norm": 1.8137934509041271, + "learning_rate": 2.705677397582134e-06, + "loss": 0.909, + "step": 5576 + }, + { + "epoch": 0.7675474814203138, + "grad_norm": 1.6433406686753138, + "learning_rate": 2.7026290076216697e-06, + "loss": 0.9014, + "step": 5577 + }, + { + "epoch": 0.7676851087255712, + "grad_norm": 1.695455110697961, + "learning_rate": 2.699582067548925e-06, + "loss": 0.9432, + "step": 5578 + }, + { + "epoch": 0.7678227360308285, + "grad_norm": 1.9173339554630964, + "learning_rate": 2.6965365779692987e-06, + "loss": 0.8568, + "step": 5579 + }, + { + "epoch": 0.7679603633360859, + "grad_norm": 1.7213110531528557, + "learning_rate": 2.6934925394878743e-06, + "loss": 0.9077, + "step": 5580 + }, + { + "epoch": 0.7680979906413432, + "grad_norm": 1.6161078213263744, + "learning_rate": 2.690449952709472e-06, + "loss": 0.9079, + "step": 5581 + }, + { + "epoch": 0.7682356179466006, + "grad_norm": 1.7350256739527512, + "learning_rate": 2.6874088182386027e-06, + "loss": 0.8484, + "step": 5582 + }, + { + "epoch": 0.768373245251858, + "grad_norm": 1.7351729364416828, + "learning_rate": 2.6843691366795054e-06, + "loss": 0.8732, + "step": 5583 + }, + { + "epoch": 0.7685108725571154, + "grad_norm": 1.829546330306816, + "learning_rate": 2.6813309086361196e-06, + "loss": 0.9107, + "step": 5584 + }, + { + "epoch": 0.7686484998623727, + "grad_norm": 1.8368923817370513, + "learning_rate": 2.678294134712102e-06, + "loss": 0.8851, + "step": 5585 + }, + { + "epoch": 0.7687861271676301, + "grad_norm": 1.820002017620623, + "learning_rate": 2.6752588155108183e-06, + "loss": 0.9457, + "step": 5586 + }, + { + "epoch": 0.7689237544728874, + "grad_norm": 1.8044278849154478, + "learning_rate": 2.6722249516353382e-06, + "loss": 0.859, + "step": 5587 + }, + { + "epoch": 0.7690613817781448, + "grad_norm": 1.7890049334154676, + "learning_rate": 2.6691925436884603e-06, + "loss": 0.9252, + "step": 5588 + }, + { + "epoch": 0.7691990090834021, + "grad_norm": 1.724298974855598, + "learning_rate": 2.6661615922726692e-06, + "loss": 0.8093, + "step": 5589 + }, + { + "epoch": 0.7693366363886595, + "grad_norm": 1.8361263640653, + "learning_rate": 2.663132097990183e-06, + "loss": 0.8658, + "step": 5590 + }, + { + "epoch": 0.7694742636939169, + "grad_norm": 1.7118805276650495, + "learning_rate": 2.6601040614429186e-06, + "loss": 0.8975, + "step": 5591 + }, + { + "epoch": 0.7696118909991743, + "grad_norm": 2.1599420951068535, + "learning_rate": 2.657077483232505e-06, + "loss": 0.9081, + "step": 5592 + }, + { + "epoch": 0.7697495183044316, + "grad_norm": 1.72506789788939, + "learning_rate": 2.6540523639602823e-06, + "loss": 0.8892, + "step": 5593 + }, + { + "epoch": 0.769887145609689, + "grad_norm": 1.6583440723975187, + "learning_rate": 2.6510287042272996e-06, + "loss": 0.9345, + "step": 5594 + }, + { + "epoch": 0.7700247729149463, + "grad_norm": 1.9851081427458022, + "learning_rate": 2.6480065046343173e-06, + "loss": 0.8677, + "step": 5595 + }, + { + "epoch": 0.7701624002202037, + "grad_norm": 1.8524054530379794, + "learning_rate": 2.6449857657818034e-06, + "loss": 0.9382, + "step": 5596 + }, + { + "epoch": 0.770300027525461, + "grad_norm": 1.7945422263820077, + "learning_rate": 2.6419664882699404e-06, + "loss": 0.9493, + "step": 5597 + }, + { + "epoch": 0.7704376548307185, + "grad_norm": 1.7363904107031078, + "learning_rate": 2.6389486726986148e-06, + "loss": 0.8726, + "step": 5598 + }, + { + "epoch": 0.7705752821359758, + "grad_norm": 1.7928677871559875, + "learning_rate": 2.6359323196674256e-06, + "loss": 0.8477, + "step": 5599 + }, + { + "epoch": 0.7707129094412332, + "grad_norm": 1.7928181227030888, + "learning_rate": 2.632917429775682e-06, + "loss": 0.9066, + "step": 5600 + }, + { + "epoch": 0.7708505367464905, + "grad_norm": 1.7588482953144533, + "learning_rate": 2.629904003622401e-06, + "loss": 0.9483, + "step": 5601 + }, + { + "epoch": 0.7709881640517479, + "grad_norm": 1.6116037731665678, + "learning_rate": 2.626892041806308e-06, + "loss": 0.805, + "step": 5602 + }, + { + "epoch": 0.7711257913570052, + "grad_norm": 1.8703430124872893, + "learning_rate": 2.6238815449258404e-06, + "loss": 0.8838, + "step": 5603 + }, + { + "epoch": 0.7712634186622626, + "grad_norm": 1.7650914118866157, + "learning_rate": 2.62087251357914e-06, + "loss": 0.8391, + "step": 5604 + }, + { + "epoch": 0.7714010459675199, + "grad_norm": 1.8101223184832043, + "learning_rate": 2.617864948364064e-06, + "loss": 0.9124, + "step": 5605 + }, + { + "epoch": 0.7715386732727774, + "grad_norm": 1.8190085255924182, + "learning_rate": 2.614858849878168e-06, + "loss": 0.9157, + "step": 5606 + }, + { + "epoch": 0.7716763005780347, + "grad_norm": 1.7057193708720535, + "learning_rate": 2.6118542187187334e-06, + "loss": 0.9527, + "step": 5607 + }, + { + "epoch": 0.7718139278832921, + "grad_norm": 1.7819112297806747, + "learning_rate": 2.608851055482727e-06, + "loss": 0.8622, + "step": 5608 + }, + { + "epoch": 0.7719515551885494, + "grad_norm": 1.7284774725171599, + "learning_rate": 2.605849360766849e-06, + "loss": 0.9234, + "step": 5609 + }, + { + "epoch": 0.7720891824938068, + "grad_norm": 1.6394413171149156, + "learning_rate": 2.60284913516748e-06, + "loss": 0.9267, + "step": 5610 + }, + { + "epoch": 0.7722268097990641, + "grad_norm": 2.025148633733877, + "learning_rate": 2.5998503792807393e-06, + "loss": 0.8786, + "step": 5611 + }, + { + "epoch": 0.7723644371043215, + "grad_norm": 1.7945584103420753, + "learning_rate": 2.596853093702425e-06, + "loss": 0.8855, + "step": 5612 + }, + { + "epoch": 0.7725020644095789, + "grad_norm": 1.8653908296322677, + "learning_rate": 2.593857279028066e-06, + "loss": 0.9461, + "step": 5613 + }, + { + "epoch": 0.7726396917148363, + "grad_norm": 1.9124334605461848, + "learning_rate": 2.5908629358528867e-06, + "loss": 0.902, + "step": 5614 + }, + { + "epoch": 0.7727773190200936, + "grad_norm": 1.7076409722048267, + "learning_rate": 2.587870064771821e-06, + "loss": 0.9507, + "step": 5615 + }, + { + "epoch": 0.772914946325351, + "grad_norm": 1.6930001798786676, + "learning_rate": 2.5848786663795135e-06, + "loss": 0.8912, + "step": 5616 + }, + { + "epoch": 0.7730525736306083, + "grad_norm": 1.7914430724307597, + "learning_rate": 2.581888741270313e-06, + "loss": 0.977, + "step": 5617 + }, + { + "epoch": 0.7731902009358657, + "grad_norm": 1.5723486108659253, + "learning_rate": 2.57890029003828e-06, + "loss": 0.8891, + "step": 5618 + }, + { + "epoch": 0.773327828241123, + "grad_norm": 1.6821338842556035, + "learning_rate": 2.5759133132771683e-06, + "loss": 0.8841, + "step": 5619 + }, + { + "epoch": 0.7734654555463804, + "grad_norm": 1.6942857003889098, + "learning_rate": 2.57292781158046e-06, + "loss": 0.9376, + "step": 5620 + }, + { + "epoch": 0.7736030828516378, + "grad_norm": 1.810474348173228, + "learning_rate": 2.569943785541331e-06, + "loss": 0.8849, + "step": 5621 + }, + { + "epoch": 0.7737407101568952, + "grad_norm": 1.7717786010031422, + "learning_rate": 2.5669612357526653e-06, + "loss": 0.8849, + "step": 5622 + }, + { + "epoch": 0.7738783374621525, + "grad_norm": 1.8893991687967917, + "learning_rate": 2.5639801628070536e-06, + "loss": 0.89, + "step": 5623 + }, + { + "epoch": 0.7740159647674099, + "grad_norm": 1.863082107768605, + "learning_rate": 2.5610005672967963e-06, + "loss": 0.9423, + "step": 5624 + }, + { + "epoch": 0.7741535920726672, + "grad_norm": 1.8509632780334762, + "learning_rate": 2.5580224498138974e-06, + "loss": 0.9954, + "step": 5625 + }, + { + "epoch": 0.7742912193779246, + "grad_norm": 1.9855437890213528, + "learning_rate": 2.5550458109500686e-06, + "loss": 0.9401, + "step": 5626 + }, + { + "epoch": 0.7744288466831819, + "grad_norm": 1.9998381035141575, + "learning_rate": 2.5520706512967263e-06, + "loss": 0.8521, + "step": 5627 + }, + { + "epoch": 0.7745664739884393, + "grad_norm": 1.5360365863602614, + "learning_rate": 2.5490969714449943e-06, + "loss": 0.8472, + "step": 5628 + }, + { + "epoch": 0.7747041012936967, + "grad_norm": 1.7083724134298932, + "learning_rate": 2.546124771985703e-06, + "loss": 0.8776, + "step": 5629 + }, + { + "epoch": 0.7748417285989541, + "grad_norm": 1.6137406059567772, + "learning_rate": 2.5431540535093867e-06, + "loss": 0.9242, + "step": 5630 + }, + { + "epoch": 0.7749793559042114, + "grad_norm": 1.6145806971785712, + "learning_rate": 2.5401848166062836e-06, + "loss": 0.8878, + "step": 5631 + }, + { + "epoch": 0.7751169832094688, + "grad_norm": 1.9040232827042867, + "learning_rate": 2.5372170618663504e-06, + "loss": 0.8714, + "step": 5632 + }, + { + "epoch": 0.7752546105147261, + "grad_norm": 2.0084052315653587, + "learning_rate": 2.534250789879229e-06, + "loss": 0.8892, + "step": 5633 + }, + { + "epoch": 0.7753922378199835, + "grad_norm": 1.624318677920536, + "learning_rate": 2.531286001234281e-06, + "loss": 0.9416, + "step": 5634 + }, + { + "epoch": 0.7755298651252408, + "grad_norm": 1.6700854469476591, + "learning_rate": 2.528322696520569e-06, + "loss": 0.9023, + "step": 5635 + }, + { + "epoch": 0.7756674924304983, + "grad_norm": 1.7348497042133397, + "learning_rate": 2.525360876326861e-06, + "loss": 0.8352, + "step": 5636 + }, + { + "epoch": 0.7758051197357556, + "grad_norm": 1.5838852729537705, + "learning_rate": 2.522400541241631e-06, + "loss": 0.9079, + "step": 5637 + }, + { + "epoch": 0.775942747041013, + "grad_norm": 1.8364188128816892, + "learning_rate": 2.5194416918530517e-06, + "loss": 0.9438, + "step": 5638 + }, + { + "epoch": 0.7760803743462703, + "grad_norm": 1.7938104679276492, + "learning_rate": 2.516484328749017e-06, + "loss": 0.8966, + "step": 5639 + }, + { + "epoch": 0.7762180016515277, + "grad_norm": 1.9577092266120515, + "learning_rate": 2.5135284525171035e-06, + "loss": 0.9792, + "step": 5640 + }, + { + "epoch": 0.776355628956785, + "grad_norm": 1.812112909079803, + "learning_rate": 2.510574063744612e-06, + "loss": 0.9432, + "step": 5641 + }, + { + "epoch": 0.7764932562620424, + "grad_norm": 1.9984890838769855, + "learning_rate": 2.50762116301853e-06, + "loss": 0.854, + "step": 5642 + }, + { + "epoch": 0.7766308835672997, + "grad_norm": 1.5635363944828542, + "learning_rate": 2.504669750925569e-06, + "loss": 0.904, + "step": 5643 + }, + { + "epoch": 0.7767685108725572, + "grad_norm": 1.8614043696546663, + "learning_rate": 2.5017198280521226e-06, + "loss": 0.8146, + "step": 5644 + }, + { + "epoch": 0.7769061381778145, + "grad_norm": 1.7536775280405275, + "learning_rate": 2.4987713949843094e-06, + "loss": 0.9122, + "step": 5645 + }, + { + "epoch": 0.7770437654830719, + "grad_norm": 1.7410977633888929, + "learning_rate": 2.495824452307939e-06, + "loss": 0.9236, + "step": 5646 + }, + { + "epoch": 0.7771813927883292, + "grad_norm": 1.6980131592890475, + "learning_rate": 2.492879000608529e-06, + "loss": 0.8699, + "step": 5647 + }, + { + "epoch": 0.7773190200935866, + "grad_norm": 1.664811907289621, + "learning_rate": 2.4899350404713007e-06, + "loss": 0.8821, + "step": 5648 + }, + { + "epoch": 0.7774566473988439, + "grad_norm": 1.8417078142949366, + "learning_rate": 2.4869925724811785e-06, + "loss": 0.8031, + "step": 5649 + }, + { + "epoch": 0.7775942747041013, + "grad_norm": 1.705066495538552, + "learning_rate": 2.484051597222792e-06, + "loss": 0.8165, + "step": 5650 + }, + { + "epoch": 0.7777319020093587, + "grad_norm": 1.5617473498109369, + "learning_rate": 2.4811121152804664e-06, + "loss": 0.9289, + "step": 5651 + }, + { + "epoch": 0.7778695293146161, + "grad_norm": 1.816195503709991, + "learning_rate": 2.478174127238243e-06, + "loss": 0.8948, + "step": 5652 + }, + { + "epoch": 0.7780071566198734, + "grad_norm": 1.7917560323364952, + "learning_rate": 2.4752376336798588e-06, + "loss": 0.8446, + "step": 5653 + }, + { + "epoch": 0.7781447839251308, + "grad_norm": 2.1766044274448446, + "learning_rate": 2.4723026351887536e-06, + "loss": 0.9575, + "step": 5654 + }, + { + "epoch": 0.7782824112303881, + "grad_norm": 1.6319135129739313, + "learning_rate": 2.4693691323480715e-06, + "loss": 0.8698, + "step": 5655 + }, + { + "epoch": 0.7784200385356455, + "grad_norm": 2.076508732634166, + "learning_rate": 2.4664371257406605e-06, + "loss": 0.8961, + "step": 5656 + }, + { + "epoch": 0.7785576658409028, + "grad_norm": 1.8780679334400128, + "learning_rate": 2.4635066159490697e-06, + "loss": 0.8931, + "step": 5657 + }, + { + "epoch": 0.7786952931461602, + "grad_norm": 1.7022418941069022, + "learning_rate": 2.4605776035555494e-06, + "loss": 0.89, + "step": 5658 + }, + { + "epoch": 0.7788329204514176, + "grad_norm": 3.167284517220072, + "learning_rate": 2.4576500891420575e-06, + "loss": 0.8092, + "step": 5659 + }, + { + "epoch": 0.778970547756675, + "grad_norm": 1.7055413307815244, + "learning_rate": 2.4547240732902476e-06, + "loss": 0.8636, + "step": 5660 + }, + { + "epoch": 0.7791081750619323, + "grad_norm": 1.7916968231017, + "learning_rate": 2.4517995565814822e-06, + "loss": 0.8575, + "step": 5661 + }, + { + "epoch": 0.7792458023671897, + "grad_norm": 1.584960798913686, + "learning_rate": 2.4488765395968208e-06, + "loss": 0.8497, + "step": 5662 + }, + { + "epoch": 0.779383429672447, + "grad_norm": 1.8444840039101915, + "learning_rate": 2.4459550229170246e-06, + "loss": 0.9777, + "step": 5663 + }, + { + "epoch": 0.7795210569777044, + "grad_norm": 1.8697744934647558, + "learning_rate": 2.443035007122567e-06, + "loss": 0.9333, + "step": 5664 + }, + { + "epoch": 0.7796586842829617, + "grad_norm": 2.0038320970223555, + "learning_rate": 2.4401164927936083e-06, + "loss": 0.9492, + "step": 5665 + }, + { + "epoch": 0.779796311588219, + "grad_norm": 1.7320728861443688, + "learning_rate": 2.4371994805100175e-06, + "loss": 0.8968, + "step": 5666 + }, + { + "epoch": 0.7799339388934765, + "grad_norm": 1.744886514912419, + "learning_rate": 2.434283970851368e-06, + "loss": 0.8675, + "step": 5667 + }, + { + "epoch": 0.7800715661987339, + "grad_norm": 1.7311785654617562, + "learning_rate": 2.431369964396928e-06, + "loss": 0.9157, + "step": 5668 + }, + { + "epoch": 0.7802091935039912, + "grad_norm": 1.9055684895788865, + "learning_rate": 2.428457461725674e-06, + "loss": 0.8588, + "step": 5669 + }, + { + "epoch": 0.7803468208092486, + "grad_norm": 2.0419914824490495, + "learning_rate": 2.4255464634162773e-06, + "loss": 0.9294, + "step": 5670 + }, + { + "epoch": 0.7804844481145059, + "grad_norm": 1.7147177156197309, + "learning_rate": 2.4226369700471196e-06, + "loss": 0.9188, + "step": 5671 + }, + { + "epoch": 0.7806220754197633, + "grad_norm": 1.6998529697999107, + "learning_rate": 2.419728982196268e-06, + "loss": 0.9129, + "step": 5672 + }, + { + "epoch": 0.7807597027250206, + "grad_norm": 2.0237679585053563, + "learning_rate": 2.4168225004415115e-06, + "loss": 0.9253, + "step": 5673 + }, + { + "epoch": 0.7808973300302781, + "grad_norm": 1.9113046309646293, + "learning_rate": 2.413917525360314e-06, + "loss": 0.8852, + "step": 5674 + }, + { + "epoch": 0.7810349573355354, + "grad_norm": 1.9674009097248133, + "learning_rate": 2.411014057529867e-06, + "loss": 0.8477, + "step": 5675 + }, + { + "epoch": 0.7811725846407928, + "grad_norm": 1.516258302483341, + "learning_rate": 2.4081120975270442e-06, + "loss": 0.9133, + "step": 5676 + }, + { + "epoch": 0.7813102119460501, + "grad_norm": 2.0046964125621414, + "learning_rate": 2.4052116459284257e-06, + "loss": 0.9429, + "step": 5677 + }, + { + "epoch": 0.7814478392513075, + "grad_norm": 1.6817476824072486, + "learning_rate": 2.402312703310292e-06, + "loss": 0.9645, + "step": 5678 + }, + { + "epoch": 0.7815854665565648, + "grad_norm": 1.9041409398463784, + "learning_rate": 2.3994152702486228e-06, + "loss": 0.9038, + "step": 5679 + }, + { + "epoch": 0.7817230938618221, + "grad_norm": 1.6123951924332274, + "learning_rate": 2.3965193473190996e-06, + "loss": 0.8399, + "step": 5680 + }, + { + "epoch": 0.7818607211670795, + "grad_norm": 1.5387291868194872, + "learning_rate": 2.3936249350971007e-06, + "loss": 0.9179, + "step": 5681 + }, + { + "epoch": 0.781998348472337, + "grad_norm": 1.7478429694282993, + "learning_rate": 2.390732034157708e-06, + "loss": 0.8842, + "step": 5682 + }, + { + "epoch": 0.7821359757775943, + "grad_norm": 1.7128381559419894, + "learning_rate": 2.387840645075701e-06, + "loss": 0.8614, + "step": 5683 + }, + { + "epoch": 0.7822736030828517, + "grad_norm": 1.6044023200434703, + "learning_rate": 2.384950768425558e-06, + "loss": 0.9315, + "step": 5684 + }, + { + "epoch": 0.782411230388109, + "grad_norm": 1.785939029748251, + "learning_rate": 2.382062404781459e-06, + "loss": 0.8939, + "step": 5685 + }, + { + "epoch": 0.7825488576933664, + "grad_norm": 1.955873870699468, + "learning_rate": 2.379175554717281e-06, + "loss": 0.8861, + "step": 5686 + }, + { + "epoch": 0.7826864849986237, + "grad_norm": 1.6821006302263313, + "learning_rate": 2.376290218806604e-06, + "loss": 0.92, + "step": 5687 + }, + { + "epoch": 0.782824112303881, + "grad_norm": 1.702860491628254, + "learning_rate": 2.373406397622703e-06, + "loss": 0.9125, + "step": 5688 + }, + { + "epoch": 0.7829617396091385, + "grad_norm": 1.7771988857000252, + "learning_rate": 2.370524091738553e-06, + "loss": 0.892, + "step": 5689 + }, + { + "epoch": 0.7830993669143959, + "grad_norm": 1.7576332417122207, + "learning_rate": 2.3676433017268308e-06, + "loss": 0.9052, + "step": 5690 + }, + { + "epoch": 0.7832369942196532, + "grad_norm": 1.804647223195512, + "learning_rate": 2.364764028159908e-06, + "loss": 0.9309, + "step": 5691 + }, + { + "epoch": 0.7833746215249106, + "grad_norm": 1.674640801124432, + "learning_rate": 2.361886271609859e-06, + "loss": 0.9089, + "step": 5692 + }, + { + "epoch": 0.7835122488301679, + "grad_norm": 1.7165826302480307, + "learning_rate": 2.35901003264845e-06, + "loss": 0.8386, + "step": 5693 + }, + { + "epoch": 0.7836498761354252, + "grad_norm": 1.7513371863292337, + "learning_rate": 2.3561353118471596e-06, + "loss": 0.8476, + "step": 5694 + }, + { + "epoch": 0.7837875034406826, + "grad_norm": 1.8624768326433874, + "learning_rate": 2.3532621097771448e-06, + "loss": 0.9475, + "step": 5695 + }, + { + "epoch": 0.78392513074594, + "grad_norm": 1.6691853346499397, + "learning_rate": 2.35039042700928e-06, + "loss": 0.9967, + "step": 5696 + }, + { + "epoch": 0.7840627580511974, + "grad_norm": 1.8612420685936584, + "learning_rate": 2.3475202641141248e-06, + "loss": 0.8986, + "step": 5697 + }, + { + "epoch": 0.7842003853564548, + "grad_norm": 1.896230289758656, + "learning_rate": 2.344651621661941e-06, + "loss": 0.9309, + "step": 5698 + }, + { + "epoch": 0.7843380126617121, + "grad_norm": 1.6803406896865396, + "learning_rate": 2.3417845002226903e-06, + "loss": 0.9493, + "step": 5699 + }, + { + "epoch": 0.7844756399669695, + "grad_norm": 1.778858753550019, + "learning_rate": 2.3389189003660264e-06, + "loss": 0.9163, + "step": 5700 + }, + { + "epoch": 0.7846132672722268, + "grad_norm": 1.748730321807752, + "learning_rate": 2.3360548226613144e-06, + "loss": 0.9292, + "step": 5701 + }, + { + "epoch": 0.7847508945774841, + "grad_norm": 1.5734463538477559, + "learning_rate": 2.333192267677594e-06, + "loss": 0.9007, + "step": 5702 + }, + { + "epoch": 0.7848885218827415, + "grad_norm": 1.7386047483261822, + "learning_rate": 2.3303312359836304e-06, + "loss": 0.9398, + "step": 5703 + }, + { + "epoch": 0.7850261491879988, + "grad_norm": 1.7657455631158028, + "learning_rate": 2.3274717281478577e-06, + "loss": 0.9041, + "step": 5704 + }, + { + "epoch": 0.7851637764932563, + "grad_norm": 1.6641602036958703, + "learning_rate": 2.324613744738432e-06, + "loss": 0.853, + "step": 5705 + }, + { + "epoch": 0.7853014037985137, + "grad_norm": 1.8917856918876181, + "learning_rate": 2.3217572863231865e-06, + "loss": 0.8645, + "step": 5706 + }, + { + "epoch": 0.785439031103771, + "grad_norm": 2.269500278468905, + "learning_rate": 2.3189023534696665e-06, + "loss": 0.9252, + "step": 5707 + }, + { + "epoch": 0.7855766584090283, + "grad_norm": 1.855431700378933, + "learning_rate": 2.3160489467451055e-06, + "loss": 0.8976, + "step": 5708 + }, + { + "epoch": 0.7857142857142857, + "grad_norm": 1.8463924273169978, + "learning_rate": 2.313197066716438e-06, + "loss": 0.9456, + "step": 5709 + }, + { + "epoch": 0.785851913019543, + "grad_norm": 1.7355428628575433, + "learning_rate": 2.3103467139502923e-06, + "loss": 0.889, + "step": 5710 + }, + { + "epoch": 0.7859895403248004, + "grad_norm": 1.7168671542186673, + "learning_rate": 2.307497889012994e-06, + "loss": 0.9068, + "step": 5711 + }, + { + "epoch": 0.7861271676300579, + "grad_norm": 1.6843731596242044, + "learning_rate": 2.3046505924705663e-06, + "loss": 0.8975, + "step": 5712 + }, + { + "epoch": 0.7862647949353152, + "grad_norm": 1.7727656423788418, + "learning_rate": 2.301804824888728e-06, + "loss": 0.8913, + "step": 5713 + }, + { + "epoch": 0.7864024222405726, + "grad_norm": 1.7043768190739303, + "learning_rate": 2.2989605868328946e-06, + "loss": 0.8717, + "step": 5714 + }, + { + "epoch": 0.7865400495458299, + "grad_norm": 1.7368669364710854, + "learning_rate": 2.296117878868177e-06, + "loss": 0.8674, + "step": 5715 + }, + { + "epoch": 0.7866776768510872, + "grad_norm": 1.6279000351259212, + "learning_rate": 2.2932767015593825e-06, + "loss": 0.8503, + "step": 5716 + }, + { + "epoch": 0.7868153041563446, + "grad_norm": 1.7677759697512399, + "learning_rate": 2.290437055471013e-06, + "loss": 0.8804, + "step": 5717 + }, + { + "epoch": 0.786952931461602, + "grad_norm": 1.8136116789332974, + "learning_rate": 2.2875989411672685e-06, + "loss": 0.9024, + "step": 5718 + }, + { + "epoch": 0.7870905587668593, + "grad_norm": 1.8059062505541048, + "learning_rate": 2.2847623592120438e-06, + "loss": 0.8758, + "step": 5719 + }, + { + "epoch": 0.7872281860721168, + "grad_norm": 1.620611231830067, + "learning_rate": 2.2819273101689275e-06, + "loss": 0.9234, + "step": 5720 + }, + { + "epoch": 0.7873658133773741, + "grad_norm": 1.8366504984543195, + "learning_rate": 2.279093794601206e-06, + "loss": 0.9294, + "step": 5721 + }, + { + "epoch": 0.7875034406826315, + "grad_norm": 1.6037415523283522, + "learning_rate": 2.2762618130718593e-06, + "loss": 0.9001, + "step": 5722 + }, + { + "epoch": 0.7876410679878888, + "grad_norm": 1.7321544975857024, + "learning_rate": 2.273431366143564e-06, + "loss": 0.9166, + "step": 5723 + }, + { + "epoch": 0.7877786952931461, + "grad_norm": 1.565691253138757, + "learning_rate": 2.270602454378692e-06, + "loss": 0.8748, + "step": 5724 + }, + { + "epoch": 0.7879163225984035, + "grad_norm": 1.7957358237971197, + "learning_rate": 2.267775078339305e-06, + "loss": 0.9041, + "step": 5725 + }, + { + "epoch": 0.7880539499036608, + "grad_norm": 1.814772134891799, + "learning_rate": 2.264949238587172e-06, + "loss": 0.9158, + "step": 5726 + }, + { + "epoch": 0.7881915772089183, + "grad_norm": 1.7379802261885697, + "learning_rate": 2.2621249356837392e-06, + "loss": 0.9487, + "step": 5727 + }, + { + "epoch": 0.7883292045141757, + "grad_norm": 1.7314051157602728, + "learning_rate": 2.2593021701901675e-06, + "loss": 0.9234, + "step": 5728 + }, + { + "epoch": 0.788466831819433, + "grad_norm": 1.7160552476975768, + "learning_rate": 2.256480942667293e-06, + "loss": 0.8495, + "step": 5729 + }, + { + "epoch": 0.7886044591246903, + "grad_norm": 1.9693532279873003, + "learning_rate": 2.2536612536756575e-06, + "loss": 0.833, + "step": 5730 + }, + { + "epoch": 0.7887420864299477, + "grad_norm": 1.9405339215326936, + "learning_rate": 2.2508431037754962e-06, + "loss": 0.8606, + "step": 5731 + }, + { + "epoch": 0.788879713735205, + "grad_norm": 1.6147178471871133, + "learning_rate": 2.2480264935267327e-06, + "loss": 0.9222, + "step": 5732 + }, + { + "epoch": 0.7890173410404624, + "grad_norm": 1.8512429648104651, + "learning_rate": 2.2452114234889965e-06, + "loss": 0.9067, + "step": 5733 + }, + { + "epoch": 0.7891549683457197, + "grad_norm": 1.730654512654597, + "learning_rate": 2.242397894221594e-06, + "loss": 0.9085, + "step": 5734 + }, + { + "epoch": 0.7892925956509772, + "grad_norm": 1.6025425105305533, + "learning_rate": 2.2395859062835456e-06, + "loss": 0.939, + "step": 5735 + }, + { + "epoch": 0.7894302229562346, + "grad_norm": 1.876534461362236, + "learning_rate": 2.2367754602335433e-06, + "loss": 0.8725, + "step": 5736 + }, + { + "epoch": 0.7895678502614919, + "grad_norm": 2.1448669559243254, + "learning_rate": 2.2339665566299964e-06, + "loss": 0.9618, + "step": 5737 + }, + { + "epoch": 0.7897054775667492, + "grad_norm": 1.6015259154823496, + "learning_rate": 2.231159196030983e-06, + "loss": 0.9259, + "step": 5738 + }, + { + "epoch": 0.7898431048720066, + "grad_norm": 1.9376382373618881, + "learning_rate": 2.228353378994297e-06, + "loss": 0.969, + "step": 5739 + }, + { + "epoch": 0.789980732177264, + "grad_norm": 1.9299867463568943, + "learning_rate": 2.2255491060774114e-06, + "loss": 0.9422, + "step": 5740 + }, + { + "epoch": 0.7901183594825213, + "grad_norm": 1.8277545734372316, + "learning_rate": 2.2227463778374982e-06, + "loss": 0.8798, + "step": 5741 + }, + { + "epoch": 0.7902559867877786, + "grad_norm": 1.6821103502960886, + "learning_rate": 2.219945194831421e-06, + "loss": 0.8928, + "step": 5742 + }, + { + "epoch": 0.7903936140930361, + "grad_norm": 1.7839520223074594, + "learning_rate": 2.2171455576157352e-06, + "loss": 0.9468, + "step": 5743 + }, + { + "epoch": 0.7905312413982934, + "grad_norm": 1.7569475291331316, + "learning_rate": 2.2143474667466923e-06, + "loss": 0.7968, + "step": 5744 + }, + { + "epoch": 0.7906688687035508, + "grad_norm": 1.6136561774721396, + "learning_rate": 2.2115509227802344e-06, + "loss": 0.9441, + "step": 5745 + }, + { + "epoch": 0.7908064960088081, + "grad_norm": 1.6332189271382906, + "learning_rate": 2.208755926271995e-06, + "loss": 0.8015, + "step": 5746 + }, + { + "epoch": 0.7909441233140655, + "grad_norm": 1.7630987948270362, + "learning_rate": 2.2059624777773025e-06, + "loss": 0.9036, + "step": 5747 + }, + { + "epoch": 0.7910817506193228, + "grad_norm": 1.556219579450756, + "learning_rate": 2.203170577851177e-06, + "loss": 0.8694, + "step": 5748 + }, + { + "epoch": 0.7912193779245802, + "grad_norm": 1.643646723029824, + "learning_rate": 2.200380227048331e-06, + "loss": 0.9136, + "step": 5749 + }, + { + "epoch": 0.7913570052298377, + "grad_norm": 1.7429030273120034, + "learning_rate": 2.197591425923169e-06, + "loss": 0.9515, + "step": 5750 + }, + { + "epoch": 0.791494632535095, + "grad_norm": 1.7933806071766738, + "learning_rate": 2.1948041750297865e-06, + "loss": 0.8431, + "step": 5751 + }, + { + "epoch": 0.7916322598403523, + "grad_norm": 2.023977653860513, + "learning_rate": 2.192018474921973e-06, + "loss": 0.8813, + "step": 5752 + }, + { + "epoch": 0.7917698871456097, + "grad_norm": 1.8091641908881209, + "learning_rate": 2.18923432615321e-06, + "loss": 0.8759, + "step": 5753 + }, + { + "epoch": 0.791907514450867, + "grad_norm": 1.6464042893643078, + "learning_rate": 2.1864517292766675e-06, + "loss": 0.971, + "step": 5754 + }, + { + "epoch": 0.7920451417561244, + "grad_norm": 1.8331215150744646, + "learning_rate": 2.1836706848452083e-06, + "loss": 0.8271, + "step": 5755 + }, + { + "epoch": 0.7921827690613817, + "grad_norm": 1.538778870034622, + "learning_rate": 2.1808911934113953e-06, + "loss": 0.9176, + "step": 5756 + }, + { + "epoch": 0.7923203963666391, + "grad_norm": 1.8075699031383436, + "learning_rate": 2.1781132555274655e-06, + "loss": 0.8947, + "step": 5757 + }, + { + "epoch": 0.7924580236718965, + "grad_norm": 1.5949101611696803, + "learning_rate": 2.175336871745367e-06, + "loss": 0.8874, + "step": 5758 + }, + { + "epoch": 0.7925956509771539, + "grad_norm": 1.7462093208357699, + "learning_rate": 2.1725620426167182e-06, + "loss": 0.9444, + "step": 5759 + }, + { + "epoch": 0.7927332782824112, + "grad_norm": 1.943647042430855, + "learning_rate": 2.169788768692852e-06, + "loss": 0.9316, + "step": 5760 + }, + { + "epoch": 0.7928709055876686, + "grad_norm": 1.6185710884205202, + "learning_rate": 2.16701705052477e-06, + "loss": 0.904, + "step": 5761 + }, + { + "epoch": 0.7930085328929259, + "grad_norm": 1.6501847676662038, + "learning_rate": 2.164246888663175e-06, + "loss": 0.8872, + "step": 5762 + }, + { + "epoch": 0.7931461601981833, + "grad_norm": 1.642174776231618, + "learning_rate": 2.1614782836584704e-06, + "loss": 0.9344, + "step": 5763 + }, + { + "epoch": 0.7932837875034406, + "grad_norm": 1.7579063549168723, + "learning_rate": 2.1587112360607266e-06, + "loss": 0.8939, + "step": 5764 + }, + { + "epoch": 0.7934214148086981, + "grad_norm": 1.7195236278333423, + "learning_rate": 2.15594574641973e-06, + "loss": 0.8999, + "step": 5765 + }, + { + "epoch": 0.7935590421139554, + "grad_norm": 1.8298942607841038, + "learning_rate": 2.153181815284935e-06, + "loss": 0.8715, + "step": 5766 + }, + { + "epoch": 0.7936966694192128, + "grad_norm": 1.6106279118175921, + "learning_rate": 2.150419443205508e-06, + "loss": 0.9171, + "step": 5767 + }, + { + "epoch": 0.7938342967244701, + "grad_norm": 1.6477527748350782, + "learning_rate": 2.147658630730283e-06, + "loss": 0.8812, + "step": 5768 + }, + { + "epoch": 0.7939719240297275, + "grad_norm": 1.6849315639765694, + "learning_rate": 2.1448993784078032e-06, + "loss": 0.9084, + "step": 5769 + }, + { + "epoch": 0.7941095513349848, + "grad_norm": 1.8182103203060942, + "learning_rate": 2.142141686786293e-06, + "loss": 0.9511, + "step": 5770 + }, + { + "epoch": 0.7942471786402422, + "grad_norm": 1.6362614548676786, + "learning_rate": 2.1393855564136667e-06, + "loss": 0.8674, + "step": 5771 + }, + { + "epoch": 0.7943848059454995, + "grad_norm": 1.6535075823372474, + "learning_rate": 2.1366309878375312e-06, + "loss": 0.9471, + "step": 5772 + }, + { + "epoch": 0.794522433250757, + "grad_norm": 1.7744839212539176, + "learning_rate": 2.13387798160518e-06, + "loss": 0.922, + "step": 5773 + }, + { + "epoch": 0.7946600605560143, + "grad_norm": 1.812703148405666, + "learning_rate": 2.131126538263598e-06, + "loss": 0.9517, + "step": 5774 + }, + { + "epoch": 0.7947976878612717, + "grad_norm": 1.593813810566864, + "learning_rate": 2.1283766583594614e-06, + "loss": 0.8723, + "step": 5775 + }, + { + "epoch": 0.794935315166529, + "grad_norm": 1.5812688147205307, + "learning_rate": 2.12562834243913e-06, + "loss": 0.9825, + "step": 5776 + }, + { + "epoch": 0.7950729424717864, + "grad_norm": 1.8100084926795617, + "learning_rate": 2.1228815910486598e-06, + "loss": 0.922, + "step": 5777 + }, + { + "epoch": 0.7952105697770437, + "grad_norm": 1.736572879235143, + "learning_rate": 2.1201364047337903e-06, + "loss": 0.8666, + "step": 5778 + }, + { + "epoch": 0.7953481970823011, + "grad_norm": 1.7188091775967367, + "learning_rate": 2.1173927840399545e-06, + "loss": 0.9618, + "step": 5779 + }, + { + "epoch": 0.7954858243875584, + "grad_norm": 1.6622362083282916, + "learning_rate": 2.114650729512271e-06, + "loss": 0.9194, + "step": 5780 + }, + { + "epoch": 0.7956234516928159, + "grad_norm": 1.663692058937918, + "learning_rate": 2.1119102416955494e-06, + "loss": 0.9283, + "step": 5781 + }, + { + "epoch": 0.7957610789980732, + "grad_norm": 2.008943598092258, + "learning_rate": 2.1091713211342858e-06, + "loss": 0.8421, + "step": 5782 + }, + { + "epoch": 0.7958987063033306, + "grad_norm": 1.9618857803223995, + "learning_rate": 2.1064339683726676e-06, + "loss": 0.8748, + "step": 5783 + }, + { + "epoch": 0.7960363336085879, + "grad_norm": 1.692099976881477, + "learning_rate": 2.103698183954569e-06, + "loss": 0.8417, + "step": 5784 + }, + { + "epoch": 0.7961739609138453, + "grad_norm": 2.0726841784474432, + "learning_rate": 2.1009639684235517e-06, + "loss": 0.9339, + "step": 5785 + }, + { + "epoch": 0.7963115882191026, + "grad_norm": 1.7564590888513487, + "learning_rate": 2.098231322322869e-06, + "loss": 0.9047, + "step": 5786 + }, + { + "epoch": 0.79644921552436, + "grad_norm": 1.7459695049479844, + "learning_rate": 2.0955002461954553e-06, + "loss": 0.9444, + "step": 5787 + }, + { + "epoch": 0.7965868428296174, + "grad_norm": 1.9404370933779793, + "learning_rate": 2.092770740583948e-06, + "loss": 0.9186, + "step": 5788 + }, + { + "epoch": 0.7967244701348748, + "grad_norm": 2.1050569627942957, + "learning_rate": 2.090042806030651e-06, + "loss": 0.9406, + "step": 5789 + }, + { + "epoch": 0.7968620974401321, + "grad_norm": 1.9276813082150905, + "learning_rate": 2.0873164430775784e-06, + "loss": 0.868, + "step": 5790 + }, + { + "epoch": 0.7969997247453895, + "grad_norm": 1.8577950037527218, + "learning_rate": 2.08459165226641e-06, + "loss": 0.8581, + "step": 5791 + }, + { + "epoch": 0.7971373520506468, + "grad_norm": 1.931541308844158, + "learning_rate": 2.081868434138535e-06, + "loss": 0.8859, + "step": 5792 + }, + { + "epoch": 0.7972749793559042, + "grad_norm": 1.7838710933451485, + "learning_rate": 2.0791467892350126e-06, + "loss": 0.8966, + "step": 5793 + }, + { + "epoch": 0.7974126066611615, + "grad_norm": 1.8312082785085597, + "learning_rate": 2.076426718096596e-06, + "loss": 0.8958, + "step": 5794 + }, + { + "epoch": 0.7975502339664189, + "grad_norm": 1.7330071478684497, + "learning_rate": 2.0737082212637337e-06, + "loss": 0.839, + "step": 5795 + }, + { + "epoch": 0.7976878612716763, + "grad_norm": 1.7158596860386541, + "learning_rate": 2.0709912992765436e-06, + "loss": 0.8682, + "step": 5796 + }, + { + "epoch": 0.7978254885769337, + "grad_norm": 1.7433704637448035, + "learning_rate": 2.068275952674851e-06, + "loss": 0.8584, + "step": 5797 + }, + { + "epoch": 0.797963115882191, + "grad_norm": 1.6101770404296827, + "learning_rate": 2.065562181998149e-06, + "loss": 0.8529, + "step": 5798 + }, + { + "epoch": 0.7981007431874484, + "grad_norm": 1.5318458639270005, + "learning_rate": 2.0628499877856346e-06, + "loss": 0.9195, + "step": 5799 + }, + { + "epoch": 0.7982383704927057, + "grad_norm": 1.6417307829788035, + "learning_rate": 2.0601393705761754e-06, + "loss": 0.8414, + "step": 5800 + }, + { + "epoch": 0.7983759977979631, + "grad_norm": 1.7609775755954107, + "learning_rate": 2.057430330908341e-06, + "loss": 0.9198, + "step": 5801 + }, + { + "epoch": 0.7985136251032204, + "grad_norm": 1.979612608927621, + "learning_rate": 2.054722869320377e-06, + "loss": 0.9494, + "step": 5802 + }, + { + "epoch": 0.7986512524084779, + "grad_norm": 1.7960735342312453, + "learning_rate": 2.0520169863502214e-06, + "loss": 0.8674, + "step": 5803 + }, + { + "epoch": 0.7987888797137352, + "grad_norm": 1.732292598579743, + "learning_rate": 2.0493126825354934e-06, + "loss": 0.9355, + "step": 5804 + }, + { + "epoch": 0.7989265070189926, + "grad_norm": 1.580893857829072, + "learning_rate": 2.0466099584135024e-06, + "loss": 0.9332, + "step": 5805 + }, + { + "epoch": 0.7990641343242499, + "grad_norm": 1.6447649439953038, + "learning_rate": 2.0439088145212438e-06, + "loss": 0.8763, + "step": 5806 + }, + { + "epoch": 0.7992017616295073, + "grad_norm": 2.0027457305309535, + "learning_rate": 2.0412092513953952e-06, + "loss": 0.8681, + "step": 5807 + }, + { + "epoch": 0.7993393889347646, + "grad_norm": 1.8660975859850022, + "learning_rate": 2.038511269572325e-06, + "loss": 0.9252, + "step": 5808 + }, + { + "epoch": 0.799477016240022, + "grad_norm": 1.8150541563347637, + "learning_rate": 2.0358148695880853e-06, + "loss": 0.8893, + "step": 5809 + }, + { + "epoch": 0.7996146435452793, + "grad_norm": 1.6236366564260942, + "learning_rate": 2.033120051978412e-06, + "loss": 0.8592, + "step": 5810 + }, + { + "epoch": 0.7997522708505368, + "grad_norm": 1.6418653559040328, + "learning_rate": 2.0304268172787313e-06, + "loss": 0.8279, + "step": 5811 + }, + { + "epoch": 0.7998898981557941, + "grad_norm": 1.9315930460308892, + "learning_rate": 2.0277351660241484e-06, + "loss": 0.9168, + "step": 5812 + }, + { + "epoch": 0.8000275254610515, + "grad_norm": 2.0745937668202985, + "learning_rate": 2.0250450987494607e-06, + "loss": 0.8631, + "step": 5813 + }, + { + "epoch": 0.8001651527663088, + "grad_norm": 2.1185366023113925, + "learning_rate": 2.022356615989147e-06, + "loss": 0.8707, + "step": 5814 + }, + { + "epoch": 0.8003027800715662, + "grad_norm": 1.9653393164715884, + "learning_rate": 2.0196697182773706e-06, + "loss": 0.9311, + "step": 5815 + }, + { + "epoch": 0.8004404073768235, + "grad_norm": 1.8463436268007716, + "learning_rate": 2.0169844061479837e-06, + "loss": 0.8803, + "step": 5816 + }, + { + "epoch": 0.8005780346820809, + "grad_norm": 1.778360640051144, + "learning_rate": 2.0143006801345167e-06, + "loss": 0.9151, + "step": 5817 + }, + { + "epoch": 0.8007156619873382, + "grad_norm": 1.7444193636759626, + "learning_rate": 2.011618540770197e-06, + "loss": 0.9078, + "step": 5818 + }, + { + "epoch": 0.8008532892925957, + "grad_norm": 1.7519083598862955, + "learning_rate": 2.008937988587919e-06, + "loss": 0.9222, + "step": 5819 + }, + { + "epoch": 0.800990916597853, + "grad_norm": 1.8951360656669713, + "learning_rate": 2.006259024120282e-06, + "loss": 0.885, + "step": 5820 + }, + { + "epoch": 0.8011285439031104, + "grad_norm": 1.7861673865907457, + "learning_rate": 2.0035816478995496e-06, + "loss": 0.9535, + "step": 5821 + }, + { + "epoch": 0.8012661712083677, + "grad_norm": 1.6623013456331268, + "learning_rate": 2.0009058604576903e-06, + "loss": 0.9634, + "step": 5822 + }, + { + "epoch": 0.8014037985136251, + "grad_norm": 1.7096603913023378, + "learning_rate": 1.998231662326335e-06, + "loss": 0.9222, + "step": 5823 + }, + { + "epoch": 0.8015414258188824, + "grad_norm": 1.7711963173911813, + "learning_rate": 1.9955590540368187e-06, + "loss": 0.9009, + "step": 5824 + }, + { + "epoch": 0.8016790531241398, + "grad_norm": 1.7230338572149915, + "learning_rate": 1.992888036120153e-06, + "loss": 0.8874, + "step": 5825 + }, + { + "epoch": 0.8018166804293972, + "grad_norm": 1.956773094566117, + "learning_rate": 1.990218609107023e-06, + "loss": 0.9341, + "step": 5826 + }, + { + "epoch": 0.8019543077346546, + "grad_norm": 1.7187761383556588, + "learning_rate": 1.9875507735278187e-06, + "loss": 1.0304, + "step": 5827 + }, + { + "epoch": 0.8020919350399119, + "grad_norm": 2.0995296221517843, + "learning_rate": 1.984884529912593e-06, + "loss": 0.9649, + "step": 5828 + }, + { + "epoch": 0.8022295623451693, + "grad_norm": 1.621479112429921, + "learning_rate": 1.9822198787911006e-06, + "loss": 0.9002, + "step": 5829 + }, + { + "epoch": 0.8023671896504266, + "grad_norm": 2.026368435735778, + "learning_rate": 1.9795568206927617e-06, + "loss": 0.8656, + "step": 5830 + }, + { + "epoch": 0.802504816955684, + "grad_norm": 1.6145256606961436, + "learning_rate": 1.9768953561466974e-06, + "loss": 0.9355, + "step": 5831 + }, + { + "epoch": 0.8026424442609413, + "grad_norm": 1.6326892756475844, + "learning_rate": 1.9742354856817014e-06, + "loss": 0.9161, + "step": 5832 + }, + { + "epoch": 0.8027800715661987, + "grad_norm": 1.6287183983640545, + "learning_rate": 1.9715772098262532e-06, + "loss": 0.8841, + "step": 5833 + }, + { + "epoch": 0.8029176988714561, + "grad_norm": 1.6764543506373148, + "learning_rate": 1.968920529108517e-06, + "loss": 0.9763, + "step": 5834 + }, + { + "epoch": 0.8030553261767135, + "grad_norm": 1.9399447585245437, + "learning_rate": 1.966265444056338e-06, + "loss": 0.9007, + "step": 5835 + }, + { + "epoch": 0.8031929534819708, + "grad_norm": 1.7121274564987772, + "learning_rate": 1.9636119551972456e-06, + "loss": 0.873, + "step": 5836 + }, + { + "epoch": 0.8033305807872282, + "grad_norm": 1.6913946390734245, + "learning_rate": 1.9609600630584512e-06, + "loss": 0.907, + "step": 5837 + }, + { + "epoch": 0.8034682080924855, + "grad_norm": 1.5623650560277835, + "learning_rate": 1.9583097681668496e-06, + "loss": 0.9258, + "step": 5838 + }, + { + "epoch": 0.8036058353977429, + "grad_norm": 1.8447460407470182, + "learning_rate": 1.9556610710490187e-06, + "loss": 0.9115, + "step": 5839 + }, + { + "epoch": 0.8037434627030002, + "grad_norm": 1.6222939145439987, + "learning_rate": 1.953013972231218e-06, + "loss": 0.8439, + "step": 5840 + }, + { + "epoch": 0.8038810900082577, + "grad_norm": 1.6872103806423533, + "learning_rate": 1.950368472239389e-06, + "loss": 0.8592, + "step": 5841 + }, + { + "epoch": 0.804018717313515, + "grad_norm": 1.828137062107912, + "learning_rate": 1.9477245715991575e-06, + "loss": 0.824, + "step": 5842 + }, + { + "epoch": 0.8041563446187724, + "grad_norm": 1.9217331968027502, + "learning_rate": 1.9450822708358306e-06, + "loss": 0.8858, + "step": 5843 + }, + { + "epoch": 0.8042939719240297, + "grad_norm": 1.914723633195201, + "learning_rate": 1.942441570474396e-06, + "loss": 0.9277, + "step": 5844 + }, + { + "epoch": 0.8044315992292871, + "grad_norm": 1.8003670722393237, + "learning_rate": 1.9398024710395257e-06, + "loss": 0.829, + "step": 5845 + }, + { + "epoch": 0.8045692265345444, + "grad_norm": 1.7710641832129357, + "learning_rate": 1.937164973055573e-06, + "loss": 0.9154, + "step": 5846 + }, + { + "epoch": 0.8047068538398018, + "grad_norm": 1.8695000149820071, + "learning_rate": 1.934529077046571e-06, + "loss": 0.9421, + "step": 5847 + }, + { + "epoch": 0.8048444811450591, + "grad_norm": 1.8075071508449876, + "learning_rate": 1.931894783536238e-06, + "loss": 0.8905, + "step": 5848 + }, + { + "epoch": 0.8049821084503166, + "grad_norm": 1.7352542004283655, + "learning_rate": 1.929262093047968e-06, + "loss": 0.9094, + "step": 5849 + }, + { + "epoch": 0.8051197357555739, + "grad_norm": 2.190709142167543, + "learning_rate": 1.9266310061048486e-06, + "loss": 0.9659, + "step": 5850 + }, + { + "epoch": 0.8052573630608313, + "grad_norm": 1.8656533491277172, + "learning_rate": 1.9240015232296305e-06, + "loss": 0.8352, + "step": 5851 + }, + { + "epoch": 0.8053949903660886, + "grad_norm": 1.7607209554900634, + "learning_rate": 1.9213736449447675e-06, + "loss": 0.9439, + "step": 5852 + }, + { + "epoch": 0.805532617671346, + "grad_norm": 1.7056651613337617, + "learning_rate": 1.9187473717723716e-06, + "loss": 0.8869, + "step": 5853 + }, + { + "epoch": 0.8056702449766033, + "grad_norm": 1.9177048920950863, + "learning_rate": 1.9161227042342566e-06, + "loss": 0.8952, + "step": 5854 + }, + { + "epoch": 0.8058078722818607, + "grad_norm": 1.831669068943791, + "learning_rate": 1.9134996428519006e-06, + "loss": 0.8489, + "step": 5855 + }, + { + "epoch": 0.805945499587118, + "grad_norm": 1.8837379302674335, + "learning_rate": 1.9108781881464756e-06, + "loss": 0.88, + "step": 5856 + }, + { + "epoch": 0.8060831268923755, + "grad_norm": 1.7456635796187767, + "learning_rate": 1.9082583406388277e-06, + "loss": 0.8467, + "step": 5857 + }, + { + "epoch": 0.8062207541976328, + "grad_norm": 1.7199221601296177, + "learning_rate": 1.9056401008494807e-06, + "loss": 0.9648, + "step": 5858 + }, + { + "epoch": 0.8063583815028902, + "grad_norm": 1.740004351770258, + "learning_rate": 1.9030234692986494e-06, + "loss": 0.995, + "step": 5859 + }, + { + "epoch": 0.8064960088081475, + "grad_norm": 1.6498204229552145, + "learning_rate": 1.9004084465062144e-06, + "loss": 0.917, + "step": 5860 + }, + { + "epoch": 0.8066336361134049, + "grad_norm": 1.9696647073405886, + "learning_rate": 1.897795032991756e-06, + "loss": 0.9082, + "step": 5861 + }, + { + "epoch": 0.8067712634186622, + "grad_norm": 1.7585075112999122, + "learning_rate": 1.895183229274511e-06, + "loss": 0.9068, + "step": 5862 + }, + { + "epoch": 0.8069088907239196, + "grad_norm": 1.7347279731697578, + "learning_rate": 1.8925730358734185e-06, + "loss": 0.8912, + "step": 5863 + }, + { + "epoch": 0.807046518029177, + "grad_norm": 2.208274573462775, + "learning_rate": 1.8899644533070848e-06, + "loss": 0.9535, + "step": 5864 + }, + { + "epoch": 0.8071841453344344, + "grad_norm": 2.3093315498095044, + "learning_rate": 1.8873574820938011e-06, + "loss": 0.9285, + "step": 5865 + }, + { + "epoch": 0.8073217726396917, + "grad_norm": 2.0805032960040535, + "learning_rate": 1.8847521227515354e-06, + "loss": 0.9191, + "step": 5866 + }, + { + "epoch": 0.8074593999449491, + "grad_norm": 1.6375995549097349, + "learning_rate": 1.8821483757979364e-06, + "loss": 0.9138, + "step": 5867 + }, + { + "epoch": 0.8075970272502064, + "grad_norm": 1.779761412176646, + "learning_rate": 1.879546241750334e-06, + "loss": 0.8976, + "step": 5868 + }, + { + "epoch": 0.8077346545554638, + "grad_norm": 1.9148868428212196, + "learning_rate": 1.8769457211257369e-06, + "loss": 0.9278, + "step": 5869 + }, + { + "epoch": 0.8078722818607211, + "grad_norm": 2.9562099808664204, + "learning_rate": 1.8743468144408328e-06, + "loss": 0.8945, + "step": 5870 + }, + { + "epoch": 0.8080099091659785, + "grad_norm": 1.6912473718378254, + "learning_rate": 1.8717495222119874e-06, + "loss": 0.9068, + "step": 5871 + }, + { + "epoch": 0.8081475364712359, + "grad_norm": 1.8941069544078148, + "learning_rate": 1.8691538449552493e-06, + "loss": 0.9548, + "step": 5872 + }, + { + "epoch": 0.8082851637764933, + "grad_norm": 1.7007340997479172, + "learning_rate": 1.8665597831863436e-06, + "loss": 0.9117, + "step": 5873 + }, + { + "epoch": 0.8084227910817506, + "grad_norm": 1.8550940728303436, + "learning_rate": 1.8639673374206735e-06, + "loss": 0.9315, + "step": 5874 + }, + { + "epoch": 0.808560418387008, + "grad_norm": 1.9560189926664586, + "learning_rate": 1.8613765081733226e-06, + "loss": 0.9103, + "step": 5875 + }, + { + "epoch": 0.8086980456922653, + "grad_norm": 1.857538935711028, + "learning_rate": 1.8587872959590548e-06, + "loss": 0.8306, + "step": 5876 + }, + { + "epoch": 0.8088356729975227, + "grad_norm": 1.7458577703745441, + "learning_rate": 1.8561997012923106e-06, + "loss": 0.8367, + "step": 5877 + }, + { + "epoch": 0.80897330030278, + "grad_norm": 1.6971000184472496, + "learning_rate": 1.8536137246872088e-06, + "loss": 0.9307, + "step": 5878 + }, + { + "epoch": 0.8091109276080375, + "grad_norm": 1.9779008163308227, + "learning_rate": 1.8510293666575453e-06, + "loss": 0.8744, + "step": 5879 + }, + { + "epoch": 0.8092485549132948, + "grad_norm": 1.649304634151418, + "learning_rate": 1.8484466277168046e-06, + "loss": 0.8972, + "step": 5880 + }, + { + "epoch": 0.8093861822185522, + "grad_norm": 1.7134239657030648, + "learning_rate": 1.8458655083781318e-06, + "loss": 0.9076, + "step": 5881 + }, + { + "epoch": 0.8095238095238095, + "grad_norm": 1.6434251866855878, + "learning_rate": 1.8432860091543692e-06, + "loss": 0.9323, + "step": 5882 + }, + { + "epoch": 0.8096614368290669, + "grad_norm": 1.7727123101705797, + "learning_rate": 1.8407081305580178e-06, + "loss": 0.9605, + "step": 5883 + }, + { + "epoch": 0.8097990641343242, + "grad_norm": 2.139855608006018, + "learning_rate": 1.8381318731012775e-06, + "loss": 0.9704, + "step": 5884 + }, + { + "epoch": 0.8099366914395816, + "grad_norm": 1.823633681444163, + "learning_rate": 1.8355572372960063e-06, + "loss": 0.892, + "step": 5885 + }, + { + "epoch": 0.8100743187448389, + "grad_norm": 1.824059112826107, + "learning_rate": 1.8329842236537543e-06, + "loss": 0.9221, + "step": 5886 + }, + { + "epoch": 0.8102119460500964, + "grad_norm": 1.9610617145350893, + "learning_rate": 1.8304128326857429e-06, + "loss": 0.8569, + "step": 5887 + }, + { + "epoch": 0.8103495733553537, + "grad_norm": 1.8453968549571853, + "learning_rate": 1.8278430649028734e-06, + "loss": 0.8999, + "step": 5888 + }, + { + "epoch": 0.8104872006606111, + "grad_norm": 2.063010382519787, + "learning_rate": 1.8252749208157238e-06, + "loss": 0.9103, + "step": 5889 + }, + { + "epoch": 0.8106248279658684, + "grad_norm": 1.5989919388871696, + "learning_rate": 1.822708400934542e-06, + "loss": 0.8899, + "step": 5890 + }, + { + "epoch": 0.8107624552711258, + "grad_norm": 1.725104448888569, + "learning_rate": 1.8201435057692718e-06, + "loss": 0.9651, + "step": 5891 + }, + { + "epoch": 0.8109000825763831, + "grad_norm": 1.8584264095018412, + "learning_rate": 1.8175802358295124e-06, + "loss": 0.8897, + "step": 5892 + }, + { + "epoch": 0.8110377098816405, + "grad_norm": 1.8391357835036597, + "learning_rate": 1.8150185916245565e-06, + "loss": 0.9717, + "step": 5893 + }, + { + "epoch": 0.8111753371868978, + "grad_norm": 1.8237961678674943, + "learning_rate": 1.8124585736633672e-06, + "loss": 0.8039, + "step": 5894 + }, + { + "epoch": 0.8113129644921553, + "grad_norm": 1.577296907577867, + "learning_rate": 1.8099001824545836e-06, + "loss": 0.9147, + "step": 5895 + }, + { + "epoch": 0.8114505917974126, + "grad_norm": 1.7997354480075671, + "learning_rate": 1.8073434185065232e-06, + "loss": 0.9461, + "step": 5896 + }, + { + "epoch": 0.81158821910267, + "grad_norm": 2.075012562113395, + "learning_rate": 1.8047882823271812e-06, + "loss": 0.9117, + "step": 5897 + }, + { + "epoch": 0.8117258464079273, + "grad_norm": 1.689439209609592, + "learning_rate": 1.8022347744242263e-06, + "loss": 0.8715, + "step": 5898 + }, + { + "epoch": 0.8118634737131847, + "grad_norm": 2.138677009035833, + "learning_rate": 1.7996828953050072e-06, + "loss": 0.9193, + "step": 5899 + }, + { + "epoch": 0.812001101018442, + "grad_norm": 1.6341215845690602, + "learning_rate": 1.797132645476547e-06, + "loss": 0.9192, + "step": 5900 + }, + { + "epoch": 0.8121387283236994, + "grad_norm": 1.7348965883010306, + "learning_rate": 1.7945840254455437e-06, + "loss": 0.894, + "step": 5901 + }, + { + "epoch": 0.8122763556289568, + "grad_norm": 1.6379400600057166, + "learning_rate": 1.7920370357183758e-06, + "loss": 0.8892, + "step": 5902 + }, + { + "epoch": 0.8124139829342142, + "grad_norm": 2.3067046533979063, + "learning_rate": 1.789491676801094e-06, + "loss": 0.8574, + "step": 5903 + }, + { + "epoch": 0.8125516102394715, + "grad_norm": 2.254981002341476, + "learning_rate": 1.7869479491994235e-06, + "loss": 0.9178, + "step": 5904 + }, + { + "epoch": 0.8126892375447289, + "grad_norm": 1.9036845189469558, + "learning_rate": 1.7844058534187769e-06, + "loss": 0.9231, + "step": 5905 + }, + { + "epoch": 0.8128268648499862, + "grad_norm": 1.7285400978119807, + "learning_rate": 1.7818653899642246e-06, + "loss": 0.8978, + "step": 5906 + }, + { + "epoch": 0.8129644921552436, + "grad_norm": 1.893218424713791, + "learning_rate": 1.7793265593405252e-06, + "loss": 0.8463, + "step": 5907 + }, + { + "epoch": 0.8131021194605009, + "grad_norm": 1.6897049268843178, + "learning_rate": 1.7767893620521103e-06, + "loss": 0.8288, + "step": 5908 + }, + { + "epoch": 0.8132397467657583, + "grad_norm": 1.7966756169892946, + "learning_rate": 1.7742537986030862e-06, + "loss": 0.9233, + "step": 5909 + }, + { + "epoch": 0.8133773740710157, + "grad_norm": 1.9169846712803489, + "learning_rate": 1.7717198694972326e-06, + "loss": 0.9985, + "step": 5910 + }, + { + "epoch": 0.8135150013762731, + "grad_norm": 1.7605628440732102, + "learning_rate": 1.7691875752380072e-06, + "loss": 0.8552, + "step": 5911 + }, + { + "epoch": 0.8136526286815304, + "grad_norm": 1.7612211027070188, + "learning_rate": 1.7666569163285475e-06, + "loss": 0.973, + "step": 5912 + }, + { + "epoch": 0.8137902559867878, + "grad_norm": 1.7426999594083206, + "learning_rate": 1.7641278932716499e-06, + "loss": 0.9729, + "step": 5913 + }, + { + "epoch": 0.8139278832920451, + "grad_norm": 1.4962393307390964, + "learning_rate": 1.7616005065698094e-06, + "loss": 0.8938, + "step": 5914 + }, + { + "epoch": 0.8140655105973025, + "grad_norm": 1.5539095638620708, + "learning_rate": 1.759074756725172e-06, + "loss": 0.899, + "step": 5915 + }, + { + "epoch": 0.8142031379025598, + "grad_norm": 1.6308362081607573, + "learning_rate": 1.7565506442395796e-06, + "loss": 0.8921, + "step": 5916 + }, + { + "epoch": 0.8143407652078173, + "grad_norm": 1.8335473936310165, + "learning_rate": 1.7540281696145278e-06, + "loss": 0.9336, + "step": 5917 + }, + { + "epoch": 0.8144783925130746, + "grad_norm": 1.8768696655085544, + "learning_rate": 1.7515073333512067e-06, + "loss": 0.926, + "step": 5918 + }, + { + "epoch": 0.814616019818332, + "grad_norm": 1.7483237710960509, + "learning_rate": 1.748988135950469e-06, + "loss": 0.8921, + "step": 5919 + }, + { + "epoch": 0.8147536471235893, + "grad_norm": 1.833444161646326, + "learning_rate": 1.7464705779128443e-06, + "loss": 0.8545, + "step": 5920 + }, + { + "epoch": 0.8148912744288467, + "grad_norm": 1.7478611116745095, + "learning_rate": 1.7439546597385404e-06, + "loss": 0.9046, + "step": 5921 + }, + { + "epoch": 0.815028901734104, + "grad_norm": 1.533946563444394, + "learning_rate": 1.7414403819274272e-06, + "loss": 0.9288, + "step": 5922 + }, + { + "epoch": 0.8151665290393614, + "grad_norm": 1.7523463045459553, + "learning_rate": 1.7389277449790676e-06, + "loss": 0.891, + "step": 5923 + }, + { + "epoch": 0.8153041563446187, + "grad_norm": 1.9146751650421423, + "learning_rate": 1.7364167493926786e-06, + "loss": 0.9489, + "step": 5924 + }, + { + "epoch": 0.8154417836498762, + "grad_norm": 1.7896290834517008, + "learning_rate": 1.733907395667167e-06, + "loss": 0.907, + "step": 5925 + }, + { + "epoch": 0.8155794109551335, + "grad_norm": 1.7487091959974983, + "learning_rate": 1.731399684301106e-06, + "loss": 0.9179, + "step": 5926 + }, + { + "epoch": 0.8157170382603909, + "grad_norm": 2.004564687643618, + "learning_rate": 1.7288936157927437e-06, + "loss": 0.8611, + "step": 5927 + }, + { + "epoch": 0.8158546655656482, + "grad_norm": 1.799723074431501, + "learning_rate": 1.72638919064e-06, + "loss": 0.911, + "step": 5928 + }, + { + "epoch": 0.8159922928709056, + "grad_norm": 1.7672423214087225, + "learning_rate": 1.7238864093404707e-06, + "loss": 0.8981, + "step": 5929 + }, + { + "epoch": 0.8161299201761629, + "grad_norm": 1.5531389445030734, + "learning_rate": 1.7213852723914248e-06, + "loss": 0.9748, + "step": 5930 + }, + { + "epoch": 0.8162675474814203, + "grad_norm": 1.9908244840292904, + "learning_rate": 1.7188857802898028e-06, + "loss": 0.9678, + "step": 5931 + }, + { + "epoch": 0.8164051747866776, + "grad_norm": 1.7794265490857712, + "learning_rate": 1.7163879335322198e-06, + "loss": 0.9183, + "step": 5932 + }, + { + "epoch": 0.8165428020919351, + "grad_norm": 1.9747284266630423, + "learning_rate": 1.713891732614965e-06, + "loss": 0.897, + "step": 5933 + }, + { + "epoch": 0.8166804293971924, + "grad_norm": 1.8136426900793992, + "learning_rate": 1.7113971780339978e-06, + "loss": 1.0084, + "step": 5934 + }, + { + "epoch": 0.8168180567024498, + "grad_norm": 1.7901329394675152, + "learning_rate": 1.7089042702849534e-06, + "loss": 0.9364, + "step": 5935 + }, + { + "epoch": 0.8169556840077071, + "grad_norm": 1.9183301483959305, + "learning_rate": 1.7064130098631348e-06, + "loss": 0.9155, + "step": 5936 + }, + { + "epoch": 0.8170933113129645, + "grad_norm": 1.6221334619433319, + "learning_rate": 1.7039233972635295e-06, + "loss": 0.8827, + "step": 5937 + }, + { + "epoch": 0.8172309386182218, + "grad_norm": 1.9957651473760132, + "learning_rate": 1.7014354329807815e-06, + "loss": 0.8503, + "step": 5938 + }, + { + "epoch": 0.8173685659234792, + "grad_norm": 2.1080582046945886, + "learning_rate": 1.6989491175092188e-06, + "loss": 0.8928, + "step": 5939 + }, + { + "epoch": 0.8175061932287366, + "grad_norm": 1.7370528409030321, + "learning_rate": 1.6964644513428363e-06, + "loss": 0.8731, + "step": 5940 + }, + { + "epoch": 0.817643820533994, + "grad_norm": 1.7421271030140655, + "learning_rate": 1.6939814349753049e-06, + "loss": 0.9438, + "step": 5941 + }, + { + "epoch": 0.8177814478392513, + "grad_norm": 1.681224383759432, + "learning_rate": 1.691500068899965e-06, + "loss": 0.8653, + "step": 5942 + }, + { + "epoch": 0.8179190751445087, + "grad_norm": 1.5965450184686145, + "learning_rate": 1.6890203536098282e-06, + "loss": 0.8535, + "step": 5943 + }, + { + "epoch": 0.818056702449766, + "grad_norm": 1.7645943608027552, + "learning_rate": 1.686542289597588e-06, + "loss": 0.9454, + "step": 5944 + }, + { + "epoch": 0.8181943297550234, + "grad_norm": 1.7627548580864085, + "learning_rate": 1.6840658773555896e-06, + "loss": 0.9023, + "step": 5945 + }, + { + "epoch": 0.8183319570602807, + "grad_norm": 1.8482797006101055, + "learning_rate": 1.6815911173758747e-06, + "loss": 0.9745, + "step": 5946 + }, + { + "epoch": 0.8184695843655381, + "grad_norm": 2.067469542196188, + "learning_rate": 1.6791180101501326e-06, + "loss": 0.857, + "step": 5947 + }, + { + "epoch": 0.8186072116707955, + "grad_norm": 2.0289724217410696, + "learning_rate": 1.6766465561697442e-06, + "loss": 0.9052, + "step": 5948 + }, + { + "epoch": 0.8187448389760529, + "grad_norm": 1.8160111192932156, + "learning_rate": 1.6741767559257505e-06, + "loss": 0.9242, + "step": 5949 + }, + { + "epoch": 0.8188824662813102, + "grad_norm": 1.9798753211157085, + "learning_rate": 1.6717086099088674e-06, + "loss": 0.9133, + "step": 5950 + }, + { + "epoch": 0.8190200935865676, + "grad_norm": 1.8904100062757685, + "learning_rate": 1.669242118609482e-06, + "loss": 0.8457, + "step": 5951 + }, + { + "epoch": 0.8191577208918249, + "grad_norm": 1.6353768802108426, + "learning_rate": 1.666777282517652e-06, + "loss": 0.8949, + "step": 5952 + }, + { + "epoch": 0.8192953481970823, + "grad_norm": 1.5649281220013609, + "learning_rate": 1.664314102123109e-06, + "loss": 0.8889, + "step": 5953 + }, + { + "epoch": 0.8194329755023396, + "grad_norm": 1.7785820148166518, + "learning_rate": 1.661852577915245e-06, + "loss": 0.9097, + "step": 5954 + }, + { + "epoch": 0.8195706028075971, + "grad_norm": 1.7502633223072503, + "learning_rate": 1.6593927103831399e-06, + "loss": 0.8888, + "step": 5955 + }, + { + "epoch": 0.8197082301128544, + "grad_norm": 1.66500490336568, + "learning_rate": 1.6569345000155335e-06, + "loss": 0.873, + "step": 5956 + }, + { + "epoch": 0.8198458574181118, + "grad_norm": 1.807777808988565, + "learning_rate": 1.6544779473008366e-06, + "loss": 0.9016, + "step": 5957 + }, + { + "epoch": 0.8199834847233691, + "grad_norm": 1.6601340473930815, + "learning_rate": 1.6520230527271352e-06, + "loss": 0.8785, + "step": 5958 + }, + { + "epoch": 0.8201211120286265, + "grad_norm": 2.059366986441786, + "learning_rate": 1.6495698167821805e-06, + "loss": 0.8933, + "step": 5959 + }, + { + "epoch": 0.8202587393338838, + "grad_norm": 1.7186924963206016, + "learning_rate": 1.6471182399533992e-06, + "loss": 0.8322, + "step": 5960 + }, + { + "epoch": 0.8203963666391412, + "grad_norm": 1.6962549222278447, + "learning_rate": 1.6446683227278848e-06, + "loss": 0.9211, + "step": 5961 + }, + { + "epoch": 0.8205339939443985, + "grad_norm": 1.7273322721931756, + "learning_rate": 1.6422200655924025e-06, + "loss": 0.8409, + "step": 5962 + }, + { + "epoch": 0.820671621249656, + "grad_norm": 1.8030875496431475, + "learning_rate": 1.6397734690333877e-06, + "loss": 0.9094, + "step": 5963 + }, + { + "epoch": 0.8208092485549133, + "grad_norm": 2.007487638080426, + "learning_rate": 1.637328533536946e-06, + "loss": 0.8514, + "step": 5964 + }, + { + "epoch": 0.8209468758601707, + "grad_norm": 1.7724447770390186, + "learning_rate": 1.634885259588852e-06, + "loss": 0.8523, + "step": 5965 + }, + { + "epoch": 0.821084503165428, + "grad_norm": 1.9355178811153453, + "learning_rate": 1.6324436476745487e-06, + "loss": 1.006, + "step": 5966 + }, + { + "epoch": 0.8212221304706854, + "grad_norm": 1.8887905745903646, + "learning_rate": 1.6300036982791578e-06, + "loss": 0.8572, + "step": 5967 + }, + { + "epoch": 0.8213597577759427, + "grad_norm": 1.978904505309744, + "learning_rate": 1.627565411887454e-06, + "loss": 0.882, + "step": 5968 + }, + { + "epoch": 0.8214973850812001, + "grad_norm": 1.8071202759003901, + "learning_rate": 1.6251287889839018e-06, + "loss": 0.889, + "step": 5969 + }, + { + "epoch": 0.8216350123864574, + "grad_norm": 1.5956031022846122, + "learning_rate": 1.6226938300526175e-06, + "loss": 0.8888, + "step": 5970 + }, + { + "epoch": 0.8217726396917149, + "grad_norm": 1.8192853400275606, + "learning_rate": 1.620260535577397e-06, + "loss": 0.8642, + "step": 5971 + }, + { + "epoch": 0.8219102669969722, + "grad_norm": 1.8444418380144318, + "learning_rate": 1.6178289060417007e-06, + "loss": 0.9, + "step": 5972 + }, + { + "epoch": 0.8220478943022296, + "grad_norm": 1.6739141534784143, + "learning_rate": 1.6153989419286597e-06, + "loss": 0.9323, + "step": 5973 + }, + { + "epoch": 0.8221855216074869, + "grad_norm": 1.620761602087883, + "learning_rate": 1.6129706437210812e-06, + "loss": 0.8267, + "step": 5974 + }, + { + "epoch": 0.8223231489127443, + "grad_norm": 1.875641359089364, + "learning_rate": 1.610544011901425e-06, + "loss": 0.9271, + "step": 5975 + }, + { + "epoch": 0.8224607762180016, + "grad_norm": 2.9390131118366716, + "learning_rate": 1.6081190469518393e-06, + "loss": 0.811, + "step": 5976 + }, + { + "epoch": 0.822598403523259, + "grad_norm": 1.8841210802653132, + "learning_rate": 1.6056957493541215e-06, + "loss": 0.8674, + "step": 5977 + }, + { + "epoch": 0.8227360308285164, + "grad_norm": 1.867670969832213, + "learning_rate": 1.6032741195897573e-06, + "loss": 0.9491, + "step": 5978 + }, + { + "epoch": 0.8228736581337738, + "grad_norm": 1.6588959027424228, + "learning_rate": 1.600854158139883e-06, + "loss": 0.9383, + "step": 5979 + }, + { + "epoch": 0.8230112854390311, + "grad_norm": 1.8549617387155988, + "learning_rate": 1.598435865485317e-06, + "loss": 0.8519, + "step": 5980 + }, + { + "epoch": 0.8231489127442885, + "grad_norm": 2.004669836985817, + "learning_rate": 1.5960192421065402e-06, + "loss": 0.9206, + "step": 5981 + }, + { + "epoch": 0.8232865400495458, + "grad_norm": 1.5988718518457394, + "learning_rate": 1.5936042884837022e-06, + "loss": 0.874, + "step": 5982 + }, + { + "epoch": 0.8234241673548032, + "grad_norm": 1.907700241593544, + "learning_rate": 1.5911910050966217e-06, + "loss": 0.8892, + "step": 5983 + }, + { + "epoch": 0.8235617946600605, + "grad_norm": 1.9557221289862043, + "learning_rate": 1.588779392424783e-06, + "loss": 0.8482, + "step": 5984 + }, + { + "epoch": 0.8236994219653179, + "grad_norm": 1.6339694592024638, + "learning_rate": 1.5863694509473448e-06, + "loss": 0.9825, + "step": 5985 + }, + { + "epoch": 0.8238370492705753, + "grad_norm": 1.602763118539975, + "learning_rate": 1.5839611811431222e-06, + "loss": 0.9375, + "step": 5986 + }, + { + "epoch": 0.8239746765758327, + "grad_norm": 1.8541876072039603, + "learning_rate": 1.581554583490612e-06, + "loss": 0.8617, + "step": 5987 + }, + { + "epoch": 0.82411230388109, + "grad_norm": 1.772090469561385, + "learning_rate": 1.579149658467971e-06, + "loss": 0.9335, + "step": 5988 + }, + { + "epoch": 0.8242499311863474, + "grad_norm": 1.8086505504839603, + "learning_rate": 1.5767464065530236e-06, + "loss": 0.9184, + "step": 5989 + }, + { + "epoch": 0.8243875584916047, + "grad_norm": 1.6400995105942466, + "learning_rate": 1.5743448282232632e-06, + "loss": 0.9227, + "step": 5990 + }, + { + "epoch": 0.8245251857968621, + "grad_norm": 1.961234479397404, + "learning_rate": 1.5719449239558504e-06, + "loss": 0.9106, + "step": 5991 + }, + { + "epoch": 0.8246628131021194, + "grad_norm": 1.7962021711435665, + "learning_rate": 1.5695466942276138e-06, + "loss": 0.8809, + "step": 5992 + }, + { + "epoch": 0.8248004404073769, + "grad_norm": 1.7425725890646862, + "learning_rate": 1.567150139515049e-06, + "loss": 0.9053, + "step": 5993 + }, + { + "epoch": 0.8249380677126342, + "grad_norm": 1.6613110819641825, + "learning_rate": 1.5647552602943183e-06, + "loss": 0.8914, + "step": 5994 + }, + { + "epoch": 0.8250756950178916, + "grad_norm": 1.7537593163092753, + "learning_rate": 1.5623620570412513e-06, + "loss": 0.894, + "step": 5995 + }, + { + "epoch": 0.8252133223231489, + "grad_norm": 1.7421062791146587, + "learning_rate": 1.559970530231345e-06, + "loss": 0.9178, + "step": 5996 + }, + { + "epoch": 0.8253509496284063, + "grad_norm": 1.785794115724862, + "learning_rate": 1.557580680339763e-06, + "loss": 0.8947, + "step": 5997 + }, + { + "epoch": 0.8254885769336636, + "grad_norm": 1.678322573360989, + "learning_rate": 1.5551925078413332e-06, + "loss": 0.9137, + "step": 5998 + }, + { + "epoch": 0.825626204238921, + "grad_norm": 1.8642530493755975, + "learning_rate": 1.5528060132105604e-06, + "loss": 0.9334, + "step": 5999 + }, + { + "epoch": 0.8257638315441783, + "grad_norm": 1.693387704353986, + "learning_rate": 1.5504211969215977e-06, + "loss": 0.8468, + "step": 6000 + }, + { + "epoch": 0.8259014588494358, + "grad_norm": 1.7635861715293604, + "learning_rate": 1.5480380594482868e-06, + "loss": 0.8664, + "step": 6001 + }, + { + "epoch": 0.8260390861546931, + "grad_norm": 1.647204212512501, + "learning_rate": 1.5456566012641162e-06, + "loss": 0.9177, + "step": 6002 + }, + { + "epoch": 0.8261767134599505, + "grad_norm": 1.8065204008396376, + "learning_rate": 1.5432768228422511e-06, + "loss": 0.8888, + "step": 6003 + }, + { + "epoch": 0.8263143407652078, + "grad_norm": 1.9338149004553775, + "learning_rate": 1.5408987246555207e-06, + "loss": 0.8264, + "step": 6004 + }, + { + "epoch": 0.8264519680704652, + "grad_norm": 1.6972476159166154, + "learning_rate": 1.53852230717642e-06, + "loss": 0.8449, + "step": 6005 + }, + { + "epoch": 0.8265895953757225, + "grad_norm": 1.7424058280356483, + "learning_rate": 1.5361475708771168e-06, + "loss": 0.8679, + "step": 6006 + }, + { + "epoch": 0.8267272226809799, + "grad_norm": 2.0524376846678276, + "learning_rate": 1.5337745162294281e-06, + "loss": 0.9167, + "step": 6007 + }, + { + "epoch": 0.8268648499862372, + "grad_norm": 1.6417447328674104, + "learning_rate": 1.5314031437048582e-06, + "loss": 0.881, + "step": 6008 + }, + { + "epoch": 0.8270024772914947, + "grad_norm": 1.5945088400528238, + "learning_rate": 1.5290334537745566e-06, + "loss": 0.8915, + "step": 6009 + }, + { + "epoch": 0.827140104596752, + "grad_norm": 1.6429579140843023, + "learning_rate": 1.5266654469093578e-06, + "loss": 0.9514, + "step": 6010 + }, + { + "epoch": 0.8272777319020094, + "grad_norm": 1.670821312578671, + "learning_rate": 1.5242991235797422e-06, + "loss": 0.8994, + "step": 6011 + }, + { + "epoch": 0.8274153592072667, + "grad_norm": 1.6570818406474768, + "learning_rate": 1.5219344842558735e-06, + "loss": 0.8667, + "step": 6012 + }, + { + "epoch": 0.8275529865125241, + "grad_norm": 1.6398390722913012, + "learning_rate": 1.5195715294075708e-06, + "loss": 0.8544, + "step": 6013 + }, + { + "epoch": 0.8276906138177814, + "grad_norm": 1.7930821626327464, + "learning_rate": 1.5172102595043204e-06, + "loss": 0.8751, + "step": 6014 + }, + { + "epoch": 0.8278282411230388, + "grad_norm": 1.87218638970103, + "learning_rate": 1.5148506750152748e-06, + "loss": 0.8796, + "step": 6015 + }, + { + "epoch": 0.8279658684282962, + "grad_norm": 1.7969397927169122, + "learning_rate": 1.5124927764092511e-06, + "loss": 0.744, + "step": 6016 + }, + { + "epoch": 0.8281034957335536, + "grad_norm": 1.8003270595762026, + "learning_rate": 1.5101365641547316e-06, + "loss": 0.9347, + "step": 6017 + }, + { + "epoch": 0.8282411230388109, + "grad_norm": 1.8119493444352799, + "learning_rate": 1.5077820387198627e-06, + "loss": 0.8865, + "step": 6018 + }, + { + "epoch": 0.8283787503440683, + "grad_norm": 1.7844322655588314, + "learning_rate": 1.505429200572458e-06, + "loss": 0.8559, + "step": 6019 + }, + { + "epoch": 0.8285163776493256, + "grad_norm": 1.8231631209107024, + "learning_rate": 1.5030780501799924e-06, + "loss": 0.9199, + "step": 6020 + }, + { + "epoch": 0.828654004954583, + "grad_norm": 1.7676342485546395, + "learning_rate": 1.500728588009608e-06, + "loss": 0.9254, + "step": 6021 + }, + { + "epoch": 0.8287916322598403, + "grad_norm": 1.5435549378672047, + "learning_rate": 1.4983808145281108e-06, + "loss": 0.8798, + "step": 6022 + }, + { + "epoch": 0.8289292595650977, + "grad_norm": 1.8045666932103723, + "learning_rate": 1.4960347302019718e-06, + "loss": 0.9228, + "step": 6023 + }, + { + "epoch": 0.8290668868703551, + "grad_norm": 1.7196811600975628, + "learning_rate": 1.4936903354973241e-06, + "loss": 0.9199, + "step": 6024 + }, + { + "epoch": 0.8292045141756125, + "grad_norm": 2.1128260571064947, + "learning_rate": 1.491347630879969e-06, + "loss": 0.8513, + "step": 6025 + }, + { + "epoch": 0.8293421414808698, + "grad_norm": 2.0650919825857206, + "learning_rate": 1.4890066168153695e-06, + "loss": 0.9354, + "step": 6026 + }, + { + "epoch": 0.8294797687861272, + "grad_norm": 1.8347582469123436, + "learning_rate": 1.486667293768651e-06, + "loss": 0.8592, + "step": 6027 + }, + { + "epoch": 0.8296173960913845, + "grad_norm": 1.8612623018733891, + "learning_rate": 1.4843296622046054e-06, + "loss": 0.9146, + "step": 6028 + }, + { + "epoch": 0.8297550233966419, + "grad_norm": 1.9485307231467388, + "learning_rate": 1.4819937225876935e-06, + "loss": 0.8136, + "step": 6029 + }, + { + "epoch": 0.8298926507018992, + "grad_norm": 1.530649891089945, + "learning_rate": 1.4796594753820248e-06, + "loss": 0.9591, + "step": 6030 + }, + { + "epoch": 0.8300302780071567, + "grad_norm": 1.7403344872108133, + "learning_rate": 1.4773269210513918e-06, + "loss": 0.9159, + "step": 6031 + }, + { + "epoch": 0.830167905312414, + "grad_norm": 2.0053602143707283, + "learning_rate": 1.4749960600592338e-06, + "loss": 0.9292, + "step": 6032 + }, + { + "epoch": 0.8303055326176714, + "grad_norm": 1.71662542031467, + "learning_rate": 1.4726668928686684e-06, + "loss": 0.8267, + "step": 6033 + }, + { + "epoch": 0.8304431599229287, + "grad_norm": 1.8465188823283023, + "learning_rate": 1.4703394199424615e-06, + "loss": 0.8344, + "step": 6034 + }, + { + "epoch": 0.8305807872281861, + "grad_norm": 1.9201293857925044, + "learning_rate": 1.4680136417430512e-06, + "loss": 0.9252, + "step": 6035 + }, + { + "epoch": 0.8307184145334434, + "grad_norm": 1.7147575710165381, + "learning_rate": 1.4656895587325459e-06, + "loss": 0.9064, + "step": 6036 + }, + { + "epoch": 0.8308560418387008, + "grad_norm": 1.7885429425135193, + "learning_rate": 1.4633671713726982e-06, + "loss": 0.931, + "step": 6037 + }, + { + "epoch": 0.8309936691439581, + "grad_norm": 1.8622770504777373, + "learning_rate": 1.4610464801249447e-06, + "loss": 0.9527, + "step": 6038 + }, + { + "epoch": 0.8311312964492156, + "grad_norm": 1.6068313407525372, + "learning_rate": 1.4587274854503653e-06, + "loss": 0.8744, + "step": 6039 + }, + { + "epoch": 0.8312689237544729, + "grad_norm": 2.1796897521838394, + "learning_rate": 1.4564101878097215e-06, + "loss": 0.9031, + "step": 6040 + }, + { + "epoch": 0.8314065510597303, + "grad_norm": 1.6578027929159391, + "learning_rate": 1.4540945876634194e-06, + "loss": 0.9256, + "step": 6041 + }, + { + "epoch": 0.8315441783649876, + "grad_norm": 1.5499973392117492, + "learning_rate": 1.4517806854715455e-06, + "loss": 0.8583, + "step": 6042 + }, + { + "epoch": 0.831681805670245, + "grad_norm": 1.716440207379264, + "learning_rate": 1.4494684816938364e-06, + "loss": 0.98, + "step": 6043 + }, + { + "epoch": 0.8318194329755023, + "grad_norm": 1.5520433952958468, + "learning_rate": 1.447157976789696e-06, + "loss": 0.9156, + "step": 6044 + }, + { + "epoch": 0.8319570602807597, + "grad_norm": 1.9394338443434653, + "learning_rate": 1.44484917121819e-06, + "loss": 0.9672, + "step": 6045 + }, + { + "epoch": 0.832094687586017, + "grad_norm": 1.5651069802077147, + "learning_rate": 1.4425420654380462e-06, + "loss": 0.9553, + "step": 6046 + }, + { + "epoch": 0.8322323148912745, + "grad_norm": 1.9230074071191414, + "learning_rate": 1.4402366599076546e-06, + "loss": 0.8945, + "step": 6047 + }, + { + "epoch": 0.8323699421965318, + "grad_norm": 2.379219331798779, + "learning_rate": 1.4379329550850697e-06, + "loss": 0.9326, + "step": 6048 + }, + { + "epoch": 0.8325075695017892, + "grad_norm": 1.6517296222712392, + "learning_rate": 1.4356309514280043e-06, + "loss": 0.8663, + "step": 6049 + }, + { + "epoch": 0.8326451968070465, + "grad_norm": 1.908762496700301, + "learning_rate": 1.4333306493938348e-06, + "loss": 0.8939, + "step": 6050 + }, + { + "epoch": 0.8327828241123039, + "grad_norm": 2.1409701002812995, + "learning_rate": 1.4310320494396002e-06, + "loss": 0.9459, + "step": 6051 + }, + { + "epoch": 0.8329204514175612, + "grad_norm": 1.8902406544057195, + "learning_rate": 1.4287351520220017e-06, + "loss": 0.9596, + "step": 6052 + }, + { + "epoch": 0.8330580787228186, + "grad_norm": 1.724051508928052, + "learning_rate": 1.426439957597401e-06, + "loss": 1.0188, + "step": 6053 + }, + { + "epoch": 0.833195706028076, + "grad_norm": 1.7958514674938604, + "learning_rate": 1.4241464666218207e-06, + "loss": 0.8938, + "step": 6054 + }, + { + "epoch": 0.8333333333333334, + "grad_norm": 1.9717124752361717, + "learning_rate": 1.421854679550947e-06, + "loss": 0.9124, + "step": 6055 + }, + { + "epoch": 0.8334709606385907, + "grad_norm": 1.7486500132746488, + "learning_rate": 1.419564596840126e-06, + "loss": 0.9308, + "step": 6056 + }, + { + "epoch": 0.8336085879438481, + "grad_norm": 1.8338332104468305, + "learning_rate": 1.4172762189443667e-06, + "loss": 0.8521, + "step": 6057 + }, + { + "epoch": 0.8337462152491054, + "grad_norm": 1.701032519197085, + "learning_rate": 1.4149895463183383e-06, + "loss": 0.8813, + "step": 6058 + }, + { + "epoch": 0.8338838425543628, + "grad_norm": 2.063316959175455, + "learning_rate": 1.4127045794163707e-06, + "loss": 0.8448, + "step": 6059 + }, + { + "epoch": 0.8340214698596201, + "grad_norm": 1.7972524092644135, + "learning_rate": 1.4104213186924542e-06, + "loss": 0.8999, + "step": 6060 + }, + { + "epoch": 0.8341590971648775, + "grad_norm": 1.794507399074356, + "learning_rate": 1.408139764600247e-06, + "loss": 0.9212, + "step": 6061 + }, + { + "epoch": 0.8342967244701349, + "grad_norm": 1.797278647567924, + "learning_rate": 1.4058599175930543e-06, + "loss": 0.8429, + "step": 6062 + }, + { + "epoch": 0.8344343517753923, + "grad_norm": 1.7912975770087685, + "learning_rate": 1.4035817781238603e-06, + "loss": 0.8991, + "step": 6063 + }, + { + "epoch": 0.8345719790806496, + "grad_norm": 1.900206946922228, + "learning_rate": 1.4013053466452898e-06, + "loss": 0.8712, + "step": 6064 + }, + { + "epoch": 0.834709606385907, + "grad_norm": 1.7026499974789298, + "learning_rate": 1.3990306236096474e-06, + "loss": 0.8511, + "step": 6065 + }, + { + "epoch": 0.8348472336911643, + "grad_norm": 1.6923000389535787, + "learning_rate": 1.396757609468884e-06, + "loss": 0.8273, + "step": 6066 + }, + { + "epoch": 0.8349848609964217, + "grad_norm": 1.5726020452464728, + "learning_rate": 1.394486304674616e-06, + "loss": 0.8615, + "step": 6067 + }, + { + "epoch": 0.835122488301679, + "grad_norm": 2.1781695674388177, + "learning_rate": 1.3922167096781269e-06, + "loss": 0.8825, + "step": 6068 + }, + { + "epoch": 0.8352601156069365, + "grad_norm": 1.8931196512359085, + "learning_rate": 1.3899488249303462e-06, + "loss": 0.8829, + "step": 6069 + }, + { + "epoch": 0.8353977429121938, + "grad_norm": 2.039192427420529, + "learning_rate": 1.3876826508818786e-06, + "loss": 0.9224, + "step": 6070 + }, + { + "epoch": 0.8355353702174512, + "grad_norm": 1.886055577250591, + "learning_rate": 1.3854181879829742e-06, + "loss": 0.9655, + "step": 6071 + }, + { + "epoch": 0.8356729975227085, + "grad_norm": 1.928488602302475, + "learning_rate": 1.3831554366835597e-06, + "loss": 0.8999, + "step": 6072 + }, + { + "epoch": 0.8358106248279659, + "grad_norm": 1.6971425320750428, + "learning_rate": 1.3808943974332045e-06, + "loss": 0.9563, + "step": 6073 + }, + { + "epoch": 0.8359482521332232, + "grad_norm": 1.7435712621514061, + "learning_rate": 1.3786350706811513e-06, + "loss": 0.9165, + "step": 6074 + }, + { + "epoch": 0.8360858794384806, + "grad_norm": 1.7310109956387163, + "learning_rate": 1.3763774568762956e-06, + "loss": 0.9168, + "step": 6075 + }, + { + "epoch": 0.8362235067437379, + "grad_norm": 1.5888331512621912, + "learning_rate": 1.374121556467195e-06, + "loss": 0.8352, + "step": 6076 + }, + { + "epoch": 0.8363611340489954, + "grad_norm": 1.9466270644967998, + "learning_rate": 1.3718673699020658e-06, + "loss": 0.9282, + "step": 6077 + }, + { + "epoch": 0.8364987613542527, + "grad_norm": 1.7857997636426537, + "learning_rate": 1.3696148976287838e-06, + "loss": 0.8978, + "step": 6078 + }, + { + "epoch": 0.8366363886595101, + "grad_norm": 2.2364335341499486, + "learning_rate": 1.3673641400948857e-06, + "loss": 0.8995, + "step": 6079 + }, + { + "epoch": 0.8367740159647674, + "grad_norm": 1.8051287150346342, + "learning_rate": 1.3651150977475636e-06, + "loss": 0.8421, + "step": 6080 + }, + { + "epoch": 0.8369116432700248, + "grad_norm": 1.6573557746162544, + "learning_rate": 1.3628677710336736e-06, + "loss": 0.9032, + "step": 6081 + }, + { + "epoch": 0.8370492705752821, + "grad_norm": 1.8074645952618298, + "learning_rate": 1.3606221603997272e-06, + "loss": 0.9146, + "step": 6082 + }, + { + "epoch": 0.8371868978805395, + "grad_norm": 1.7274708386333526, + "learning_rate": 1.3583782662918976e-06, + "loss": 0.8668, + "step": 6083 + }, + { + "epoch": 0.8373245251857968, + "grad_norm": 1.6330928082409644, + "learning_rate": 1.356136089156016e-06, + "loss": 0.9581, + "step": 6084 + }, + { + "epoch": 0.8374621524910543, + "grad_norm": 1.7054009634098362, + "learning_rate": 1.3538956294375715e-06, + "loss": 0.877, + "step": 6085 + }, + { + "epoch": 0.8375997797963116, + "grad_norm": 1.7463593761632161, + "learning_rate": 1.3516568875817138e-06, + "loss": 0.8783, + "step": 6086 + }, + { + "epoch": 0.837737407101569, + "grad_norm": 1.8605929065771427, + "learning_rate": 1.349419864033249e-06, + "loss": 0.8513, + "step": 6087 + }, + { + "epoch": 0.8378750344068263, + "grad_norm": 1.7162030312797463, + "learning_rate": 1.3471845592366438e-06, + "loss": 0.8657, + "step": 6088 + }, + { + "epoch": 0.8380126617120837, + "grad_norm": 1.8079296078158014, + "learning_rate": 1.3449509736360232e-06, + "loss": 0.9372, + "step": 6089 + }, + { + "epoch": 0.838150289017341, + "grad_norm": 1.6700336382168768, + "learning_rate": 1.3427191076751666e-06, + "loss": 0.8228, + "step": 6090 + }, + { + "epoch": 0.8382879163225984, + "grad_norm": 1.764755199441113, + "learning_rate": 1.3404889617975226e-06, + "loss": 0.8647, + "step": 6091 + }, + { + "epoch": 0.8384255436278558, + "grad_norm": 1.677207916115392, + "learning_rate": 1.338260536446182e-06, + "loss": 0.9021, + "step": 6092 + }, + { + "epoch": 0.8385631709331132, + "grad_norm": 1.8650096152289894, + "learning_rate": 1.3360338320639098e-06, + "loss": 0.9853, + "step": 6093 + }, + { + "epoch": 0.8387007982383705, + "grad_norm": 1.9181375753132912, + "learning_rate": 1.3338088490931144e-06, + "loss": 0.8916, + "step": 6094 + }, + { + "epoch": 0.8388384255436279, + "grad_norm": 1.553251292780892, + "learning_rate": 1.3315855879758787e-06, + "loss": 0.8212, + "step": 6095 + }, + { + "epoch": 0.8389760528488852, + "grad_norm": 1.610467421678477, + "learning_rate": 1.329364049153924e-06, + "loss": 0.9053, + "step": 6096 + }, + { + "epoch": 0.8391136801541426, + "grad_norm": 1.6114612195107656, + "learning_rate": 1.3271442330686467e-06, + "loss": 0.8993, + "step": 6097 + }, + { + "epoch": 0.8392513074593999, + "grad_norm": 1.6734658341730906, + "learning_rate": 1.3249261401610946e-06, + "loss": 0.8655, + "step": 6098 + }, + { + "epoch": 0.8393889347646573, + "grad_norm": 1.8339477443188514, + "learning_rate": 1.3227097708719649e-06, + "loss": 0.9351, + "step": 6099 + }, + { + "epoch": 0.8395265620699147, + "grad_norm": 2.072308235141602, + "learning_rate": 1.3204951256416288e-06, + "loss": 0.8766, + "step": 6100 + }, + { + "epoch": 0.8396641893751721, + "grad_norm": 1.6660579208588797, + "learning_rate": 1.3182822049100975e-06, + "loss": 0.8651, + "step": 6101 + }, + { + "epoch": 0.8398018166804294, + "grad_norm": 1.9730982348603476, + "learning_rate": 1.316071009117057e-06, + "loss": 0.8848, + "step": 6102 + }, + { + "epoch": 0.8399394439856868, + "grad_norm": 2.1103217808056507, + "learning_rate": 1.3138615387018327e-06, + "loss": 0.9022, + "step": 6103 + }, + { + "epoch": 0.8400770712909441, + "grad_norm": 1.7643874347913482, + "learning_rate": 1.3116537941034225e-06, + "loss": 0.9259, + "step": 6104 + }, + { + "epoch": 0.8402146985962015, + "grad_norm": 1.82608810256724, + "learning_rate": 1.3094477757604718e-06, + "loss": 0.9408, + "step": 6105 + }, + { + "epoch": 0.8403523259014588, + "grad_norm": 1.909491737390539, + "learning_rate": 1.307243484111288e-06, + "loss": 0.9644, + "step": 6106 + }, + { + "epoch": 0.8404899532067163, + "grad_norm": 1.9100606181776114, + "learning_rate": 1.3050409195938341e-06, + "loss": 0.8551, + "step": 6107 + }, + { + "epoch": 0.8406275805119736, + "grad_norm": 1.590136761381432, + "learning_rate": 1.3028400826457266e-06, + "loss": 0.8477, + "step": 6108 + }, + { + "epoch": 0.840765207817231, + "grad_norm": 1.8166123583462286, + "learning_rate": 1.3006409737042448e-06, + "loss": 0.8122, + "step": 6109 + }, + { + "epoch": 0.8409028351224883, + "grad_norm": 1.858342832182508, + "learning_rate": 1.29844359320632e-06, + "loss": 0.8927, + "step": 6110 + }, + { + "epoch": 0.8410404624277457, + "grad_norm": 1.8384373899354556, + "learning_rate": 1.2962479415885421e-06, + "loss": 0.8855, + "step": 6111 + }, + { + "epoch": 0.841178089733003, + "grad_norm": 1.9770414133472942, + "learning_rate": 1.2940540192871564e-06, + "loss": 0.857, + "step": 6112 + }, + { + "epoch": 0.8413157170382604, + "grad_norm": 1.6709901002887881, + "learning_rate": 1.2918618267380656e-06, + "loss": 0.8825, + "step": 6113 + }, + { + "epoch": 0.8414533443435177, + "grad_norm": 1.628858499422417, + "learning_rate": 1.2896713643768289e-06, + "loss": 0.8414, + "step": 6114 + }, + { + "epoch": 0.8415909716487752, + "grad_norm": 2.0096845119187727, + "learning_rate": 1.2874826326386601e-06, + "loss": 0.9519, + "step": 6115 + }, + { + "epoch": 0.8417285989540325, + "grad_norm": 1.9845448935818484, + "learning_rate": 1.285295631958432e-06, + "loss": 0.915, + "step": 6116 + }, + { + "epoch": 0.8418662262592899, + "grad_norm": 1.6497359152365212, + "learning_rate": 1.28311036277067e-06, + "loss": 0.8665, + "step": 6117 + }, + { + "epoch": 0.8420038535645472, + "grad_norm": 1.7352456052419811, + "learning_rate": 1.2809268255095565e-06, + "loss": 0.9668, + "step": 6118 + }, + { + "epoch": 0.8421414808698046, + "grad_norm": 1.7857166890053737, + "learning_rate": 1.2787450206089324e-06, + "loss": 0.8985, + "step": 6119 + }, + { + "epoch": 0.8422791081750619, + "grad_norm": 1.8961421306037025, + "learning_rate": 1.2765649485022923e-06, + "loss": 0.8759, + "step": 6120 + }, + { + "epoch": 0.8424167354803193, + "grad_norm": 1.8095611152354723, + "learning_rate": 1.2743866096227863e-06, + "loss": 0.8921, + "step": 6121 + }, + { + "epoch": 0.8425543627855766, + "grad_norm": 1.6210589012402394, + "learning_rate": 1.272210004403217e-06, + "loss": 0.9177, + "step": 6122 + }, + { + "epoch": 0.8426919900908341, + "grad_norm": 1.732450653086319, + "learning_rate": 1.2700351332760542e-06, + "loss": 0.9457, + "step": 6123 + }, + { + "epoch": 0.8428296173960914, + "grad_norm": 1.596930517048839, + "learning_rate": 1.2678619966734062e-06, + "loss": 0.8522, + "step": 6124 + }, + { + "epoch": 0.8429672447013488, + "grad_norm": 1.7366085271603748, + "learning_rate": 1.2656905950270537e-06, + "loss": 0.8737, + "step": 6125 + }, + { + "epoch": 0.8431048720066061, + "grad_norm": 1.6927379001521008, + "learning_rate": 1.2635209287684157e-06, + "loss": 0.8915, + "step": 6126 + }, + { + "epoch": 0.8432424993118635, + "grad_norm": 1.7387728033511043, + "learning_rate": 1.2613529983285843e-06, + "loss": 0.9575, + "step": 6127 + }, + { + "epoch": 0.8433801266171208, + "grad_norm": 1.8614872756380283, + "learning_rate": 1.2591868041382893e-06, + "loss": 0.924, + "step": 6128 + }, + { + "epoch": 0.8435177539223782, + "grad_norm": 2.034200146748662, + "learning_rate": 1.2570223466279296e-06, + "loss": 0.8841, + "step": 6129 + }, + { + "epoch": 0.8436553812276356, + "grad_norm": 1.632094586098541, + "learning_rate": 1.2548596262275526e-06, + "loss": 0.9254, + "step": 6130 + }, + { + "epoch": 0.843793008532893, + "grad_norm": 1.9064335207681462, + "learning_rate": 1.2526986433668565e-06, + "loss": 0.9127, + "step": 6131 + }, + { + "epoch": 0.8439306358381503, + "grad_norm": 2.125998740610062, + "learning_rate": 1.2505393984752068e-06, + "loss": 0.8841, + "step": 6132 + }, + { + "epoch": 0.8440682631434077, + "grad_norm": 1.6465219842572694, + "learning_rate": 1.248381891981606e-06, + "loss": 0.8884, + "step": 6133 + }, + { + "epoch": 0.844205890448665, + "grad_norm": 1.6813455796972567, + "learning_rate": 1.2462261243147322e-06, + "loss": 0.9313, + "step": 6134 + }, + { + "epoch": 0.8443435177539224, + "grad_norm": 1.7858511445267358, + "learning_rate": 1.2440720959028973e-06, + "loss": 0.8454, + "step": 6135 + }, + { + "epoch": 0.8444811450591797, + "grad_norm": 1.819605417144252, + "learning_rate": 1.2419198071740823e-06, + "loss": 0.8788, + "step": 6136 + }, + { + "epoch": 0.844618772364437, + "grad_norm": 1.6528858617096494, + "learning_rate": 1.2397692585559173e-06, + "loss": 0.8892, + "step": 6137 + }, + { + "epoch": 0.8447563996696945, + "grad_norm": 1.776742051552983, + "learning_rate": 1.2376204504756862e-06, + "loss": 0.8726, + "step": 6138 + }, + { + "epoch": 0.8448940269749519, + "grad_norm": 1.6741325880591398, + "learning_rate": 1.2354733833603272e-06, + "loss": 0.8917, + "step": 6139 + }, + { + "epoch": 0.8450316542802092, + "grad_norm": 1.9028593690749667, + "learning_rate": 1.2333280576364349e-06, + "loss": 0.839, + "step": 6140 + }, + { + "epoch": 0.8451692815854666, + "grad_norm": 1.7678687103518214, + "learning_rate": 1.2311844737302536e-06, + "loss": 0.9343, + "step": 6141 + }, + { + "epoch": 0.8453069088907239, + "grad_norm": 1.8638353498324214, + "learning_rate": 1.2290426320676852e-06, + "loss": 0.9142, + "step": 6142 + }, + { + "epoch": 0.8454445361959813, + "grad_norm": 1.7962682859738455, + "learning_rate": 1.2269025330742857e-06, + "loss": 0.9592, + "step": 6143 + }, + { + "epoch": 0.8455821635012386, + "grad_norm": 1.6672492017970242, + "learning_rate": 1.2247641771752616e-06, + "loss": 0.9329, + "step": 6144 + }, + { + "epoch": 0.8457197908064961, + "grad_norm": 1.8513922229830846, + "learning_rate": 1.2226275647954755e-06, + "loss": 0.9621, + "step": 6145 + }, + { + "epoch": 0.8458574181117534, + "grad_norm": 1.8075126899538192, + "learning_rate": 1.2204926963594433e-06, + "loss": 0.8541, + "step": 6146 + }, + { + "epoch": 0.8459950454170108, + "grad_norm": 1.8642184446683254, + "learning_rate": 1.2183595722913333e-06, + "loss": 0.9001, + "step": 6147 + }, + { + "epoch": 0.8461326727222681, + "grad_norm": 1.8629039170002117, + "learning_rate": 1.21622819301497e-06, + "loss": 0.9096, + "step": 6148 + }, + { + "epoch": 0.8462703000275255, + "grad_norm": 1.7683356366398046, + "learning_rate": 1.2140985589538269e-06, + "loss": 0.9099, + "step": 6149 + }, + { + "epoch": 0.8464079273327828, + "grad_norm": 1.9537421270030741, + "learning_rate": 1.211970670531034e-06, + "loss": 0.8904, + "step": 6150 + }, + { + "epoch": 0.8465455546380402, + "grad_norm": 1.705587175663193, + "learning_rate": 1.2098445281693748e-06, + "loss": 0.8336, + "step": 6151 + }, + { + "epoch": 0.8466831819432975, + "grad_norm": 1.9411358524930473, + "learning_rate": 1.2077201322912814e-06, + "loss": 0.875, + "step": 6152 + }, + { + "epoch": 0.846820809248555, + "grad_norm": 2.0164169072124647, + "learning_rate": 1.2055974833188488e-06, + "loss": 0.9274, + "step": 6153 + }, + { + "epoch": 0.8469584365538123, + "grad_norm": 1.792438573818221, + "learning_rate": 1.2034765816738103e-06, + "loss": 0.9657, + "step": 6154 + }, + { + "epoch": 0.8470960638590697, + "grad_norm": 1.8858913854586463, + "learning_rate": 1.2013574277775674e-06, + "loss": 0.8902, + "step": 6155 + }, + { + "epoch": 0.847233691164327, + "grad_norm": 2.336056892384564, + "learning_rate": 1.1992400220511591e-06, + "loss": 0.9771, + "step": 6156 + }, + { + "epoch": 0.8473713184695844, + "grad_norm": 1.7673569050817732, + "learning_rate": 1.197124364915293e-06, + "loss": 0.834, + "step": 6157 + }, + { + "epoch": 0.8475089457748417, + "grad_norm": 1.7033321142648965, + "learning_rate": 1.195010456790313e-06, + "loss": 0.8742, + "step": 6158 + }, + { + "epoch": 0.847646573080099, + "grad_norm": 1.7457197264061113, + "learning_rate": 1.1928982980962312e-06, + "loss": 0.862, + "step": 6159 + }, + { + "epoch": 0.8477842003853564, + "grad_norm": 1.808992090745284, + "learning_rate": 1.1907878892527003e-06, + "loss": 0.9053, + "step": 6160 + }, + { + "epoch": 0.8479218276906139, + "grad_norm": 1.8024166637962096, + "learning_rate": 1.1886792306790318e-06, + "loss": 0.8705, + "step": 6161 + }, + { + "epoch": 0.8480594549958712, + "grad_norm": 1.6904549719558852, + "learning_rate": 1.1865723227941882e-06, + "loss": 0.8306, + "step": 6162 + }, + { + "epoch": 0.8481970823011286, + "grad_norm": 2.0832332030525125, + "learning_rate": 1.1844671660167773e-06, + "loss": 0.9243, + "step": 6163 + }, + { + "epoch": 0.8483347096063859, + "grad_norm": 1.8260186390936586, + "learning_rate": 1.182363760765073e-06, + "loss": 0.8628, + "step": 6164 + }, + { + "epoch": 0.8484723369116433, + "grad_norm": 1.4997195278129782, + "learning_rate": 1.1802621074569843e-06, + "loss": 0.9305, + "step": 6165 + }, + { + "epoch": 0.8486099642169006, + "grad_norm": 1.7116906091568327, + "learning_rate": 1.1781622065100884e-06, + "loss": 0.8911, + "step": 6166 + }, + { + "epoch": 0.848747591522158, + "grad_norm": 1.721290941788685, + "learning_rate": 1.176064058341605e-06, + "loss": 0.9882, + "step": 6167 + }, + { + "epoch": 0.8488852188274154, + "grad_norm": 1.5974017197861239, + "learning_rate": 1.173967663368406e-06, + "loss": 0.9148, + "step": 6168 + }, + { + "epoch": 0.8490228461326728, + "grad_norm": 1.7101521735939351, + "learning_rate": 1.1718730220070174e-06, + "loss": 0.8843, + "step": 6169 + }, + { + "epoch": 0.8491604734379301, + "grad_norm": 1.7142188658696245, + "learning_rate": 1.1697801346736147e-06, + "loss": 0.917, + "step": 6170 + }, + { + "epoch": 0.8492981007431875, + "grad_norm": 1.596463143195057, + "learning_rate": 1.167689001784027e-06, + "loss": 0.9414, + "step": 6171 + }, + { + "epoch": 0.8494357280484448, + "grad_norm": 1.5805159736046763, + "learning_rate": 1.1655996237537337e-06, + "loss": 0.8933, + "step": 6172 + }, + { + "epoch": 0.8495733553537022, + "grad_norm": 2.0666283665033847, + "learning_rate": 1.163512000997865e-06, + "loss": 0.9125, + "step": 6173 + }, + { + "epoch": 0.8497109826589595, + "grad_norm": 1.993870261625524, + "learning_rate": 1.161426133931204e-06, + "loss": 0.8562, + "step": 6174 + }, + { + "epoch": 0.8498486099642169, + "grad_norm": 1.8049165722007277, + "learning_rate": 1.1593420229681828e-06, + "loss": 0.951, + "step": 6175 + }, + { + "epoch": 0.8499862372694743, + "grad_norm": 2.0571880605830795, + "learning_rate": 1.1572596685228865e-06, + "loss": 0.9509, + "step": 6176 + }, + { + "epoch": 0.8501238645747317, + "grad_norm": 1.7372973052614344, + "learning_rate": 1.1551790710090505e-06, + "loss": 0.8661, + "step": 6177 + }, + { + "epoch": 0.850261491879989, + "grad_norm": 1.7437380714988833, + "learning_rate": 1.1531002308400595e-06, + "loss": 0.8775, + "step": 6178 + }, + { + "epoch": 0.8503991191852464, + "grad_norm": 1.622455604839031, + "learning_rate": 1.1510231484289525e-06, + "loss": 0.9552, + "step": 6179 + }, + { + "epoch": 0.8505367464905037, + "grad_norm": 1.795308289686882, + "learning_rate": 1.1489478241884167e-06, + "loss": 0.8687, + "step": 6180 + }, + { + "epoch": 0.850674373795761, + "grad_norm": 1.6196311730173674, + "learning_rate": 1.1468742585307913e-06, + "loss": 0.9117, + "step": 6181 + }, + { + "epoch": 0.8508120011010184, + "grad_norm": 1.7395749033639012, + "learning_rate": 1.1448024518680634e-06, + "loss": 0.9115, + "step": 6182 + }, + { + "epoch": 0.8509496284062759, + "grad_norm": 1.579889317628041, + "learning_rate": 1.1427324046118738e-06, + "loss": 0.9077, + "step": 6183 + }, + { + "epoch": 0.8510872557115332, + "grad_norm": 1.7478329843529872, + "learning_rate": 1.1406641171735112e-06, + "loss": 0.877, + "step": 6184 + }, + { + "epoch": 0.8512248830167906, + "grad_norm": 1.597017759813138, + "learning_rate": 1.1385975899639212e-06, + "loss": 0.9397, + "step": 6185 + }, + { + "epoch": 0.8513625103220479, + "grad_norm": 1.823189759823978, + "learning_rate": 1.1365328233936867e-06, + "loss": 0.8332, + "step": 6186 + }, + { + "epoch": 0.8515001376273053, + "grad_norm": 1.749493111741998, + "learning_rate": 1.1344698178730574e-06, + "loss": 0.9116, + "step": 6187 + }, + { + "epoch": 0.8516377649325626, + "grad_norm": 1.96166600476659, + "learning_rate": 1.132408573811914e-06, + "loss": 0.9301, + "step": 6188 + }, + { + "epoch": 0.85177539223782, + "grad_norm": 1.8515830106667426, + "learning_rate": 1.130349091619808e-06, + "loss": 0.9521, + "step": 6189 + }, + { + "epoch": 0.8519130195430773, + "grad_norm": 1.8463081993784243, + "learning_rate": 1.1282913717059206e-06, + "loss": 0.8828, + "step": 6190 + }, + { + "epoch": 0.8520506468483348, + "grad_norm": 2.0521941944871753, + "learning_rate": 1.1262354144790999e-06, + "loss": 0.9125, + "step": 6191 + }, + { + "epoch": 0.8521882741535921, + "grad_norm": 1.8238912532626685, + "learning_rate": 1.124181220347833e-06, + "loss": 0.9179, + "step": 6192 + }, + { + "epoch": 0.8523259014588495, + "grad_norm": 1.94010405763025, + "learning_rate": 1.1221287897202604e-06, + "loss": 0.8766, + "step": 6193 + }, + { + "epoch": 0.8524635287641068, + "grad_norm": 1.8408612021602413, + "learning_rate": 1.1200781230041757e-06, + "loss": 0.9017, + "step": 6194 + }, + { + "epoch": 0.8526011560693642, + "grad_norm": 1.5533309872353143, + "learning_rate": 1.118029220607011e-06, + "loss": 0.8861, + "step": 6195 + }, + { + "epoch": 0.8527387833746215, + "grad_norm": 1.8491282524239647, + "learning_rate": 1.1159820829358626e-06, + "loss": 0.8449, + "step": 6196 + }, + { + "epoch": 0.8528764106798788, + "grad_norm": 1.5890951668967603, + "learning_rate": 1.1139367103974607e-06, + "loss": 0.9049, + "step": 6197 + }, + { + "epoch": 0.8530140379851362, + "grad_norm": 1.8682495051675239, + "learning_rate": 1.1118931033982005e-06, + "loss": 0.9028, + "step": 6198 + }, + { + "epoch": 0.8531516652903937, + "grad_norm": 1.6849700445351836, + "learning_rate": 1.109851262344116e-06, + "loss": 0.8539, + "step": 6199 + }, + { + "epoch": 0.853289292595651, + "grad_norm": 1.7085091748241745, + "learning_rate": 1.1078111876408925e-06, + "loss": 0.896, + "step": 6200 + }, + { + "epoch": 0.8534269199009084, + "grad_norm": 1.7627489964715688, + "learning_rate": 1.1057728796938649e-06, + "loss": 0.9103, + "step": 6201 + }, + { + "epoch": 0.8535645472061657, + "grad_norm": 1.7732265115369894, + "learning_rate": 1.103736338908017e-06, + "loss": 0.9877, + "step": 6202 + }, + { + "epoch": 0.853702174511423, + "grad_norm": 2.3317087875938443, + "learning_rate": 1.1017015656879826e-06, + "loss": 0.8577, + "step": 6203 + }, + { + "epoch": 0.8538398018166804, + "grad_norm": 1.8906089410673719, + "learning_rate": 1.099668560438043e-06, + "loss": 0.9098, + "step": 6204 + }, + { + "epoch": 0.8539774291219377, + "grad_norm": 1.7741615332785652, + "learning_rate": 1.0976373235621273e-06, + "loss": 0.9202, + "step": 6205 + }, + { + "epoch": 0.8541150564271952, + "grad_norm": 1.7795041831959177, + "learning_rate": 1.0956078554638161e-06, + "loss": 0.9812, + "step": 6206 + }, + { + "epoch": 0.8542526837324526, + "grad_norm": 1.9280939130133263, + "learning_rate": 1.0935801565463356e-06, + "loss": 0.8748, + "step": 6207 + }, + { + "epoch": 0.8543903110377099, + "grad_norm": 1.6985953902383173, + "learning_rate": 1.0915542272125635e-06, + "loss": 0.8844, + "step": 6208 + }, + { + "epoch": 0.8545279383429673, + "grad_norm": 1.7656054135347574, + "learning_rate": 1.0895300678650223e-06, + "loss": 0.8981, + "step": 6209 + }, + { + "epoch": 0.8546655656482246, + "grad_norm": 1.882449130548782, + "learning_rate": 1.0875076789058857e-06, + "loss": 0.8684, + "step": 6210 + }, + { + "epoch": 0.854803192953482, + "grad_norm": 2.0828732529177283, + "learning_rate": 1.0854870607369738e-06, + "loss": 0.8886, + "step": 6211 + }, + { + "epoch": 0.8549408202587393, + "grad_norm": 1.5762031784650956, + "learning_rate": 1.0834682137597575e-06, + "loss": 0.9016, + "step": 6212 + }, + { + "epoch": 0.8550784475639966, + "grad_norm": 1.9997176451775578, + "learning_rate": 1.0814511383753524e-06, + "loss": 0.9091, + "step": 6213 + }, + { + "epoch": 0.8552160748692541, + "grad_norm": 1.6008286782788077, + "learning_rate": 1.0794358349845258e-06, + "loss": 0.901, + "step": 6214 + }, + { + "epoch": 0.8553537021745115, + "grad_norm": 1.747513288586546, + "learning_rate": 1.0774223039876884e-06, + "loss": 0.933, + "step": 6215 + }, + { + "epoch": 0.8554913294797688, + "grad_norm": 1.691661423142964, + "learning_rate": 1.0754105457849007e-06, + "loss": 0.8802, + "step": 6216 + }, + { + "epoch": 0.8556289567850262, + "grad_norm": 2.0459210262909413, + "learning_rate": 1.0734005607758768e-06, + "loss": 0.8389, + "step": 6217 + }, + { + "epoch": 0.8557665840902835, + "grad_norm": 1.7241595170062474, + "learning_rate": 1.0713923493599665e-06, + "loss": 0.8852, + "step": 6218 + }, + { + "epoch": 0.8559042113955408, + "grad_norm": 1.760415045388538, + "learning_rate": 1.0693859119361805e-06, + "loss": 0.945, + "step": 6219 + }, + { + "epoch": 0.8560418387007982, + "grad_norm": 1.8553260892920427, + "learning_rate": 1.0673812489031632e-06, + "loss": 0.8616, + "step": 6220 + }, + { + "epoch": 0.8561794660060557, + "grad_norm": 1.8733594274032301, + "learning_rate": 1.0653783606592193e-06, + "loss": 0.9112, + "step": 6221 + }, + { + "epoch": 0.856317093311313, + "grad_norm": 1.661550675841988, + "learning_rate": 1.063377247602294e-06, + "loss": 0.9323, + "step": 6222 + }, + { + "epoch": 0.8564547206165704, + "grad_norm": 2.0148655361835837, + "learning_rate": 1.0613779101299794e-06, + "loss": 0.8318, + "step": 6223 + }, + { + "epoch": 0.8565923479218277, + "grad_norm": 1.7556268197937792, + "learning_rate": 1.0593803486395192e-06, + "loss": 0.8494, + "step": 6224 + }, + { + "epoch": 0.856729975227085, + "grad_norm": 1.824161035374066, + "learning_rate": 1.057384563527799e-06, + "loss": 0.8797, + "step": 6225 + }, + { + "epoch": 0.8568676025323424, + "grad_norm": 1.7644634000553918, + "learning_rate": 1.0553905551913568e-06, + "loss": 0.8914, + "step": 6226 + }, + { + "epoch": 0.8570052298375997, + "grad_norm": 1.8137094918409802, + "learning_rate": 1.05339832402637e-06, + "loss": 0.9026, + "step": 6227 + }, + { + "epoch": 0.8571428571428571, + "grad_norm": 1.5935991393795679, + "learning_rate": 1.0514078704286713e-06, + "loss": 0.8273, + "step": 6228 + }, + { + "epoch": 0.8572804844481146, + "grad_norm": 1.8627668337735328, + "learning_rate": 1.049419194793737e-06, + "loss": 0.8461, + "step": 6229 + }, + { + "epoch": 0.8574181117533719, + "grad_norm": 1.8243143285536125, + "learning_rate": 1.0474322975166872e-06, + "loss": 0.8747, + "step": 6230 + }, + { + "epoch": 0.8575557390586293, + "grad_norm": 1.607035697618476, + "learning_rate": 1.0454471789922938e-06, + "loss": 0.9447, + "step": 6231 + }, + { + "epoch": 0.8576933663638866, + "grad_norm": 1.8249112881971736, + "learning_rate": 1.043463839614971e-06, + "loss": 0.8677, + "step": 6232 + }, + { + "epoch": 0.857830993669144, + "grad_norm": 1.834360652593746, + "learning_rate": 1.0414822797787827e-06, + "loss": 0.8972, + "step": 6233 + }, + { + "epoch": 0.8579686209744013, + "grad_norm": 1.734085765353281, + "learning_rate": 1.039502499877435e-06, + "loss": 0.8738, + "step": 6234 + }, + { + "epoch": 0.8581062482796586, + "grad_norm": 1.7648585493823916, + "learning_rate": 1.0375245003042855e-06, + "loss": 0.9147, + "step": 6235 + }, + { + "epoch": 0.858243875584916, + "grad_norm": 1.6533946327914746, + "learning_rate": 1.0355482814523355e-06, + "loss": 0.8877, + "step": 6236 + }, + { + "epoch": 0.8583815028901735, + "grad_norm": 1.62368507915153, + "learning_rate": 1.0335738437142307e-06, + "loss": 0.8561, + "step": 6237 + }, + { + "epoch": 0.8585191301954308, + "grad_norm": 1.9459208143022695, + "learning_rate": 1.0316011874822673e-06, + "loss": 0.8548, + "step": 6238 + }, + { + "epoch": 0.8586567575006882, + "grad_norm": 1.5434341527162416, + "learning_rate": 1.029630313148381e-06, + "loss": 0.9445, + "step": 6239 + }, + { + "epoch": 0.8587943848059455, + "grad_norm": 1.6948168797812844, + "learning_rate": 1.0276612211041647e-06, + "loss": 0.8671, + "step": 6240 + }, + { + "epoch": 0.8589320121112028, + "grad_norm": 1.655727692137088, + "learning_rate": 1.025693911740845e-06, + "loss": 0.8963, + "step": 6241 + }, + { + "epoch": 0.8590696394164602, + "grad_norm": 1.9142942378624233, + "learning_rate": 1.0237283854492984e-06, + "loss": 0.7426, + "step": 6242 + }, + { + "epoch": 0.8592072667217175, + "grad_norm": 1.7050248295069106, + "learning_rate": 1.0217646426200512e-06, + "loss": 0.8821, + "step": 6243 + }, + { + "epoch": 0.859344894026975, + "grad_norm": 1.846644966719409, + "learning_rate": 1.0198026836432705e-06, + "loss": 0.9452, + "step": 6244 + }, + { + "epoch": 0.8594825213322324, + "grad_norm": 1.589539889630898, + "learning_rate": 1.0178425089087707e-06, + "loss": 0.9316, + "step": 6245 + }, + { + "epoch": 0.8596201486374897, + "grad_norm": 1.8671977305075909, + "learning_rate": 1.0158841188060097e-06, + "loss": 0.9304, + "step": 6246 + }, + { + "epoch": 0.859757775942747, + "grad_norm": 1.6998972779955002, + "learning_rate": 1.0139275137241e-06, + "loss": 0.8841, + "step": 6247 + }, + { + "epoch": 0.8598954032480044, + "grad_norm": 1.6411868750384548, + "learning_rate": 1.0119726940517826e-06, + "loss": 0.8831, + "step": 6248 + }, + { + "epoch": 0.8600330305532617, + "grad_norm": 1.8325711136668632, + "learning_rate": 1.0100196601774636e-06, + "loss": 0.8513, + "step": 6249 + }, + { + "epoch": 0.8601706578585191, + "grad_norm": 1.673958912207221, + "learning_rate": 1.0080684124891737e-06, + "loss": 0.8326, + "step": 6250 + }, + { + "epoch": 0.8603082851637764, + "grad_norm": 1.6930924073611227, + "learning_rate": 1.0061189513746084e-06, + "loss": 0.8934, + "step": 6251 + }, + { + "epoch": 0.8604459124690339, + "grad_norm": 1.6526857250595426, + "learning_rate": 1.00417127722109e-06, + "loss": 0.8588, + "step": 6252 + }, + { + "epoch": 0.8605835397742913, + "grad_norm": 1.6392924387162284, + "learning_rate": 1.0022253904156021e-06, + "loss": 0.9076, + "step": 6253 + }, + { + "epoch": 0.8607211670795486, + "grad_norm": 1.9012759811366886, + "learning_rate": 1.0002812913447623e-06, + "loss": 0.9007, + "step": 6254 + }, + { + "epoch": 0.860858794384806, + "grad_norm": 1.6634464242122433, + "learning_rate": 9.983389803948374e-07, + "loss": 0.9173, + "step": 6255 + }, + { + "epoch": 0.8609964216900633, + "grad_norm": 1.5711386034534134, + "learning_rate": 9.963984579517382e-07, + "loss": 0.8745, + "step": 6256 + }, + { + "epoch": 0.8611340489953206, + "grad_norm": 1.8215760569065331, + "learning_rate": 9.94459724401019e-07, + "loss": 0.8029, + "step": 6257 + }, + { + "epoch": 0.861271676300578, + "grad_norm": 2.254793095089383, + "learning_rate": 9.925227801278826e-07, + "loss": 0.9907, + "step": 6258 + }, + { + "epoch": 0.8614093036058355, + "grad_norm": 1.6885256395454302, + "learning_rate": 9.905876255171665e-07, + "loss": 0.8028, + "step": 6259 + }, + { + "epoch": 0.8615469309110928, + "grad_norm": 1.9243112534056797, + "learning_rate": 9.886542609533668e-07, + "loss": 0.8267, + "step": 6260 + }, + { + "epoch": 0.8616845582163501, + "grad_norm": 1.9010430236512281, + "learning_rate": 9.867226868206127e-07, + "loss": 0.8641, + "step": 6261 + }, + { + "epoch": 0.8618221855216075, + "grad_norm": 1.9111698643083475, + "learning_rate": 9.847929035026826e-07, + "loss": 0.9461, + "step": 6262 + }, + { + "epoch": 0.8619598128268648, + "grad_norm": 2.8209566271193287, + "learning_rate": 9.82864911382997e-07, + "loss": 0.9151, + "step": 6263 + }, + { + "epoch": 0.8620974401321222, + "grad_norm": 1.7380561692099294, + "learning_rate": 9.809387108446234e-07, + "loss": 0.8923, + "step": 6264 + }, + { + "epoch": 0.8622350674373795, + "grad_norm": 1.7854983960326407, + "learning_rate": 9.790143022702703e-07, + "loss": 0.9246, + "step": 6265 + }, + { + "epoch": 0.8623726947426369, + "grad_norm": 1.8401999725636915, + "learning_rate": 9.770916860422907e-07, + "loss": 0.9202, + "step": 6266 + }, + { + "epoch": 0.8625103220478944, + "grad_norm": 1.9592330923428067, + "learning_rate": 9.751708625426836e-07, + "loss": 0.9526, + "step": 6267 + }, + { + "epoch": 0.8626479493531517, + "grad_norm": 1.8987182144016828, + "learning_rate": 9.732518321530904e-07, + "loss": 0.9473, + "step": 6268 + }, + { + "epoch": 0.862785576658409, + "grad_norm": 2.079129585369098, + "learning_rate": 9.713345952547949e-07, + "loss": 0.9041, + "step": 6269 + }, + { + "epoch": 0.8629232039636664, + "grad_norm": 1.9200819556924664, + "learning_rate": 9.694191522287256e-07, + "loss": 0.938, + "step": 6270 + }, + { + "epoch": 0.8630608312689237, + "grad_norm": 1.8636943318134989, + "learning_rate": 9.675055034554547e-07, + "loss": 0.9252, + "step": 6271 + }, + { + "epoch": 0.8631984585741811, + "grad_norm": 1.5840490516828283, + "learning_rate": 9.655936493152018e-07, + "loss": 0.7854, + "step": 6272 + }, + { + "epoch": 0.8633360858794384, + "grad_norm": 1.9078694551675215, + "learning_rate": 9.636835901878217e-07, + "loss": 0.8825, + "step": 6273 + }, + { + "epoch": 0.8634737131846958, + "grad_norm": 1.9859282786300518, + "learning_rate": 9.617753264528184e-07, + "loss": 1.0039, + "step": 6274 + }, + { + "epoch": 0.8636113404899532, + "grad_norm": 1.8353425757594175, + "learning_rate": 9.59868858489338e-07, + "loss": 0.8419, + "step": 6275 + }, + { + "epoch": 0.8637489677952106, + "grad_norm": 1.6029197278096232, + "learning_rate": 9.579641866761702e-07, + "loss": 0.9292, + "step": 6276 + }, + { + "epoch": 0.863886595100468, + "grad_norm": 1.7337445203655124, + "learning_rate": 9.560613113917462e-07, + "loss": 0.9725, + "step": 6277 + }, + { + "epoch": 0.8640242224057253, + "grad_norm": 1.5568334729503015, + "learning_rate": 9.541602330141386e-07, + "loss": 0.8627, + "step": 6278 + }, + { + "epoch": 0.8641618497109826, + "grad_norm": 1.8046165228091906, + "learning_rate": 9.522609519210735e-07, + "loss": 0.8959, + "step": 6279 + }, + { + "epoch": 0.86429947701624, + "grad_norm": 1.839020720605846, + "learning_rate": 9.503634684899033e-07, + "loss": 0.8946, + "step": 6280 + }, + { + "epoch": 0.8644371043214973, + "grad_norm": 1.744491745807985, + "learning_rate": 9.484677830976396e-07, + "loss": 0.8513, + "step": 6281 + }, + { + "epoch": 0.8645747316267548, + "grad_norm": 1.7600007823660464, + "learning_rate": 9.465738961209214e-07, + "loss": 0.8734, + "step": 6282 + }, + { + "epoch": 0.8647123589320121, + "grad_norm": 2.097317385804689, + "learning_rate": 9.446818079360465e-07, + "loss": 0.8637, + "step": 6283 + }, + { + "epoch": 0.8648499862372695, + "grad_norm": 1.792349322173358, + "learning_rate": 9.427915189189385e-07, + "loss": 0.92, + "step": 6284 + }, + { + "epoch": 0.8649876135425268, + "grad_norm": 1.6838078052423404, + "learning_rate": 9.409030294451771e-07, + "loss": 0.8759, + "step": 6285 + }, + { + "epoch": 0.8651252408477842, + "grad_norm": 1.8965747909653592, + "learning_rate": 9.390163398899788e-07, + "loss": 0.8594, + "step": 6286 + }, + { + "epoch": 0.8652628681530415, + "grad_norm": 1.7187042335812959, + "learning_rate": 9.371314506282026e-07, + "loss": 0.8658, + "step": 6287 + }, + { + "epoch": 0.8654004954582989, + "grad_norm": 1.736646926580443, + "learning_rate": 9.3524836203435e-07, + "loss": 0.903, + "step": 6288 + }, + { + "epoch": 0.8655381227635562, + "grad_norm": 1.6405004807193584, + "learning_rate": 9.333670744825652e-07, + "loss": 0.8413, + "step": 6289 + }, + { + "epoch": 0.8656757500688137, + "grad_norm": 1.9928999627402626, + "learning_rate": 9.314875883466335e-07, + "loss": 0.9367, + "step": 6290 + }, + { + "epoch": 0.865813377374071, + "grad_norm": 2.015746288041542, + "learning_rate": 9.29609903999984e-07, + "loss": 0.9, + "step": 6291 + }, + { + "epoch": 0.8659510046793284, + "grad_norm": 1.7682184098481524, + "learning_rate": 9.27734021815686e-07, + "loss": 0.8982, + "step": 6292 + }, + { + "epoch": 0.8660886319845857, + "grad_norm": 1.9331544655860518, + "learning_rate": 9.258599421664527e-07, + "loss": 0.8793, + "step": 6293 + }, + { + "epoch": 0.8662262592898431, + "grad_norm": 1.8157555149059545, + "learning_rate": 9.239876654246371e-07, + "loss": 0.9145, + "step": 6294 + }, + { + "epoch": 0.8663638865951004, + "grad_norm": 1.8900435670286593, + "learning_rate": 9.221171919622352e-07, + "loss": 0.8649, + "step": 6295 + }, + { + "epoch": 0.8665015139003578, + "grad_norm": 2.109691307902643, + "learning_rate": 9.20248522150885e-07, + "loss": 0.8854, + "step": 6296 + }, + { + "epoch": 0.8666391412056152, + "grad_norm": 1.8004300274607514, + "learning_rate": 9.183816563618641e-07, + "loss": 0.932, + "step": 6297 + }, + { + "epoch": 0.8667767685108726, + "grad_norm": 1.7325947887059283, + "learning_rate": 9.165165949660959e-07, + "loss": 0.9331, + "step": 6298 + }, + { + "epoch": 0.86691439581613, + "grad_norm": 1.634354875671112, + "learning_rate": 9.146533383341394e-07, + "loss": 0.9443, + "step": 6299 + }, + { + "epoch": 0.8670520231213873, + "grad_norm": 1.7494753398255085, + "learning_rate": 9.127918868362006e-07, + "loss": 0.8638, + "step": 6300 + }, + { + "epoch": 0.8671896504266446, + "grad_norm": 1.733450030302839, + "learning_rate": 9.109322408421217e-07, + "loss": 0.8891, + "step": 6301 + }, + { + "epoch": 0.867327277731902, + "grad_norm": 1.584905250717093, + "learning_rate": 9.090744007213948e-07, + "loss": 0.8938, + "step": 6302 + }, + { + "epoch": 0.8674649050371593, + "grad_norm": 1.8141678255413258, + "learning_rate": 9.072183668431389e-07, + "loss": 0.8805, + "step": 6303 + }, + { + "epoch": 0.8676025323424167, + "grad_norm": 1.9020457554564945, + "learning_rate": 9.053641395761303e-07, + "loss": 0.9193, + "step": 6304 + }, + { + "epoch": 0.8677401596476741, + "grad_norm": 1.6497987739186861, + "learning_rate": 9.035117192887743e-07, + "loss": 0.8922, + "step": 6305 + }, + { + "epoch": 0.8678777869529315, + "grad_norm": 1.6889959614906276, + "learning_rate": 9.01661106349121e-07, + "loss": 0.9396, + "step": 6306 + }, + { + "epoch": 0.8680154142581888, + "grad_norm": 1.8666693448828282, + "learning_rate": 8.998123011248639e-07, + "loss": 0.8792, + "step": 6307 + }, + { + "epoch": 0.8681530415634462, + "grad_norm": 1.6628947515863657, + "learning_rate": 8.979653039833336e-07, + "loss": 0.9292, + "step": 6308 + }, + { + "epoch": 0.8682906688687035, + "grad_norm": 1.7178438640142257, + "learning_rate": 8.961201152915067e-07, + "loss": 0.9303, + "step": 6309 + }, + { + "epoch": 0.8684282961739609, + "grad_norm": 2.1109822306351296, + "learning_rate": 8.94276735415991e-07, + "loss": 0.9046, + "step": 6310 + }, + { + "epoch": 0.8685659234792182, + "grad_norm": 1.8667267425314287, + "learning_rate": 8.924351647230479e-07, + "loss": 0.8646, + "step": 6311 + }, + { + "epoch": 0.8687035507844756, + "grad_norm": 1.789102127393617, + "learning_rate": 8.905954035785646e-07, + "loss": 0.8941, + "step": 6312 + }, + { + "epoch": 0.868841178089733, + "grad_norm": 1.7309625894204395, + "learning_rate": 8.887574523480847e-07, + "loss": 0.8685, + "step": 6313 + }, + { + "epoch": 0.8689788053949904, + "grad_norm": 1.810638738665834, + "learning_rate": 8.869213113967756e-07, + "loss": 0.855, + "step": 6314 + }, + { + "epoch": 0.8691164327002477, + "grad_norm": 1.8431368190404611, + "learning_rate": 8.850869810894591e-07, + "loss": 0.8699, + "step": 6315 + }, + { + "epoch": 0.8692540600055051, + "grad_norm": 1.7187531972697576, + "learning_rate": 8.832544617905902e-07, + "loss": 0.8786, + "step": 6316 + }, + { + "epoch": 0.8693916873107624, + "grad_norm": 1.6521170750378698, + "learning_rate": 8.814237538642656e-07, + "loss": 0.8979, + "step": 6317 + }, + { + "epoch": 0.8695293146160198, + "grad_norm": 1.645183724165715, + "learning_rate": 8.795948576742209e-07, + "loss": 0.916, + "step": 6318 + }, + { + "epoch": 0.8696669419212771, + "grad_norm": 1.76123362687622, + "learning_rate": 8.77767773583833e-07, + "loss": 0.9087, + "step": 6319 + }, + { + "epoch": 0.8698045692265346, + "grad_norm": 1.9093273651390985, + "learning_rate": 8.759425019561196e-07, + "loss": 0.9791, + "step": 6320 + }, + { + "epoch": 0.869942196531792, + "grad_norm": 1.780088933989605, + "learning_rate": 8.74119043153735e-07, + "loss": 0.9689, + "step": 6321 + }, + { + "epoch": 0.8700798238370493, + "grad_norm": 1.7879835357412397, + "learning_rate": 8.722973975389782e-07, + "loss": 0.9233, + "step": 6322 + }, + { + "epoch": 0.8702174511423066, + "grad_norm": 1.5389503308348593, + "learning_rate": 8.704775654737829e-07, + "loss": 0.8581, + "step": 6323 + }, + { + "epoch": 0.870355078447564, + "grad_norm": 1.753843554425785, + "learning_rate": 8.686595473197257e-07, + "loss": 0.9103, + "step": 6324 + }, + { + "epoch": 0.8704927057528213, + "grad_norm": 1.6995973295819178, + "learning_rate": 8.66843343438023e-07, + "loss": 0.8744, + "step": 6325 + }, + { + "epoch": 0.8706303330580787, + "grad_norm": 1.8521250832114422, + "learning_rate": 8.650289541895285e-07, + "loss": 0.9923, + "step": 6326 + }, + { + "epoch": 0.870767960363336, + "grad_norm": 1.6785191208659946, + "learning_rate": 8.63216379934736e-07, + "loss": 0.9399, + "step": 6327 + }, + { + "epoch": 0.8709055876685935, + "grad_norm": 1.7242708645722309, + "learning_rate": 8.614056210337795e-07, + "loss": 0.9523, + "step": 6328 + }, + { + "epoch": 0.8710432149738508, + "grad_norm": 1.5861174351691378, + "learning_rate": 8.595966778464338e-07, + "loss": 0.7835, + "step": 6329 + }, + { + "epoch": 0.8711808422791082, + "grad_norm": 1.6840028909608171, + "learning_rate": 8.577895507321088e-07, + "loss": 0.9035, + "step": 6330 + }, + { + "epoch": 0.8713184695843655, + "grad_norm": 1.7191411712837283, + "learning_rate": 8.559842400498564e-07, + "loss": 0.879, + "step": 6331 + }, + { + "epoch": 0.8714560968896229, + "grad_norm": 1.7825350412061591, + "learning_rate": 8.541807461583684e-07, + "loss": 0.8704, + "step": 6332 + }, + { + "epoch": 0.8715937241948802, + "grad_norm": 1.5307914260203452, + "learning_rate": 8.523790694159717e-07, + "loss": 0.8741, + "step": 6333 + }, + { + "epoch": 0.8717313515001376, + "grad_norm": 1.5895739537259068, + "learning_rate": 8.505792101806399e-07, + "loss": 0.9052, + "step": 6334 + }, + { + "epoch": 0.871868978805395, + "grad_norm": 1.6085163080113656, + "learning_rate": 8.487811688099734e-07, + "loss": 0.8694, + "step": 6335 + }, + { + "epoch": 0.8720066061106524, + "grad_norm": 1.8569943216146498, + "learning_rate": 8.469849456612267e-07, + "loss": 0.8521, + "step": 6336 + }, + { + "epoch": 0.8721442334159097, + "grad_norm": 1.6871735656310338, + "learning_rate": 8.451905410912775e-07, + "loss": 0.8608, + "step": 6337 + }, + { + "epoch": 0.8722818607211671, + "grad_norm": 1.5621967849273881, + "learning_rate": 8.433979554566518e-07, + "loss": 0.9175, + "step": 6338 + }, + { + "epoch": 0.8724194880264244, + "grad_norm": 1.991136048183134, + "learning_rate": 8.416071891135124e-07, + "loss": 0.8564, + "step": 6339 + }, + { + "epoch": 0.8725571153316818, + "grad_norm": 1.7755791811167518, + "learning_rate": 8.398182424176582e-07, + "loss": 0.938, + "step": 6340 + }, + { + "epoch": 0.8726947426369391, + "grad_norm": 1.717335521906552, + "learning_rate": 8.380311157245335e-07, + "loss": 0.8686, + "step": 6341 + }, + { + "epoch": 0.8728323699421965, + "grad_norm": 1.9847187413732654, + "learning_rate": 8.36245809389209e-07, + "loss": 0.9475, + "step": 6342 + }, + { + "epoch": 0.8729699972474539, + "grad_norm": 1.6617495762541499, + "learning_rate": 8.344623237664073e-07, + "loss": 0.9122, + "step": 6343 + }, + { + "epoch": 0.8731076245527113, + "grad_norm": 1.7830617518430338, + "learning_rate": 8.326806592104764e-07, + "loss": 0.9082, + "step": 6344 + }, + { + "epoch": 0.8732452518579686, + "grad_norm": 1.7477154403518669, + "learning_rate": 8.309008160754151e-07, + "loss": 0.887, + "step": 6345 + }, + { + "epoch": 0.873382879163226, + "grad_norm": 2.0417345218428284, + "learning_rate": 8.291227947148462e-07, + "loss": 0.9705, + "step": 6346 + }, + { + "epoch": 0.8735205064684833, + "grad_norm": 1.7391080974950275, + "learning_rate": 8.273465954820436e-07, + "loss": 0.889, + "step": 6347 + }, + { + "epoch": 0.8736581337737407, + "grad_norm": 2.014714504012215, + "learning_rate": 8.255722187299142e-07, + "loss": 0.9744, + "step": 6348 + }, + { + "epoch": 0.873795761078998, + "grad_norm": 1.7514079570638046, + "learning_rate": 8.237996648109991e-07, + "loss": 0.9581, + "step": 6349 + }, + { + "epoch": 0.8739333883842554, + "grad_norm": 1.8316563545438742, + "learning_rate": 8.220289340774835e-07, + "loss": 0.8527, + "step": 6350 + }, + { + "epoch": 0.8740710156895128, + "grad_norm": 1.988474765344503, + "learning_rate": 8.202600268811844e-07, + "loss": 0.9276, + "step": 6351 + }, + { + "epoch": 0.8742086429947702, + "grad_norm": 1.8268774279820574, + "learning_rate": 8.184929435735622e-07, + "loss": 0.8775, + "step": 6352 + }, + { + "epoch": 0.8743462703000275, + "grad_norm": 1.891187046561271, + "learning_rate": 8.167276845057104e-07, + "loss": 0.9322, + "step": 6353 + }, + { + "epoch": 0.8744838976052849, + "grad_norm": 1.8819324364304035, + "learning_rate": 8.149642500283627e-07, + "loss": 0.9324, + "step": 6354 + }, + { + "epoch": 0.8746215249105422, + "grad_norm": 1.9641177679425152, + "learning_rate": 8.132026404918891e-07, + "loss": 0.9039, + "step": 6355 + }, + { + "epoch": 0.8747591522157996, + "grad_norm": 1.6889952085972064, + "learning_rate": 8.114428562462973e-07, + "loss": 0.9126, + "step": 6356 + }, + { + "epoch": 0.8748967795210569, + "grad_norm": 1.7317647726110619, + "learning_rate": 8.096848976412319e-07, + "loss": 0.8739, + "step": 6357 + }, + { + "epoch": 0.8750344068263144, + "grad_norm": 1.761934660674444, + "learning_rate": 8.079287650259759e-07, + "loss": 0.9663, + "step": 6358 + }, + { + "epoch": 0.8751720341315717, + "grad_norm": 1.983540032809982, + "learning_rate": 8.061744587494491e-07, + "loss": 0.8636, + "step": 6359 + }, + { + "epoch": 0.8753096614368291, + "grad_norm": 1.875032675908984, + "learning_rate": 8.04421979160207e-07, + "loss": 0.9254, + "step": 6360 + }, + { + "epoch": 0.8754472887420864, + "grad_norm": 1.794436931006903, + "learning_rate": 8.026713266064434e-07, + "loss": 0.8982, + "step": 6361 + }, + { + "epoch": 0.8755849160473438, + "grad_norm": 2.0245174167597124, + "learning_rate": 8.0092250143599e-07, + "loss": 0.9641, + "step": 6362 + }, + { + "epoch": 0.8757225433526011, + "grad_norm": 1.664289405988689, + "learning_rate": 7.991755039963134e-07, + "loss": 0.8719, + "step": 6363 + }, + { + "epoch": 0.8758601706578585, + "grad_norm": 1.584552844868047, + "learning_rate": 7.974303346345202e-07, + "loss": 0.9307, + "step": 6364 + }, + { + "epoch": 0.8759977979631158, + "grad_norm": 1.991380471171234, + "learning_rate": 7.956869936973477e-07, + "loss": 0.9301, + "step": 6365 + }, + { + "epoch": 0.8761354252683733, + "grad_norm": 2.214259244239434, + "learning_rate": 7.939454815311787e-07, + "loss": 0.9309, + "step": 6366 + }, + { + "epoch": 0.8762730525736306, + "grad_norm": 1.89229127387758, + "learning_rate": 7.92205798482023e-07, + "loss": 0.9328, + "step": 6367 + }, + { + "epoch": 0.876410679878888, + "grad_norm": 1.7676159587421614, + "learning_rate": 7.904679448955366e-07, + "loss": 0.8914, + "step": 6368 + }, + { + "epoch": 0.8765483071841453, + "grad_norm": 1.8367416268357815, + "learning_rate": 7.887319211170042e-07, + "loss": 0.9715, + "step": 6369 + }, + { + "epoch": 0.8766859344894027, + "grad_norm": 1.6900339129382613, + "learning_rate": 7.869977274913476e-07, + "loss": 0.8871, + "step": 6370 + }, + { + "epoch": 0.87682356179466, + "grad_norm": 1.649013007781255, + "learning_rate": 7.852653643631347e-07, + "loss": 0.8966, + "step": 6371 + }, + { + "epoch": 0.8769611890999174, + "grad_norm": 1.7585566679889797, + "learning_rate": 7.835348320765546e-07, + "loss": 0.8506, + "step": 6372 + }, + { + "epoch": 0.8770988164051748, + "grad_norm": 1.902847572757343, + "learning_rate": 7.818061309754465e-07, + "loss": 0.8945, + "step": 6373 + }, + { + "epoch": 0.8772364437104322, + "grad_norm": 1.7229573270950396, + "learning_rate": 7.800792614032738e-07, + "loss": 0.9047, + "step": 6374 + }, + { + "epoch": 0.8773740710156895, + "grad_norm": 1.7390330253904451, + "learning_rate": 7.783542237031483e-07, + "loss": 0.8453, + "step": 6375 + }, + { + "epoch": 0.8775116983209469, + "grad_norm": 1.771730135659942, + "learning_rate": 7.766310182178038e-07, + "loss": 0.9133, + "step": 6376 + }, + { + "epoch": 0.8776493256262042, + "grad_norm": 1.8284534533947143, + "learning_rate": 7.749096452896243e-07, + "loss": 0.8712, + "step": 6377 + }, + { + "epoch": 0.8777869529314616, + "grad_norm": 1.6582122102078258, + "learning_rate": 7.731901052606206e-07, + "loss": 0.8298, + "step": 6378 + }, + { + "epoch": 0.8779245802367189, + "grad_norm": 1.7878471537398035, + "learning_rate": 7.714723984724415e-07, + "loss": 0.8937, + "step": 6379 + }, + { + "epoch": 0.8780622075419763, + "grad_norm": 1.6685627449282436, + "learning_rate": 7.697565252663719e-07, + "loss": 0.8694, + "step": 6380 + }, + { + "epoch": 0.8781998348472337, + "grad_norm": 1.807688598708972, + "learning_rate": 7.680424859833325e-07, + "loss": 1.0326, + "step": 6381 + }, + { + "epoch": 0.8783374621524911, + "grad_norm": 1.8627588549183183, + "learning_rate": 7.663302809638795e-07, + "loss": 0.9078, + "step": 6382 + }, + { + "epoch": 0.8784750894577484, + "grad_norm": 1.610877059058267, + "learning_rate": 7.646199105482043e-07, + "loss": 0.8238, + "step": 6383 + }, + { + "epoch": 0.8786127167630058, + "grad_norm": 1.85234750961446, + "learning_rate": 7.629113750761352e-07, + "loss": 0.9746, + "step": 6384 + }, + { + "epoch": 0.8787503440682631, + "grad_norm": 1.7961471682033971, + "learning_rate": 7.612046748871327e-07, + "loss": 0.9441, + "step": 6385 + }, + { + "epoch": 0.8788879713735205, + "grad_norm": 1.7439640481186351, + "learning_rate": 7.594998103202967e-07, + "loss": 0.9372, + "step": 6386 + }, + { + "epoch": 0.8790255986787778, + "grad_norm": 1.666755666368937, + "learning_rate": 7.577967817143595e-07, + "loss": 0.8755, + "step": 6387 + }, + { + "epoch": 0.8791632259840352, + "grad_norm": 1.8588723254955457, + "learning_rate": 7.560955894076894e-07, + "loss": 0.9137, + "step": 6388 + }, + { + "epoch": 0.8793008532892926, + "grad_norm": 1.6512079211921402, + "learning_rate": 7.543962337382915e-07, + "loss": 0.8697, + "step": 6389 + }, + { + "epoch": 0.87943848059455, + "grad_norm": 1.5663438714845923, + "learning_rate": 7.526987150438025e-07, + "loss": 0.9217, + "step": 6390 + }, + { + "epoch": 0.8795761078998073, + "grad_norm": 1.8061798966086877, + "learning_rate": 7.51003033661497e-07, + "loss": 0.8471, + "step": 6391 + }, + { + "epoch": 0.8797137352050647, + "grad_norm": 1.5708147991688186, + "learning_rate": 7.493091899282845e-07, + "loss": 0.9186, + "step": 6392 + }, + { + "epoch": 0.879851362510322, + "grad_norm": 1.6590333758088234, + "learning_rate": 7.476171841807078e-07, + "loss": 0.9595, + "step": 6393 + }, + { + "epoch": 0.8799889898155794, + "grad_norm": 1.694780532764805, + "learning_rate": 7.459270167549448e-07, + "loss": 0.9009, + "step": 6394 + }, + { + "epoch": 0.8801266171208367, + "grad_norm": 1.625017323761249, + "learning_rate": 7.442386879868069e-07, + "loss": 0.9063, + "step": 6395 + }, + { + "epoch": 0.8802642444260942, + "grad_norm": 1.9261840238073933, + "learning_rate": 7.425521982117489e-07, + "loss": 0.8558, + "step": 6396 + }, + { + "epoch": 0.8804018717313515, + "grad_norm": 2.299072595871546, + "learning_rate": 7.408675477648442e-07, + "loss": 0.9506, + "step": 6397 + }, + { + "epoch": 0.8805394990366089, + "grad_norm": 1.6093723065269614, + "learning_rate": 7.39184736980817e-07, + "loss": 0.9176, + "step": 6398 + }, + { + "epoch": 0.8806771263418662, + "grad_norm": 1.7452446315608874, + "learning_rate": 7.375037661940132e-07, + "loss": 0.911, + "step": 6399 + }, + { + "epoch": 0.8808147536471236, + "grad_norm": 1.611112393288828, + "learning_rate": 7.358246357384247e-07, + "loss": 0.9607, + "step": 6400 + }, + { + "epoch": 0.8809523809523809, + "grad_norm": 1.644251578042853, + "learning_rate": 7.341473459476645e-07, + "loss": 0.9208, + "step": 6401 + }, + { + "epoch": 0.8810900082576383, + "grad_norm": 1.8027601113899416, + "learning_rate": 7.324718971549926e-07, + "loss": 0.887, + "step": 6402 + }, + { + "epoch": 0.8812276355628956, + "grad_norm": 1.6692914484855061, + "learning_rate": 7.307982896932986e-07, + "loss": 0.8873, + "step": 6403 + }, + { + "epoch": 0.8813652628681531, + "grad_norm": 1.770575001284231, + "learning_rate": 7.291265238950984e-07, + "loss": 0.9359, + "step": 6404 + }, + { + "epoch": 0.8815028901734104, + "grad_norm": 1.685006217012473, + "learning_rate": 7.274566000925575e-07, + "loss": 0.9147, + "step": 6405 + }, + { + "epoch": 0.8816405174786678, + "grad_norm": 1.8131165110999612, + "learning_rate": 7.257885186174596e-07, + "loss": 0.887, + "step": 6406 + }, + { + "epoch": 0.8817781447839251, + "grad_norm": 2.2749024770192783, + "learning_rate": 7.241222798012359e-07, + "loss": 0.8377, + "step": 6407 + }, + { + "epoch": 0.8819157720891825, + "grad_norm": 2.2328916164174273, + "learning_rate": 7.224578839749386e-07, + "loss": 0.9509, + "step": 6408 + }, + { + "epoch": 0.8820533993944398, + "grad_norm": 1.769521910442314, + "learning_rate": 7.207953314692673e-07, + "loss": 0.866, + "step": 6409 + }, + { + "epoch": 0.8821910266996972, + "grad_norm": 1.8185570386594185, + "learning_rate": 7.191346226145435e-07, + "loss": 0.8848, + "step": 6410 + }, + { + "epoch": 0.8823286540049546, + "grad_norm": 1.705985114440559, + "learning_rate": 7.174757577407298e-07, + "loss": 0.9407, + "step": 6411 + }, + { + "epoch": 0.882466281310212, + "grad_norm": 1.5383726606934731, + "learning_rate": 7.158187371774183e-07, + "loss": 0.8851, + "step": 6412 + }, + { + "epoch": 0.8826039086154693, + "grad_norm": 1.5539039197212894, + "learning_rate": 7.141635612538378e-07, + "loss": 0.8647, + "step": 6413 + }, + { + "epoch": 0.8827415359207267, + "grad_norm": 1.5523027222060135, + "learning_rate": 7.125102302988485e-07, + "loss": 0.9209, + "step": 6414 + }, + { + "epoch": 0.882879163225984, + "grad_norm": 2.015737098231876, + "learning_rate": 7.108587446409443e-07, + "loss": 0.9087, + "step": 6415 + }, + { + "epoch": 0.8830167905312414, + "grad_norm": 1.8150814702452889, + "learning_rate": 7.092091046082538e-07, + "loss": 0.8421, + "step": 6416 + }, + { + "epoch": 0.8831544178364987, + "grad_norm": 1.848789992413101, + "learning_rate": 7.075613105285361e-07, + "loss": 0.9596, + "step": 6417 + }, + { + "epoch": 0.8832920451417561, + "grad_norm": 1.9201010090851653, + "learning_rate": 7.059153627291871e-07, + "loss": 0.883, + "step": 6418 + }, + { + "epoch": 0.8834296724470135, + "grad_norm": 2.005900188495301, + "learning_rate": 7.042712615372327e-07, + "loss": 0.9078, + "step": 6419 + }, + { + "epoch": 0.8835672997522709, + "grad_norm": 2.048393098564577, + "learning_rate": 7.02629007279334e-07, + "loss": 0.9436, + "step": 6420 + }, + { + "epoch": 0.8837049270575282, + "grad_norm": 1.9853746294734698, + "learning_rate": 7.009886002817856e-07, + "loss": 0.8772, + "step": 6421 + }, + { + "epoch": 0.8838425543627856, + "grad_norm": 1.7725021607751894, + "learning_rate": 6.993500408705112e-07, + "loss": 0.9129, + "step": 6422 + }, + { + "epoch": 0.8839801816680429, + "grad_norm": 1.620304211930674, + "learning_rate": 6.977133293710725e-07, + "loss": 0.8819, + "step": 6423 + }, + { + "epoch": 0.8841178089733003, + "grad_norm": 1.9642300616043322, + "learning_rate": 6.960784661086606e-07, + "loss": 0.8956, + "step": 6424 + }, + { + "epoch": 0.8842554362785576, + "grad_norm": 1.6405778621070912, + "learning_rate": 6.94445451408099e-07, + "loss": 0.8629, + "step": 6425 + }, + { + "epoch": 0.884393063583815, + "grad_norm": 1.6084542656993244, + "learning_rate": 6.928142855938513e-07, + "loss": 0.8574, + "step": 6426 + }, + { + "epoch": 0.8845306908890724, + "grad_norm": 1.6392540744553863, + "learning_rate": 6.911849689899985e-07, + "loss": 0.8935, + "step": 6427 + }, + { + "epoch": 0.8846683181943298, + "grad_norm": 1.8897373748705015, + "learning_rate": 6.895575019202727e-07, + "loss": 0.8816, + "step": 6428 + }, + { + "epoch": 0.8848059454995871, + "grad_norm": 1.940786978969368, + "learning_rate": 6.879318847080229e-07, + "loss": 0.935, + "step": 6429 + }, + { + "epoch": 0.8849435728048445, + "grad_norm": 1.8321974051191918, + "learning_rate": 6.863081176762409e-07, + "loss": 0.8916, + "step": 6430 + }, + { + "epoch": 0.8850812001101018, + "grad_norm": 2.0176252624408, + "learning_rate": 6.84686201147543e-07, + "loss": 0.8429, + "step": 6431 + }, + { + "epoch": 0.8852188274153592, + "grad_norm": 1.7645600873921015, + "learning_rate": 6.830661354441858e-07, + "loss": 0.897, + "step": 6432 + }, + { + "epoch": 0.8853564547206165, + "grad_norm": 1.6067030412374046, + "learning_rate": 6.81447920888052e-07, + "loss": 0.9046, + "step": 6433 + }, + { + "epoch": 0.885494082025874, + "grad_norm": 1.641955020927479, + "learning_rate": 6.798315578006597e-07, + "loss": 0.8355, + "step": 6434 + }, + { + "epoch": 0.8856317093311313, + "grad_norm": 1.9703812498182551, + "learning_rate": 6.782170465031601e-07, + "loss": 1.0185, + "step": 6435 + }, + { + "epoch": 0.8857693366363887, + "grad_norm": 1.8242606371538628, + "learning_rate": 6.766043873163275e-07, + "loss": 0.8507, + "step": 6436 + }, + { + "epoch": 0.885906963941646, + "grad_norm": 2.01352940475627, + "learning_rate": 6.749935805605833e-07, + "loss": 0.8617, + "step": 6437 + }, + { + "epoch": 0.8860445912469034, + "grad_norm": 2.049467317992396, + "learning_rate": 6.733846265559663e-07, + "loss": 0.8898, + "step": 6438 + }, + { + "epoch": 0.8861822185521607, + "grad_norm": 1.58098666551709, + "learning_rate": 6.717775256221582e-07, + "loss": 0.8349, + "step": 6439 + }, + { + "epoch": 0.8863198458574181, + "grad_norm": 1.8406874726848967, + "learning_rate": 6.701722780784658e-07, + "loss": 0.9221, + "step": 6440 + }, + { + "epoch": 0.8864574731626754, + "grad_norm": 2.3589597139645315, + "learning_rate": 6.68568884243831e-07, + "loss": 0.8771, + "step": 6441 + }, + { + "epoch": 0.8865951004679329, + "grad_norm": 1.5908031430634777, + "learning_rate": 6.669673444368252e-07, + "loss": 0.8677, + "step": 6442 + }, + { + "epoch": 0.8867327277731902, + "grad_norm": 2.0651342130525876, + "learning_rate": 6.653676589756519e-07, + "loss": 0.9312, + "step": 6443 + }, + { + "epoch": 0.8868703550784476, + "grad_norm": 1.6486704535154544, + "learning_rate": 6.637698281781491e-07, + "loss": 0.9149, + "step": 6444 + }, + { + "epoch": 0.8870079823837049, + "grad_norm": 2.449555071958951, + "learning_rate": 6.621738523617815e-07, + "loss": 0.9483, + "step": 6445 + }, + { + "epoch": 0.8871456096889623, + "grad_norm": 1.7431737535788985, + "learning_rate": 6.605797318436491e-07, + "loss": 0.9202, + "step": 6446 + }, + { + "epoch": 0.8872832369942196, + "grad_norm": 2.734494514258936, + "learning_rate": 6.589874669404805e-07, + "loss": 0.8957, + "step": 6447 + }, + { + "epoch": 0.887420864299477, + "grad_norm": 1.8748408386995095, + "learning_rate": 6.573970579686384e-07, + "loss": 0.9096, + "step": 6448 + }, + { + "epoch": 0.8875584916047344, + "grad_norm": 1.6687726414456006, + "learning_rate": 6.558085052441132e-07, + "loss": 0.8552, + "step": 6449 + }, + { + "epoch": 0.8876961189099918, + "grad_norm": 1.7932994132205653, + "learning_rate": 6.542218090825314e-07, + "loss": 0.8306, + "step": 6450 + }, + { + "epoch": 0.8878337462152491, + "grad_norm": 1.7570804728183498, + "learning_rate": 6.526369697991463e-07, + "loss": 0.8671, + "step": 6451 + }, + { + "epoch": 0.8879713735205065, + "grad_norm": 1.7647289643909725, + "learning_rate": 6.510539877088429e-07, + "loss": 0.9161, + "step": 6452 + }, + { + "epoch": 0.8881090008257638, + "grad_norm": 1.936996257939106, + "learning_rate": 6.494728631261405e-07, + "loss": 0.9673, + "step": 6453 + }, + { + "epoch": 0.8882466281310212, + "grad_norm": 1.8594045279895188, + "learning_rate": 6.478935963651844e-07, + "loss": 0.8677, + "step": 6454 + }, + { + "epoch": 0.8883842554362785, + "grad_norm": 1.8227155626760876, + "learning_rate": 6.463161877397539e-07, + "loss": 0.9431, + "step": 6455 + }, + { + "epoch": 0.8885218827415359, + "grad_norm": 1.877559899528615, + "learning_rate": 6.447406375632592e-07, + "loss": 0.8641, + "step": 6456 + }, + { + "epoch": 0.8886595100467933, + "grad_norm": 1.9494281376945986, + "learning_rate": 6.431669461487389e-07, + "loss": 0.939, + "step": 6457 + }, + { + "epoch": 0.8887971373520507, + "grad_norm": 1.7092941486354354, + "learning_rate": 6.415951138088694e-07, + "loss": 0.94, + "step": 6458 + }, + { + "epoch": 0.888934764657308, + "grad_norm": 1.8321943127031395, + "learning_rate": 6.400251408559443e-07, + "loss": 0.8427, + "step": 6459 + }, + { + "epoch": 0.8890723919625654, + "grad_norm": 1.9229410203008541, + "learning_rate": 6.38457027601902e-07, + "loss": 0.895, + "step": 6460 + }, + { + "epoch": 0.8892100192678227, + "grad_norm": 1.445636448719149, + "learning_rate": 6.368907743583008e-07, + "loss": 0.8834, + "step": 6461 + }, + { + "epoch": 0.8893476465730801, + "grad_norm": 1.663931414796164, + "learning_rate": 6.353263814363375e-07, + "loss": 0.8976, + "step": 6462 + }, + { + "epoch": 0.8894852738783374, + "grad_norm": 4.914740749767287, + "learning_rate": 6.337638491468323e-07, + "loss": 0.8938, + "step": 6463 + }, + { + "epoch": 0.8896229011835948, + "grad_norm": 1.6062424724252355, + "learning_rate": 6.322031778002413e-07, + "loss": 0.8563, + "step": 6464 + }, + { + "epoch": 0.8897605284888522, + "grad_norm": 1.8240505572592254, + "learning_rate": 6.306443677066476e-07, + "loss": 0.8081, + "step": 6465 + }, + { + "epoch": 0.8898981557941096, + "grad_norm": 1.6403812662549373, + "learning_rate": 6.290874191757656e-07, + "loss": 0.9013, + "step": 6466 + }, + { + "epoch": 0.8900357830993669, + "grad_norm": 2.1965582163033024, + "learning_rate": 6.275323325169403e-07, + "loss": 0.9684, + "step": 6467 + }, + { + "epoch": 0.8901734104046243, + "grad_norm": 1.636816816862865, + "learning_rate": 6.259791080391431e-07, + "loss": 0.9072, + "step": 6468 + }, + { + "epoch": 0.8903110377098816, + "grad_norm": 1.9809936772996666, + "learning_rate": 6.24427746050984e-07, + "loss": 0.9171, + "step": 6469 + }, + { + "epoch": 0.890448665015139, + "grad_norm": 1.861002482375235, + "learning_rate": 6.228782468606909e-07, + "loss": 0.9036, + "step": 6470 + }, + { + "epoch": 0.8905862923203963, + "grad_norm": 1.5489148953570382, + "learning_rate": 6.21330610776133e-07, + "loss": 0.8569, + "step": 6471 + }, + { + "epoch": 0.8907239196256538, + "grad_norm": 1.9892753746223564, + "learning_rate": 6.197848381048033e-07, + "loss": 0.8269, + "step": 6472 + }, + { + "epoch": 0.8908615469309111, + "grad_norm": 1.8385620898065118, + "learning_rate": 6.182409291538238e-07, + "loss": 0.8437, + "step": 6473 + }, + { + "epoch": 0.8909991742361685, + "grad_norm": 1.755218217609776, + "learning_rate": 6.166988842299504e-07, + "loss": 0.9044, + "step": 6474 + }, + { + "epoch": 0.8911368015414258, + "grad_norm": 1.7390119752546944, + "learning_rate": 6.151587036395656e-07, + "loss": 0.94, + "step": 6475 + }, + { + "epoch": 0.8912744288466832, + "grad_norm": 1.8919811094019443, + "learning_rate": 6.136203876886803e-07, + "loss": 0.8448, + "step": 6476 + }, + { + "epoch": 0.8914120561519405, + "grad_norm": 1.62869687066339, + "learning_rate": 6.120839366829401e-07, + "loss": 0.8602, + "step": 6477 + }, + { + "epoch": 0.8915496834571979, + "grad_norm": 1.7149783406023593, + "learning_rate": 6.105493509276139e-07, + "loss": 0.9188, + "step": 6478 + }, + { + "epoch": 0.8916873107624552, + "grad_norm": 2.2772423936115986, + "learning_rate": 6.090166307276047e-07, + "loss": 0.8516, + "step": 6479 + }, + { + "epoch": 0.8918249380677127, + "grad_norm": 2.082448804519124, + "learning_rate": 6.074857763874431e-07, + "loss": 0.838, + "step": 6480 + }, + { + "epoch": 0.89196256537297, + "grad_norm": 1.7899594499955493, + "learning_rate": 6.059567882112871e-07, + "loss": 0.8448, + "step": 6481 + }, + { + "epoch": 0.8921001926782274, + "grad_norm": 1.8206715252367143, + "learning_rate": 6.04429666502927e-07, + "loss": 0.8179, + "step": 6482 + }, + { + "epoch": 0.8922378199834847, + "grad_norm": 1.8154475150126275, + "learning_rate": 6.029044115657812e-07, + "loss": 0.9081, + "step": 6483 + }, + { + "epoch": 0.8923754472887421, + "grad_norm": 2.051830224255475, + "learning_rate": 6.013810237028972e-07, + "loss": 0.8667, + "step": 6484 + }, + { + "epoch": 0.8925130745939994, + "grad_norm": 1.9637870769196732, + "learning_rate": 5.998595032169496e-07, + "loss": 0.861, + "step": 6485 + }, + { + "epoch": 0.8926507018992568, + "grad_norm": 1.816180860691923, + "learning_rate": 5.983398504102456e-07, + "loss": 0.7809, + "step": 6486 + }, + { + "epoch": 0.8927883292045142, + "grad_norm": 1.530421870800164, + "learning_rate": 5.968220655847167e-07, + "loss": 0.8821, + "step": 6487 + }, + { + "epoch": 0.8929259565097716, + "grad_norm": 1.7413773067831742, + "learning_rate": 5.95306149041931e-07, + "loss": 0.8828, + "step": 6488 + }, + { + "epoch": 0.8930635838150289, + "grad_norm": 1.7877887698130557, + "learning_rate": 5.937921010830738e-07, + "loss": 0.8731, + "step": 6489 + }, + { + "epoch": 0.8932012111202863, + "grad_norm": 1.9499295015221687, + "learning_rate": 5.922799220089726e-07, + "loss": 0.9063, + "step": 6490 + }, + { + "epoch": 0.8933388384255436, + "grad_norm": 1.8620475071039337, + "learning_rate": 5.907696121200701e-07, + "loss": 0.8926, + "step": 6491 + }, + { + "epoch": 0.893476465730801, + "grad_norm": 1.771179601397548, + "learning_rate": 5.892611717164498e-07, + "loss": 0.9292, + "step": 6492 + }, + { + "epoch": 0.8936140930360583, + "grad_norm": 1.4933871262496432, + "learning_rate": 5.877546010978142e-07, + "loss": 0.9897, + "step": 6493 + }, + { + "epoch": 0.8937517203413157, + "grad_norm": 1.8174475111573583, + "learning_rate": 5.862499005635003e-07, + "loss": 0.9226, + "step": 6494 + }, + { + "epoch": 0.8938893476465731, + "grad_norm": 1.8019261540055285, + "learning_rate": 5.847470704124703e-07, + "loss": 0.9148, + "step": 6495 + }, + { + "epoch": 0.8940269749518305, + "grad_norm": 1.8400245643491624, + "learning_rate": 5.832461109433174e-07, + "loss": 0.9034, + "step": 6496 + }, + { + "epoch": 0.8941646022570878, + "grad_norm": 1.7588803585853499, + "learning_rate": 5.81747022454261e-07, + "loss": 0.9313, + "step": 6497 + }, + { + "epoch": 0.8943022295623452, + "grad_norm": 1.8443099176665407, + "learning_rate": 5.802498052431493e-07, + "loss": 0.8383, + "step": 6498 + }, + { + "epoch": 0.8944398568676025, + "grad_norm": 1.751664613122743, + "learning_rate": 5.787544596074613e-07, + "loss": 0.9357, + "step": 6499 + }, + { + "epoch": 0.8945774841728599, + "grad_norm": 1.8561395911336072, + "learning_rate": 5.772609858442946e-07, + "loss": 0.8961, + "step": 6500 + }, + { + "epoch": 0.8947151114781172, + "grad_norm": 1.5885039219810266, + "learning_rate": 5.757693842503898e-07, + "loss": 0.9413, + "step": 6501 + }, + { + "epoch": 0.8948527387833746, + "grad_norm": 1.7939590814932405, + "learning_rate": 5.742796551221042e-07, + "loss": 0.8626, + "step": 6502 + }, + { + "epoch": 0.894990366088632, + "grad_norm": 1.9411485752127589, + "learning_rate": 5.727917987554266e-07, + "loss": 0.9683, + "step": 6503 + }, + { + "epoch": 0.8951279933938894, + "grad_norm": 1.8222550125872903, + "learning_rate": 5.71305815445975e-07, + "loss": 0.8859, + "step": 6504 + }, + { + "epoch": 0.8952656206991467, + "grad_norm": 1.54451157826374, + "learning_rate": 5.698217054889921e-07, + "loss": 0.8742, + "step": 6505 + }, + { + "epoch": 0.8954032480044041, + "grad_norm": 1.8822293308043638, + "learning_rate": 5.68339469179352e-07, + "loss": 0.9062, + "step": 6506 + }, + { + "epoch": 0.8955408753096614, + "grad_norm": 1.79554558514194, + "learning_rate": 5.668591068115536e-07, + "loss": 0.9112, + "step": 6507 + }, + { + "epoch": 0.8956785026149188, + "grad_norm": 1.948484488289092, + "learning_rate": 5.653806186797262e-07, + "loss": 0.9484, + "step": 6508 + }, + { + "epoch": 0.8958161299201761, + "grad_norm": 1.8407065427371514, + "learning_rate": 5.639040050776223e-07, + "loss": 0.9034, + "step": 6509 + }, + { + "epoch": 0.8959537572254336, + "grad_norm": 1.7359488427788166, + "learning_rate": 5.624292662986275e-07, + "loss": 1.0032, + "step": 6510 + }, + { + "epoch": 0.8960913845306909, + "grad_norm": 1.6428633158115507, + "learning_rate": 5.609564026357517e-07, + "loss": 0.9344, + "step": 6511 + }, + { + "epoch": 0.8962290118359483, + "grad_norm": 1.6470189540858218, + "learning_rate": 5.594854143816298e-07, + "loss": 0.8314, + "step": 6512 + }, + { + "epoch": 0.8963666391412056, + "grad_norm": 1.8032430998251854, + "learning_rate": 5.580163018285323e-07, + "loss": 0.8015, + "step": 6513 + }, + { + "epoch": 0.896504266446463, + "grad_norm": 1.6037953052878011, + "learning_rate": 5.565490652683481e-07, + "loss": 0.8968, + "step": 6514 + }, + { + "epoch": 0.8966418937517203, + "grad_norm": 1.8551423989590532, + "learning_rate": 5.550837049925984e-07, + "loss": 0.9818, + "step": 6515 + }, + { + "epoch": 0.8967795210569777, + "grad_norm": 1.8045038102060473, + "learning_rate": 5.536202212924291e-07, + "loss": 0.9151, + "step": 6516 + }, + { + "epoch": 0.896917148362235, + "grad_norm": 1.8950969321366096, + "learning_rate": 5.521586144586144e-07, + "loss": 0.8661, + "step": 6517 + }, + { + "epoch": 0.8970547756674925, + "grad_norm": 1.7504246122810019, + "learning_rate": 5.506988847815564e-07, + "loss": 0.8152, + "step": 6518 + }, + { + "epoch": 0.8971924029727498, + "grad_norm": 1.6141600866720605, + "learning_rate": 5.49241032551282e-07, + "loss": 0.8249, + "step": 6519 + }, + { + "epoch": 0.8973300302780072, + "grad_norm": 1.7202165545868682, + "learning_rate": 5.477850580574506e-07, + "loss": 0.903, + "step": 6520 + }, + { + "epoch": 0.8974676575832645, + "grad_norm": 1.6087666737187465, + "learning_rate": 5.463309615893387e-07, + "loss": 0.9334, + "step": 6521 + }, + { + "epoch": 0.8976052848885219, + "grad_norm": 1.7519328044161546, + "learning_rate": 5.448787434358604e-07, + "loss": 0.8884, + "step": 6522 + }, + { + "epoch": 0.8977429121937792, + "grad_norm": 1.6882940409760934, + "learning_rate": 5.434284038855475e-07, + "loss": 0.864, + "step": 6523 + }, + { + "epoch": 0.8978805394990366, + "grad_norm": 1.7296911630054215, + "learning_rate": 5.41979943226566e-07, + "loss": 0.8739, + "step": 6524 + }, + { + "epoch": 0.898018166804294, + "grad_norm": 1.8246021185485055, + "learning_rate": 5.405333617467012e-07, + "loss": 0.9371, + "step": 6525 + }, + { + "epoch": 0.8981557941095514, + "grad_norm": 1.6346340198394016, + "learning_rate": 5.390886597333733e-07, + "loss": 0.9056, + "step": 6526 + }, + { + "epoch": 0.8982934214148087, + "grad_norm": 1.771731218858257, + "learning_rate": 5.376458374736227e-07, + "loss": 0.8966, + "step": 6527 + }, + { + "epoch": 0.8984310487200661, + "grad_norm": 2.039338060693509, + "learning_rate": 5.362048952541176e-07, + "loss": 0.9419, + "step": 6528 + }, + { + "epoch": 0.8985686760253234, + "grad_norm": 1.649239631302988, + "learning_rate": 5.347658333611561e-07, + "loss": 0.8356, + "step": 6529 + }, + { + "epoch": 0.8987063033305808, + "grad_norm": 2.2454122330319044, + "learning_rate": 5.33328652080658e-07, + "loss": 0.8263, + "step": 6530 + }, + { + "epoch": 0.8988439306358381, + "grad_norm": 1.690810632231821, + "learning_rate": 5.318933516981728e-07, + "loss": 0.877, + "step": 6531 + }, + { + "epoch": 0.8989815579410955, + "grad_norm": 1.6766251105945962, + "learning_rate": 5.304599324988724e-07, + "loss": 0.893, + "step": 6532 + }, + { + "epoch": 0.8991191852463529, + "grad_norm": 1.7273685942227646, + "learning_rate": 5.290283947675612e-07, + "loss": 0.7914, + "step": 6533 + }, + { + "epoch": 0.8992568125516103, + "grad_norm": 1.8820816369764046, + "learning_rate": 5.275987387886638e-07, + "loss": 0.8305, + "step": 6534 + }, + { + "epoch": 0.8993944398568676, + "grad_norm": 1.6419479409456463, + "learning_rate": 5.261709648462344e-07, + "loss": 0.8891, + "step": 6535 + }, + { + "epoch": 0.899532067162125, + "grad_norm": 1.6177035326997828, + "learning_rate": 5.247450732239512e-07, + "loss": 0.9873, + "step": 6536 + }, + { + "epoch": 0.8996696944673823, + "grad_norm": 2.0802440436101244, + "learning_rate": 5.2332106420512e-07, + "loss": 0.8938, + "step": 6537 + }, + { + "epoch": 0.8998073217726397, + "grad_norm": 1.9184231674953178, + "learning_rate": 5.218989380726725e-07, + "loss": 0.9194, + "step": 6538 + }, + { + "epoch": 0.899944949077897, + "grad_norm": 1.741997863632587, + "learning_rate": 5.204786951091645e-07, + "loss": 0.9334, + "step": 6539 + }, + { + "epoch": 0.9000825763831544, + "grad_norm": 1.6562565353807313, + "learning_rate": 5.190603355967793e-07, + "loss": 0.938, + "step": 6540 + }, + { + "epoch": 0.9002202036884118, + "grad_norm": 1.6806233156899084, + "learning_rate": 5.176438598173251e-07, + "loss": 0.9268, + "step": 6541 + }, + { + "epoch": 0.9003578309936692, + "grad_norm": 1.6122806132307825, + "learning_rate": 5.162292680522363e-07, + "loss": 0.8818, + "step": 6542 + }, + { + "epoch": 0.9004954582989265, + "grad_norm": 1.7468473576364618, + "learning_rate": 5.148165605825739e-07, + "loss": 0.9561, + "step": 6543 + }, + { + "epoch": 0.9006330856041839, + "grad_norm": 1.8286724928426037, + "learning_rate": 5.134057376890222e-07, + "loss": 0.8924, + "step": 6544 + }, + { + "epoch": 0.9007707129094412, + "grad_norm": 1.54532617431928, + "learning_rate": 5.119967996518948e-07, + "loss": 0.9292, + "step": 6545 + }, + { + "epoch": 0.9009083402146986, + "grad_norm": 1.6685094442604222, + "learning_rate": 5.105897467511256e-07, + "loss": 0.9587, + "step": 6546 + }, + { + "epoch": 0.9010459675199559, + "grad_norm": 1.7635740070767605, + "learning_rate": 5.091845792662775e-07, + "loss": 0.8965, + "step": 6547 + }, + { + "epoch": 0.9011835948252134, + "grad_norm": 1.6007294737907005, + "learning_rate": 5.077812974765395e-07, + "loss": 0.8552, + "step": 6548 + }, + { + "epoch": 0.9013212221304707, + "grad_norm": 1.533965175058837, + "learning_rate": 5.063799016607218e-07, + "loss": 0.879, + "step": 6549 + }, + { + "epoch": 0.9014588494357281, + "grad_norm": 1.8042562024634408, + "learning_rate": 5.049803920972652e-07, + "loss": 0.8919, + "step": 6550 + }, + { + "epoch": 0.9015964767409854, + "grad_norm": 1.8070898356916127, + "learning_rate": 5.035827690642303e-07, + "loss": 0.9123, + "step": 6551 + }, + { + "epoch": 0.9017341040462428, + "grad_norm": 1.8737460169104463, + "learning_rate": 5.021870328393108e-07, + "loss": 0.8766, + "step": 6552 + }, + { + "epoch": 0.9018717313515001, + "grad_norm": 1.6068687612614978, + "learning_rate": 5.007931836998136e-07, + "loss": 0.9058, + "step": 6553 + }, + { + "epoch": 0.9020093586567575, + "grad_norm": 1.5647655450688793, + "learning_rate": 4.99401221922684e-07, + "loss": 0.9407, + "step": 6554 + }, + { + "epoch": 0.9021469859620148, + "grad_norm": 1.7292845833284105, + "learning_rate": 4.980111477844796e-07, + "loss": 0.9001, + "step": 6555 + }, + { + "epoch": 0.9022846132672723, + "grad_norm": 1.7460313749169862, + "learning_rate": 4.966229615613927e-07, + "loss": 0.9419, + "step": 6556 + }, + { + "epoch": 0.9024222405725296, + "grad_norm": 1.7949317549858668, + "learning_rate": 4.952366635292371e-07, + "loss": 0.9027, + "step": 6557 + }, + { + "epoch": 0.902559867877787, + "grad_norm": 1.753305123783024, + "learning_rate": 4.938522539634505e-07, + "loss": 0.8999, + "step": 6558 + }, + { + "epoch": 0.9026974951830443, + "grad_norm": 2.290788703166616, + "learning_rate": 4.92469733139096e-07, + "loss": 0.8811, + "step": 6559 + }, + { + "epoch": 0.9028351224883017, + "grad_norm": 1.8858108772066675, + "learning_rate": 4.910891013308617e-07, + "loss": 0.8626, + "step": 6560 + }, + { + "epoch": 0.902972749793559, + "grad_norm": 2.0487260009049444, + "learning_rate": 4.897103588130591e-07, + "loss": 0.8643, + "step": 6561 + }, + { + "epoch": 0.9031103770988164, + "grad_norm": 1.8731328089032253, + "learning_rate": 4.883335058596284e-07, + "loss": 0.9016, + "step": 6562 + }, + { + "epoch": 0.9032480044040738, + "grad_norm": 1.7335271641120151, + "learning_rate": 4.869585427441281e-07, + "loss": 0.9036, + "step": 6563 + }, + { + "epoch": 0.9033856317093312, + "grad_norm": 1.8864747923351008, + "learning_rate": 4.855854697397467e-07, + "loss": 0.8174, + "step": 6564 + }, + { + "epoch": 0.9035232590145885, + "grad_norm": 1.8963899210084483, + "learning_rate": 4.842142871192934e-07, + "loss": 0.8826, + "step": 6565 + }, + { + "epoch": 0.9036608863198459, + "grad_norm": 1.646440538421722, + "learning_rate": 4.82844995155205e-07, + "loss": 0.9325, + "step": 6566 + }, + { + "epoch": 0.9037985136251032, + "grad_norm": 1.977717039678017, + "learning_rate": 4.814775941195404e-07, + "loss": 0.8555, + "step": 6567 + }, + { + "epoch": 0.9039361409303606, + "grad_norm": 1.9414915727587263, + "learning_rate": 4.801120842839834e-07, + "loss": 0.8718, + "step": 6568 + }, + { + "epoch": 0.9040737682356179, + "grad_norm": 1.882564153265852, + "learning_rate": 4.787484659198427e-07, + "loss": 0.9298, + "step": 6569 + }, + { + "epoch": 0.9042113955408753, + "grad_norm": 1.8674386775015053, + "learning_rate": 4.773867392980491e-07, + "loss": 0.8205, + "step": 6570 + }, + { + "epoch": 0.9043490228461327, + "grad_norm": 1.6384417469931487, + "learning_rate": 4.760269046891608e-07, + "loss": 0.8777, + "step": 6571 + }, + { + "epoch": 0.9044866501513901, + "grad_norm": 1.8653101773206573, + "learning_rate": 4.7466896236335823e-07, + "loss": 0.9435, + "step": 6572 + }, + { + "epoch": 0.9046242774566474, + "grad_norm": 1.6540119127835107, + "learning_rate": 4.7331291259044345e-07, + "loss": 0.8822, + "step": 6573 + }, + { + "epoch": 0.9047619047619048, + "grad_norm": 1.6567127234929107, + "learning_rate": 4.719587556398464e-07, + "loss": 0.8459, + "step": 6574 + }, + { + "epoch": 0.9048995320671621, + "grad_norm": 1.724847266600291, + "learning_rate": 4.70606491780623e-07, + "loss": 0.8452, + "step": 6575 + }, + { + "epoch": 0.9050371593724195, + "grad_norm": 1.9040297228230547, + "learning_rate": 4.692561212814439e-07, + "loss": 0.8167, + "step": 6576 + }, + { + "epoch": 0.9051747866776768, + "grad_norm": 1.6101297233610008, + "learning_rate": 4.6790764441061345e-07, + "loss": 0.9209, + "step": 6577 + }, + { + "epoch": 0.9053124139829342, + "grad_norm": 1.839525351460534, + "learning_rate": 4.665610614360527e-07, + "loss": 0.7991, + "step": 6578 + }, + { + "epoch": 0.9054500412881916, + "grad_norm": 1.6563624770820888, + "learning_rate": 4.6521637262531117e-07, + "loss": 0.9147, + "step": 6579 + }, + { + "epoch": 0.905587668593449, + "grad_norm": 1.8234873772590174, + "learning_rate": 4.6387357824555945e-07, + "loss": 0.7843, + "step": 6580 + }, + { + "epoch": 0.9057252958987063, + "grad_norm": 2.0193544014437013, + "learning_rate": 4.6253267856359087e-07, + "loss": 0.8302, + "step": 6581 + }, + { + "epoch": 0.9058629232039637, + "grad_norm": 2.014872590370536, + "learning_rate": 4.6119367384582783e-07, + "loss": 0.9115, + "step": 6582 + }, + { + "epoch": 0.906000550509221, + "grad_norm": 2.11212818691487, + "learning_rate": 4.598565643583075e-07, + "loss": 0.9088, + "step": 6583 + }, + { + "epoch": 0.9061381778144784, + "grad_norm": 2.108754784364996, + "learning_rate": 4.5852135036669965e-07, + "loss": 0.8706, + "step": 6584 + }, + { + "epoch": 0.9062758051197357, + "grad_norm": 1.6985852446316199, + "learning_rate": 4.571880321362887e-07, + "loss": 0.8444, + "step": 6585 + }, + { + "epoch": 0.9064134324249932, + "grad_norm": 1.8210663379800185, + "learning_rate": 4.5585660993199166e-07, + "loss": 0.8954, + "step": 6586 + }, + { + "epoch": 0.9065510597302505, + "grad_norm": 1.6887238460973126, + "learning_rate": 4.54527084018338e-07, + "loss": 0.8618, + "step": 6587 + }, + { + "epoch": 0.9066886870355079, + "grad_norm": 1.4555231728404738, + "learning_rate": 4.531994546594909e-07, + "loss": 0.8832, + "step": 6588 + }, + { + "epoch": 0.9068263143407652, + "grad_norm": 1.7671927289371971, + "learning_rate": 4.518737221192304e-07, + "loss": 0.9437, + "step": 6589 + }, + { + "epoch": 0.9069639416460226, + "grad_norm": 1.9954060623643808, + "learning_rate": 4.5054988666096255e-07, + "loss": 0.8603, + "step": 6590 + }, + { + "epoch": 0.9071015689512799, + "grad_norm": 1.7106584993534777, + "learning_rate": 4.4922794854771355e-07, + "loss": 0.8496, + "step": 6591 + }, + { + "epoch": 0.9072391962565373, + "grad_norm": 1.81272496832236, + "learning_rate": 4.479079080421356e-07, + "loss": 0.9054, + "step": 6592 + }, + { + "epoch": 0.9073768235617946, + "grad_norm": 2.0798587406957223, + "learning_rate": 4.465897654065032e-07, + "loss": 0.8819, + "step": 6593 + }, + { + "epoch": 0.9075144508670521, + "grad_norm": 1.9205973601469513, + "learning_rate": 4.4527352090271147e-07, + "loss": 0.9219, + "step": 6594 + }, + { + "epoch": 0.9076520781723094, + "grad_norm": 1.8955465109042444, + "learning_rate": 4.4395917479228225e-07, + "loss": 0.8912, + "step": 6595 + }, + { + "epoch": 0.9077897054775668, + "grad_norm": 1.8527383773608623, + "learning_rate": 4.426467273363566e-07, + "loss": 0.8703, + "step": 6596 + }, + { + "epoch": 0.9079273327828241, + "grad_norm": 1.7415456547412604, + "learning_rate": 4.413361787956993e-07, + "loss": 0.8579, + "step": 6597 + }, + { + "epoch": 0.9080649600880815, + "grad_norm": 1.7758426039912696, + "learning_rate": 4.4002752943070105e-07, + "loss": 0.8935, + "step": 6598 + }, + { + "epoch": 0.9082025873933388, + "grad_norm": 1.8530171865645382, + "learning_rate": 4.3872077950136925e-07, + "loss": 0.8698, + "step": 6599 + }, + { + "epoch": 0.9083402146985962, + "grad_norm": 1.596771878669106, + "learning_rate": 4.3741592926734076e-07, + "loss": 0.9345, + "step": 6600 + }, + { + "epoch": 0.9084778420038536, + "grad_norm": 1.6112671912522478, + "learning_rate": 4.3611297898786817e-07, + "loss": 0.8113, + "step": 6601 + }, + { + "epoch": 0.908615469309111, + "grad_norm": 1.9419853892114927, + "learning_rate": 4.348119289218322e-07, + "loss": 0.7748, + "step": 6602 + }, + { + "epoch": 0.9087530966143683, + "grad_norm": 1.801264363217741, + "learning_rate": 4.335127793277316e-07, + "loss": 0.977, + "step": 6603 + }, + { + "epoch": 0.9088907239196257, + "grad_norm": 1.9890863003779633, + "learning_rate": 4.322155304636921e-07, + "loss": 0.887, + "step": 6604 + }, + { + "epoch": 0.909028351224883, + "grad_norm": 1.717762295220119, + "learning_rate": 4.309201825874576e-07, + "loss": 0.8402, + "step": 6605 + }, + { + "epoch": 0.9091659785301404, + "grad_norm": 1.8232544832949633, + "learning_rate": 4.2962673595639435e-07, + "loss": 0.8873, + "step": 6606 + }, + { + "epoch": 0.9093036058353977, + "grad_norm": 1.6639896450540121, + "learning_rate": 4.28335190827498e-07, + "loss": 0.8065, + "step": 6607 + }, + { + "epoch": 0.9094412331406551, + "grad_norm": 1.8037209367726625, + "learning_rate": 4.2704554745737534e-07, + "loss": 0.907, + "step": 6608 + }, + { + "epoch": 0.9095788604459125, + "grad_norm": 1.8621254840145887, + "learning_rate": 4.2575780610226494e-07, + "loss": 0.9288, + "step": 6609 + }, + { + "epoch": 0.9097164877511699, + "grad_norm": 1.574332402170311, + "learning_rate": 4.2447196701801976e-07, + "loss": 0.9324, + "step": 6610 + }, + { + "epoch": 0.9098541150564272, + "grad_norm": 1.7674899905187675, + "learning_rate": 4.231880304601199e-07, + "loss": 0.9331, + "step": 6611 + }, + { + "epoch": 0.9099917423616846, + "grad_norm": 1.706511890907948, + "learning_rate": 4.2190599668366584e-07, + "loss": 0.8878, + "step": 6612 + }, + { + "epoch": 0.9101293696669419, + "grad_norm": 1.833179119890394, + "learning_rate": 4.206258659433804e-07, + "loss": 0.8856, + "step": 6613 + }, + { + "epoch": 0.9102669969721993, + "grad_norm": 1.5151215350581662, + "learning_rate": 4.1934763849361016e-07, + "loss": 0.9007, + "step": 6614 + }, + { + "epoch": 0.9104046242774566, + "grad_norm": 1.7594633829797883, + "learning_rate": 4.180713145883164e-07, + "loss": 0.8985, + "step": 6615 + }, + { + "epoch": 0.910542251582714, + "grad_norm": 1.975519087189147, + "learning_rate": 4.1679689448109294e-07, + "loss": 0.8899, + "step": 6616 + }, + { + "epoch": 0.9106798788879714, + "grad_norm": 1.9699918505680831, + "learning_rate": 4.1552437842514395e-07, + "loss": 0.8378, + "step": 6617 + }, + { + "epoch": 0.9108175061932288, + "grad_norm": 1.9591865936143695, + "learning_rate": 4.1425376667330596e-07, + "loss": 0.9208, + "step": 6618 + }, + { + "epoch": 0.9109551334984861, + "grad_norm": 1.6895012658487327, + "learning_rate": 4.1298505947802825e-07, + "loss": 0.8609, + "step": 6619 + }, + { + "epoch": 0.9110927608037435, + "grad_norm": 1.6723245518917729, + "learning_rate": 4.1171825709138804e-07, + "loss": 0.9109, + "step": 6620 + }, + { + "epoch": 0.9112303881090008, + "grad_norm": 1.7093450591013928, + "learning_rate": 4.104533597650817e-07, + "loss": 0.8829, + "step": 6621 + }, + { + "epoch": 0.9113680154142582, + "grad_norm": 1.7379208229181309, + "learning_rate": 4.0919036775042496e-07, + "loss": 0.9415, + "step": 6622 + }, + { + "epoch": 0.9115056427195155, + "grad_norm": 1.9339672998004858, + "learning_rate": 4.0792928129835927e-07, + "loss": 0.9521, + "step": 6623 + }, + { + "epoch": 0.911643270024773, + "grad_norm": 1.6733579771648264, + "learning_rate": 4.0667010065944423e-07, + "loss": 0.8637, + "step": 6624 + }, + { + "epoch": 0.9117808973300303, + "grad_norm": 1.7416226244790092, + "learning_rate": 4.0541282608386077e-07, + "loss": 0.863, + "step": 6625 + }, + { + "epoch": 0.9119185246352877, + "grad_norm": 1.8401733008096999, + "learning_rate": 4.041574578214147e-07, + "loss": 0.8833, + "step": 6626 + }, + { + "epoch": 0.912056151940545, + "grad_norm": 1.7730339800730637, + "learning_rate": 4.0290399612152755e-07, + "loss": 0.9953, + "step": 6627 + }, + { + "epoch": 0.9121937792458024, + "grad_norm": 1.6269898459706014, + "learning_rate": 4.0165244123324674e-07, + "loss": 0.8787, + "step": 6628 + }, + { + "epoch": 0.9123314065510597, + "grad_norm": 1.750878474384059, + "learning_rate": 4.00402793405239e-07, + "loss": 0.8903, + "step": 6629 + }, + { + "epoch": 0.9124690338563171, + "grad_norm": 1.6926773769276244, + "learning_rate": 3.9915505288579236e-07, + "loss": 0.8295, + "step": 6630 + }, + { + "epoch": 0.9126066611615744, + "grad_norm": 1.780408957544683, + "learning_rate": 3.9790921992281515e-07, + "loss": 0.9826, + "step": 6631 + }, + { + "epoch": 0.9127442884668319, + "grad_norm": 1.6870287344489507, + "learning_rate": 3.966652947638383e-07, + "loss": 0.9341, + "step": 6632 + }, + { + "epoch": 0.9128819157720892, + "grad_norm": 1.6532776105631557, + "learning_rate": 3.9542327765601183e-07, + "loss": 0.9941, + "step": 6633 + }, + { + "epoch": 0.9130195430773466, + "grad_norm": 1.8311052463862865, + "learning_rate": 3.941831688461073e-07, + "loss": 0.8675, + "step": 6634 + }, + { + "epoch": 0.9131571703826039, + "grad_norm": 1.7801054480401524, + "learning_rate": 3.9294496858051757e-07, + "loss": 0.9045, + "step": 6635 + }, + { + "epoch": 0.9132947976878613, + "grad_norm": 1.7328620085983233, + "learning_rate": 3.9170867710525583e-07, + "loss": 0.9292, + "step": 6636 + }, + { + "epoch": 0.9134324249931186, + "grad_norm": 1.8854547764240481, + "learning_rate": 3.90474294665959e-07, + "loss": 0.8593, + "step": 6637 + }, + { + "epoch": 0.913570052298376, + "grad_norm": 1.6695318283187075, + "learning_rate": 3.8924182150787635e-07, + "loss": 0.8711, + "step": 6638 + }, + { + "epoch": 0.9137076796036334, + "grad_norm": 1.714850573071997, + "learning_rate": 3.88011257875891e-07, + "loss": 0.9459, + "step": 6639 + }, + { + "epoch": 0.9138453069088908, + "grad_norm": 1.7400816330670235, + "learning_rate": 3.867826040144906e-07, + "loss": 0.8727, + "step": 6640 + }, + { + "epoch": 0.9139829342141481, + "grad_norm": 2.0030781093942145, + "learning_rate": 3.855558601678e-07, + "loss": 0.9388, + "step": 6641 + }, + { + "epoch": 0.9141205615194055, + "grad_norm": 1.7510381620771789, + "learning_rate": 3.843310265795508e-07, + "loss": 0.8248, + "step": 6642 + }, + { + "epoch": 0.9142581888246628, + "grad_norm": 1.9172835490329863, + "learning_rate": 3.8310810349310165e-07, + "loss": 0.8942, + "step": 6643 + }, + { + "epoch": 0.9143958161299202, + "grad_norm": 2.0032379166871634, + "learning_rate": 3.8188709115143384e-07, + "loss": 0.8625, + "step": 6644 + }, + { + "epoch": 0.9145334434351775, + "grad_norm": 1.9548096070771217, + "learning_rate": 3.8066798979714106e-07, + "loss": 0.9453, + "step": 6645 + }, + { + "epoch": 0.9146710707404349, + "grad_norm": 1.9540109225545668, + "learning_rate": 3.7945079967244613e-07, + "loss": 0.9001, + "step": 6646 + }, + { + "epoch": 0.9148086980456923, + "grad_norm": 1.7875528064076698, + "learning_rate": 3.782355210191846e-07, + "loss": 0.9243, + "step": 6647 + }, + { + "epoch": 0.9149463253509497, + "grad_norm": 1.740506784024848, + "learning_rate": 3.770221540788188e-07, + "loss": 0.9466, + "step": 6648 + }, + { + "epoch": 0.915083952656207, + "grad_norm": 1.9094809839627789, + "learning_rate": 3.75810699092426e-07, + "loss": 0.8493, + "step": 6649 + }, + { + "epoch": 0.9152215799614644, + "grad_norm": 1.6571270209215923, + "learning_rate": 3.746011563007057e-07, + "loss": 0.9009, + "step": 6650 + }, + { + "epoch": 0.9153592072667217, + "grad_norm": 1.879873722723251, + "learning_rate": 3.733935259439803e-07, + "loss": 0.8319, + "step": 6651 + }, + { + "epoch": 0.9154968345719791, + "grad_norm": 1.7502571279791121, + "learning_rate": 3.721878082621866e-07, + "loss": 0.926, + "step": 6652 + }, + { + "epoch": 0.9156344618772364, + "grad_norm": 1.5251944349329831, + "learning_rate": 3.7098400349488415e-07, + "loss": 0.8703, + "step": 6653 + }, + { + "epoch": 0.9157720891824938, + "grad_norm": 1.8625417040164118, + "learning_rate": 3.6978211188125504e-07, + "loss": 0.8786, + "step": 6654 + }, + { + "epoch": 0.9159097164877512, + "grad_norm": 1.6554450799381786, + "learning_rate": 3.685821336600959e-07, + "loss": 0.8419, + "step": 6655 + }, + { + "epoch": 0.9160473437930086, + "grad_norm": 1.910404918622632, + "learning_rate": 3.6738406906982716e-07, + "loss": 0.867, + "step": 6656 + }, + { + "epoch": 0.9161849710982659, + "grad_norm": 1.7115333544832927, + "learning_rate": 3.6618791834848734e-07, + "loss": 0.8915, + "step": 6657 + }, + { + "epoch": 0.9163225984035233, + "grad_norm": 1.853647120727828, + "learning_rate": 3.649936817337363e-07, + "loss": 0.8883, + "step": 6658 + }, + { + "epoch": 0.9164602257087806, + "grad_norm": 1.6383060504556373, + "learning_rate": 3.63801359462852e-07, + "loss": 0.8606, + "step": 6659 + }, + { + "epoch": 0.916597853014038, + "grad_norm": 1.7642863664843762, + "learning_rate": 3.6261095177273164e-07, + "loss": 0.8689, + "step": 6660 + }, + { + "epoch": 0.9167354803192953, + "grad_norm": 1.9386432165065688, + "learning_rate": 3.614224588998938e-07, + "loss": 0.8728, + "step": 6661 + }, + { + "epoch": 0.9168731076245528, + "grad_norm": 1.6334616354537612, + "learning_rate": 3.6023588108047627e-07, + "loss": 0.8911, + "step": 6662 + }, + { + "epoch": 0.9170107349298101, + "grad_norm": 1.812030793003262, + "learning_rate": 3.590512185502348e-07, + "loss": 0.8758, + "step": 6663 + }, + { + "epoch": 0.9171483622350675, + "grad_norm": 1.6046301523042072, + "learning_rate": 3.5786847154454576e-07, + "loss": 0.8928, + "step": 6664 + }, + { + "epoch": 0.9172859895403248, + "grad_norm": 1.8652323603229102, + "learning_rate": 3.5668764029840543e-07, + "loss": 0.9667, + "step": 6665 + }, + { + "epoch": 0.9174236168455822, + "grad_norm": 1.6119432651216818, + "learning_rate": 3.5550872504642844e-07, + "loss": 0.9323, + "step": 6666 + }, + { + "epoch": 0.9175612441508395, + "grad_norm": 1.6850927496028545, + "learning_rate": 3.543317260228485e-07, + "loss": 0.9707, + "step": 6667 + }, + { + "epoch": 0.9176988714560969, + "grad_norm": 1.6147412596159219, + "learning_rate": 3.5315664346151857e-07, + "loss": 0.9153, + "step": 6668 + }, + { + "epoch": 0.9178364987613542, + "grad_norm": 1.8546617511959536, + "learning_rate": 3.5198347759591634e-07, + "loss": 0.873, + "step": 6669 + }, + { + "epoch": 0.9179741260666117, + "grad_norm": 1.924301868998822, + "learning_rate": 3.5081222865912645e-07, + "loss": 0.9002, + "step": 6670 + }, + { + "epoch": 0.918111753371869, + "grad_norm": 1.7908707947828946, + "learning_rate": 3.496428968838661e-07, + "loss": 0.9398, + "step": 6671 + }, + { + "epoch": 0.9182493806771264, + "grad_norm": 1.7311379218446268, + "learning_rate": 3.4847548250246167e-07, + "loss": 0.9166, + "step": 6672 + }, + { + "epoch": 0.9183870079823837, + "grad_norm": 1.738987796179036, + "learning_rate": 3.473099857468665e-07, + "loss": 0.8442, + "step": 6673 + }, + { + "epoch": 0.9185246352876411, + "grad_norm": 1.5105149515565026, + "learning_rate": 3.461464068486442e-07, + "loss": 0.8594, + "step": 6674 + }, + { + "epoch": 0.9186622625928984, + "grad_norm": 1.7604936033848666, + "learning_rate": 3.4498474603898323e-07, + "loss": 0.8679, + "step": 6675 + }, + { + "epoch": 0.9187998898981558, + "grad_norm": 1.7241475854773618, + "learning_rate": 3.438250035486923e-07, + "loss": 0.8838, + "step": 6676 + }, + { + "epoch": 0.9189375172034132, + "grad_norm": 1.8471638937604435, + "learning_rate": 3.426671796081926e-07, + "loss": 0.8193, + "step": 6677 + }, + { + "epoch": 0.9190751445086706, + "grad_norm": 1.7662535974844213, + "learning_rate": 3.415112744475313e-07, + "loss": 0.8909, + "step": 6678 + }, + { + "epoch": 0.9192127718139279, + "grad_norm": 1.4666376711190297, + "learning_rate": 3.4035728829636796e-07, + "loss": 0.9573, + "step": 6679 + }, + { + "epoch": 0.9193503991191853, + "grad_norm": 1.6793110481500233, + "learning_rate": 3.3920522138398805e-07, + "loss": 0.8592, + "step": 6680 + }, + { + "epoch": 0.9194880264244426, + "grad_norm": 1.63743169968516, + "learning_rate": 3.380550739392863e-07, + "loss": 0.8868, + "step": 6681 + }, + { + "epoch": 0.9196256537297, + "grad_norm": 1.8810930160267187, + "learning_rate": 3.369068461907843e-07, + "loss": 0.87, + "step": 6682 + }, + { + "epoch": 0.9197632810349573, + "grad_norm": 1.8177284947069905, + "learning_rate": 3.357605383666185e-07, + "loss": 0.9294, + "step": 6683 + }, + { + "epoch": 0.9199009083402147, + "grad_norm": 1.8911852417396828, + "learning_rate": 3.3461615069454555e-07, + "loss": 0.9342, + "step": 6684 + }, + { + "epoch": 0.9200385356454721, + "grad_norm": 1.64706066456111, + "learning_rate": 3.334736834019381e-07, + "loss": 0.9067, + "step": 6685 + }, + { + "epoch": 0.9201761629507295, + "grad_norm": 1.5695035191908084, + "learning_rate": 3.3233313671579e-07, + "loss": 0.877, + "step": 6686 + }, + { + "epoch": 0.9203137902559868, + "grad_norm": 1.7427359490335155, + "learning_rate": 3.3119451086271125e-07, + "loss": 0.9123, + "step": 6687 + }, + { + "epoch": 0.9204514175612442, + "grad_norm": 1.5918768816296056, + "learning_rate": 3.3005780606893187e-07, + "loss": 0.8355, + "step": 6688 + }, + { + "epoch": 0.9205890448665015, + "grad_norm": 1.8127723040383803, + "learning_rate": 3.2892302256029907e-07, + "loss": 0.9059, + "step": 6689 + }, + { + "epoch": 0.9207266721717589, + "grad_norm": 1.7057368540740396, + "learning_rate": 3.277901605622802e-07, + "loss": 0.8904, + "step": 6690 + }, + { + "epoch": 0.9208642994770162, + "grad_norm": 2.253848317789013, + "learning_rate": 3.2665922029995635e-07, + "loss": 0.8578, + "step": 6691 + }, + { + "epoch": 0.9210019267822736, + "grad_norm": 1.7667600616031471, + "learning_rate": 3.255302019980333e-07, + "loss": 0.8533, + "step": 6692 + }, + { + "epoch": 0.921139554087531, + "grad_norm": 1.6974616079507912, + "learning_rate": 3.2440310588082946e-07, + "loss": 0.9021, + "step": 6693 + }, + { + "epoch": 0.9212771813927884, + "grad_norm": 2.084911902778498, + "learning_rate": 3.232779321722845e-07, + "loss": 0.848, + "step": 6694 + }, + { + "epoch": 0.9214148086980457, + "grad_norm": 1.9454637788301041, + "learning_rate": 3.2215468109595416e-07, + "loss": 0.9145, + "step": 6695 + }, + { + "epoch": 0.921552436003303, + "grad_norm": 1.7208332923753595, + "learning_rate": 3.2103335287501314e-07, + "loss": 0.8818, + "step": 6696 + }, + { + "epoch": 0.9216900633085604, + "grad_norm": 1.835177135267487, + "learning_rate": 3.1991394773225435e-07, + "loss": 0.8931, + "step": 6697 + }, + { + "epoch": 0.9218276906138178, + "grad_norm": 1.8319827998143843, + "learning_rate": 3.1879646589008775e-07, + "loss": 0.8974, + "step": 6698 + }, + { + "epoch": 0.9219653179190751, + "grad_norm": 2.004390978715643, + "learning_rate": 3.176809075705445e-07, + "loss": 0.9169, + "step": 6699 + }, + { + "epoch": 0.9221029452243326, + "grad_norm": 1.8122984673102467, + "learning_rate": 3.1656727299526513e-07, + "loss": 0.8577, + "step": 6700 + }, + { + "epoch": 0.9222405725295899, + "grad_norm": 1.7913896494943586, + "learning_rate": 3.154555623855193e-07, + "loss": 0.9805, + "step": 6701 + }, + { + "epoch": 0.9223781998348473, + "grad_norm": 1.7884600144595337, + "learning_rate": 3.1434577596218483e-07, + "loss": 0.9343, + "step": 6702 + }, + { + "epoch": 0.9225158271401046, + "grad_norm": 1.7956755109017832, + "learning_rate": 3.132379139457642e-07, + "loss": 0.8523, + "step": 6703 + }, + { + "epoch": 0.922653454445362, + "grad_norm": 1.733126742744374, + "learning_rate": 3.121319765563702e-07, + "loss": 0.9207, + "step": 6704 + }, + { + "epoch": 0.9227910817506193, + "grad_norm": 1.8734521486985727, + "learning_rate": 3.110279640137415e-07, + "loss": 0.8499, + "step": 6705 + }, + { + "epoch": 0.9229287090558767, + "grad_norm": 1.8803686463013634, + "learning_rate": 3.099258765372304e-07, + "loss": 0.9017, + "step": 6706 + }, + { + "epoch": 0.923066336361134, + "grad_norm": 1.8920744331639814, + "learning_rate": 3.088257143458018e-07, + "loss": 0.8457, + "step": 6707 + }, + { + "epoch": 0.9232039636663915, + "grad_norm": 1.5580227608560706, + "learning_rate": 3.0772747765804854e-07, + "loss": 0.9205, + "step": 6708 + }, + { + "epoch": 0.9233415909716488, + "grad_norm": 1.9287959286744265, + "learning_rate": 3.0663116669217064e-07, + "loss": 0.9555, + "step": 6709 + }, + { + "epoch": 0.9234792182769062, + "grad_norm": 1.7840050128312877, + "learning_rate": 3.055367816659949e-07, + "loss": 0.9155, + "step": 6710 + }, + { + "epoch": 0.9236168455821635, + "grad_norm": 1.6611551553663357, + "learning_rate": 3.0444432279695514e-07, + "loss": 0.9608, + "step": 6711 + }, + { + "epoch": 0.9237544728874209, + "grad_norm": 1.8760277839256523, + "learning_rate": 3.033537903021111e-07, + "loss": 0.8878, + "step": 6712 + }, + { + "epoch": 0.9238921001926782, + "grad_norm": 1.9739036425922312, + "learning_rate": 3.022651843981372e-07, + "loss": 0.9726, + "step": 6713 + }, + { + "epoch": 0.9240297274979355, + "grad_norm": 2.187773574199657, + "learning_rate": 3.0117850530132385e-07, + "loss": 0.8506, + "step": 6714 + }, + { + "epoch": 0.924167354803193, + "grad_norm": 1.784754402597568, + "learning_rate": 3.0009375322757826e-07, + "loss": 0.8557, + "step": 6715 + }, + { + "epoch": 0.9243049821084504, + "grad_norm": 1.8168110741645787, + "learning_rate": 2.99010928392427e-07, + "loss": 0.8784, + "step": 6716 + }, + { + "epoch": 0.9244426094137077, + "grad_norm": 2.0265989117451118, + "learning_rate": 2.9793003101101225e-07, + "loss": 0.9467, + "step": 6717 + }, + { + "epoch": 0.924580236718965, + "grad_norm": 1.6499394612165763, + "learning_rate": 2.968510612980935e-07, + "loss": 0.8682, + "step": 6718 + }, + { + "epoch": 0.9247178640242224, + "grad_norm": 1.7920370365755391, + "learning_rate": 2.957740194680481e-07, + "loss": 0.9218, + "step": 6719 + }, + { + "epoch": 0.9248554913294798, + "grad_norm": 1.7402510327042713, + "learning_rate": 2.9469890573486705e-07, + "loss": 0.8526, + "step": 6720 + }, + { + "epoch": 0.9249931186347371, + "grad_norm": 1.8590331059899194, + "learning_rate": 2.93625720312164e-07, + "loss": 0.8838, + "step": 6721 + }, + { + "epoch": 0.9251307459399944, + "grad_norm": 2.057331600625596, + "learning_rate": 2.925544634131638e-07, + "loss": 0.9378, + "step": 6722 + }, + { + "epoch": 0.9252683732452519, + "grad_norm": 2.0154618278172243, + "learning_rate": 2.9148513525071085e-07, + "loss": 0.8736, + "step": 6723 + }, + { + "epoch": 0.9254060005505093, + "grad_norm": 1.7521081650127621, + "learning_rate": 2.904177360372662e-07, + "loss": 0.9283, + "step": 6724 + }, + { + "epoch": 0.9255436278557666, + "grad_norm": 2.037714302100558, + "learning_rate": 2.8935226598490904e-07, + "loss": 0.8737, + "step": 6725 + }, + { + "epoch": 0.925681255161024, + "grad_norm": 2.4180617062426237, + "learning_rate": 2.8828872530533127e-07, + "loss": 0.8403, + "step": 6726 + }, + { + "epoch": 0.9258188824662813, + "grad_norm": 1.7423897982131689, + "learning_rate": 2.872271142098448e-07, + "loss": 0.925, + "step": 6727 + }, + { + "epoch": 0.9259565097715386, + "grad_norm": 1.761866097084584, + "learning_rate": 2.861674329093778e-07, + "loss": 0.8629, + "step": 6728 + }, + { + "epoch": 0.926094137076796, + "grad_norm": 2.1137081289410853, + "learning_rate": 2.8510968161447386e-07, + "loss": 0.9382, + "step": 6729 + }, + { + "epoch": 0.9262317643820533, + "grad_norm": 1.8521920591215564, + "learning_rate": 2.840538605352927e-07, + "loss": 0.9803, + "step": 6730 + }, + { + "epoch": 0.9263693916873108, + "grad_norm": 1.8039333554448262, + "learning_rate": 2.829999698816155e-07, + "loss": 0.9215, + "step": 6731 + }, + { + "epoch": 0.9265070189925682, + "grad_norm": 1.8825457669069858, + "learning_rate": 2.819480098628302e-07, + "loss": 0.8621, + "step": 6732 + }, + { + "epoch": 0.9266446462978255, + "grad_norm": 1.7522793000762182, + "learning_rate": 2.808979806879519e-07, + "loss": 0.8051, + "step": 6733 + }, + { + "epoch": 0.9267822736030829, + "grad_norm": 1.620167762810065, + "learning_rate": 2.7984988256560265e-07, + "loss": 0.9609, + "step": 6734 + }, + { + "epoch": 0.9269199009083402, + "grad_norm": 4.753660088830378, + "learning_rate": 2.788037157040302e-07, + "loss": 0.8456, + "step": 6735 + }, + { + "epoch": 0.9270575282135975, + "grad_norm": 1.734463601719378, + "learning_rate": 2.7775948031108835e-07, + "loss": 0.8798, + "step": 6736 + }, + { + "epoch": 0.9271951555188549, + "grad_norm": 2.0480650194414745, + "learning_rate": 2.767171765942556e-07, + "loss": 0.9232, + "step": 6737 + }, + { + "epoch": 0.9273327828241124, + "grad_norm": 1.6693304190490281, + "learning_rate": 2.7567680476062287e-07, + "loss": 0.9126, + "step": 6738 + }, + { + "epoch": 0.9274704101293697, + "grad_norm": 1.7766777369505644, + "learning_rate": 2.746383650168971e-07, + "loss": 0.8125, + "step": 6739 + }, + { + "epoch": 0.927608037434627, + "grad_norm": 1.6354296490214693, + "learning_rate": 2.7360185756940326e-07, + "loss": 0.9499, + "step": 6740 + }, + { + "epoch": 0.9277456647398844, + "grad_norm": 2.1053599947431305, + "learning_rate": 2.7256728262407884e-07, + "loss": 0.7553, + "step": 6741 + }, + { + "epoch": 0.9278832920451417, + "grad_norm": 1.7647441018626433, + "learning_rate": 2.715346403864838e-07, + "loss": 0.8812, + "step": 6742 + }, + { + "epoch": 0.9280209193503991, + "grad_norm": 1.67035945811495, + "learning_rate": 2.705039310617852e-07, + "loss": 0.9077, + "step": 6743 + }, + { + "epoch": 0.9281585466556564, + "grad_norm": 1.70150900897486, + "learning_rate": 2.694751548547736e-07, + "loss": 0.9253, + "step": 6744 + }, + { + "epoch": 0.9282961739609138, + "grad_norm": 1.6407886142683044, + "learning_rate": 2.684483119698533e-07, + "loss": 0.8626, + "step": 6745 + }, + { + "epoch": 0.9284338012661713, + "grad_norm": 1.6808680263612912, + "learning_rate": 2.6742340261104226e-07, + "loss": 0.9122, + "step": 6746 + }, + { + "epoch": 0.9285714285714286, + "grad_norm": 3.153135252003833, + "learning_rate": 2.6640042698197754e-07, + "loss": 0.9082, + "step": 6747 + }, + { + "epoch": 0.928709055876686, + "grad_norm": 1.7994744391412743, + "learning_rate": 2.653793852859088e-07, + "loss": 0.9158, + "step": 6748 + }, + { + "epoch": 0.9288466831819433, + "grad_norm": 1.7590305572227787, + "learning_rate": 2.643602777257037e-07, + "loss": 0.8904, + "step": 6749 + }, + { + "epoch": 0.9289843104872006, + "grad_norm": 1.6574442553348636, + "learning_rate": 2.633431045038448e-07, + "loss": 0.9064, + "step": 6750 + }, + { + "epoch": 0.929121937792458, + "grad_norm": 1.8289226024863323, + "learning_rate": 2.6232786582243154e-07, + "loss": 0.8852, + "step": 6751 + }, + { + "epoch": 0.9292595650977153, + "grad_norm": 1.9218483386695762, + "learning_rate": 2.6131456188317693e-07, + "loss": 0.9728, + "step": 6752 + }, + { + "epoch": 0.9293971924029728, + "grad_norm": 1.806320292935636, + "learning_rate": 2.6030319288741e-07, + "loss": 0.8817, + "step": 6753 + }, + { + "epoch": 0.9295348197082302, + "grad_norm": 1.7984514680359818, + "learning_rate": 2.592937590360778e-07, + "loss": 0.8991, + "step": 6754 + }, + { + "epoch": 0.9296724470134875, + "grad_norm": 1.8492724272917933, + "learning_rate": 2.582862605297398e-07, + "loss": 0.8819, + "step": 6755 + }, + { + "epoch": 0.9298100743187449, + "grad_norm": 1.9723252453530116, + "learning_rate": 2.572806975685715e-07, + "loss": 0.8688, + "step": 6756 + }, + { + "epoch": 0.9299477016240022, + "grad_norm": 1.8624228459278565, + "learning_rate": 2.562770703523665e-07, + "loss": 0.9136, + "step": 6757 + }, + { + "epoch": 0.9300853289292595, + "grad_norm": 1.9245564314205306, + "learning_rate": 2.5527537908052955e-07, + "loss": 0.9283, + "step": 6758 + }, + { + "epoch": 0.9302229562345169, + "grad_norm": 1.6357061165864268, + "learning_rate": 2.542756239520838e-07, + "loss": 0.9027, + "step": 6759 + }, + { + "epoch": 0.9303605835397742, + "grad_norm": 1.8474170957099505, + "learning_rate": 2.53277805165667e-07, + "loss": 0.9135, + "step": 6760 + }, + { + "epoch": 0.9304982108450317, + "grad_norm": 1.7750893802721934, + "learning_rate": 2.522819229195339e-07, + "loss": 0.8972, + "step": 6761 + }, + { + "epoch": 0.930635838150289, + "grad_norm": 1.9631761810096797, + "learning_rate": 2.5128797741154953e-07, + "loss": 0.8484, + "step": 6762 + }, + { + "epoch": 0.9307734654555464, + "grad_norm": 1.7482420845792697, + "learning_rate": 2.502959688392004e-07, + "loss": 0.8023, + "step": 6763 + }, + { + "epoch": 0.9309110927608037, + "grad_norm": 1.940705987395037, + "learning_rate": 2.49305897399581e-07, + "loss": 0.9226, + "step": 6764 + }, + { + "epoch": 0.9310487200660611, + "grad_norm": 1.9222557913282416, + "learning_rate": 2.483177632894096e-07, + "loss": 0.7704, + "step": 6765 + }, + { + "epoch": 0.9311863473713184, + "grad_norm": 1.8166968851934966, + "learning_rate": 2.4733156670501024e-07, + "loss": 0.9132, + "step": 6766 + }, + { + "epoch": 0.9313239746765758, + "grad_norm": 1.7524205401489148, + "learning_rate": 2.4634730784233065e-07, + "loss": 0.8915, + "step": 6767 + }, + { + "epoch": 0.9314616019818331, + "grad_norm": 1.9535002696157346, + "learning_rate": 2.453649868969277e-07, + "loss": 0.9533, + "step": 6768 + }, + { + "epoch": 0.9315992292870906, + "grad_norm": 1.5964397472559173, + "learning_rate": 2.4438460406397523e-07, + "loss": 0.8872, + "step": 6769 + }, + { + "epoch": 0.931736856592348, + "grad_norm": 2.533761718768302, + "learning_rate": 2.434061595382631e-07, + "loss": 0.9668, + "step": 6770 + }, + { + "epoch": 0.9318744838976053, + "grad_norm": 1.8085988263727473, + "learning_rate": 2.4242965351419235e-07, + "loss": 0.9081, + "step": 6771 + }, + { + "epoch": 0.9320121112028626, + "grad_norm": 1.7762197657678571, + "learning_rate": 2.414550861857845e-07, + "loss": 0.9095, + "step": 6772 + }, + { + "epoch": 0.93214973850812, + "grad_norm": 1.6881265047663423, + "learning_rate": 2.4048245774667025e-07, + "loss": 0.9605, + "step": 6773 + }, + { + "epoch": 0.9322873658133773, + "grad_norm": 1.7050383224414745, + "learning_rate": 2.395117683900983e-07, + "loss": 0.8747, + "step": 6774 + }, + { + "epoch": 0.9324249931186347, + "grad_norm": 1.8098885347307578, + "learning_rate": 2.3854301830893103e-07, + "loss": 0.9256, + "step": 6775 + }, + { + "epoch": 0.9325626204238922, + "grad_norm": 1.6812604516262217, + "learning_rate": 2.375762076956467e-07, + "loss": 0.8757, + "step": 6776 + }, + { + "epoch": 0.9327002477291495, + "grad_norm": 1.6718316606730066, + "learning_rate": 2.3661133674233727e-07, + "loss": 0.8363, + "step": 6777 + }, + { + "epoch": 0.9328378750344068, + "grad_norm": 1.830815890317496, + "learning_rate": 2.3564840564070933e-07, + "loss": 0.9076, + "step": 6778 + }, + { + "epoch": 0.9329755023396642, + "grad_norm": 2.1090006586241468, + "learning_rate": 2.3468741458208322e-07, + "loss": 0.9122, + "step": 6779 + }, + { + "epoch": 0.9331131296449215, + "grad_norm": 1.8956518754324183, + "learning_rate": 2.3372836375739506e-07, + "loss": 0.8679, + "step": 6780 + }, + { + "epoch": 0.9332507569501789, + "grad_norm": 2.0224689082816236, + "learning_rate": 2.3277125335719576e-07, + "loss": 0.8495, + "step": 6781 + }, + { + "epoch": 0.9333883842554362, + "grad_norm": 1.6435425807621866, + "learning_rate": 2.3181608357164874e-07, + "loss": 0.8987, + "step": 6782 + }, + { + "epoch": 0.9335260115606936, + "grad_norm": 1.8556749691602903, + "learning_rate": 2.3086285459053448e-07, + "loss": 0.9048, + "step": 6783 + }, + { + "epoch": 0.933663638865951, + "grad_norm": 1.661868194075282, + "learning_rate": 2.2991156660324698e-07, + "loss": 0.8208, + "step": 6784 + }, + { + "epoch": 0.9338012661712084, + "grad_norm": 2.059331357189913, + "learning_rate": 2.2896221979879173e-07, + "loss": 0.9161, + "step": 6785 + }, + { + "epoch": 0.9339388934764657, + "grad_norm": 2.171894164108577, + "learning_rate": 2.2801481436579344e-07, + "loss": 0.9523, + "step": 6786 + }, + { + "epoch": 0.9340765207817231, + "grad_norm": 2.1251543423482584, + "learning_rate": 2.2706935049248812e-07, + "loss": 0.8402, + "step": 6787 + }, + { + "epoch": 0.9342141480869804, + "grad_norm": 1.8816532150290932, + "learning_rate": 2.261258283667256e-07, + "loss": 0.9228, + "step": 6788 + }, + { + "epoch": 0.9343517753922378, + "grad_norm": 2.0154914803720088, + "learning_rate": 2.251842481759703e-07, + "loss": 0.9016, + "step": 6789 + }, + { + "epoch": 0.9344894026974951, + "grad_norm": 1.8329452619946158, + "learning_rate": 2.242446101073037e-07, + "loss": 0.8955, + "step": 6790 + }, + { + "epoch": 0.9346270300027526, + "grad_norm": 1.6874954454685946, + "learning_rate": 2.2330691434741647e-07, + "loss": 0.8578, + "step": 6791 + }, + { + "epoch": 0.93476465730801, + "grad_norm": 1.611040839581695, + "learning_rate": 2.2237116108261626e-07, + "loss": 0.8631, + "step": 6792 + }, + { + "epoch": 0.9349022846132673, + "grad_norm": 1.9036183638110515, + "learning_rate": 2.2143735049882765e-07, + "loss": 0.98, + "step": 6793 + }, + { + "epoch": 0.9350399119185246, + "grad_norm": 1.6988433964126208, + "learning_rate": 2.2050548278158113e-07, + "loss": 0.8362, + "step": 6794 + }, + { + "epoch": 0.935177539223782, + "grad_norm": 1.618138428940883, + "learning_rate": 2.1957555811603083e-07, + "loss": 0.9037, + "step": 6795 + }, + { + "epoch": 0.9353151665290393, + "grad_norm": 1.760559718276881, + "learning_rate": 2.186475766869356e-07, + "loss": 0.9204, + "step": 6796 + }, + { + "epoch": 0.9354527938342967, + "grad_norm": 1.7973875944856716, + "learning_rate": 2.1772153867867574e-07, + "loss": 0.8035, + "step": 6797 + }, + { + "epoch": 0.935590421139554, + "grad_norm": 1.6015590836416564, + "learning_rate": 2.1679744427524074e-07, + "loss": 0.8744, + "step": 6798 + }, + { + "epoch": 0.9357280484448115, + "grad_norm": 1.5994055198518262, + "learning_rate": 2.1587529366023597e-07, + "loss": 0.8914, + "step": 6799 + }, + { + "epoch": 0.9358656757500688, + "grad_norm": 1.719984044174885, + "learning_rate": 2.149550870168815e-07, + "loss": 0.8953, + "step": 6800 + }, + { + "epoch": 0.9360033030553262, + "grad_norm": 1.9498908135327602, + "learning_rate": 2.1403682452800778e-07, + "loss": 0.9363, + "step": 6801 + }, + { + "epoch": 0.9361409303605835, + "grad_norm": 1.9873876841070839, + "learning_rate": 2.131205063760633e-07, + "loss": 0.885, + "step": 6802 + }, + { + "epoch": 0.9362785576658409, + "grad_norm": 2.095703520387624, + "learning_rate": 2.1220613274310353e-07, + "loss": 0.8993, + "step": 6803 + }, + { + "epoch": 0.9364161849710982, + "grad_norm": 1.8325264793310525, + "learning_rate": 2.112937038108076e-07, + "loss": 0.9272, + "step": 6804 + }, + { + "epoch": 0.9365538122763556, + "grad_norm": 1.6836455140490378, + "learning_rate": 2.1038321976045828e-07, + "loss": 0.8428, + "step": 6805 + }, + { + "epoch": 0.9366914395816129, + "grad_norm": 1.5674543537096617, + "learning_rate": 2.0947468077295864e-07, + "loss": 0.8383, + "step": 6806 + }, + { + "epoch": 0.9368290668868704, + "grad_norm": 1.6639053428604236, + "learning_rate": 2.0856808702882203e-07, + "loss": 0.8581, + "step": 6807 + }, + { + "epoch": 0.9369666941921277, + "grad_norm": 1.8406505240881303, + "learning_rate": 2.076634387081766e-07, + "loss": 0.8853, + "step": 6808 + }, + { + "epoch": 0.9371043214973851, + "grad_norm": 1.6226107877185898, + "learning_rate": 2.0676073599076308e-07, + "loss": 0.9263, + "step": 6809 + }, + { + "epoch": 0.9372419488026424, + "grad_norm": 1.884552336688101, + "learning_rate": 2.058599790559368e-07, + "loss": 0.8819, + "step": 6810 + }, + { + "epoch": 0.9373795761078998, + "grad_norm": 1.6703037581392963, + "learning_rate": 2.0496116808266465e-07, + "loss": 0.821, + "step": 6811 + }, + { + "epoch": 0.9375172034131571, + "grad_norm": 1.6695697513911147, + "learning_rate": 2.040643032495304e-07, + "loss": 0.9441, + "step": 6812 + }, + { + "epoch": 0.9376548307184145, + "grad_norm": 1.965974359909565, + "learning_rate": 2.0316938473472601e-07, + "loss": 0.7742, + "step": 6813 + }, + { + "epoch": 0.937792458023672, + "grad_norm": 1.8621729865719097, + "learning_rate": 2.0227641271606147e-07, + "loss": 0.9026, + "step": 6814 + }, + { + "epoch": 0.9379300853289293, + "grad_norm": 2.252289326331772, + "learning_rate": 2.0138538737095813e-07, + "loss": 0.9264, + "step": 6815 + }, + { + "epoch": 0.9380677126341866, + "grad_norm": 1.7380675462760236, + "learning_rate": 2.004963088764489e-07, + "loss": 0.8842, + "step": 6816 + }, + { + "epoch": 0.938205339939444, + "grad_norm": 1.8169649370835967, + "learning_rate": 1.996091774091824e-07, + "loss": 0.8819, + "step": 6817 + }, + { + "epoch": 0.9383429672447013, + "grad_norm": 1.906652865819821, + "learning_rate": 1.98723993145421e-07, + "loss": 0.9148, + "step": 6818 + }, + { + "epoch": 0.9384805945499587, + "grad_norm": 1.6423592066571813, + "learning_rate": 1.9784075626103628e-07, + "loss": 0.8795, + "step": 6819 + }, + { + "epoch": 0.938618221855216, + "grad_norm": 1.7795390507722146, + "learning_rate": 1.9695946693151668e-07, + "loss": 0.8855, + "step": 6820 + }, + { + "epoch": 0.9387558491604734, + "grad_norm": 1.725285942699275, + "learning_rate": 1.9608012533196108e-07, + "loss": 0.9265, + "step": 6821 + }, + { + "epoch": 0.9388934764657308, + "grad_norm": 1.694721896140251, + "learning_rate": 1.9520273163708414e-07, + "loss": 0.9468, + "step": 6822 + }, + { + "epoch": 0.9390311037709882, + "grad_norm": 1.8458326033733117, + "learning_rate": 1.9432728602121198e-07, + "loss": 0.9037, + "step": 6823 + }, + { + "epoch": 0.9391687310762455, + "grad_norm": 1.802677502222908, + "learning_rate": 1.9345378865828212e-07, + "loss": 0.9096, + "step": 6824 + }, + { + "epoch": 0.9393063583815029, + "grad_norm": 1.6893783687251545, + "learning_rate": 1.9258223972184798e-07, + "loss": 0.9373, + "step": 6825 + }, + { + "epoch": 0.9394439856867602, + "grad_norm": 1.6661542399146276, + "learning_rate": 1.9171263938507323e-07, + "loss": 0.8329, + "step": 6826 + }, + { + "epoch": 0.9395816129920176, + "grad_norm": 1.87368604806992, + "learning_rate": 1.9084498782073746e-07, + "loss": 0.8011, + "step": 6827 + }, + { + "epoch": 0.9397192402972749, + "grad_norm": 2.4287459631488155, + "learning_rate": 1.8997928520122833e-07, + "loss": 0.8895, + "step": 6828 + }, + { + "epoch": 0.9398568676025324, + "grad_norm": 2.11521151978369, + "learning_rate": 1.8911553169855268e-07, + "loss": 0.928, + "step": 6829 + }, + { + "epoch": 0.9399944949077897, + "grad_norm": 1.8997163815764124, + "learning_rate": 1.8825372748432325e-07, + "loss": 0.8996, + "step": 6830 + }, + { + "epoch": 0.9401321222130471, + "grad_norm": 1.5883085310461664, + "learning_rate": 1.873938727297697e-07, + "loss": 0.9394, + "step": 6831 + }, + { + "epoch": 0.9402697495183044, + "grad_norm": 1.6952534693097505, + "learning_rate": 1.8653596760573434e-07, + "loss": 0.9016, + "step": 6832 + }, + { + "epoch": 0.9404073768235618, + "grad_norm": 1.5906014988442498, + "learning_rate": 1.8568001228267075e-07, + "loss": 0.9547, + "step": 6833 + }, + { + "epoch": 0.9405450041288191, + "grad_norm": 1.7242234500542897, + "learning_rate": 1.8482600693064512e-07, + "loss": 0.941, + "step": 6834 + }, + { + "epoch": 0.9406826314340765, + "grad_norm": 1.930076913120841, + "learning_rate": 1.8397395171933618e-07, + "loss": 0.9152, + "step": 6835 + }, + { + "epoch": 0.9408202587393338, + "grad_norm": 1.780982631877666, + "learning_rate": 1.831238468180363e-07, + "loss": 0.9401, + "step": 6836 + }, + { + "epoch": 0.9409578860445913, + "grad_norm": 1.7441465485561063, + "learning_rate": 1.8227569239565034e-07, + "loss": 0.8982, + "step": 6837 + }, + { + "epoch": 0.9410955133498486, + "grad_norm": 1.9075067610843672, + "learning_rate": 1.8142948862069465e-07, + "loss": 0.9356, + "step": 6838 + }, + { + "epoch": 0.941233140655106, + "grad_norm": 1.888594453459353, + "learning_rate": 1.8058523566129694e-07, + "loss": 0.7884, + "step": 6839 + }, + { + "epoch": 0.9413707679603633, + "grad_norm": 1.9188049500760371, + "learning_rate": 1.7974293368520078e-07, + "loss": 0.8901, + "step": 6840 + }, + { + "epoch": 0.9415083952656207, + "grad_norm": 2.0468617212023146, + "learning_rate": 1.789025828597579e-07, + "loss": 0.8479, + "step": 6841 + }, + { + "epoch": 0.941646022570878, + "grad_norm": 1.7100144286067072, + "learning_rate": 1.7806418335193698e-07, + "loss": 0.9088, + "step": 6842 + }, + { + "epoch": 0.9417836498761354, + "grad_norm": 1.7944488655465505, + "learning_rate": 1.7722773532831362e-07, + "loss": 0.8843, + "step": 6843 + }, + { + "epoch": 0.9419212771813927, + "grad_norm": 1.6462022125060438, + "learning_rate": 1.7639323895508153e-07, + "loss": 0.7841, + "step": 6844 + }, + { + "epoch": 0.9420589044866502, + "grad_norm": 1.6377588720319618, + "learning_rate": 1.7556069439804148e-07, + "loss": 0.9321, + "step": 6845 + }, + { + "epoch": 0.9421965317919075, + "grad_norm": 1.651221842504661, + "learning_rate": 1.7473010182260997e-07, + "loss": 0.9496, + "step": 6846 + }, + { + "epoch": 0.9423341590971649, + "grad_norm": 1.6424145879414012, + "learning_rate": 1.739014613938128e-07, + "loss": 0.9082, + "step": 6847 + }, + { + "epoch": 0.9424717864024222, + "grad_norm": 1.8134316529454835, + "learning_rate": 1.7307477327629273e-07, + "loss": 0.8738, + "step": 6848 + }, + { + "epoch": 0.9426094137076796, + "grad_norm": 1.6032671795878075, + "learning_rate": 1.7225003763429726e-07, + "loss": 0.8873, + "step": 6849 + }, + { + "epoch": 0.9427470410129369, + "grad_norm": 1.8897536107345827, + "learning_rate": 1.7142725463169307e-07, + "loss": 0.8788, + "step": 6850 + }, + { + "epoch": 0.9428846683181943, + "grad_norm": 1.6484570428276837, + "learning_rate": 1.7060642443195498e-07, + "loss": 0.871, + "step": 6851 + }, + { + "epoch": 0.9430222956234517, + "grad_norm": 2.713572682890576, + "learning_rate": 1.6978754719817025e-07, + "loss": 0.8783, + "step": 6852 + }, + { + "epoch": 0.9431599229287091, + "grad_norm": 1.8591786908805175, + "learning_rate": 1.6897062309303992e-07, + "loss": 0.8542, + "step": 6853 + }, + { + "epoch": 0.9432975502339664, + "grad_norm": 1.7822794633437862, + "learning_rate": 1.68155652278873e-07, + "loss": 0.8841, + "step": 6854 + }, + { + "epoch": 0.9434351775392238, + "grad_norm": 1.7357737931361794, + "learning_rate": 1.673426349175955e-07, + "loss": 0.9237, + "step": 6855 + }, + { + "epoch": 0.9435728048444811, + "grad_norm": 1.8219701820555858, + "learning_rate": 1.665315711707416e-07, + "loss": 0.9221, + "step": 6856 + }, + { + "epoch": 0.9437104321497385, + "grad_norm": 1.4587533711022453, + "learning_rate": 1.6572246119946012e-07, + "loss": 0.913, + "step": 6857 + }, + { + "epoch": 0.9438480594549958, + "grad_norm": 1.9511745537913943, + "learning_rate": 1.6491530516450805e-07, + "loss": 0.8174, + "step": 6858 + }, + { + "epoch": 0.9439856867602532, + "grad_norm": 1.93786226890944, + "learning_rate": 1.641101032262582e-07, + "loss": 0.884, + "step": 6859 + }, + { + "epoch": 0.9441233140655106, + "grad_norm": 1.7090203449006098, + "learning_rate": 1.6330685554469038e-07, + "loss": 0.8366, + "step": 6860 + }, + { + "epoch": 0.944260941370768, + "grad_norm": 1.9480260550299469, + "learning_rate": 1.625055622794014e-07, + "loss": 0.9221, + "step": 6861 + }, + { + "epoch": 0.9443985686760253, + "grad_norm": 1.7514886934050018, + "learning_rate": 1.617062235895972e-07, + "loss": 0.9019, + "step": 6862 + }, + { + "epoch": 0.9445361959812827, + "grad_norm": 1.9031940664476543, + "learning_rate": 1.6090883963409522e-07, + "loss": 0.9087, + "step": 6863 + }, + { + "epoch": 0.94467382328654, + "grad_norm": 1.599433700658534, + "learning_rate": 1.6011341057132312e-07, + "loss": 0.8505, + "step": 6864 + }, + { + "epoch": 0.9448114505917974, + "grad_norm": 1.6609440991956699, + "learning_rate": 1.5931993655932342e-07, + "loss": 0.9288, + "step": 6865 + }, + { + "epoch": 0.9449490778970547, + "grad_norm": 1.985269901145207, + "learning_rate": 1.585284177557489e-07, + "loss": 0.8587, + "step": 6866 + }, + { + "epoch": 0.9450867052023122, + "grad_norm": 1.74941215028353, + "learning_rate": 1.577388543178615e-07, + "loss": 0.9051, + "step": 6867 + }, + { + "epoch": 0.9452243325075695, + "grad_norm": 1.6527080430527137, + "learning_rate": 1.5695124640253801e-07, + "loss": 0.9933, + "step": 6868 + }, + { + "epoch": 0.9453619598128269, + "grad_norm": 1.7057586260970554, + "learning_rate": 1.5616559416626654e-07, + "loss": 0.863, + "step": 6869 + }, + { + "epoch": 0.9454995871180842, + "grad_norm": 2.021323598905727, + "learning_rate": 1.5538189776514223e-07, + "loss": 0.9071, + "step": 6870 + }, + { + "epoch": 0.9456372144233416, + "grad_norm": 1.8610819063118123, + "learning_rate": 1.5460015735487833e-07, + "loss": 0.9208, + "step": 6871 + }, + { + "epoch": 0.9457748417285989, + "grad_norm": 1.7209252707659366, + "learning_rate": 1.5382037309079279e-07, + "loss": 0.8907, + "step": 6872 + }, + { + "epoch": 0.9459124690338563, + "grad_norm": 1.7049072112952166, + "learning_rate": 1.5304254512782057e-07, + "loss": 0.8763, + "step": 6873 + }, + { + "epoch": 0.9460500963391136, + "grad_norm": 1.8114946610084695, + "learning_rate": 1.5226667362050473e-07, + "loss": 0.9097, + "step": 6874 + }, + { + "epoch": 0.9461877236443711, + "grad_norm": 1.8290428067948414, + "learning_rate": 1.5149275872299973e-07, + "loss": 0.8354, + "step": 6875 + }, + { + "epoch": 0.9463253509496284, + "grad_norm": 1.8362736926075185, + "learning_rate": 1.5072080058907145e-07, + "loss": 0.9482, + "step": 6876 + }, + { + "epoch": 0.9464629782548858, + "grad_norm": 1.7598012381585473, + "learning_rate": 1.4995079937209943e-07, + "loss": 0.8973, + "step": 6877 + }, + { + "epoch": 0.9466006055601431, + "grad_norm": 1.9346401065048422, + "learning_rate": 1.4918275522507018e-07, + "loss": 0.9604, + "step": 6878 + }, + { + "epoch": 0.9467382328654005, + "grad_norm": 1.7481026930345087, + "learning_rate": 1.4841666830058387e-07, + "loss": 0.8927, + "step": 6879 + }, + { + "epoch": 0.9468758601706578, + "grad_norm": 1.703252507385437, + "learning_rate": 1.4765253875085429e-07, + "loss": 0.8628, + "step": 6880 + }, + { + "epoch": 0.9470134874759152, + "grad_norm": 1.6467941019533234, + "learning_rate": 1.4689036672769886e-07, + "loss": 0.8061, + "step": 6881 + }, + { + "epoch": 0.9471511147811725, + "grad_norm": 1.742991072709727, + "learning_rate": 1.4613015238255536e-07, + "loss": 0.9639, + "step": 6882 + }, + { + "epoch": 0.94728874208643, + "grad_norm": 1.604378255148692, + "learning_rate": 1.4537189586646516e-07, + "loss": 0.9046, + "step": 6883 + }, + { + "epoch": 0.9474263693916873, + "grad_norm": 1.8039167060632009, + "learning_rate": 1.4461559733008334e-07, + "loss": 0.8897, + "step": 6884 + }, + { + "epoch": 0.9475639966969447, + "grad_norm": 1.5982083816414294, + "learning_rate": 1.4386125692367746e-07, + "loss": 0.9095, + "step": 6885 + }, + { + "epoch": 0.947701624002202, + "grad_norm": 1.7404152677773783, + "learning_rate": 1.431088747971232e-07, + "loss": 0.9093, + "step": 6886 + }, + { + "epoch": 0.9478392513074594, + "grad_norm": 1.6271280808373305, + "learning_rate": 1.4235845109990987e-07, + "loss": 0.8674, + "step": 6887 + }, + { + "epoch": 0.9479768786127167, + "grad_norm": 1.5477127685447425, + "learning_rate": 1.4160998598113595e-07, + "loss": 0.9662, + "step": 6888 + }, + { + "epoch": 0.9481145059179741, + "grad_norm": 1.611945638519736, + "learning_rate": 1.408634795895114e-07, + "loss": 0.9062, + "step": 6889 + }, + { + "epoch": 0.9482521332232315, + "grad_norm": 1.6921620485760902, + "learning_rate": 1.4011893207335536e-07, + "loss": 0.8839, + "step": 6890 + }, + { + "epoch": 0.9483897605284889, + "grad_norm": 1.6752551530284163, + "learning_rate": 1.393763435806028e-07, + "loss": 0.9184, + "step": 6891 + }, + { + "epoch": 0.9485273878337462, + "grad_norm": 1.6260459481826721, + "learning_rate": 1.386357142587913e-07, + "loss": 0.8516, + "step": 6892 + }, + { + "epoch": 0.9486650151390036, + "grad_norm": 1.635264907323393, + "learning_rate": 1.3789704425507644e-07, + "loss": 0.9061, + "step": 6893 + }, + { + "epoch": 0.9488026424442609, + "grad_norm": 1.7348693046302868, + "learning_rate": 1.3716033371622195e-07, + "loss": 0.8708, + "step": 6894 + }, + { + "epoch": 0.9489402697495183, + "grad_norm": 1.838946065465115, + "learning_rate": 1.3642558278860186e-07, + "loss": 0.9347, + "step": 6895 + }, + { + "epoch": 0.9490778970547756, + "grad_norm": 1.9035343401570166, + "learning_rate": 1.356927916182005e-07, + "loss": 0.9378, + "step": 6896 + }, + { + "epoch": 0.949215524360033, + "grad_norm": 1.7097838268781598, + "learning_rate": 1.349619603506136e-07, + "loss": 0.8581, + "step": 6897 + }, + { + "epoch": 0.9493531516652904, + "grad_norm": 1.9035869275975081, + "learning_rate": 1.3423308913104726e-07, + "loss": 0.8088, + "step": 6898 + }, + { + "epoch": 0.9494907789705478, + "grad_norm": 1.768536041605859, + "learning_rate": 1.3350617810431898e-07, + "loss": 0.8914, + "step": 6899 + }, + { + "epoch": 0.9496284062758051, + "grad_norm": 1.5628857172004424, + "learning_rate": 1.3278122741485544e-07, + "loss": 0.9246, + "step": 6900 + }, + { + "epoch": 0.9497660335810625, + "grad_norm": 1.7206375989574216, + "learning_rate": 1.3205823720669474e-07, + "loss": 0.8669, + "step": 6901 + }, + { + "epoch": 0.9499036608863198, + "grad_norm": 1.9548474546284382, + "learning_rate": 1.3133720762348535e-07, + "loss": 0.8804, + "step": 6902 + }, + { + "epoch": 0.9500412881915772, + "grad_norm": 1.799202762265101, + "learning_rate": 1.3061813880848483e-07, + "loss": 0.8902, + "step": 6903 + }, + { + "epoch": 0.9501789154968345, + "grad_norm": 1.957096562528851, + "learning_rate": 1.299010309045623e-07, + "loss": 0.8651, + "step": 6904 + }, + { + "epoch": 0.950316542802092, + "grad_norm": 1.722148664865578, + "learning_rate": 1.291858840541993e-07, + "loss": 0.8284, + "step": 6905 + }, + { + "epoch": 0.9504541701073493, + "grad_norm": 1.6793670397722704, + "learning_rate": 1.284726983994833e-07, + "loss": 0.8894, + "step": 6906 + }, + { + "epoch": 0.9505917974126067, + "grad_norm": 1.5468472951583538, + "learning_rate": 1.2776147408211648e-07, + "loss": 0.9088, + "step": 6907 + }, + { + "epoch": 0.950729424717864, + "grad_norm": 1.767197458812727, + "learning_rate": 1.2705221124340806e-07, + "loss": 0.888, + "step": 6908 + }, + { + "epoch": 0.9508670520231214, + "grad_norm": 1.8940533878713974, + "learning_rate": 1.2634491002427972e-07, + "loss": 0.8935, + "step": 6909 + }, + { + "epoch": 0.9510046793283787, + "grad_norm": 2.0538176723179764, + "learning_rate": 1.2563957056526244e-07, + "loss": 0.8878, + "step": 6910 + }, + { + "epoch": 0.9511423066336361, + "grad_norm": 1.5601413797606019, + "learning_rate": 1.2493619300649628e-07, + "loss": 0.9163, + "step": 6911 + }, + { + "epoch": 0.9512799339388934, + "grad_norm": 1.7847668279717401, + "learning_rate": 1.2423477748773394e-07, + "loss": 0.9424, + "step": 6912 + }, + { + "epoch": 0.9514175612441509, + "grad_norm": 1.6621132179279845, + "learning_rate": 1.2353532414833724e-07, + "loss": 0.9495, + "step": 6913 + }, + { + "epoch": 0.9515551885494082, + "grad_norm": 1.7643640954635127, + "learning_rate": 1.2283783312727727e-07, + "loss": 0.8714, + "step": 6914 + }, + { + "epoch": 0.9516928158546656, + "grad_norm": 1.6621149191507572, + "learning_rate": 1.2214230456313646e-07, + "loss": 0.8449, + "step": 6915 + }, + { + "epoch": 0.9518304431599229, + "grad_norm": 2.3067853768737465, + "learning_rate": 1.2144873859410655e-07, + "loss": 0.91, + "step": 6916 + }, + { + "epoch": 0.9519680704651803, + "grad_norm": 1.8187932142648133, + "learning_rate": 1.207571353579895e-07, + "loss": 0.9583, + "step": 6917 + }, + { + "epoch": 0.9521056977704376, + "grad_norm": 1.7698829258459328, + "learning_rate": 1.2006749499219762e-07, + "loss": 0.9204, + "step": 6918 + }, + { + "epoch": 0.952243325075695, + "grad_norm": 1.8413161658003894, + "learning_rate": 1.193798176337535e-07, + "loss": 0.907, + "step": 6919 + }, + { + "epoch": 0.9523809523809523, + "grad_norm": 1.8226692976326315, + "learning_rate": 1.1869410341928678e-07, + "loss": 0.8351, + "step": 6920 + }, + { + "epoch": 0.9525185796862098, + "grad_norm": 1.677338772043695, + "learning_rate": 1.18010352485044e-07, + "loss": 0.9658, + "step": 6921 + }, + { + "epoch": 0.9526562069914671, + "grad_norm": 1.8381883995940593, + "learning_rate": 1.1732856496687207e-07, + "loss": 0.9598, + "step": 6922 + }, + { + "epoch": 0.9527938342967245, + "grad_norm": 2.175476063743664, + "learning_rate": 1.1664874100023704e-07, + "loss": 0.9099, + "step": 6923 + }, + { + "epoch": 0.9529314616019818, + "grad_norm": 1.7078863106447115, + "learning_rate": 1.1597088072020757e-07, + "loss": 0.9198, + "step": 6924 + }, + { + "epoch": 0.9530690889072392, + "grad_norm": 1.801628346378403, + "learning_rate": 1.1529498426146701e-07, + "loss": 0.9548, + "step": 6925 + }, + { + "epoch": 0.9532067162124965, + "grad_norm": 1.8010725461775434, + "learning_rate": 1.1462105175830685e-07, + "loss": 0.8791, + "step": 6926 + }, + { + "epoch": 0.9533443435177539, + "grad_norm": 1.7402721188306143, + "learning_rate": 1.1394908334462662e-07, + "loss": 0.8926, + "step": 6927 + }, + { + "epoch": 0.9534819708230113, + "grad_norm": 1.8363826000219154, + "learning_rate": 1.1327907915393843e-07, + "loss": 0.8772, + "step": 6928 + }, + { + "epoch": 0.9536195981282687, + "grad_norm": 1.8741681065445746, + "learning_rate": 1.1261103931936357e-07, + "loss": 0.872, + "step": 6929 + }, + { + "epoch": 0.953757225433526, + "grad_norm": 1.7090532301161931, + "learning_rate": 1.1194496397363142e-07, + "loss": 0.868, + "step": 6930 + }, + { + "epoch": 0.9538948527387834, + "grad_norm": 1.9148420407390783, + "learning_rate": 1.1128085324908167e-07, + "loss": 0.917, + "step": 6931 + }, + { + "epoch": 0.9540324800440407, + "grad_norm": 1.5286601520457046, + "learning_rate": 1.1061870727766432e-07, + "loss": 0.9382, + "step": 6932 + }, + { + "epoch": 0.9541701073492981, + "grad_norm": 1.6787249411567307, + "learning_rate": 1.0995852619093972e-07, + "loss": 0.9363, + "step": 6933 + }, + { + "epoch": 0.9543077346545554, + "grad_norm": 1.737895652636811, + "learning_rate": 1.0930031012007513e-07, + "loss": 0.898, + "step": 6934 + }, + { + "epoch": 0.9544453619598128, + "grad_norm": 1.6957826630059414, + "learning_rate": 1.0864405919585042e-07, + "loss": 0.8123, + "step": 6935 + }, + { + "epoch": 0.9545829892650702, + "grad_norm": 1.7927120064184359, + "learning_rate": 1.0798977354865348e-07, + "loss": 0.9282, + "step": 6936 + }, + { + "epoch": 0.9547206165703276, + "grad_norm": 1.7648377881144928, + "learning_rate": 1.0733745330848144e-07, + "loss": 0.9037, + "step": 6937 + }, + { + "epoch": 0.9548582438755849, + "grad_norm": 1.62374679435468, + "learning_rate": 1.0668709860494175e-07, + "loss": 0.8616, + "step": 6938 + }, + { + "epoch": 0.9549958711808423, + "grad_norm": 1.7772697920250673, + "learning_rate": 1.0603870956725105e-07, + "loss": 0.871, + "step": 6939 + }, + { + "epoch": 0.9551334984860996, + "grad_norm": 1.6234404492675212, + "learning_rate": 1.0539228632423515e-07, + "loss": 0.866, + "step": 6940 + }, + { + "epoch": 0.955271125791357, + "grad_norm": 2.952890658428871, + "learning_rate": 1.0474782900432911e-07, + "loss": 0.9761, + "step": 6941 + }, + { + "epoch": 0.9554087530966143, + "grad_norm": 1.7791744873658384, + "learning_rate": 1.041053377355783e-07, + "loss": 0.9494, + "step": 6942 + }, + { + "epoch": 0.9555463804018718, + "grad_norm": 1.779703124382904, + "learning_rate": 1.0346481264563723e-07, + "loss": 0.9354, + "step": 6943 + }, + { + "epoch": 0.9556840077071291, + "grad_norm": 1.8196610948124943, + "learning_rate": 1.0282625386176858e-07, + "loss": 0.9467, + "step": 6944 + }, + { + "epoch": 0.9558216350123865, + "grad_norm": 1.6573816023128902, + "learning_rate": 1.0218966151084531e-07, + "loss": 0.8288, + "step": 6945 + }, + { + "epoch": 0.9559592623176438, + "grad_norm": 1.8216858888705667, + "learning_rate": 1.0155503571935176e-07, + "loss": 0.9409, + "step": 6946 + }, + { + "epoch": 0.9560968896229012, + "grad_norm": 1.7303447376220138, + "learning_rate": 1.0092237661337712e-07, + "loss": 0.9139, + "step": 6947 + }, + { + "epoch": 0.9562345169281585, + "grad_norm": 1.7605000367523342, + "learning_rate": 1.002916843186208e-07, + "loss": 0.8481, + "step": 6948 + }, + { + "epoch": 0.9563721442334159, + "grad_norm": 1.7386245902049096, + "learning_rate": 9.966295896039702e-08, + "loss": 0.9163, + "step": 6949 + }, + { + "epoch": 0.9565097715386732, + "grad_norm": 1.7256887638584524, + "learning_rate": 9.90362006636203e-08, + "loss": 0.9646, + "step": 6950 + }, + { + "epoch": 0.9566473988439307, + "grad_norm": 1.9453443968185502, + "learning_rate": 9.841140955282325e-08, + "loss": 0.8962, + "step": 6951 + }, + { + "epoch": 0.956785026149188, + "grad_norm": 1.7132711639508447, + "learning_rate": 9.778858575213989e-08, + "loss": 0.9286, + "step": 6952 + }, + { + "epoch": 0.9569226534544454, + "grad_norm": 1.6954225061418475, + "learning_rate": 9.716772938531793e-08, + "loss": 0.926, + "step": 6953 + }, + { + "epoch": 0.9570602807597027, + "grad_norm": 1.8555176837014946, + "learning_rate": 9.654884057571312e-08, + "loss": 0.8675, + "step": 6954 + }, + { + "epoch": 0.9571979080649601, + "grad_norm": 1.6174415267741171, + "learning_rate": 9.593191944629044e-08, + "loss": 0.9499, + "step": 6955 + }, + { + "epoch": 0.9573355353702174, + "grad_norm": 1.6253734363025731, + "learning_rate": 9.531696611962404e-08, + "loss": 0.914, + "step": 6956 + }, + { + "epoch": 0.9574731626754748, + "grad_norm": 1.7932315131048848, + "learning_rate": 9.47039807178951e-08, + "loss": 0.8301, + "step": 6957 + }, + { + "epoch": 0.9576107899807321, + "grad_norm": 1.8963226926498309, + "learning_rate": 9.409296336289731e-08, + "loss": 0.9464, + "step": 6958 + }, + { + "epoch": 0.9577484172859896, + "grad_norm": 1.8406455576947864, + "learning_rate": 9.34839141760302e-08, + "loss": 0.8934, + "step": 6959 + }, + { + "epoch": 0.9578860445912469, + "grad_norm": 1.7782969157227637, + "learning_rate": 9.287683327830365e-08, + "loss": 0.8568, + "step": 6960 + }, + { + "epoch": 0.9580236718965043, + "grad_norm": 1.8705540028502288, + "learning_rate": 9.227172079033675e-08, + "loss": 0.9157, + "step": 6961 + }, + { + "epoch": 0.9581612992017616, + "grad_norm": 2.284878545650477, + "learning_rate": 9.166857683235775e-08, + "loss": 0.8699, + "step": 6962 + }, + { + "epoch": 0.958298926507019, + "grad_norm": 1.648912133150703, + "learning_rate": 9.10674015242019e-08, + "loss": 0.8736, + "step": 6963 + }, + { + "epoch": 0.9584365538122763, + "grad_norm": 1.804291699317642, + "learning_rate": 9.046819498531367e-08, + "loss": 0.866, + "step": 6964 + }, + { + "epoch": 0.9585741811175337, + "grad_norm": 1.7908943194680291, + "learning_rate": 8.987095733475004e-08, + "loss": 0.7528, + "step": 6965 + }, + { + "epoch": 0.9587118084227911, + "grad_norm": 1.7643524380595477, + "learning_rate": 8.927568869117165e-08, + "loss": 0.8525, + "step": 6966 + }, + { + "epoch": 0.9588494357280485, + "grad_norm": 1.5671598855667779, + "learning_rate": 8.868238917285054e-08, + "loss": 0.9166, + "step": 6967 + }, + { + "epoch": 0.9589870630333058, + "grad_norm": 2.2313684624255723, + "learning_rate": 8.8091058897668e-08, + "loss": 0.8821, + "step": 6968 + }, + { + "epoch": 0.9591246903385632, + "grad_norm": 1.8898481906538556, + "learning_rate": 8.750169798311226e-08, + "loss": 0.909, + "step": 6969 + }, + { + "epoch": 0.9592623176438205, + "grad_norm": 1.8090622495547521, + "learning_rate": 8.691430654628186e-08, + "loss": 0.8543, + "step": 6970 + }, + { + "epoch": 0.9593999449490779, + "grad_norm": 1.7445148777120731, + "learning_rate": 8.632888470388345e-08, + "loss": 0.8649, + "step": 6971 + }, + { + "epoch": 0.9595375722543352, + "grad_norm": 2.031451180271669, + "learning_rate": 8.574543257223289e-08, + "loss": 0.8948, + "step": 6972 + }, + { + "epoch": 0.9596751995595926, + "grad_norm": 1.7517091162860967, + "learning_rate": 8.516395026725299e-08, + "loss": 0.8267, + "step": 6973 + }, + { + "epoch": 0.95981282686485, + "grad_norm": 1.8254721777825589, + "learning_rate": 8.45844379044769e-08, + "loss": 0.8579, + "step": 6974 + }, + { + "epoch": 0.9599504541701074, + "grad_norm": 1.8825797925639514, + "learning_rate": 8.400689559904585e-08, + "loss": 0.8035, + "step": 6975 + }, + { + "epoch": 0.9600880814753647, + "grad_norm": 2.073357047620673, + "learning_rate": 8.343132346571025e-08, + "loss": 0.8853, + "step": 6976 + }, + { + "epoch": 0.9602257087806221, + "grad_norm": 1.8073888233335662, + "learning_rate": 8.285772161882644e-08, + "loss": 0.8852, + "step": 6977 + }, + { + "epoch": 0.9603633360858794, + "grad_norm": 1.7165820075084854, + "learning_rate": 8.228609017236211e-08, + "loss": 0.8977, + "step": 6978 + }, + { + "epoch": 0.9605009633911368, + "grad_norm": 1.9913822552626756, + "learning_rate": 8.17164292398942e-08, + "loss": 0.9043, + "step": 6979 + }, + { + "epoch": 0.9606385906963941, + "grad_norm": 2.1315839148885685, + "learning_rate": 8.11487389346044e-08, + "loss": 0.872, + "step": 6980 + }, + { + "epoch": 0.9607762180016516, + "grad_norm": 2.0705572994385975, + "learning_rate": 8.058301936928691e-08, + "loss": 0.8246, + "step": 6981 + }, + { + "epoch": 0.9609138453069089, + "grad_norm": 1.8625344413489078, + "learning_rate": 8.001927065634075e-08, + "loss": 0.9051, + "step": 6982 + }, + { + "epoch": 0.9610514726121663, + "grad_norm": 1.5114800185758899, + "learning_rate": 7.945749290777627e-08, + "loss": 0.8641, + "step": 6983 + }, + { + "epoch": 0.9611890999174236, + "grad_norm": 2.023599806503092, + "learning_rate": 7.889768623520977e-08, + "loss": 0.9091, + "step": 6984 + }, + { + "epoch": 0.961326727222681, + "grad_norm": 1.618965657476201, + "learning_rate": 7.833985074986782e-08, + "loss": 0.8833, + "step": 6985 + }, + { + "epoch": 0.9614643545279383, + "grad_norm": 1.6148255733306132, + "learning_rate": 7.778398656258624e-08, + "loss": 0.9217, + "step": 6986 + }, + { + "epoch": 0.9616019818331957, + "grad_norm": 1.9215345961213792, + "learning_rate": 7.723009378380442e-08, + "loss": 0.8955, + "step": 6987 + }, + { + "epoch": 0.961739609138453, + "grad_norm": 1.7833526329607186, + "learning_rate": 7.667817252357656e-08, + "loss": 0.9959, + "step": 6988 + }, + { + "epoch": 0.9618772364437105, + "grad_norm": 1.8493407618206463, + "learning_rate": 7.612822289155941e-08, + "loss": 0.9831, + "step": 6989 + }, + { + "epoch": 0.9620148637489678, + "grad_norm": 1.629222252590816, + "learning_rate": 7.55802449970211e-08, + "loss": 0.8592, + "step": 6990 + }, + { + "epoch": 0.9621524910542252, + "grad_norm": 1.995894256611542, + "learning_rate": 7.503423894883899e-08, + "loss": 0.9448, + "step": 6991 + }, + { + "epoch": 0.9622901183594825, + "grad_norm": 2.0485114804737243, + "learning_rate": 7.449020485549408e-08, + "loss": 0.8779, + "step": 6992 + }, + { + "epoch": 0.9624277456647399, + "grad_norm": 1.6756352730392277, + "learning_rate": 7.394814282508101e-08, + "loss": 0.8326, + "step": 6993 + }, + { + "epoch": 0.9625653729699972, + "grad_norm": 1.5076506885932646, + "learning_rate": 7.340805296529918e-08, + "loss": 0.9111, + "step": 6994 + }, + { + "epoch": 0.9627030002752546, + "grad_norm": 1.7752676933032443, + "learning_rate": 7.286993538345611e-08, + "loss": 0.9274, + "step": 6995 + }, + { + "epoch": 0.9628406275805119, + "grad_norm": 1.6653512229654122, + "learning_rate": 7.233379018646958e-08, + "loss": 0.8586, + "step": 6996 + }, + { + "epoch": 0.9629782548857694, + "grad_norm": 1.6178332517512912, + "learning_rate": 7.17996174808644e-08, + "loss": 0.8028, + "step": 6997 + }, + { + "epoch": 0.9631158821910267, + "grad_norm": 1.644701178168763, + "learning_rate": 7.126741737277343e-08, + "loss": 0.8919, + "step": 6998 + }, + { + "epoch": 0.9632535094962841, + "grad_norm": 1.6406366587468344, + "learning_rate": 7.073718996793544e-08, + "loss": 0.908, + "step": 6999 + }, + { + "epoch": 0.9633911368015414, + "grad_norm": 1.6196882894827915, + "learning_rate": 7.020893537170281e-08, + "loss": 0.9098, + "step": 7000 + }, + { + "epoch": 0.9635287641067988, + "grad_norm": 1.764158510497846, + "learning_rate": 6.968265368902938e-08, + "loss": 0.8192, + "step": 7001 + }, + { + "epoch": 0.9636663914120561, + "grad_norm": 1.783624002239502, + "learning_rate": 6.91583450244826e-08, + "loss": 0.8553, + "step": 7002 + }, + { + "epoch": 0.9638040187173135, + "grad_norm": 1.7430201499350384, + "learning_rate": 6.863600948223248e-08, + "loss": 0.8184, + "step": 7003 + }, + { + "epoch": 0.9639416460225709, + "grad_norm": 1.6454871583395467, + "learning_rate": 6.811564716606378e-08, + "loss": 0.9221, + "step": 7004 + }, + { + "epoch": 0.9640792733278283, + "grad_norm": 1.6234864712663126, + "learning_rate": 6.759725817936158e-08, + "loss": 0.9052, + "step": 7005 + }, + { + "epoch": 0.9642169006330856, + "grad_norm": 1.8693921712753214, + "learning_rate": 6.708084262512571e-08, + "loss": 0.9744, + "step": 7006 + }, + { + "epoch": 0.964354527938343, + "grad_norm": 1.770293438978404, + "learning_rate": 6.656640060595854e-08, + "loss": 0.9326, + "step": 7007 + }, + { + "epoch": 0.9644921552436003, + "grad_norm": 1.6874844657827082, + "learning_rate": 6.605393222407496e-08, + "loss": 0.9154, + "step": 7008 + }, + { + "epoch": 0.9646297825488577, + "grad_norm": 1.567401810734057, + "learning_rate": 6.554343758129244e-08, + "loss": 0.9151, + "step": 7009 + }, + { + "epoch": 0.964767409854115, + "grad_norm": 1.799476901678856, + "learning_rate": 6.503491677904206e-08, + "loss": 1.002, + "step": 7010 + }, + { + "epoch": 0.9649050371593724, + "grad_norm": 1.9390278608736544, + "learning_rate": 6.452836991835964e-08, + "loss": 0.9198, + "step": 7011 + }, + { + "epoch": 0.9650426644646298, + "grad_norm": 1.7870153168920437, + "learning_rate": 6.402379709988693e-08, + "loss": 0.9473, + "step": 7012 + }, + { + "epoch": 0.9651802917698872, + "grad_norm": 1.9248514625385063, + "learning_rate": 6.352119842387816e-08, + "loss": 0.9746, + "step": 7013 + }, + { + "epoch": 0.9653179190751445, + "grad_norm": 2.1967987735886494, + "learning_rate": 6.302057399019012e-08, + "loss": 0.9037, + "step": 7014 + }, + { + "epoch": 0.9654555463804019, + "grad_norm": 2.6477680284718708, + "learning_rate": 6.252192389829326e-08, + "loss": 0.9229, + "step": 7015 + }, + { + "epoch": 0.9655931736856592, + "grad_norm": 2.159799236518315, + "learning_rate": 6.202524824725941e-08, + "loss": 0.8118, + "step": 7016 + }, + { + "epoch": 0.9657308009909166, + "grad_norm": 1.9141925848759096, + "learning_rate": 6.1530547135773e-08, + "loss": 0.798, + "step": 7017 + }, + { + "epoch": 0.9658684282961739, + "grad_norm": 2.0899251558882392, + "learning_rate": 6.103782066212427e-08, + "loss": 0.899, + "step": 7018 + }, + { + "epoch": 0.9660060556014314, + "grad_norm": 1.6179207512221074, + "learning_rate": 6.054706892421159e-08, + "loss": 0.9552, + "step": 7019 + }, + { + "epoch": 0.9661436829066887, + "grad_norm": 1.959348144592709, + "learning_rate": 6.005829201954028e-08, + "loss": 0.9204, + "step": 7020 + }, + { + "epoch": 0.9662813102119461, + "grad_norm": 1.7572820185439972, + "learning_rate": 5.957149004522267e-08, + "loss": 0.8896, + "step": 7021 + }, + { + "epoch": 0.9664189375172034, + "grad_norm": 1.5475420228345198, + "learning_rate": 5.908666309798028e-08, + "loss": 0.8669, + "step": 7022 + }, + { + "epoch": 0.9665565648224608, + "grad_norm": 1.7126660355640158, + "learning_rate": 5.860381127414161e-08, + "loss": 0.8697, + "step": 7023 + }, + { + "epoch": 0.9666941921277181, + "grad_norm": 1.627670364016188, + "learning_rate": 5.812293466964325e-08, + "loss": 0.8701, + "step": 7024 + }, + { + "epoch": 0.9668318194329755, + "grad_norm": 1.657946058586508, + "learning_rate": 5.764403338002766e-08, + "loss": 0.8259, + "step": 7025 + }, + { + "epoch": 0.9669694467382328, + "grad_norm": 1.6565077518102114, + "learning_rate": 5.7167107500446517e-08, + "loss": 0.8696, + "step": 7026 + }, + { + "epoch": 0.9671070740434903, + "grad_norm": 1.9254474491903764, + "learning_rate": 5.6692157125659564e-08, + "loss": 0.8874, + "step": 7027 + }, + { + "epoch": 0.9672447013487476, + "grad_norm": 1.910671640416932, + "learning_rate": 5.6219182350031323e-08, + "loss": 0.9159, + "step": 7028 + }, + { + "epoch": 0.967382328654005, + "grad_norm": 1.7187620750712238, + "learning_rate": 5.574818326753551e-08, + "loss": 0.8941, + "step": 7029 + }, + { + "epoch": 0.9675199559592623, + "grad_norm": 1.9654077114318835, + "learning_rate": 5.527915997175504e-08, + "loss": 0.9295, + "step": 7030 + }, + { + "epoch": 0.9676575832645197, + "grad_norm": 1.950631943657173, + "learning_rate": 5.4812112555876486e-08, + "loss": 0.9025, + "step": 7031 + }, + { + "epoch": 0.967795210569777, + "grad_norm": 1.8138899446435934, + "learning_rate": 5.434704111269562e-08, + "loss": 0.8916, + "step": 7032 + }, + { + "epoch": 0.9679328378750344, + "grad_norm": 1.497912686583352, + "learning_rate": 5.38839457346163e-08, + "loss": 0.8431, + "step": 7033 + }, + { + "epoch": 0.9680704651802917, + "grad_norm": 1.8072146789943926, + "learning_rate": 5.3422826513651604e-08, + "loss": 0.8971, + "step": 7034 + }, + { + "epoch": 0.9682080924855492, + "grad_norm": 1.857030784314519, + "learning_rate": 5.29636835414149e-08, + "loss": 0.8701, + "step": 7035 + }, + { + "epoch": 0.9683457197908065, + "grad_norm": 1.7185076013725726, + "learning_rate": 5.250651690913655e-08, + "loss": 0.9123, + "step": 7036 + }, + { + "epoch": 0.9684833470960639, + "grad_norm": 1.8004951915791596, + "learning_rate": 5.205132670764501e-08, + "loss": 0.9014, + "step": 7037 + }, + { + "epoch": 0.9686209744013212, + "grad_norm": 1.7832089529766129, + "learning_rate": 5.159811302738349e-08, + "loss": 0.8038, + "step": 7038 + }, + { + "epoch": 0.9687586017065786, + "grad_norm": 1.607315490247505, + "learning_rate": 5.1146875958397733e-08, + "loss": 0.8913, + "step": 7039 + }, + { + "epoch": 0.9688962290118359, + "grad_norm": 1.6073677551355436, + "learning_rate": 5.06976155903427e-08, + "loss": 0.9062, + "step": 7040 + }, + { + "epoch": 0.9690338563170933, + "grad_norm": 1.7291127339334689, + "learning_rate": 5.0250332012480306e-08, + "loss": 0.9029, + "step": 7041 + }, + { + "epoch": 0.9691714836223507, + "grad_norm": 2.2705236371704043, + "learning_rate": 4.980502531367948e-08, + "loss": 0.8555, + "step": 7042 + }, + { + "epoch": 0.9693091109276081, + "grad_norm": 1.6377248069593295, + "learning_rate": 4.936169558241832e-08, + "loss": 0.9233, + "step": 7043 + }, + { + "epoch": 0.9694467382328654, + "grad_norm": 1.6983329803823561, + "learning_rate": 4.8920342906776385e-08, + "loss": 0.9009, + "step": 7044 + }, + { + "epoch": 0.9695843655381228, + "grad_norm": 1.930205798657553, + "learning_rate": 4.8480967374449074e-08, + "loss": 0.8876, + "step": 7045 + }, + { + "epoch": 0.9697219928433801, + "grad_norm": 1.6604617054859634, + "learning_rate": 4.8043569072731e-08, + "loss": 0.8088, + "step": 7046 + }, + { + "epoch": 0.9698596201486375, + "grad_norm": 1.6435404385680668, + "learning_rate": 4.7608148088528204e-08, + "loss": 0.9228, + "step": 7047 + }, + { + "epoch": 0.9699972474538948, + "grad_norm": 1.878680655909647, + "learning_rate": 4.717470450835371e-08, + "loss": 0.9732, + "step": 7048 + }, + { + "epoch": 0.9701348747591522, + "grad_norm": 1.7178339084567391, + "learning_rate": 4.674323841832529e-08, + "loss": 0.8686, + "step": 7049 + }, + { + "epoch": 0.9702725020644096, + "grad_norm": 1.9310697815721647, + "learning_rate": 4.6313749904171033e-08, + "loss": 0.8774, + "step": 7050 + }, + { + "epoch": 0.970410129369667, + "grad_norm": 1.7564715812605578, + "learning_rate": 4.5886239051222694e-08, + "loss": 0.8046, + "step": 7051 + }, + { + "epoch": 0.9705477566749243, + "grad_norm": 1.7360743681770483, + "learning_rate": 4.5460705944422315e-08, + "loss": 0.863, + "step": 7052 + }, + { + "epoch": 0.9706853839801817, + "grad_norm": 1.732559229584392, + "learning_rate": 4.503715066831671e-08, + "loss": 0.8172, + "step": 7053 + }, + { + "epoch": 0.970823011285439, + "grad_norm": 1.9029916280153099, + "learning_rate": 4.461557330705968e-08, + "loss": 0.9006, + "step": 7054 + }, + { + "epoch": 0.9709606385906964, + "grad_norm": 1.7351383194836687, + "learning_rate": 4.419597394441533e-08, + "loss": 0.889, + "step": 7055 + }, + { + "epoch": 0.9710982658959537, + "grad_norm": 1.9810484868613516, + "learning_rate": 4.37783526637503e-08, + "loss": 0.8931, + "step": 7056 + }, + { + "epoch": 0.9712358932012112, + "grad_norm": 1.6340388016239356, + "learning_rate": 4.336270954804045e-08, + "loss": 0.8946, + "step": 7057 + }, + { + "epoch": 0.9713735205064685, + "grad_norm": 2.814674002625467, + "learning_rate": 4.2949044679868603e-08, + "loss": 0.8515, + "step": 7058 + }, + { + "epoch": 0.9715111478117259, + "grad_norm": 1.670225065981339, + "learning_rate": 4.253735814142568e-08, + "loss": 0.9069, + "step": 7059 + }, + { + "epoch": 0.9716487751169832, + "grad_norm": 1.707548224833195, + "learning_rate": 4.212765001450514e-08, + "loss": 0.9165, + "step": 7060 + }, + { + "epoch": 0.9717864024222406, + "grad_norm": 1.9274105244583501, + "learning_rate": 4.171992038051298e-08, + "loss": 0.7479, + "step": 7061 + }, + { + "epoch": 0.9719240297274979, + "grad_norm": 1.6990494038235444, + "learning_rate": 4.131416932045884e-08, + "loss": 0.81, + "step": 7062 + }, + { + "epoch": 0.9720616570327553, + "grad_norm": 1.779826395743542, + "learning_rate": 4.091039691495935e-08, + "loss": 0.9026, + "step": 7063 + }, + { + "epoch": 0.9721992843380126, + "grad_norm": 1.5873756799538337, + "learning_rate": 4.050860324423922e-08, + "loss": 0.8841, + "step": 7064 + }, + { + "epoch": 0.9723369116432701, + "grad_norm": 1.7713466072247923, + "learning_rate": 4.0108788388129036e-08, + "loss": 0.9259, + "step": 7065 + }, + { + "epoch": 0.9724745389485274, + "grad_norm": 1.562599255512465, + "learning_rate": 3.9710952426066376e-08, + "loss": 0.834, + "step": 7066 + }, + { + "epoch": 0.9726121662537848, + "grad_norm": 1.7775577202185444, + "learning_rate": 3.9315095437095774e-08, + "loss": 0.8822, + "step": 7067 + }, + { + "epoch": 0.9727497935590421, + "grad_norm": 1.8095496986083404, + "learning_rate": 3.892121749986988e-08, + "loss": 0.8947, + "step": 7068 + }, + { + "epoch": 0.9728874208642995, + "grad_norm": 1.9787130731560385, + "learning_rate": 3.852931869264387e-08, + "loss": 0.9374, + "step": 7069 + }, + { + "epoch": 0.9730250481695568, + "grad_norm": 1.7208339392159893, + "learning_rate": 3.8139399093286564e-08, + "loss": 0.9056, + "step": 7070 + }, + { + "epoch": 0.9731626754748142, + "grad_norm": 2.0487655381513807, + "learning_rate": 3.7751458779266e-08, + "loss": 0.9751, + "step": 7071 + }, + { + "epoch": 0.9733003027800715, + "grad_norm": 1.757429371628704, + "learning_rate": 3.736549782766275e-08, + "loss": 0.9429, + "step": 7072 + }, + { + "epoch": 0.973437930085329, + "grad_norm": 1.6676552724924816, + "learning_rate": 3.698151631516211e-08, + "loss": 0.9467, + "step": 7073 + }, + { + "epoch": 0.9735755573905863, + "grad_norm": 2.3134057320639854, + "learning_rate": 3.6599514318054195e-08, + "loss": 0.8625, + "step": 7074 + }, + { + "epoch": 0.9737131846958437, + "grad_norm": 1.873842007538208, + "learning_rate": 3.6219491912240504e-08, + "loss": 0.8584, + "step": 7075 + }, + { + "epoch": 0.973850812001101, + "grad_norm": 1.7468240122028678, + "learning_rate": 3.5841449173222854e-08, + "loss": 0.8672, + "step": 7076 + }, + { + "epoch": 0.9739884393063584, + "grad_norm": 1.855062850565374, + "learning_rate": 3.546538617611561e-08, + "loss": 0.8361, + "step": 7077 + }, + { + "epoch": 0.9741260666116157, + "grad_norm": 1.9802498697079032, + "learning_rate": 3.5091302995635676e-08, + "loss": 0.9438, + "step": 7078 + }, + { + "epoch": 0.9742636939168731, + "grad_norm": 1.6309589036400995, + "learning_rate": 3.471919970610915e-08, + "loss": 0.8868, + "step": 7079 + }, + { + "epoch": 0.9744013212221305, + "grad_norm": 1.6353552114617786, + "learning_rate": 3.434907638146911e-08, + "loss": 0.9223, + "step": 7080 + }, + { + "epoch": 0.9745389485273879, + "grad_norm": 1.981548368938733, + "learning_rate": 3.398093309525119e-08, + "loss": 0.8385, + "step": 7081 + }, + { + "epoch": 0.9746765758326452, + "grad_norm": 1.9207662262798872, + "learning_rate": 3.361476992060242e-08, + "loss": 0.8541, + "step": 7082 + }, + { + "epoch": 0.9748142031379026, + "grad_norm": 2.002015667002389, + "learning_rate": 3.325058693027461e-08, + "loss": 0.9624, + "step": 7083 + }, + { + "epoch": 0.9749518304431599, + "grad_norm": 1.7864833524666346, + "learning_rate": 3.2888384196624325e-08, + "loss": 0.9298, + "step": 7084 + }, + { + "epoch": 0.9750894577484173, + "grad_norm": 1.93100028770135, + "learning_rate": 3.252816179161733e-08, + "loss": 0.8986, + "step": 7085 + }, + { + "epoch": 0.9752270850536746, + "grad_norm": 2.0246020688298962, + "learning_rate": 3.216991978682526e-08, + "loss": 0.9944, + "step": 7086 + }, + { + "epoch": 0.975364712358932, + "grad_norm": 1.737797327729844, + "learning_rate": 3.181365825342564e-08, + "loss": 0.9388, + "step": 7087 + }, + { + "epoch": 0.9755023396641894, + "grad_norm": 1.651356637157781, + "learning_rate": 3.145937726220183e-08, + "loss": 0.8931, + "step": 7088 + }, + { + "epoch": 0.9756399669694468, + "grad_norm": 2.0748649500398098, + "learning_rate": 3.110707688354531e-08, + "loss": 0.8894, + "step": 7089 + }, + { + "epoch": 0.9757775942747041, + "grad_norm": 1.6741498669703077, + "learning_rate": 3.0756757187453415e-08, + "loss": 0.8863, + "step": 7090 + }, + { + "epoch": 0.9759152215799615, + "grad_norm": 1.7821527478448937, + "learning_rate": 3.040841824353047e-08, + "loss": 0.9133, + "step": 7091 + }, + { + "epoch": 0.9760528488852188, + "grad_norm": 2.080579411181176, + "learning_rate": 3.006206012098556e-08, + "loss": 0.8973, + "step": 7092 + }, + { + "epoch": 0.9761904761904762, + "grad_norm": 1.6463141622308273, + "learning_rate": 2.9717682888635857e-08, + "loss": 0.8887, + "step": 7093 + }, + { + "epoch": 0.9763281034957335, + "grad_norm": 1.884228711354478, + "learning_rate": 2.93752866149033e-08, + "loss": 0.9382, + "step": 7094 + }, + { + "epoch": 0.976465730800991, + "grad_norm": 1.832278164879594, + "learning_rate": 2.9034871367819018e-08, + "loss": 0.8391, + "step": 7095 + }, + { + "epoch": 0.9766033581062483, + "grad_norm": 1.845464799891437, + "learning_rate": 2.8696437215018915e-08, + "loss": 0.8733, + "step": 7096 + }, + { + "epoch": 0.9767409854115057, + "grad_norm": 1.968923791616318, + "learning_rate": 2.8359984223743643e-08, + "loss": 0.8854, + "step": 7097 + }, + { + "epoch": 0.976878612716763, + "grad_norm": 1.877231737483089, + "learning_rate": 2.8025512460843064e-08, + "loss": 0.8104, + "step": 7098 + }, + { + "epoch": 0.9770162400220204, + "grad_norm": 2.022533927179004, + "learning_rate": 2.7693021992770685e-08, + "loss": 0.9126, + "step": 7099 + }, + { + "epoch": 0.9771538673272777, + "grad_norm": 1.9363774928982873, + "learning_rate": 2.736251288558922e-08, + "loss": 0.9292, + "step": 7100 + }, + { + "epoch": 0.9772914946325351, + "grad_norm": 1.8129414646002855, + "learning_rate": 2.703398520496614e-08, + "loss": 0.8727, + "step": 7101 + }, + { + "epoch": 0.9774291219377924, + "grad_norm": 1.7222336543085603, + "learning_rate": 2.6707439016175897e-08, + "loss": 0.8765, + "step": 7102 + }, + { + "epoch": 0.9775667492430499, + "grad_norm": 2.0180193711113543, + "learning_rate": 2.638287438409659e-08, + "loss": 0.9492, + "step": 7103 + }, + { + "epoch": 0.9777043765483072, + "grad_norm": 1.7221709607586126, + "learning_rate": 2.606029137321775e-08, + "loss": 0.9094, + "step": 7104 + }, + { + "epoch": 0.9778420038535646, + "grad_norm": 1.767951368711889, + "learning_rate": 2.5739690047629216e-08, + "loss": 0.8179, + "step": 7105 + }, + { + "epoch": 0.9779796311588219, + "grad_norm": 1.6224241246194628, + "learning_rate": 2.542107047103226e-08, + "loss": 0.8257, + "step": 7106 + }, + { + "epoch": 0.9781172584640793, + "grad_norm": 1.6655550828321986, + "learning_rate": 2.5104432706731796e-08, + "loss": 0.9045, + "step": 7107 + }, + { + "epoch": 0.9782548857693366, + "grad_norm": 1.6619768555904941, + "learning_rate": 2.478977681763861e-08, + "loss": 0.8761, + "step": 7108 + }, + { + "epoch": 0.978392513074594, + "grad_norm": 1.600156895390456, + "learning_rate": 2.4477102866271584e-08, + "loss": 0.8956, + "step": 7109 + }, + { + "epoch": 0.9785301403798513, + "grad_norm": 1.9348779299398633, + "learning_rate": 2.4166410914753246e-08, + "loss": 0.8323, + "step": 7110 + }, + { + "epoch": 0.9786677676851088, + "grad_norm": 1.6900605750041795, + "learning_rate": 2.3857701024815327e-08, + "loss": 0.9341, + "step": 7111 + }, + { + "epoch": 0.9788053949903661, + "grad_norm": 1.9220245514876881, + "learning_rate": 2.3550973257794317e-08, + "loss": 0.8694, + "step": 7112 + }, + { + "epoch": 0.9789430222956235, + "grad_norm": 1.819841654352463, + "learning_rate": 2.3246227674631472e-08, + "loss": 0.8453, + "step": 7113 + }, + { + "epoch": 0.9790806496008808, + "grad_norm": 1.7882061470919681, + "learning_rate": 2.294346433587613e-08, + "loss": 0.8872, + "step": 7114 + }, + { + "epoch": 0.9792182769061382, + "grad_norm": 1.6107359036725846, + "learning_rate": 2.264268330168351e-08, + "loss": 0.8894, + "step": 7115 + }, + { + "epoch": 0.9793559042113955, + "grad_norm": 2.0282243889102793, + "learning_rate": 2.2343884631814694e-08, + "loss": 0.8053, + "step": 7116 + }, + { + "epoch": 0.9794935315166529, + "grad_norm": 1.5875665012265534, + "learning_rate": 2.2047068385636638e-08, + "loss": 0.9109, + "step": 7117 + }, + { + "epoch": 0.9796311588219103, + "grad_norm": 1.695998402398623, + "learning_rate": 2.175223462212328e-08, + "loss": 0.9461, + "step": 7118 + }, + { + "epoch": 0.9797687861271677, + "grad_norm": 1.8006566939648083, + "learning_rate": 2.1459383399853318e-08, + "loss": 0.9442, + "step": 7119 + }, + { + "epoch": 0.979906413432425, + "grad_norm": 1.7083784965100186, + "learning_rate": 2.116851477701243e-08, + "loss": 0.8962, + "step": 7120 + }, + { + "epoch": 0.9800440407376824, + "grad_norm": 1.6297415725852664, + "learning_rate": 2.087962881139216e-08, + "loss": 0.8785, + "step": 7121 + }, + { + "epoch": 0.9801816680429397, + "grad_norm": 2.0103854470664295, + "learning_rate": 2.059272556039105e-08, + "loss": 0.8635, + "step": 7122 + }, + { + "epoch": 0.9803192953481971, + "grad_norm": 1.9178600599825455, + "learning_rate": 2.0307805081011266e-08, + "loss": 0.9392, + "step": 7123 + }, + { + "epoch": 0.9804569226534544, + "grad_norm": 1.8588559567919347, + "learning_rate": 2.0024867429864204e-08, + "loss": 0.9162, + "step": 7124 + }, + { + "epoch": 0.9805945499587118, + "grad_norm": 1.7798291085729843, + "learning_rate": 1.9743912663164888e-08, + "loss": 0.8889, + "step": 7125 + }, + { + "epoch": 0.9807321772639692, + "grad_norm": 1.6385197147716186, + "learning_rate": 1.9464940836735336e-08, + "loss": 0.854, + "step": 7126 + }, + { + "epoch": 0.9808698045692266, + "grad_norm": 1.9030545231460738, + "learning_rate": 1.918795200600343e-08, + "loss": 0.9209, + "step": 7127 + }, + { + "epoch": 0.9810074318744839, + "grad_norm": 1.7393012268092334, + "learning_rate": 1.8912946226002927e-08, + "loss": 0.929, + "step": 7128 + }, + { + "epoch": 0.9811450591797413, + "grad_norm": 1.7645949434336508, + "learning_rate": 1.8639923551373452e-08, + "loss": 0.802, + "step": 7129 + }, + { + "epoch": 0.9812826864849986, + "grad_norm": 1.6210850927119227, + "learning_rate": 1.8368884036361612e-08, + "loss": 0.916, + "step": 7130 + }, + { + "epoch": 0.981420313790256, + "grad_norm": 1.6870470902510133, + "learning_rate": 1.8099827734818775e-08, + "loss": 0.9073, + "step": 7131 + }, + { + "epoch": 0.9815579410955133, + "grad_norm": 1.723240574149357, + "learning_rate": 1.7832754700203292e-08, + "loss": 0.8968, + "step": 7132 + }, + { + "epoch": 0.9816955684007708, + "grad_norm": 1.7463134039853085, + "learning_rate": 1.756766498557827e-08, + "loss": 0.9474, + "step": 7133 + }, + { + "epoch": 0.9818331957060281, + "grad_norm": 1.5540838318403067, + "learning_rate": 1.730455864361269e-08, + "loss": 0.8886, + "step": 7134 + }, + { + "epoch": 0.9819708230112855, + "grad_norm": 2.046228938546936, + "learning_rate": 1.7043435726583624e-08, + "loss": 0.9232, + "step": 7135 + }, + { + "epoch": 0.9821084503165428, + "grad_norm": 1.5883942130631237, + "learning_rate": 1.6784296286372904e-08, + "loss": 0.949, + "step": 7136 + }, + { + "epoch": 0.9822460776218002, + "grad_norm": 1.7508680171451814, + "learning_rate": 1.652714037446601e-08, + "loss": 0.8526, + "step": 7137 + }, + { + "epoch": 0.9823837049270575, + "grad_norm": 1.6884182567313746, + "learning_rate": 1.627196804195874e-08, + "loss": 0.9074, + "step": 7138 + }, + { + "epoch": 0.9825213322323149, + "grad_norm": 1.8071955475154486, + "learning_rate": 1.601877933954832e-08, + "loss": 0.9622, + "step": 7139 + }, + { + "epoch": 0.9826589595375722, + "grad_norm": 1.812399178990143, + "learning_rate": 1.5767574317540058e-08, + "loss": 0.9188, + "step": 7140 + }, + { + "epoch": 0.9827965868428297, + "grad_norm": 1.7091239674428909, + "learning_rate": 1.5518353025846255e-08, + "loss": 0.9243, + "step": 7141 + }, + { + "epoch": 0.982934214148087, + "grad_norm": 1.8651624804345133, + "learning_rate": 1.527111551398286e-08, + "loss": 0.8821, + "step": 7142 + }, + { + "epoch": 0.9830718414533444, + "grad_norm": 1.6731486557083872, + "learning_rate": 1.5025861831072795e-08, + "loss": 0.9148, + "step": 7143 + }, + { + "epoch": 0.9832094687586017, + "grad_norm": 1.8797866104253402, + "learning_rate": 1.4782592025844866e-08, + "loss": 0.8485, + "step": 7144 + }, + { + "epoch": 0.9833470960638591, + "grad_norm": 1.6941139921541544, + "learning_rate": 1.4541306146633737e-08, + "loss": 0.8823, + "step": 7145 + }, + { + "epoch": 0.9834847233691164, + "grad_norm": 1.7277284702614353, + "learning_rate": 1.4302004241377731e-08, + "loss": 0.8649, + "step": 7146 + }, + { + "epoch": 0.9836223506743738, + "grad_norm": 1.8878176929955857, + "learning_rate": 1.406468635762437e-08, + "loss": 0.7993, + "step": 7147 + }, + { + "epoch": 0.9837599779796311, + "grad_norm": 1.7430252944041433, + "learning_rate": 1.3829352542525932e-08, + "loss": 0.848, + "step": 7148 + }, + { + "epoch": 0.9838976052848886, + "grad_norm": 1.6787876598970166, + "learning_rate": 1.3596002842839461e-08, + "loss": 0.9098, + "step": 7149 + }, + { + "epoch": 0.9840352325901459, + "grad_norm": 1.802135022013292, + "learning_rate": 1.3364637304927875e-08, + "loss": 0.938, + "step": 7150 + }, + { + "epoch": 0.9841728598954033, + "grad_norm": 1.7066248869045448, + "learning_rate": 1.3135255974759952e-08, + "loss": 0.9316, + "step": 7151 + }, + { + "epoch": 0.9843104872006606, + "grad_norm": 1.688818959217137, + "learning_rate": 1.2907858897912574e-08, + "loss": 0.845, + "step": 7152 + }, + { + "epoch": 0.984448114505918, + "grad_norm": 1.6738683910176764, + "learning_rate": 1.268244611956404e-08, + "loss": 0.9179, + "step": 7153 + }, + { + "epoch": 0.9845857418111753, + "grad_norm": 1.841478877326216, + "learning_rate": 1.2459017684502971e-08, + "loss": 0.8658, + "step": 7154 + }, + { + "epoch": 0.9847233691164327, + "grad_norm": 1.6639405162849878, + "learning_rate": 1.2237573637120526e-08, + "loss": 0.8854, + "step": 7155 + }, + { + "epoch": 0.9848609964216901, + "grad_norm": 1.7656578461971746, + "learning_rate": 1.201811402141484e-08, + "loss": 0.9147, + "step": 7156 + }, + { + "epoch": 0.9849986237269475, + "grad_norm": 1.671497392496297, + "learning_rate": 1.1800638880988814e-08, + "loss": 0.9047, + "step": 7157 + }, + { + "epoch": 0.9851362510322048, + "grad_norm": 1.7568125475738678, + "learning_rate": 1.1585148259052326e-08, + "loss": 0.8614, + "step": 7158 + }, + { + "epoch": 0.9852738783374622, + "grad_norm": 1.8364603475377745, + "learning_rate": 1.1371642198420018e-08, + "loss": 0.9752, + "step": 7159 + }, + { + "epoch": 0.9854115056427195, + "grad_norm": 1.5406872133853837, + "learning_rate": 1.1160120741513514e-08, + "loss": 0.8888, + "step": 7160 + }, + { + "epoch": 0.9855491329479769, + "grad_norm": 1.9034274589405134, + "learning_rate": 1.0950583930358082e-08, + "loss": 0.9222, + "step": 7161 + }, + { + "epoch": 0.9856867602532342, + "grad_norm": 2.0903873546796805, + "learning_rate": 1.074303180658709e-08, + "loss": 0.8383, + "step": 7162 + }, + { + "epoch": 0.9858243875584916, + "grad_norm": 1.8757467134829442, + "learning_rate": 1.0537464411437548e-08, + "loss": 0.8267, + "step": 7163 + }, + { + "epoch": 0.985962014863749, + "grad_norm": 1.6811245777192023, + "learning_rate": 1.0333881785753452e-08, + "loss": 0.8863, + "step": 7164 + }, + { + "epoch": 0.9860996421690064, + "grad_norm": 1.7987369059245906, + "learning_rate": 1.0132283969982448e-08, + "loss": 0.9106, + "step": 7165 + }, + { + "epoch": 0.9862372694742637, + "grad_norm": 1.8398364755139685, + "learning_rate": 9.932671004180273e-09, + "loss": 0.8475, + "step": 7166 + }, + { + "epoch": 0.9863748967795211, + "grad_norm": 1.6356891369890763, + "learning_rate": 9.73504292800742e-09, + "loss": 0.8665, + "step": 7167 + }, + { + "epoch": 0.9865125240847784, + "grad_norm": 1.8368386893276074, + "learning_rate": 9.539399780730262e-09, + "loss": 0.8093, + "step": 7168 + }, + { + "epoch": 0.9866501513900358, + "grad_norm": 1.7693342834125982, + "learning_rate": 9.345741601219926e-09, + "loss": 0.8748, + "step": 7169 + }, + { + "epoch": 0.9867877786952931, + "grad_norm": 1.7533188654178768, + "learning_rate": 9.154068427952301e-09, + "loss": 0.8254, + "step": 7170 + }, + { + "epoch": 0.9869254060005506, + "grad_norm": 1.627192571262827, + "learning_rate": 8.964380299012476e-09, + "loss": 0.8866, + "step": 7171 + }, + { + "epoch": 0.9870630333058079, + "grad_norm": 1.7538591528372371, + "learning_rate": 8.776677252088084e-09, + "loss": 0.848, + "step": 7172 + }, + { + "epoch": 0.9872006606110653, + "grad_norm": 1.6214170946739905, + "learning_rate": 8.590959324472626e-09, + "loss": 0.8923, + "step": 7173 + }, + { + "epoch": 0.9873382879163226, + "grad_norm": 2.1878000251190852, + "learning_rate": 8.407226553066583e-09, + "loss": 0.8922, + "step": 7174 + }, + { + "epoch": 0.98747591522158, + "grad_norm": 1.914370840493567, + "learning_rate": 8.22547897437409e-09, + "loss": 0.9315, + "step": 7175 + }, + { + "epoch": 0.9876135425268373, + "grad_norm": 1.697390313345376, + "learning_rate": 8.045716624506262e-09, + "loss": 0.9452, + "step": 7176 + }, + { + "epoch": 0.9877511698320947, + "grad_norm": 1.717139225600654, + "learning_rate": 7.86793953918008e-09, + "loss": 0.893, + "step": 7177 + }, + { + "epoch": 0.987888797137352, + "grad_norm": 1.6663640520102418, + "learning_rate": 7.692147753717294e-09, + "loss": 0.8751, + "step": 7178 + }, + { + "epoch": 0.9880264244426095, + "grad_norm": 2.354178759618853, + "learning_rate": 7.518341303043298e-09, + "loss": 0.8489, + "step": 7179 + }, + { + "epoch": 0.9881640517478668, + "grad_norm": 1.871022062912179, + "learning_rate": 7.346520221693798e-09, + "loss": 0.8801, + "step": 7180 + }, + { + "epoch": 0.9883016790531242, + "grad_norm": 1.6714705860707322, + "learning_rate": 7.176684543807044e-09, + "loss": 0.8728, + "step": 7181 + }, + { + "epoch": 0.9884393063583815, + "grad_norm": 1.6633120166078814, + "learning_rate": 7.008834303124934e-09, + "loss": 0.9347, + "step": 7182 + }, + { + "epoch": 0.9885769336636389, + "grad_norm": 2.3732573888755057, + "learning_rate": 6.842969532998567e-09, + "loss": 0.7813, + "step": 7183 + }, + { + "epoch": 0.9887145609688962, + "grad_norm": 1.930358242580649, + "learning_rate": 6.679090266382693e-09, + "loss": 0.8848, + "step": 7184 + }, + { + "epoch": 0.9888521882741536, + "grad_norm": 1.6646359585491275, + "learning_rate": 6.517196535839043e-09, + "loss": 0.8924, + "step": 7185 + }, + { + "epoch": 0.9889898155794109, + "grad_norm": 1.9205596433668473, + "learning_rate": 6.357288373531889e-09, + "loss": 0.8764, + "step": 7186 + }, + { + "epoch": 0.9891274428846684, + "grad_norm": 1.7452758425073653, + "learning_rate": 6.199365811232483e-09, + "loss": 0.885, + "step": 7187 + }, + { + "epoch": 0.9892650701899257, + "grad_norm": 1.6915464698187117, + "learning_rate": 6.043428880320168e-09, + "loss": 0.9049, + "step": 7188 + }, + { + "epoch": 0.9894026974951831, + "grad_norm": 1.8021505276916292, + "learning_rate": 5.889477611775718e-09, + "loss": 0.8834, + "step": 7189 + }, + { + "epoch": 0.9895403248004404, + "grad_norm": 1.6676111625334, + "learning_rate": 5.7375120361868876e-09, + "loss": 0.9045, + "step": 7190 + }, + { + "epoch": 0.9896779521056978, + "grad_norm": 2.0276735160386994, + "learning_rate": 5.587532183748412e-09, + "loss": 0.8431, + "step": 7191 + }, + { + "epoch": 0.9898155794109551, + "grad_norm": 1.7840647250241717, + "learning_rate": 5.439538084259788e-09, + "loss": 0.877, + "step": 7192 + }, + { + "epoch": 0.9899532067162125, + "grad_norm": 1.6591409980739253, + "learning_rate": 5.293529767123051e-09, + "loss": 0.9372, + "step": 7193 + }, + { + "epoch": 0.9900908340214699, + "grad_norm": 1.7355841443800708, + "learning_rate": 5.14950726134944e-09, + "loss": 0.9776, + "step": 7194 + }, + { + "epoch": 0.9902284613267273, + "grad_norm": 1.6009324818743722, + "learning_rate": 5.007470595553843e-09, + "loss": 0.9175, + "step": 7195 + }, + { + "epoch": 0.9903660886319846, + "grad_norm": 1.5085758678570025, + "learning_rate": 4.867419797958128e-09, + "loss": 0.886, + "step": 7196 + }, + { + "epoch": 0.990503715937242, + "grad_norm": 1.8269963812930436, + "learning_rate": 4.729354896387817e-09, + "loss": 0.9232, + "step": 7197 + }, + { + "epoch": 0.9906413432424993, + "grad_norm": 2.0486648439703714, + "learning_rate": 4.593275918273188e-09, + "loss": 0.926, + "step": 7198 + }, + { + "epoch": 0.9907789705477567, + "grad_norm": 1.7664048782002812, + "learning_rate": 4.4591828906537235e-09, + "loss": 0.983, + "step": 7199 + }, + { + "epoch": 0.990916597853014, + "grad_norm": 1.7876032568880444, + "learning_rate": 4.327075840169226e-09, + "loss": 0.9074, + "step": 7200 + }, + { + "epoch": 0.9910542251582714, + "grad_norm": 1.4863395063889362, + "learning_rate": 4.196954793069807e-09, + "loss": 0.8924, + "step": 7201 + }, + { + "epoch": 0.9911918524635288, + "grad_norm": 2.4224974981447023, + "learning_rate": 4.068819775208122e-09, + "loss": 0.8228, + "step": 7202 + }, + { + "epoch": 0.9913294797687862, + "grad_norm": 1.8670057646834823, + "learning_rate": 3.942670812041583e-09, + "loss": 0.8476, + "step": 7203 + }, + { + "epoch": 0.9914671070740435, + "grad_norm": 1.5374751589277778, + "learning_rate": 3.818507928635695e-09, + "loss": 0.9175, + "step": 7204 + }, + { + "epoch": 0.9916047343793009, + "grad_norm": 1.9344898542373, + "learning_rate": 3.6963311496596153e-09, + "loss": 0.8507, + "step": 7205 + }, + { + "epoch": 0.9917423616845582, + "grad_norm": 1.866868848122515, + "learning_rate": 3.5761404993883696e-09, + "loss": 0.9843, + "step": 7206 + }, + { + "epoch": 0.9918799889898156, + "grad_norm": 1.6443382459047127, + "learning_rate": 3.4579360017006347e-09, + "loss": 0.9257, + "step": 7207 + }, + { + "epoch": 0.9920176162950729, + "grad_norm": 2.0249260568886043, + "learning_rate": 3.341717680084289e-09, + "loss": 0.9139, + "step": 7208 + }, + { + "epoch": 0.9921552436003304, + "grad_norm": 1.6176446174493126, + "learning_rate": 3.2274855576297504e-09, + "loss": 0.9083, + "step": 7209 + }, + { + "epoch": 0.9922928709055877, + "grad_norm": 1.8328209013930994, + "learning_rate": 3.1152396570321986e-09, + "loss": 0.9116, + "step": 7210 + }, + { + "epoch": 0.9924304982108451, + "grad_norm": 1.7733833238101095, + "learning_rate": 3.0049800005937934e-09, + "loss": 0.8739, + "step": 7211 + }, + { + "epoch": 0.9925681255161024, + "grad_norm": 1.773101807202807, + "learning_rate": 2.896706610222566e-09, + "loss": 0.913, + "step": 7212 + }, + { + "epoch": 0.9927057528213598, + "grad_norm": 1.7250848979914113, + "learning_rate": 2.7904195074301976e-09, + "loss": 0.9128, + "step": 7213 + }, + { + "epoch": 0.9928433801266171, + "grad_norm": 1.5710101725478094, + "learning_rate": 2.6861187133342402e-09, + "loss": 0.8911, + "step": 7214 + }, + { + "epoch": 0.9929810074318745, + "grad_norm": 1.607077490723191, + "learning_rate": 2.5838042486581172e-09, + "loss": 0.9104, + "step": 7215 + }, + { + "epoch": 0.9931186347371318, + "grad_norm": 1.7471667402672224, + "learning_rate": 2.483476133730012e-09, + "loss": 0.9068, + "step": 7216 + }, + { + "epoch": 0.9932562620423893, + "grad_norm": 1.6388262276604368, + "learning_rate": 2.3851343884850885e-09, + "loss": 0.8962, + "step": 7217 + }, + { + "epoch": 0.9933938893476466, + "grad_norm": 1.6416106440544962, + "learning_rate": 2.2887790324610526e-09, + "loss": 0.9077, + "step": 7218 + }, + { + "epoch": 0.993531516652904, + "grad_norm": 1.8303266911909823, + "learning_rate": 2.1944100848025896e-09, + "loss": 0.9481, + "step": 7219 + }, + { + "epoch": 0.9936691439581613, + "grad_norm": 1.9798627145381948, + "learning_rate": 2.102027564260256e-09, + "loss": 0.8361, + "step": 7220 + }, + { + "epoch": 0.9938067712634187, + "grad_norm": 1.8522145317892034, + "learning_rate": 2.0116314891893694e-09, + "loss": 0.8778, + "step": 7221 + }, + { + "epoch": 0.993944398568676, + "grad_norm": 2.555581321355594, + "learning_rate": 1.9232218775488976e-09, + "loss": 0.9617, + "step": 7222 + }, + { + "epoch": 0.9940820258739334, + "grad_norm": 1.8090208334745586, + "learning_rate": 1.836798746905899e-09, + "loss": 0.9101, + "step": 7223 + }, + { + "epoch": 0.9942196531791907, + "grad_norm": 1.8893466543300572, + "learning_rate": 1.752362114429973e-09, + "loss": 0.9297, + "step": 7224 + }, + { + "epoch": 0.9943572804844482, + "grad_norm": 1.9970676149799507, + "learning_rate": 1.66991199689992e-09, + "loss": 0.9157, + "step": 7225 + }, + { + "epoch": 0.9944949077897055, + "grad_norm": 1.8116763067699089, + "learning_rate": 1.5894484106959706e-09, + "loss": 0.8939, + "step": 7226 + }, + { + "epoch": 0.9946325350949629, + "grad_norm": 1.682717522225496, + "learning_rate": 1.5109713718042262e-09, + "loss": 0.9293, + "step": 7227 + }, + { + "epoch": 0.9947701624002202, + "grad_norm": 1.6954144217200076, + "learning_rate": 1.4344808958188794e-09, + "loss": 0.9107, + "step": 7228 + }, + { + "epoch": 0.9949077897054776, + "grad_norm": 1.6244666882231236, + "learning_rate": 1.3599769979366629e-09, + "loss": 0.7895, + "step": 7229 + }, + { + "epoch": 0.9950454170107349, + "grad_norm": 1.9278337293288486, + "learning_rate": 1.28745969296018e-09, + "loss": 0.9036, + "step": 7230 + }, + { + "epoch": 0.9951830443159922, + "grad_norm": 1.423921089428278, + "learning_rate": 1.2169289952990159e-09, + "loss": 0.888, + "step": 7231 + }, + { + "epoch": 0.9953206716212497, + "grad_norm": 1.6769835867933292, + "learning_rate": 1.148384918964185e-09, + "loss": 0.8599, + "step": 7232 + }, + { + "epoch": 0.9954582989265071, + "grad_norm": 1.975109384235802, + "learning_rate": 1.0818274775770132e-09, + "loss": 0.8535, + "step": 7233 + }, + { + "epoch": 0.9955959262317644, + "grad_norm": 2.183982952391194, + "learning_rate": 1.0172566843602571e-09, + "loss": 0.9815, + "step": 7234 + }, + { + "epoch": 0.9957335535370218, + "grad_norm": 1.6946357384558168, + "learning_rate": 9.546725521425437e-10, + "loss": 0.9489, + "step": 7235 + }, + { + "epoch": 0.9958711808422791, + "grad_norm": 1.7365714638028211, + "learning_rate": 8.940750933605912e-10, + "loss": 0.9244, + "step": 7236 + }, + { + "epoch": 0.9960088081475365, + "grad_norm": 1.7723060904959356, + "learning_rate": 8.35464320052548e-10, + "loss": 0.8988, + "step": 7237 + }, + { + "epoch": 0.9961464354527938, + "grad_norm": 2.4314326930622556, + "learning_rate": 7.788402438646536e-10, + "loss": 0.8811, + "step": 7238 + }, + { + "epoch": 0.9962840627580511, + "grad_norm": 1.7308191369354555, + "learning_rate": 7.242028760456876e-10, + "loss": 0.8604, + "step": 7239 + }, + { + "epoch": 0.9964216900633086, + "grad_norm": 1.8478710866104708, + "learning_rate": 6.715522274536312e-10, + "loss": 0.8873, + "step": 7240 + }, + { + "epoch": 0.996559317368566, + "grad_norm": 2.726233529420824, + "learning_rate": 6.208883085467854e-10, + "loss": 0.9578, + "step": 7241 + }, + { + "epoch": 0.9966969446738233, + "grad_norm": 1.702964764368626, + "learning_rate": 5.722111293937626e-10, + "loss": 0.8969, + "step": 7242 + }, + { + "epoch": 0.9968345719790807, + "grad_norm": 1.627391315594623, + "learning_rate": 5.255206996657158e-10, + "loss": 0.9395, + "step": 7243 + }, + { + "epoch": 0.996972199284338, + "grad_norm": 1.6857255285555028, + "learning_rate": 4.80817028637448e-10, + "loss": 0.8893, + "step": 7244 + }, + { + "epoch": 0.9971098265895953, + "grad_norm": 1.8055265930315514, + "learning_rate": 4.381001251940742e-10, + "loss": 0.7719, + "step": 7245 + }, + { + "epoch": 0.9972474538948527, + "grad_norm": 1.8320218215201705, + "learning_rate": 3.973699978199186e-10, + "loss": 0.847, + "step": 7246 + }, + { + "epoch": 0.9973850812001102, + "grad_norm": 1.8760038641452677, + "learning_rate": 3.586266546085071e-10, + "loss": 0.9716, + "step": 7247 + }, + { + "epoch": 0.9975227085053675, + "grad_norm": 1.7235155603671224, + "learning_rate": 3.2187010325812617e-10, + "loss": 0.8192, + "step": 7248 + }, + { + "epoch": 0.9976603358106249, + "grad_norm": 1.7155100422710021, + "learning_rate": 2.871003510718229e-10, + "loss": 0.8443, + "step": 7249 + }, + { + "epoch": 0.9977979631158822, + "grad_norm": 2.109932552655822, + "learning_rate": 2.543174049574049e-10, + "loss": 0.9018, + "step": 7250 + }, + { + "epoch": 0.9979355904211396, + "grad_norm": 2.0033343001823662, + "learning_rate": 2.2352127142855063e-10, + "loss": 0.8536, + "step": 7251 + }, + { + "epoch": 0.9980732177263969, + "grad_norm": 1.719209198321801, + "learning_rate": 1.9471195660369925e-10, + "loss": 0.8949, + "step": 7252 + }, + { + "epoch": 0.9982108450316542, + "grad_norm": 1.9271712515601436, + "learning_rate": 1.6788946620716063e-10, + "loss": 0.9116, + "step": 7253 + }, + { + "epoch": 0.9983484723369116, + "grad_norm": 1.876080886910594, + "learning_rate": 1.4305380556800529e-10, + "loss": 0.8995, + "step": 7254 + }, + { + "epoch": 0.9984860996421691, + "grad_norm": 1.8420027205658958, + "learning_rate": 1.202049796211746e-10, + "loss": 0.9426, + "step": 7255 + }, + { + "epoch": 0.9986237269474264, + "grad_norm": 1.6243810005406427, + "learning_rate": 9.93429929063705e-11, + "loss": 0.9549, + "step": 7256 + }, + { + "epoch": 0.9987613542526838, + "grad_norm": 1.8848285743983697, + "learning_rate": 8.046784956805553e-11, + "loss": 0.9488, + "step": 7257 + }, + { + "epoch": 0.9988989815579411, + "grad_norm": 1.6938589434188889, + "learning_rate": 6.357955335767329e-11, + "loss": 0.9334, + "step": 7258 + }, + { + "epoch": 0.9990366088631984, + "grad_norm": 1.6163266701171075, + "learning_rate": 4.867810762920755e-11, + "loss": 0.8548, + "step": 7259 + }, + { + "epoch": 0.9991742361684558, + "grad_norm": 2.0646338993543925, + "learning_rate": 3.57635153436231e-11, + "loss": 0.8011, + "step": 7260 + }, + { + "epoch": 0.9993118634737131, + "grad_norm": 1.7544564251939918, + "learning_rate": 2.4835779067755583e-11, + "loss": 0.8468, + "step": 7261 + }, + { + "epoch": 0.9994494907789705, + "grad_norm": 1.9342070082883098, + "learning_rate": 1.5894900972091008e-11, + "loss": 0.9321, + "step": 7262 + }, + { + "epoch": 0.999587118084228, + "grad_norm": 1.8564244616293597, + "learning_rate": 8.940882832986219e-12, + "loss": 0.9073, + "step": 7263 + }, + { + "epoch": 0.9997247453894853, + "grad_norm": 1.5797573733611097, + "learning_rate": 3.973726032668879e-12, + "loss": 0.8586, + "step": 7264 + }, + { + "epoch": 0.9998623726947427, + "grad_norm": 1.688711086309991, + "learning_rate": 9.934315570170327e-13, + "loss": 0.9282, + "step": 7265 + }, + { + "epoch": 1.0, + "grad_norm": 1.6864384435827913, + "learning_rate": 0.0, + "loss": 0.8906, + "step": 7266 + }, + { + "epoch": 1.0, + "step": 7266, + "total_flos": 3.5387205742913126e+18, + "train_loss": 1.064733874492661, + "train_runtime": 10499.2866, + "train_samples_per_second": 88.582, + "train_steps_per_second": 0.692 + } + ], + "logging_steps": 1.0, + "max_steps": 7266, + "num_input_tokens_seen": 0, + "num_train_epochs": 1, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 3.5387205742913126e+18, + "train_batch_size": 4, + "trial_name": null, + "trial_params": null +}