|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"global_step": 198580, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.987410615369121e-05, |
|
"loss": 1.5195, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.974821230738242e-05, |
|
"loss": 0.9589, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.962231846107363e-05, |
|
"loss": 0.849, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.949642461476483e-05, |
|
"loss": 0.7958, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.937053076845604e-05, |
|
"loss": 0.7606, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9244636922147244e-05, |
|
"loss": 0.7385, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.911874307583846e-05, |
|
"loss": 0.7186, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.899284922952966e-05, |
|
"loss": 0.6996, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.886695538322087e-05, |
|
"loss": 0.6902, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.874106153691208e-05, |
|
"loss": 0.6809, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.861516769060329e-05, |
|
"loss": 0.6723, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.848927384429449e-05, |
|
"loss": 0.6689, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.8363379997985705e-05, |
|
"loss": 0.6621, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.823748615167691e-05, |
|
"loss": 0.6569, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.811159230536812e-05, |
|
"loss": 0.6519, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.798569845905932e-05, |
|
"loss": 0.6445, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.785980461275053e-05, |
|
"loss": 0.643, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7733910766441734e-05, |
|
"loss": 0.6377, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.760801692013295e-05, |
|
"loss": 0.6386, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.748212307382415e-05, |
|
"loss": 0.6264, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.7356229227515365e-05, |
|
"loss": 0.6231, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.723033538120657e-05, |
|
"loss": 0.6254, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.710444153489778e-05, |
|
"loss": 0.6205, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.697854768858899e-05, |
|
"loss": 0.6222, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.685265384228019e-05, |
|
"loss": 0.6156, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.6726759995971395e-05, |
|
"loss": 0.612, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.660086614966261e-05, |
|
"loss": 0.6182, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.647497230335381e-05, |
|
"loss": 0.6127, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.634907845704502e-05, |
|
"loss": 0.6087, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.622318461073623e-05, |
|
"loss": 0.6062, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.609729076442744e-05, |
|
"loss": 0.604, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.597139691811864e-05, |
|
"loss": 0.6029, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.5845503071809856e-05, |
|
"loss": 0.6024, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.571960922550106e-05, |
|
"loss": 0.599, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.559371537919227e-05, |
|
"loss": 0.5967, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.546782153288347e-05, |
|
"loss": 0.5961, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.534192768657468e-05, |
|
"loss": 0.5954, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.521603384026589e-05, |
|
"loss": 0.5927, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.50901399939571e-05, |
|
"loss": 0.5859, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.6686062812805176, |
|
"eval_runtime": 51.5648, |
|
"eval_samples_per_second": 342.346, |
|
"step": 19858 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.49642461476483e-05, |
|
"loss": 0.5871, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.4838352301339516e-05, |
|
"loss": 0.579, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.471245845503072e-05, |
|
"loss": 0.5806, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.458656460872193e-05, |
|
"loss": 0.5788, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.446067076241314e-05, |
|
"loss": 0.5765, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.433477691610434e-05, |
|
"loss": 0.5756, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.4208883069795545e-05, |
|
"loss": 0.5756, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.408298922348676e-05, |
|
"loss": 0.5758, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.3957095377177963e-05, |
|
"loss": 0.5712, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.3831201530869176e-05, |
|
"loss": 0.5708, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.370530768456038e-05, |
|
"loss": 0.5684, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.357941383825159e-05, |
|
"loss": 0.5738, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.34535199919428e-05, |
|
"loss": 0.5697, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.3327626145634006e-05, |
|
"loss": 0.5659, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.320173229932521e-05, |
|
"loss": 0.569, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.307583845301642e-05, |
|
"loss": 0.5688, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.2949944606707624e-05, |
|
"loss": 0.5695, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.282405076039883e-05, |
|
"loss": 0.5659, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.269815691409004e-05, |
|
"loss": 0.5641, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.257226306778125e-05, |
|
"loss": 0.5628, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.2446369221472454e-05, |
|
"loss": 0.5611, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.2320475375163666e-05, |
|
"loss": 0.5604, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.219458152885487e-05, |
|
"loss": 0.5647, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.2068687682546085e-05, |
|
"loss": 0.5629, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.194279383623729e-05, |
|
"loss": 0.5619, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.181689998992849e-05, |
|
"loss": 0.5574, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.16910061436197e-05, |
|
"loss": 0.5568, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.156511229731091e-05, |
|
"loss": 0.5585, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.1439218451002114e-05, |
|
"loss": 0.556, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.1313324604693327e-05, |
|
"loss": 0.5582, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.118743075838453e-05, |
|
"loss": 0.5541, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.106153691207574e-05, |
|
"loss": 0.5524, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.093564306576695e-05, |
|
"loss": 0.5565, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.0809749219458157e-05, |
|
"loss": 0.5544, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.068385537314936e-05, |
|
"loss": 0.5534, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.055796152684057e-05, |
|
"loss": 0.5545, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.0432067680531774e-05, |
|
"loss": 0.5521, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.030617383422299e-05, |
|
"loss": 0.5467, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.018027998791419e-05, |
|
"loss": 0.5512, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.00543861416054e-05, |
|
"loss": 0.5488, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.6351193785667419, |
|
"eval_runtime": 51.4996, |
|
"eval_samples_per_second": 342.779, |
|
"step": 39716 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.992849229529661e-05, |
|
"loss": 0.5438, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.980259844898782e-05, |
|
"loss": 0.5404, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.967670460267902e-05, |
|
"loss": 0.5412, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.9550810756370235e-05, |
|
"loss": 0.5414, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.942491691006144e-05, |
|
"loss": 0.5452, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.929902306375265e-05, |
|
"loss": 0.5373, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.917312921744385e-05, |
|
"loss": 0.5373, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.904723537113506e-05, |
|
"loss": 0.539, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.8921341524826264e-05, |
|
"loss": 0.5363, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.879544767851748e-05, |
|
"loss": 0.5376, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.866955383220868e-05, |
|
"loss": 0.5421, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.8543659985899895e-05, |
|
"loss": 0.5399, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 3.84177661395911e-05, |
|
"loss": 0.5343, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3.829187229328231e-05, |
|
"loss": 0.5365, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 3.816597844697351e-05, |
|
"loss": 0.5397, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 3.804008460066472e-05, |
|
"loss": 0.5342, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.7914190754355925e-05, |
|
"loss": 0.5327, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.778829690804714e-05, |
|
"loss": 0.54, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.766240306173834e-05, |
|
"loss": 0.5362, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.753650921542955e-05, |
|
"loss": 0.5351, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.741061536912076e-05, |
|
"loss": 0.5351, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 3.728472152281197e-05, |
|
"loss": 0.5284, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 3.715882767650317e-05, |
|
"loss": 0.5345, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 3.7032933830194386e-05, |
|
"loss": 0.5352, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 3.690703998388559e-05, |
|
"loss": 0.5328, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 3.67811461375768e-05, |
|
"loss": 0.5362, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 3.6655252291268e-05, |
|
"loss": 0.5304, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 3.652935844495921e-05, |
|
"loss": 0.5322, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 3.640346459865042e-05, |
|
"loss": 0.5295, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.627757075234163e-05, |
|
"loss": 0.5284, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.615167690603283e-05, |
|
"loss": 0.5318, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.6025783059724046e-05, |
|
"loss": 0.5312, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.589988921341525e-05, |
|
"loss": 0.5282, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.577399536710646e-05, |
|
"loss": 0.5292, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.564810152079766e-05, |
|
"loss": 0.5289, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.552220767448887e-05, |
|
"loss": 0.5293, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.5396313828180075e-05, |
|
"loss": 0.5241, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.527041998187129e-05, |
|
"loss": 0.5275, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.5144526135562493e-05, |
|
"loss": 0.5335, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.5018632289253706e-05, |
|
"loss": 0.5244, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 0.6179068088531494, |
|
"eval_runtime": 51.415, |
|
"eval_samples_per_second": 343.344, |
|
"step": 59574 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.489273844294491e-05, |
|
"loss": 0.5182, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.476684459663612e-05, |
|
"loss": 0.5147, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.464095075032733e-05, |
|
"loss": 0.5187, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.4515056904018536e-05, |
|
"loss": 0.5201, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.438916305770974e-05, |
|
"loss": 0.5199, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.426326921140095e-05, |
|
"loss": 0.5239, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.4137375365092154e-05, |
|
"loss": 0.5176, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.401148151878336e-05, |
|
"loss": 0.5199, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.388558767247457e-05, |
|
"loss": 0.5216, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.375969382616578e-05, |
|
"loss": 0.5152, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.3633799979856984e-05, |
|
"loss": 0.5176, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.3507906133548196e-05, |
|
"loss": 0.5155, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.33820122872394e-05, |
|
"loss": 0.5151, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.3256118440930615e-05, |
|
"loss": 0.5176, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.3130224594621814e-05, |
|
"loss": 0.5144, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.300433074831302e-05, |
|
"loss": 0.5172, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.287843690200423e-05, |
|
"loss": 0.5173, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 3.275254305569544e-05, |
|
"loss": 0.5172, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.2626649209386644e-05, |
|
"loss": 0.5124, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.2500755363077856e-05, |
|
"loss": 0.518, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 3.237486151676906e-05, |
|
"loss": 0.5164, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.224896767046027e-05, |
|
"loss": 0.5175, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.212307382415148e-05, |
|
"loss": 0.5156, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.1997179977842687e-05, |
|
"loss": 0.5164, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.187128613153389e-05, |
|
"loss": 0.5165, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.17453922852251e-05, |
|
"loss": 0.5171, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.1619498438916304e-05, |
|
"loss": 0.5133, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.149360459260752e-05, |
|
"loss": 0.5157, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.136771074629872e-05, |
|
"loss": 0.5116, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.124181689998993e-05, |
|
"loss": 0.5137, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.111592305368114e-05, |
|
"loss": 0.5154, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.099002920737235e-05, |
|
"loss": 0.5147, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 3.086413536106355e-05, |
|
"loss": 0.5158, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.0738241514754765e-05, |
|
"loss": 0.5139, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.0612347668445964e-05, |
|
"loss": 0.5162, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.0486453822137173e-05, |
|
"loss": 0.5133, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.0360559975828383e-05, |
|
"loss": 0.5126, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.023466612951959e-05, |
|
"loss": 0.5128, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.0108772283210794e-05, |
|
"loss": 0.5135, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 0.6064777374267578, |
|
"eval_runtime": 51.6259, |
|
"eval_samples_per_second": 341.941, |
|
"step": 79432 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 2.9982878436902007e-05, |
|
"loss": 0.507, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 2.9856984590593213e-05, |
|
"loss": 0.4969, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 2.9731090744284422e-05, |
|
"loss": 0.5011, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 2.9605196897975628e-05, |
|
"loss": 0.5023, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 2.9479303051666834e-05, |
|
"loss": 0.5025, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 2.9353409205358046e-05, |
|
"loss": 0.5032, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 2.9227515359049252e-05, |
|
"loss": 0.5036, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 2.9101621512740458e-05, |
|
"loss": 0.5004, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 2.8975727666431667e-05, |
|
"loss": 0.5041, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 2.8849833820122873e-05, |
|
"loss": 0.505, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 2.872393997381408e-05, |
|
"loss": 0.5072, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 2.859804612750529e-05, |
|
"loss": 0.5012, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 2.8472152281196497e-05, |
|
"loss": 0.505, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 2.8346258434887703e-05, |
|
"loss": 0.5039, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 2.8220364588578912e-05, |
|
"loss": 0.5006, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.8094470742270118e-05, |
|
"loss": 0.5061, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 2.796857689596133e-05, |
|
"loss": 0.5018, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 2.7842683049652536e-05, |
|
"loss": 0.5009, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 2.771678920334374e-05, |
|
"loss": 0.503, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 2.759089535703495e-05, |
|
"loss": 0.5018, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 2.7465001510726157e-05, |
|
"loss": 0.502, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 2.7339107664417363e-05, |
|
"loss": 0.5039, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 2.7213213818108572e-05, |
|
"loss": 0.5044, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 2.7087319971799778e-05, |
|
"loss": 0.4998, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 2.6961426125490984e-05, |
|
"loss": 0.5046, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.6835532279182197e-05, |
|
"loss": 0.5039, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.6709638432873402e-05, |
|
"loss": 0.5024, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.6583744586564608e-05, |
|
"loss": 0.503, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.6457850740255818e-05, |
|
"loss": 0.5007, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 2.6331956893947023e-05, |
|
"loss": 0.5027, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.6206063047638236e-05, |
|
"loss": 0.5081, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 2.6080169201329442e-05, |
|
"loss": 0.5002, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 2.5954275355020648e-05, |
|
"loss": 0.5045, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.5828381508711857e-05, |
|
"loss": 0.4986, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 2.5702487662403063e-05, |
|
"loss": 0.5026, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 2.557659381609427e-05, |
|
"loss": 0.502, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 2.545069996978548e-05, |
|
"loss": 0.5016, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.5324806123476687e-05, |
|
"loss": 0.5056, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 2.519891227716789e-05, |
|
"loss": 0.4983, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.5073018430859102e-05, |
|
"loss": 0.5002, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 0.6010987758636475, |
|
"eval_runtime": 51.6361, |
|
"eval_samples_per_second": 341.873, |
|
"step": 99290 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 2.4947124584550308e-05, |
|
"loss": 0.4916, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 2.4821230738241517e-05, |
|
"loss": 0.4912, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 2.4695336891932723e-05, |
|
"loss": 0.4941, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.456944304562393e-05, |
|
"loss": 0.4876, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 2.4443549199315138e-05, |
|
"loss": 0.4942, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 2.4317655353006347e-05, |
|
"loss": 0.4937, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 2.4191761506697556e-05, |
|
"loss": 0.4902, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 2.4065867660388762e-05, |
|
"loss": 0.4909, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 2.3939973814079968e-05, |
|
"loss": 0.4933, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 2.3814079967771177e-05, |
|
"loss": 0.4935, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 2.3688186121462383e-05, |
|
"loss": 0.4951, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 2.3562292275153592e-05, |
|
"loss": 0.4913, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 2.3436398428844798e-05, |
|
"loss": 0.4973, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 2.3310504582536007e-05, |
|
"loss": 0.49, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.3184610736227213e-05, |
|
"loss": 0.4897, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 2.3058716889918422e-05, |
|
"loss": 0.4929, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 2.293282304360963e-05, |
|
"loss": 0.4876, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 2.2806929197300837e-05, |
|
"loss": 0.4951, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 2.2681035350992043e-05, |
|
"loss": 0.4913, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 2.2555141504683252e-05, |
|
"loss": 0.4952, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 2.242924765837446e-05, |
|
"loss": 0.4894, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 2.2303353812065667e-05, |
|
"loss": 0.4936, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 2.2177459965756873e-05, |
|
"loss": 0.488, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 2.2051566119448082e-05, |
|
"loss": 0.4908, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 2.192567227313929e-05, |
|
"loss": 0.4935, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 2.1799778426830498e-05, |
|
"loss": 0.4883, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 2.1673884580521707e-05, |
|
"loss": 0.4912, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 2.1547990734212913e-05, |
|
"loss": 0.4903, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 2.142209688790412e-05, |
|
"loss": 0.4914, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 2.1296203041595328e-05, |
|
"loss": 0.489, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 2.1170309195286537e-05, |
|
"loss": 0.4929, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 2.1044415348977743e-05, |
|
"loss": 0.4892, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 2.091852150266895e-05, |
|
"loss": 0.4854, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 2.0792627656360158e-05, |
|
"loss": 0.4938, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 2.0666733810051367e-05, |
|
"loss": 0.4916, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 2.0540839963742573e-05, |
|
"loss": 0.4945, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 2.0414946117433782e-05, |
|
"loss": 0.4901, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 2.0289052271124988e-05, |
|
"loss": 0.4922, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 2.0163158424816194e-05, |
|
"loss": 0.4903, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 2.0037264578507403e-05, |
|
"loss": 0.4911, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 0.5948862433433533, |
|
"eval_runtime": 51.885, |
|
"eval_samples_per_second": 340.233, |
|
"step": 119148 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 1.9911370732198612e-05, |
|
"loss": 0.4846, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 1.978547688588982e-05, |
|
"loss": 0.4849, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 1.9659583039581027e-05, |
|
"loss": 0.4815, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.9533689193272233e-05, |
|
"loss": 0.4835, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 1.9407795346963442e-05, |
|
"loss": 0.4802, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 1.9281901500654648e-05, |
|
"loss": 0.482, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 1.9156007654345857e-05, |
|
"loss": 0.4865, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 1.9030113808037063e-05, |
|
"loss": 0.4846, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 1.8904219961728272e-05, |
|
"loss": 0.4826, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 1.8778326115419478e-05, |
|
"loss": 0.4782, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 1.8652432269110687e-05, |
|
"loss": 0.4839, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 1.8526538422801896e-05, |
|
"loss": 0.485, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 1.8400644576493102e-05, |
|
"loss": 0.4803, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 1.8274750730184308e-05, |
|
"loss": 0.4792, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 1.8148856883875517e-05, |
|
"loss": 0.4859, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 1.8022963037566727e-05, |
|
"loss": 0.4847, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.7897069191257932e-05, |
|
"loss": 0.4832, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.7771175344949138e-05, |
|
"loss": 0.4865, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.7645281498640347e-05, |
|
"loss": 0.4836, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 1.7519387652331553e-05, |
|
"loss": 0.4855, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.7393493806022763e-05, |
|
"loss": 0.4806, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 1.7267599959713972e-05, |
|
"loss": 0.4801, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 1.7141706113405178e-05, |
|
"loss": 0.4807, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.7015812267096383e-05, |
|
"loss": 0.4832, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.6889918420787593e-05, |
|
"loss": 0.4803, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 1.6764024574478802e-05, |
|
"loss": 0.4821, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.6638130728170008e-05, |
|
"loss": 0.4792, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.6512236881861213e-05, |
|
"loss": 0.4827, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 1.6386343035552423e-05, |
|
"loss": 0.4821, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.6260449189243632e-05, |
|
"loss": 0.4754, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.6134555342934838e-05, |
|
"loss": 0.4822, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 1.6008661496626047e-05, |
|
"loss": 0.4813, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.5882767650317253e-05, |
|
"loss": 0.4802, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.575687380400846e-05, |
|
"loss": 0.4835, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 1.5630979957699668e-05, |
|
"loss": 0.4842, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 1.5505086111390877e-05, |
|
"loss": 0.4867, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.5379192265082086e-05, |
|
"loss": 0.4832, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 1.525329841877329e-05, |
|
"loss": 0.4835, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.5127404572464498e-05, |
|
"loss": 0.4813, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 1.5001510726155707e-05, |
|
"loss": 0.4799, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 0.5898594856262207, |
|
"eval_runtime": 51.3831, |
|
"eval_samples_per_second": 343.557, |
|
"step": 139006 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 1.4875616879846913e-05, |
|
"loss": 0.4741, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.474972303353812e-05, |
|
"loss": 0.4743, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 1.462382918722933e-05, |
|
"loss": 0.477, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 1.4497935340920537e-05, |
|
"loss": 0.476, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.4372041494611743e-05, |
|
"loss": 0.474, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.4246147648302952e-05, |
|
"loss": 0.4761, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.412025380199416e-05, |
|
"loss": 0.4757, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 1.3994359955685366e-05, |
|
"loss": 0.4725, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.3868466109376573e-05, |
|
"loss": 0.4743, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 1.3742572263067782e-05, |
|
"loss": 0.4789, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 1.361667841675899e-05, |
|
"loss": 0.4683, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.3490784570450196e-05, |
|
"loss": 0.4746, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 1.3364890724141405e-05, |
|
"loss": 0.4698, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 1.3238996877832612e-05, |
|
"loss": 0.4726, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 1.3113103031523818e-05, |
|
"loss": 0.4759, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 1.2987209185215027e-05, |
|
"loss": 0.4727, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 1.2861315338906235e-05, |
|
"loss": 0.4737, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 1.2735421492597444e-05, |
|
"loss": 0.4732, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 1.2609527646288648e-05, |
|
"loss": 0.476, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.2483633799979858e-05, |
|
"loss": 0.4751, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 1.2357739953671065e-05, |
|
"loss": 0.4769, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.2231846107362273e-05, |
|
"loss": 0.4751, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 1.210595226105348e-05, |
|
"loss": 0.4732, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 1.1980058414744688e-05, |
|
"loss": 0.4745, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 1.1854164568435895e-05, |
|
"loss": 0.471, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 1.1728270722127103e-05, |
|
"loss": 0.4744, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 1.160237687581831e-05, |
|
"loss": 0.4799, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 1.147648302950952e-05, |
|
"loss": 0.474, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 1.1350589183200725e-05, |
|
"loss": 0.4768, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 1.1224695336891933e-05, |
|
"loss": 0.4751, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 1.109880149058314e-05, |
|
"loss": 0.477, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1.0972907644274348e-05, |
|
"loss": 0.4759, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 1.0847013797965557e-05, |
|
"loss": 0.472, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 1.0721119951656763e-05, |
|
"loss": 0.4741, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 1.0595226105347972e-05, |
|
"loss": 0.4776, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 1.0469332259039178e-05, |
|
"loss": 0.4735, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 1.0343438412730385e-05, |
|
"loss": 0.4773, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 1.0217544566421595e-05, |
|
"loss": 0.474, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 1.00916507201128e-05, |
|
"loss": 0.4749, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 0.5894228219985962, |
|
"eval_runtime": 51.3606, |
|
"eval_samples_per_second": 343.707, |
|
"step": 158864 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 9.96575687380401e-06, |
|
"loss": 0.4705, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 9.839863027495217e-06, |
|
"loss": 0.4665, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 9.713969181186425e-06, |
|
"loss": 0.4634, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 9.588075334877632e-06, |
|
"loss": 0.4673, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 9.462181488568838e-06, |
|
"loss": 0.4659, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 9.336287642260047e-06, |
|
"loss": 0.469, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 9.210393795951255e-06, |
|
"loss": 0.4635, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 9.084499949642462e-06, |
|
"loss": 0.4695, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 8.95860610333367e-06, |
|
"loss": 0.4697, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 8.832712257024877e-06, |
|
"loss": 0.4686, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 8.706818410716085e-06, |
|
"loss": 0.4683, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 8.580924564407292e-06, |
|
"loss": 0.4716, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 8.4550307180985e-06, |
|
"loss": 0.4632, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 8.329136871789707e-06, |
|
"loss": 0.4687, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 8.203243025480915e-06, |
|
"loss": 0.4683, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 8.077349179172123e-06, |
|
"loss": 0.4652, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 7.95145533286333e-06, |
|
"loss": 0.4688, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 7.825561486554538e-06, |
|
"loss": 0.4674, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 7.699667640245745e-06, |
|
"loss": 0.4707, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 7.573773793936953e-06, |
|
"loss": 0.4718, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 7.44787994762816e-06, |
|
"loss": 0.4693, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 7.3219861013193685e-06, |
|
"loss": 0.4655, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 7.196092255010575e-06, |
|
"loss": 0.4683, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 7.0701984087017836e-06, |
|
"loss": 0.471, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 6.94430456239299e-06, |
|
"loss": 0.4665, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 6.818410716084198e-06, |
|
"loss": 0.4693, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 6.692516869775406e-06, |
|
"loss": 0.4708, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 6.566623023466613e-06, |
|
"loss": 0.468, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 6.440729177157821e-06, |
|
"loss": 0.4656, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 6.314835330849028e-06, |
|
"loss": 0.4693, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 6.188941484540236e-06, |
|
"loss": 0.4716, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 6.063047638231444e-06, |
|
"loss": 0.4671, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 5.937153791922651e-06, |
|
"loss": 0.4663, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 5.811259945613859e-06, |
|
"loss": 0.4729, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 5.685366099305066e-06, |
|
"loss": 0.4655, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 5.559472252996274e-06, |
|
"loss": 0.4647, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 5.433578406687481e-06, |
|
"loss": 0.4676, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 5.307684560378689e-06, |
|
"loss": 0.4658, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 5.181790714069896e-06, |
|
"loss": 0.4681, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 5.055896867761104e-06, |
|
"loss": 0.4675, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 0.5858681201934814, |
|
"eval_runtime": 51.7912, |
|
"eval_samples_per_second": 340.849, |
|
"step": 178722 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 4.9300030214523114e-06, |
|
"loss": 0.4637, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 4.80410917514352e-06, |
|
"loss": 0.4622, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 4.6782153288347265e-06, |
|
"loss": 0.4619, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 4.552321482525934e-06, |
|
"loss": 0.4614, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 4.4264276362171415e-06, |
|
"loss": 0.4597, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 4.30053378990835e-06, |
|
"loss": 0.4625, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 4.174639943599557e-06, |
|
"loss": 0.464, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 4.048746097290765e-06, |
|
"loss": 0.4601, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 3.9228522509819725e-06, |
|
"loss": 0.4649, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 3.7969584046731796e-06, |
|
"loss": 0.4634, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 3.671064558364387e-06, |
|
"loss": 0.466, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 3.5451707120555946e-06, |
|
"loss": 0.4663, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 3.4192768657468025e-06, |
|
"loss": 0.465, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 3.29338301943801e-06, |
|
"loss": 0.4639, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 3.1674891731292176e-06, |
|
"loss": 0.4631, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 3.041595326820425e-06, |
|
"loss": 0.4633, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 2.9157014805116326e-06, |
|
"loss": 0.4609, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 2.7898076342028406e-06, |
|
"loss": 0.461, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 2.6639137878940477e-06, |
|
"loss": 0.4593, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 2.538019941585255e-06, |
|
"loss": 0.4623, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 2.412126095276463e-06, |
|
"loss": 0.4625, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 2.2862322489676707e-06, |
|
"loss": 0.4607, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 2.160338402658878e-06, |
|
"loss": 0.4638, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.0344445563500857e-06, |
|
"loss": 0.4613, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 1.9085507100412932e-06, |
|
"loss": 0.4652, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 1.7826568637325008e-06, |
|
"loss": 0.4652, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 1.6567630174237083e-06, |
|
"loss": 0.4627, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 1.530869171114916e-06, |
|
"loss": 0.4628, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 1.4049753248061235e-06, |
|
"loss": 0.462, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 1.2790814784973313e-06, |
|
"loss": 0.4646, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 1.1531876321885388e-06, |
|
"loss": 0.4626, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 1.027293785879746e-06, |
|
"loss": 0.4618, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 9.013999395709538e-07, |
|
"loss": 0.4622, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 7.755060932621615e-07, |
|
"loss": 0.4616, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 6.496122469533689e-07, |
|
"loss": 0.4631, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 5.237184006445765e-07, |
|
"loss": 0.4625, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 3.978245543357841e-07, |
|
"loss": 0.4621, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 2.7193070802699166e-07, |
|
"loss": 0.461, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 1.4603686171819923e-07, |
|
"loss": 0.4617, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 2.014301540940679e-08, |
|
"loss": 0.4652, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 0.5854414701461792, |
|
"eval_runtime": 51.7785, |
|
"eval_samples_per_second": 340.933, |
|
"step": 198580 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 198580, |
|
"total_flos": 1.2104979074144256e+17, |
|
"train_runtime": 21748.254, |
|
"train_samples_per_second": 9.131 |
|
} |
|
], |
|
"max_steps": 198580, |
|
"num_train_epochs": 10, |
|
"total_flos": 1.2104979074144256e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|