|
{ |
|
"best_metric": 0.3029824197292328, |
|
"best_model_checkpoint": "./convnext-base-3e-5-batch-8/checkpoint-8792", |
|
"epoch": 10.0, |
|
"eval_steps": 500, |
|
"global_step": 21980, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 20.464111328125, |
|
"learning_rate": 2.999846786074732e-05, |
|
"loss": 2.7431, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 26.436141967773438, |
|
"learning_rate": 2.999387175598269e-05, |
|
"loss": 1.6104, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 54.28024673461914, |
|
"learning_rate": 2.998621262462245e-05, |
|
"loss": 1.2012, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 53.153194427490234, |
|
"learning_rate": 2.9975492031314045e-05, |
|
"loss": 0.974, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 38.65731430053711, |
|
"learning_rate": 2.996171216611638e-05, |
|
"loss": 0.8435, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 23.02427101135254, |
|
"learning_rate": 2.994487584405244e-05, |
|
"loss": 0.7449, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 19.776226043701172, |
|
"learning_rate": 2.992498650453421e-05, |
|
"loss": 0.6626, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 19.967565536499023, |
|
"learning_rate": 2.990204821066006e-05, |
|
"loss": 0.5485, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 32.89971923828125, |
|
"learning_rate": 2.9876065648384715e-05, |
|
"loss": 0.6233, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 23.158945083618164, |
|
"learning_rate": 2.984704412556199e-05, |
|
"loss": 0.595, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 33.01531219482422, |
|
"learning_rate": 2.981498957086044e-05, |
|
"loss": 0.5844, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 36.02910614013672, |
|
"learning_rate": 2.977990853255228e-05, |
|
"loss": 0.4787, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 25.670297622680664, |
|
"learning_rate": 2.974180817717561e-05, |
|
"loss": 0.6183, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 16.535247802734375, |
|
"learning_rate": 2.970069628807043e-05, |
|
"loss": 0.509, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 20.26650047302246, |
|
"learning_rate": 2.965658126378862e-05, |
|
"loss": 0.5619, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 51.08666229248047, |
|
"learning_rate": 2.9609472116378222e-05, |
|
"loss": 0.4814, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 13.978100776672363, |
|
"learning_rate": 2.955937846954242e-05, |
|
"loss": 0.4695, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 2.855776071548462, |
|
"learning_rate": 2.9506310556673573e-05, |
|
"loss": 0.5061, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 5.468057155609131, |
|
"learning_rate": 2.945027921876265e-05, |
|
"loss": 0.5227, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 2.113645315170288, |
|
"learning_rate": 2.9391295902184625e-05, |
|
"loss": 0.4312, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 37.98606872558594, |
|
"learning_rate": 2.93293726563601e-05, |
|
"loss": 0.5851, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8918489065606362, |
|
"eval_loss": 0.38079071044921875, |
|
"eval_runtime": 114.5109, |
|
"eval_samples_per_second": 21.963, |
|
"eval_steps_per_second": 2.751, |
|
"step": 2198 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 20.0971622467041, |
|
"learning_rate": 2.9264522131293818e-05, |
|
"loss": 0.5127, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 26.824159622192383, |
|
"learning_rate": 2.919675757499045e-05, |
|
"loss": 0.3676, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 14.176966667175293, |
|
"learning_rate": 2.9126092830748217e-05, |
|
"loss": 0.3944, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 31.566120147705078, |
|
"learning_rate": 2.9052542334330916e-05, |
|
"loss": 0.3678, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 30.576416015625, |
|
"learning_rate": 2.897612111101888e-05, |
|
"loss": 0.465, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 52.94912338256836, |
|
"learning_rate": 2.889684477253959e-05, |
|
"loss": 0.3741, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 26.88067054748535, |
|
"learning_rate": 2.8814729513878365e-05, |
|
"loss": 0.3993, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 26.82840919494629, |
|
"learning_rate": 2.8729792109970015e-05, |
|
"loss": 0.3801, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 52.661399841308594, |
|
"learning_rate": 2.864204991227195e-05, |
|
"loss": 0.326, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 11.977348327636719, |
|
"learning_rate": 2.855152084521953e-05, |
|
"loss": 0.3646, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 28.014543533325195, |
|
"learning_rate": 2.8458223402564366e-05, |
|
"loss": 0.3409, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.3806567192077637, |
|
"learning_rate": 2.836217664359634e-05, |
|
"loss": 0.467, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 33.71268844604492, |
|
"learning_rate": 2.826340018925006e-05, |
|
"loss": 0.4337, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 34.983802795410156, |
|
"learning_rate": 2.8161914218096568e-05, |
|
"loss": 0.3762, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 23.66938591003418, |
|
"learning_rate": 2.8057739462221215e-05, |
|
"loss": 0.4465, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 16.07572364807129, |
|
"learning_rate": 2.7950897202988338e-05, |
|
"loss": 0.3359, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 25.094499588012695, |
|
"learning_rate": 2.7841409266693838e-05, |
|
"loss": 0.4376, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 44.122901916503906, |
|
"learning_rate": 2.7729298020106363e-05, |
|
"loss": 0.3529, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 29.057430267333984, |
|
"learning_rate": 2.761458636589813e-05, |
|
"loss": 0.3423, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 39.68132019042969, |
|
"learning_rate": 2.7497297737966217e-05, |
|
"loss": 0.3745, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 25.461807250976562, |
|
"learning_rate": 2.7377456096645395e-05, |
|
"loss": 0.4214, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 34.991153717041016, |
|
"learning_rate": 2.725508592381337e-05, |
|
"loss": 0.3975, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9093439363817097, |
|
"eval_loss": 0.3232283592224121, |
|
"eval_runtime": 114.6327, |
|
"eval_samples_per_second": 21.94, |
|
"eval_steps_per_second": 2.748, |
|
"step": 4396 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 23.15580177307129, |
|
"learning_rate": 2.7130212217889484e-05, |
|
"loss": 0.3681, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 28.531185150146484, |
|
"learning_rate": 2.7002860488727944e-05, |
|
"loss": 0.3262, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 38.52848434448242, |
|
"learning_rate": 2.6873056752406504e-05, |
|
"loss": 0.3097, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 1.2146570682525635, |
|
"learning_rate": 2.6740827525911766e-05, |
|
"loss": 0.4274, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 19.732980728149414, |
|
"learning_rate": 2.6606199821722166e-05, |
|
"loss": 0.268, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 15.875640869140625, |
|
"learning_rate": 2.646920114228972e-05, |
|
"loss": 0.2734, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 18.1243953704834, |
|
"learning_rate": 2.632985947442167e-05, |
|
"loss": 0.2739, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 17.237104415893555, |
|
"learning_rate": 2.6188203283563198e-05, |
|
"loss": 0.2917, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 18.371570587158203, |
|
"learning_rate": 2.6044261507982356e-05, |
|
"loss": 0.3551, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 14.930416107177734, |
|
"learning_rate": 2.589806355285841e-05, |
|
"loss": 0.3264, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 30.236698150634766, |
|
"learning_rate": 2.5749639284274782e-05, |
|
"loss": 0.3388, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.13715609908103943, |
|
"learning_rate": 2.5599019023117872e-05, |
|
"loss": 0.2575, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 6.84519624710083, |
|
"learning_rate": 2.5446233538882924e-05, |
|
"loss": 0.3263, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 58.51634979248047, |
|
"learning_rate": 2.5291314043388295e-05, |
|
"loss": 0.3716, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.961408257484436, |
|
"learning_rate": 2.513429218439932e-05, |
|
"loss": 0.3012, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 13.59073257446289, |
|
"learning_rate": 2.497520003916316e-05, |
|
"loss": 0.2911, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 12.26279354095459, |
|
"learning_rate": 2.4814070107855878e-05, |
|
"loss": 0.279, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 30.789432525634766, |
|
"learning_rate": 2.465093530694315e-05, |
|
"loss": 0.29, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.28742390871047974, |
|
"learning_rate": 2.448582896245591e-05, |
|
"loss": 0.3051, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 2.97086501121521, |
|
"learning_rate": 2.4318784803182317e-05, |
|
"loss": 0.2735, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 32.147727966308594, |
|
"learning_rate": 2.4149836953777488e-05, |
|
"loss": 0.2992, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 28.64170265197754, |
|
"learning_rate": 2.3979019927792315e-05, |
|
"loss": 0.3337, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.925248508946322, |
|
"eval_loss": 0.3209765553474426, |
|
"eval_runtime": 113.6693, |
|
"eval_samples_per_second": 22.126, |
|
"eval_steps_per_second": 2.771, |
|
"step": 6594 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 14.05492115020752, |
|
"learning_rate": 2.3806368620622876e-05, |
|
"loss": 0.3353, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 5.791962623596191, |
|
"learning_rate": 2.3631918302381803e-05, |
|
"loss": 0.2148, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 0.8230623602867126, |
|
"learning_rate": 2.345570461069312e-05, |
|
"loss": 0.267, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 0.2485540211200714, |
|
"learning_rate": 2.327776354341202e-05, |
|
"loss": 0.2472, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 0.06697408854961395, |
|
"learning_rate": 2.3098131451271016e-05, |
|
"loss": 0.2568, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 5.22568941116333, |
|
"learning_rate": 2.291684503045402e-05, |
|
"loss": 0.2392, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 58.771671295166016, |
|
"learning_rate": 2.2733941315099883e-05, |
|
"loss": 0.2867, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 21.552099227905273, |
|
"learning_rate": 2.2549457669736836e-05, |
|
"loss": 0.2652, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 1.265632152557373, |
|
"learning_rate": 2.2363431781649483e-05, |
|
"loss": 0.2284, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 5.533514499664307, |
|
"learning_rate": 2.2175901653179847e-05, |
|
"loss": 0.232, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 0.05318501219153404, |
|
"learning_rate": 2.1986905593964048e-05, |
|
"loss": 0.2094, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 20.877737045288086, |
|
"learning_rate": 2.1796482213106203e-05, |
|
"loss": 0.3036, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 43.25149917602539, |
|
"learning_rate": 2.1604670411291174e-05, |
|
"loss": 0.2388, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 15.92627239227295, |
|
"learning_rate": 2.1411509372837724e-05, |
|
"loss": 0.3357, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 12.71852970123291, |
|
"learning_rate": 2.121703855769373e-05, |
|
"loss": 0.2069, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"grad_norm": 12.913580894470215, |
|
"learning_rate": 2.102129769337511e-05, |
|
"loss": 0.2867, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 0.06732411682605743, |
|
"learning_rate": 2.0824326766850072e-05, |
|
"loss": 0.28, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"grad_norm": 12.845504760742188, |
|
"learning_rate": 2.0626166016370375e-05, |
|
"loss": 0.2245, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"grad_norm": 0.00928166788071394, |
|
"learning_rate": 2.042685592325123e-05, |
|
"loss": 0.2359, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"grad_norm": 9.65197467803955, |
|
"learning_rate": 2.0226437203601602e-05, |
|
"loss": 0.2984, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 25.082759857177734, |
|
"learning_rate": 2.0024950800006463e-05, |
|
"loss": 0.2164, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 9.887031555175781, |
|
"learning_rate": 1.9822437873162863e-05, |
|
"loss": 0.2279, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.9308151093439364, |
|
"eval_loss": 0.3029824197292328, |
|
"eval_runtime": 112.1873, |
|
"eval_samples_per_second": 22.418, |
|
"eval_steps_per_second": 2.808, |
|
"step": 8792 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 6.438531398773193, |
|
"learning_rate": 1.961893979347137e-05, |
|
"loss": 0.2683, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 0.2623905837535858, |
|
"learning_rate": 1.9414498132584773e-05, |
|
"loss": 0.2297, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"grad_norm": 13.5099515914917, |
|
"learning_rate": 1.9209154654915524e-05, |
|
"loss": 0.1985, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"grad_norm": 0.006642199121415615, |
|
"learning_rate": 1.900295130910396e-05, |
|
"loss": 0.2145, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"grad_norm": 41.91756057739258, |
|
"learning_rate": 1.879593021944875e-05, |
|
"loss": 0.2221, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"grad_norm": 21.074771881103516, |
|
"learning_rate": 1.8588133677301595e-05, |
|
"loss": 0.2129, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"grad_norm": 0.3562418520450592, |
|
"learning_rate": 1.837960413242765e-05, |
|
"loss": 0.2438, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 40.30086898803711, |
|
"learning_rate": 1.817038418433373e-05, |
|
"loss": 0.2036, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"grad_norm": 0.014717689715325832, |
|
"learning_rate": 1.796051657356582e-05, |
|
"loss": 0.2003, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 17.778074264526367, |
|
"learning_rate": 1.7750044172977838e-05, |
|
"loss": 0.2179, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 5.801839351654053, |
|
"learning_rate": 1.7539009978973312e-05, |
|
"loss": 0.2142, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 5.123954772949219, |
|
"learning_rate": 1.7327457102721887e-05, |
|
"loss": 0.1686, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"grad_norm": 4.904812812805176, |
|
"learning_rate": 1.711542876135233e-05, |
|
"loss": 0.2073, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 15.447718620300293, |
|
"learning_rate": 1.6902968269123902e-05, |
|
"loss": 0.2276, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 0.12395219504833221, |
|
"learning_rate": 1.669011902857791e-05, |
|
"loss": 0.1882, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"grad_norm": 17.49830436706543, |
|
"learning_rate": 1.6476924521671194e-05, |
|
"loss": 0.2127, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"grad_norm": 10.882052421569824, |
|
"learning_rate": 1.6263428300893422e-05, |
|
"loss": 0.2202, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"grad_norm": 37.238731384277344, |
|
"learning_rate": 1.604967398036996e-05, |
|
"loss": 0.1663, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"grad_norm": 9.180378913879395, |
|
"learning_rate": 1.5835705226952112e-05, |
|
"loss": 0.2547, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"grad_norm": 0.04440607875585556, |
|
"learning_rate": 1.5621565751296676e-05, |
|
"loss": 0.2186, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"grad_norm": 0.13343572616577148, |
|
"learning_rate": 1.540729929893649e-05, |
|
"loss": 0.2779, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 1.3157250881195068, |
|
"learning_rate": 1.5192949641343834e-05, |
|
"loss": 0.1696, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9292246520874752, |
|
"eval_loss": 0.34775686264038086, |
|
"eval_runtime": 111.4119, |
|
"eval_samples_per_second": 22.574, |
|
"eval_steps_per_second": 2.827, |
|
"step": 10990 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.44680455327033997, |
|
"learning_rate": 1.4978560566988603e-05, |
|
"loss": 0.23, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"grad_norm": 0.041424062103033066, |
|
"learning_rate": 1.4764175872392958e-05, |
|
"loss": 0.1913, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"grad_norm": 2.6414008140563965, |
|
"learning_rate": 1.454983935318433e-05, |
|
"loss": 0.1374, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"grad_norm": 0.396395206451416, |
|
"learning_rate": 1.433559479514864e-05, |
|
"loss": 0.1807, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"grad_norm": 8.14771842956543, |
|
"learning_rate": 1.4121485965285485e-05, |
|
"loss": 0.1888, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"grad_norm": 0.3801160454750061, |
|
"learning_rate": 1.3907556602867213e-05, |
|
"loss": 0.1838, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"grad_norm": 1.0655605792999268, |
|
"learning_rate": 1.3693850410503614e-05, |
|
"loss": 0.1483, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"grad_norm": 0.37717050313949585, |
|
"learning_rate": 1.3480411045214147e-05, |
|
"loss": 0.1635, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"grad_norm": 0.017707068473100662, |
|
"learning_rate": 1.326728210950942e-05, |
|
"loss": 0.1871, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"grad_norm": 0.01935901865363121, |
|
"learning_rate": 1.3054507142483875e-05, |
|
"loss": 0.1479, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"grad_norm": 0.2556498050689697, |
|
"learning_rate": 1.2842129610921378e-05, |
|
"loss": 0.1754, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"grad_norm": 1.742725133895874, |
|
"learning_rate": 1.2630192900415582e-05, |
|
"loss": 0.2713, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"grad_norm": 40.924644470214844, |
|
"learning_rate": 1.2418740306506923e-05, |
|
"loss": 0.1909, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"grad_norm": 49.58342361450195, |
|
"learning_rate": 1.2207815025837977e-05, |
|
"loss": 0.159, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"grad_norm": 0.03460393100976944, |
|
"learning_rate": 1.1997460147328984e-05, |
|
"loss": 0.1559, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"grad_norm": 0.0873652920126915, |
|
"learning_rate": 1.178771864337546e-05, |
|
"loss": 0.1713, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"grad_norm": 0.1891886442899704, |
|
"learning_rate": 1.1578633361069559e-05, |
|
"loss": 0.2004, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"grad_norm": 0.3393521010875702, |
|
"learning_rate": 1.1370247013447035e-05, |
|
"loss": 0.1993, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"grad_norm": 3.934659719467163, |
|
"learning_rate": 1.1162602170761611e-05, |
|
"loss": 0.1507, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"grad_norm": 1.6851441860198975, |
|
"learning_rate": 1.095574125178849e-05, |
|
"loss": 0.1649, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"grad_norm": 74.29869079589844, |
|
"learning_rate": 1.0749706515158863e-05, |
|
"loss": 0.2056, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"grad_norm": 23.88290786743164, |
|
"learning_rate": 1.0544540050727048e-05, |
|
"loss": 0.1658, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.9427435387673956, |
|
"eval_loss": 0.3084171712398529, |
|
"eval_runtime": 111.2598, |
|
"eval_samples_per_second": 22.605, |
|
"eval_steps_per_second": 2.831, |
|
"step": 13188 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"grad_norm": 15.39907169342041, |
|
"learning_rate": 1.0340283770972167e-05, |
|
"loss": 0.2241, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"grad_norm": 0.004868438933044672, |
|
"learning_rate": 1.0136979402436069e-05, |
|
"loss": 0.1529, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"grad_norm": 45.70091247558594, |
|
"learning_rate": 9.93466847719919e-06, |
|
"loss": 0.1543, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"grad_norm": 0.8413438200950623, |
|
"learning_rate": 9.733392324396167e-06, |
|
"loss": 0.1236, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"grad_norm": 13.94447135925293, |
|
"learning_rate": 9.533192061772919e-06, |
|
"loss": 0.1151, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"grad_norm": 0.031886328011751175, |
|
"learning_rate": 9.334108587286877e-06, |
|
"loss": 0.1213, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"grad_norm": 1.3224759101867676, |
|
"learning_rate": 9.136182570752153e-06, |
|
"loss": 0.1151, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"grad_norm": 0.02312690019607544, |
|
"learning_rate": 8.93945444553128e-06, |
|
"loss": 0.1601, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"grad_norm": 0.02013530395925045, |
|
"learning_rate": 8.743964400275304e-06, |
|
"loss": 0.133, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"grad_norm": 0.02437993884086609, |
|
"learning_rate": 8.549752370713798e-06, |
|
"loss": 0.1754, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"grad_norm": 0.005770612042397261, |
|
"learning_rate": 8.356858031496596e-06, |
|
"loss": 0.1421, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"grad_norm": 0.008364029228687286, |
|
"learning_rate": 8.165320788088888e-06, |
|
"loss": 0.1988, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"grad_norm": 0.06003904342651367, |
|
"learning_rate": 7.975179768721187e-06, |
|
"loss": 0.073, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"grad_norm": 13.212935447692871, |
|
"learning_rate": 7.78647381639607e-06, |
|
"loss": 0.1858, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"grad_norm": 0.03764009103178978, |
|
"learning_rate": 7.599241480953112e-06, |
|
"loss": 0.1373, |
|
"step": 14600 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"grad_norm": 0.01994572952389717, |
|
"learning_rate": 7.413521011193705e-06, |
|
"loss": 0.1165, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"grad_norm": 36.3884162902832, |
|
"learning_rate": 7.229350347067426e-06, |
|
"loss": 0.1989, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"grad_norm": 0.005731828045099974, |
|
"learning_rate": 7.046767111921425e-06, |
|
"loss": 0.0917, |
|
"step": 14900 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"grad_norm": 48.87177658081055, |
|
"learning_rate": 6.865808604814564e-06, |
|
"loss": 0.1263, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"grad_norm": 29.0344295501709, |
|
"learning_rate": 6.686511792897767e-06, |
|
"loss": 0.1724, |
|
"step": 15100 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"grad_norm": 0.013815321959555149, |
|
"learning_rate": 6.508913303862144e-06, |
|
"loss": 0.1795, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"grad_norm": 29.06551742553711, |
|
"learning_rate": 6.333049418456533e-06, |
|
"loss": 0.1383, |
|
"step": 15300 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.9391650099403579, |
|
"eval_loss": 0.3318726718425751, |
|
"eval_runtime": 111.0425, |
|
"eval_samples_per_second": 22.649, |
|
"eval_steps_per_second": 2.837, |
|
"step": 15386 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"grad_norm": 54.77537536621094, |
|
"learning_rate": 6.1589560630758656e-06, |
|
"loss": 0.102, |
|
"step": 15400 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"grad_norm": 1.445946455001831, |
|
"learning_rate": 5.986668802421924e-06, |
|
"loss": 0.1061, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"grad_norm": 30.50276756286621, |
|
"learning_rate": 5.8162228322380155e-06, |
|
"loss": 0.1004, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"grad_norm": 0.28580981492996216, |
|
"learning_rate": 5.647652972118998e-06, |
|
"loss": 0.095, |
|
"step": 15700 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"grad_norm": 8.433425903320312, |
|
"learning_rate": 5.480993658398129e-06, |
|
"loss": 0.2191, |
|
"step": 15800 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"grad_norm": 23.46518898010254, |
|
"learning_rate": 5.316278937112267e-06, |
|
"loss": 0.1039, |
|
"step": 15900 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"grad_norm": 0.1998785138130188, |
|
"learning_rate": 5.153542457046737e-06, |
|
"loss": 0.1161, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"grad_norm": 0.5174553990364075, |
|
"learning_rate": 4.992817462861397e-06, |
|
"loss": 0.1093, |
|
"step": 16100 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"grad_norm": 10.745183944702148, |
|
"learning_rate": 4.834136788299248e-06, |
|
"loss": 0.152, |
|
"step": 16200 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"grad_norm": 0.035963889211416245, |
|
"learning_rate": 4.67753284947898e-06, |
|
"loss": 0.0887, |
|
"step": 16300 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"grad_norm": 0.02713492140173912, |
|
"learning_rate": 4.523037638272822e-06, |
|
"loss": 0.0964, |
|
"step": 16400 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"grad_norm": 0.04027700796723366, |
|
"learning_rate": 4.370682715771108e-06, |
|
"loss": 0.0898, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"grad_norm": 32.831809997558594, |
|
"learning_rate": 4.220499205834783e-06, |
|
"loss": 0.1376, |
|
"step": 16600 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"grad_norm": 1.195786952972412, |
|
"learning_rate": 4.072517788737264e-06, |
|
"loss": 0.1386, |
|
"step": 16700 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"grad_norm": 0.0985260009765625, |
|
"learning_rate": 3.926768694896931e-06, |
|
"loss": 0.1031, |
|
"step": 16800 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"grad_norm": 66.71458435058594, |
|
"learning_rate": 3.783281698701482e-06, |
|
"loss": 0.1584, |
|
"step": 16900 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"grad_norm": 0.06048920005559921, |
|
"learning_rate": 3.6420861124254607e-06, |
|
"loss": 0.1557, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"grad_norm": 35.74193572998047, |
|
"learning_rate": 3.5032107802422107e-06, |
|
"loss": 0.1509, |
|
"step": 17100 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"grad_norm": 0.14910955727100372, |
|
"learning_rate": 3.3666840723314145e-06, |
|
"loss": 0.1421, |
|
"step": 17200 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"grad_norm": 44.823760986328125, |
|
"learning_rate": 3.232533879083511e-06, |
|
"loss": 0.1181, |
|
"step": 17300 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"grad_norm": 7.532151699066162, |
|
"learning_rate": 3.1007876054020724e-06, |
|
"loss": 0.1385, |
|
"step": 17400 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"grad_norm": 28.50664520263672, |
|
"learning_rate": 2.9714721651054e-06, |
|
"loss": 0.1222, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.9479125248508946, |
|
"eval_loss": 0.3132196068763733, |
|
"eval_runtime": 111.2173, |
|
"eval_samples_per_second": 22.613, |
|
"eval_steps_per_second": 2.832, |
|
"step": 17584 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"grad_norm": 0.00245882966555655, |
|
"learning_rate": 2.8446139754284486e-06, |
|
"loss": 0.0782, |
|
"step": 17600 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"grad_norm": 0.3096585273742676, |
|
"learning_rate": 2.7202389516261346e-06, |
|
"loss": 0.1482, |
|
"step": 17700 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"grad_norm": 38.84019088745117, |
|
"learning_rate": 2.5983725016792574e-06, |
|
"loss": 0.1191, |
|
"step": 17800 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"grad_norm": 0.24393437802791595, |
|
"learning_rate": 2.4790395211040296e-06, |
|
"loss": 0.1095, |
|
"step": 17900 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"grad_norm": 0.32781603932380676, |
|
"learning_rate": 2.36226438786627e-06, |
|
"loss": 0.121, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"grad_norm": 8.179335594177246, |
|
"learning_rate": 2.2480709574013637e-06, |
|
"loss": 0.0564, |
|
"step": 18100 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"grad_norm": 0.03705134242773056, |
|
"learning_rate": 2.1364825577409424e-06, |
|
"loss": 0.1466, |
|
"step": 18200 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"grad_norm": 0.8985774517059326, |
|
"learning_rate": 2.0275219847473026e-06, |
|
"loss": 0.1097, |
|
"step": 18300 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"grad_norm": 7.916272163391113, |
|
"learning_rate": 1.9212114974565664e-06, |
|
"loss": 0.1213, |
|
"step": 18400 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"grad_norm": 0.16036684811115265, |
|
"learning_rate": 1.8175728135314707e-06, |
|
"loss": 0.0848, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"grad_norm": 0.01501951552927494, |
|
"learning_rate": 1.7166271048247796e-06, |
|
"loss": 0.089, |
|
"step": 18600 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"grad_norm": 80.24330139160156, |
|
"learning_rate": 1.6183949930541898e-06, |
|
"loss": 0.1014, |
|
"step": 18700 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"grad_norm": 0.014790402725338936, |
|
"learning_rate": 1.5228965455896054e-06, |
|
"loss": 0.1042, |
|
"step": 18800 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"grad_norm": 0.005228283815085888, |
|
"learning_rate": 1.4301512713536873e-06, |
|
"loss": 0.1056, |
|
"step": 18900 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"grad_norm": 0.0038773128762841225, |
|
"learning_rate": 1.3401781168364591e-06, |
|
"loss": 0.0963, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"grad_norm": 0.009148034267127514, |
|
"learning_rate": 1.2529954622248114e-06, |
|
"loss": 0.109, |
|
"step": 19100 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"grad_norm": 0.10844717919826508, |
|
"learning_rate": 1.1686211176477208e-06, |
|
"loss": 0.1075, |
|
"step": 19200 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"grad_norm": 0.017569424584507942, |
|
"learning_rate": 1.0870723195378852e-06, |
|
"loss": 0.1228, |
|
"step": 19300 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"grad_norm": 39.32170104980469, |
|
"learning_rate": 1.00836572711058e-06, |
|
"loss": 0.1414, |
|
"step": 19400 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"grad_norm": 28.396100997924805, |
|
"learning_rate": 9.325174189604346e-07, |
|
"loss": 0.129, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"grad_norm": 0.031751640141010284, |
|
"learning_rate": 8.595428897768071e-07, |
|
"loss": 0.1163, |
|
"step": 19600 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"grad_norm": 1.8334925174713135, |
|
"learning_rate": 7.894570471784418e-07, |
|
"loss": 0.1196, |
|
"step": 19700 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.9467196819085487, |
|
"eval_loss": 0.3136024475097656, |
|
"eval_runtime": 111.3615, |
|
"eval_samples_per_second": 22.584, |
|
"eval_steps_per_second": 2.829, |
|
"step": 19782 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"grad_norm": 9.599562644958496, |
|
"learning_rate": 7.222742086680756e-07, |
|
"loss": 0.1225, |
|
"step": 19800 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"grad_norm": 20.813501358032227, |
|
"learning_rate": 6.580080987075721e-07, |
|
"loss": 0.0845, |
|
"step": 19900 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"grad_norm": 0.00961952656507492, |
|
"learning_rate": 5.966718459142196e-07, |
|
"loss": 0.107, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"grad_norm": 0.0361330471932888, |
|
"learning_rate": 5.382779803787579e-07, |
|
"loss": 0.0993, |
|
"step": 20100 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"grad_norm": 0.8669134378433228, |
|
"learning_rate": 4.82838431105655e-07, |
|
"loss": 0.0874, |
|
"step": 20200 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"grad_norm": 0.02739112637937069, |
|
"learning_rate": 4.303645235761866e-07, |
|
"loss": 0.1095, |
|
"step": 20300 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"grad_norm": 0.019733713939785957, |
|
"learning_rate": 3.808669774348167e-07, |
|
"loss": 0.1325, |
|
"step": 20400 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"grad_norm": 0.0024758102372288704, |
|
"learning_rate": 3.3435590429932493e-07, |
|
"loss": 0.0883, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"grad_norm": 0.04354145750403404, |
|
"learning_rate": 2.908408056951578e-07, |
|
"loss": 0.0939, |
|
"step": 20600 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"grad_norm": 10.014009475708008, |
|
"learning_rate": 2.5033057111440106e-07, |
|
"loss": 0.0856, |
|
"step": 20700 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"grad_norm": 0.00877867080271244, |
|
"learning_rate": 2.1283347619979243e-07, |
|
"loss": 0.0977, |
|
"step": 20800 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"grad_norm": 0.0024496885016560555, |
|
"learning_rate": 1.7835718105413235e-07, |
|
"loss": 0.1331, |
|
"step": 20900 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"grad_norm": 0.2954551577568054, |
|
"learning_rate": 1.4690872867542892e-07, |
|
"loss": 0.126, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"grad_norm": 0.06035974249243736, |
|
"learning_rate": 1.1849454351812394e-07, |
|
"loss": 0.077, |
|
"step": 21100 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"grad_norm": 0.7805526852607727, |
|
"learning_rate": 9.312043018067762e-08, |
|
"loss": 0.1167, |
|
"step": 21200 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"grad_norm": 0.09065116941928864, |
|
"learning_rate": 7.079157221975718e-08, |
|
"loss": 0.1658, |
|
"step": 21300 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"grad_norm": 0.00335172307677567, |
|
"learning_rate": 5.1512531091333914e-08, |
|
"loss": 0.126, |
|
"step": 21400 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"grad_norm": 1.6308414936065674, |
|
"learning_rate": 3.528724521882687e-08, |
|
"loss": 0.1088, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"grad_norm": 22.293624877929688, |
|
"learning_rate": 2.211902918855313e-08, |
|
"loss": 0.1201, |
|
"step": 21600 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"grad_norm": 0.030308537185192108, |
|
"learning_rate": 1.2010573072602783e-08, |
|
"loss": 0.0758, |
|
"step": 21700 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"grad_norm": 5.30928897857666, |
|
"learning_rate": 4.963941879295164e-09, |
|
"loss": 0.0952, |
|
"step": 21800 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"grad_norm": 0.04648282751441002, |
|
"learning_rate": 9.805751313296529e-10, |
|
"loss": 0.1257, |
|
"step": 21900 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.94831013916501, |
|
"eval_loss": 0.3119918704032898, |
|
"eval_runtime": 111.4853, |
|
"eval_samples_per_second": 22.559, |
|
"eval_steps_per_second": 2.825, |
|
"step": 21980 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 21980, |
|
"total_flos": 4.09349935387607e+19, |
|
"train_loss": 0.2596938943081926, |
|
"train_runtime": 18130.6882, |
|
"train_samples_per_second": 9.697, |
|
"train_steps_per_second": 1.212 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 21980, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"total_flos": 4.09349935387607e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|