|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.870503597122302, |
|
"eval_steps": 500, |
|
"global_step": 204, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.142857142857143e-06, |
|
"loss": 3.7385, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 2.9618, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 3.7651, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 2.5572, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 3.2352, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 3.7965, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 5e-05, |
|
"loss": 3.4411, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.999682116415026e-05, |
|
"loss": 3.2539, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.998728546500082e-05, |
|
"loss": 3.2295, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.9971395327545466e-05, |
|
"loss": 3.8146, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.9949154792755286e-05, |
|
"loss": 2.9204, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.992056951655103e-05, |
|
"loss": 2.4845, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.988564676836475e-05, |
|
"loss": 2.3794, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.984439542929117e-05, |
|
"loss": 2.3144, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.979682598982912e-05, |
|
"loss": 2.8456, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.974295054721377e-05, |
|
"loss": 3.0537, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.9682782802340184e-05, |
|
"loss": 2.7413, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.9616338056279124e-05, |
|
"loss": 2.8134, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.9543633206385834e-05, |
|
"loss": 3.6205, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.9464686742003006e-05, |
|
"loss": 3.5697, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.937951873975871e-05, |
|
"loss": 2.8348, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.928815085846087e-05, |
|
"loss": 3.3747, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.9190606333589194e-05, |
|
"loss": 3.0038, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.9086909971386305e-05, |
|
"loss": 3.1921, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.8977088142549285e-05, |
|
"loss": 2.6579, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.886116877552347e-05, |
|
"loss": 3.2293, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.873918134940002e-05, |
|
"loss": 4.1384, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.8611156886419206e-05, |
|
"loss": 2.6833, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.847712794408124e-05, |
|
"loss": 3.4143, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.833712860686666e-05, |
|
"loss": 3.9069, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.8191194477568435e-05, |
|
"loss": 3.1464, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.803936266823792e-05, |
|
"loss": 3.9239, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.7881671790747e-05, |
|
"loss": 2.982, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.7718161946968835e-05, |
|
"loss": 2.9096, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.754887471857969e-05, |
|
"loss": 2.772, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.7373853156484406e-05, |
|
"loss": 2.9948, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.7193141769868265e-05, |
|
"loss": 2.9775, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.7006786514877997e-05, |
|
"loss": 2.534, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.6814834782934844e-05, |
|
"loss": 2.3817, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.6617335388682556e-05, |
|
"loss": 3.223, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.641433855757351e-05, |
|
"loss": 3.3482, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.620589591309603e-05, |
|
"loss": 2.992, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.59920604636462e-05, |
|
"loss": 2.4652, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.577288658904741e-05, |
|
"loss": 2.8348, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.554843002672129e-05, |
|
"loss": 4.1209, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.531874785751317e-05, |
|
"loss": 3.5458, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.5083898491176136e-05, |
|
"loss": 3.3499, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.4843941651517e-05, |
|
"loss": 2.5327, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.4598938361208095e-05, |
|
"loss": 2.9724, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.434895092626883e-05, |
|
"loss": 2.4149, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.409404292022081e-05, |
|
"loss": 2.8688, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.38342791679207e-05, |
|
"loss": 2.9715, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.356972572907473e-05, |
|
"loss": 2.6849, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.3300449881439375e-05, |
|
"loss": 3.185, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.302652010371205e-05, |
|
"loss": 3.1415, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.274800605811658e-05, |
|
"loss": 2.5797, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.246497857268759e-05, |
|
"loss": 2.2357, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.2177509623258456e-05, |
|
"loss": 2.8965, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.1885672315157346e-05, |
|
"loss": 3.1428, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.1589540864616025e-05, |
|
"loss": 2.3255, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.128919057989622e-05, |
|
"loss": 3.001, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.098469784213812e-05, |
|
"loss": 3.0732, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.0676140085936186e-05, |
|
"loss": 2.7686, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.0363595779647e-05, |
|
"loss": 3.1581, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.0047144405434175e-05, |
|
"loss": 3.8875, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 3.972686643905558e-05, |
|
"loss": 3.9603, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.940284332939771e-05, |
|
"loss": 3.6382, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.9075157477762744e-05, |
|
"loss": 3.5762, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 3.874389221691329e-05, |
|
"loss": 2.7994, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.84091317898803e-05, |
|
"loss": 3.0237, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.8070961328539525e-05, |
|
"loss": 2.4911, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.772946683196179e-05, |
|
"loss": 3.264, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.738473514454297e-05, |
|
"loss": 3.1732, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.7036853933918784e-05, |
|
"loss": 3.5421, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.668591166867035e-05, |
|
"loss": 3.8679, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.633199759582596e-05, |
|
"loss": 3.2613, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.597520171816503e-05, |
|
"loss": 3.3075, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.5615614771329706e-05, |
|
"loss": 2.6811, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.5253328200750224e-05, |
|
"loss": 3.0083, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.488843413838963e-05, |
|
"loss": 3.3417, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 3.452102537931408e-05, |
|
"loss": 2.4357, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 3.4151195358094365e-05, |
|
"loss": 3.2885, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 3.377903812504487e-05, |
|
"loss": 3.3224, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.340464832230592e-05, |
|
"loss": 3.2192, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.3028121159775656e-05, |
|
"loss": 2.8256, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.2649552390897494e-05, |
|
"loss": 2.0816, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.226903828830935e-05, |
|
"loss": 3.1104, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 3.1886675619360885e-05, |
|
"loss": 3.0006, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 3.1502561621504874e-05, |
|
"loss": 2.4053, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 3.111679397756906e-05, |
|
"loss": 3.3762, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 3.072947079091472e-05, |
|
"loss": 2.9438, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 3.034069056048837e-05, |
|
"loss": 3.9727, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 2.9950552155772743e-05, |
|
"loss": 3.3063, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 2.95591547916436e-05, |
|
"loss": 2.9004, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 2.9166598003138766e-05, |
|
"loss": 3.149, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 2.8772981620145623e-05, |
|
"loss": 2.7673, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.8378405742013713e-05, |
|
"loss": 2.4541, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.7982970712098794e-05, |
|
"loss": 2.7791, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.7586777092244804e-05, |
|
"loss": 3.8837, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.7189925637210323e-05, |
|
"loss": 2.5179, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.6792517269045943e-05, |
|
"loss": 3.7649, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.6394653051429064e-05, |
|
"loss": 3.5913, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 2.5996434163962763e-05, |
|
"loss": 5.0055, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.5597961876445077e-05, |
|
"loss": 2.6826, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 2.5199337523115418e-05, |
|
"loss": 3.1971, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 2.480066247688459e-05, |
|
"loss": 3.0641, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 2.4402038123554933e-05, |
|
"loss": 2.9264, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 2.4003565836037246e-05, |
|
"loss": 2.5069, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 2.360534694857094e-05, |
|
"loss": 4.365, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 2.3207482730954063e-05, |
|
"loss": 3.4356, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 2.2810074362789676e-05, |
|
"loss": 3.2688, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 2.2413222907755195e-05, |
|
"loss": 2.7195, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 2.2017029287901212e-05, |
|
"loss": 2.6272, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 2.162159425798629e-05, |
|
"loss": 2.8288, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 2.1227018379854383e-05, |
|
"loss": 2.0868, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 2.083340199686124e-05, |
|
"loss": 3.5304, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 2.0440845208356402e-05, |
|
"loss": 3.0078, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 2.0049447844227266e-05, |
|
"loss": 3.5883, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 1.9659309439511628e-05, |
|
"loss": 2.6867, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 1.927052920908528e-05, |
|
"loss": 2.3407, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 1.8883206022430956e-05, |
|
"loss": 3.6757, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 1.849743837849513e-05, |
|
"loss": 4.9815, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 1.8113324380639117e-05, |
|
"loss": 3.0576, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.7730961711690655e-05, |
|
"loss": 2.4852, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 1.735044760910251e-05, |
|
"loss": 2.6612, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.6971878840224346e-05, |
|
"loss": 2.9253, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.6595351677694083e-05, |
|
"loss": 3.4379, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.6220961874955134e-05, |
|
"loss": 2.8997, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.5848804641905634e-05, |
|
"loss": 3.2326, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.547897462068592e-05, |
|
"loss": 2.8364, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.5111565861610378e-05, |
|
"loss": 2.5979, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.4746671799249784e-05, |
|
"loss": 3.2094, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.4384385228670288e-05, |
|
"loss": 2.2841, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.4024798281834966e-05, |
|
"loss": 3.333, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 1.3668002404174047e-05, |
|
"loss": 3.1955, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.331408833132966e-05, |
|
"loss": 1.9727, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.2963146066081217e-05, |
|
"loss": 2.5208, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.2615264855457038e-05, |
|
"loss": 2.4915, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 1.2270533168038217e-05, |
|
"loss": 2.6322, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 1.1929038671460486e-05, |
|
"loss": 3.6504, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 1.1590868210119692e-05, |
|
"loss": 2.9824, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.125610778308672e-05, |
|
"loss": 2.6982, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.0924842522237267e-05, |
|
"loss": 4.0364, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.0597156670602299e-05, |
|
"loss": 2.9809, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.027313356094443e-05, |
|
"loss": 1.7772, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 9.95285559456583e-06, |
|
"loss": 1.9795, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 9.636404220353013e-06, |
|
"loss": 2.0211, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 9.323859914063814e-06, |
|
"loss": 2.3984, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 9.015302157861883e-06, |
|
"loss": 2.1658, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 8.710809420103789e-06, |
|
"loss": 2.0633, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 8.41045913538398e-06, |
|
"loss": 2.5502, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 8.11432768484267e-06, |
|
"loss": 2.2849, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 7.822490376741554e-06, |
|
"loss": 2.1318, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 7.535021427312417e-06, |
|
"loss": 3.1577, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 7.251993941883428e-06, |
|
"loss": 2.4369, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 6.9734798962879575e-06, |
|
"loss": 2.7381, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 6.699550118560632e-06, |
|
"loss": 2.8945, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 6.430274270925271e-06, |
|
"loss": 1.8788, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 6.1657208320793054e-06, |
|
"loss": 3.3476, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 5.905957079779187e-06, |
|
"loss": 2.3672, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 5.6510490737311735e-06, |
|
"loss": 3.0826, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 5.4010616387919095e-06, |
|
"loss": 2.4247, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 5.156058348483006e-06, |
|
"loss": 2.9849, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 4.916101508823873e-06, |
|
"loss": 3.2181, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 4.681252142486841e-06, |
|
"loss": 2.756, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 4.451569973278719e-06, |
|
"loss": 2.5613, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 4.227113410952585e-06, |
|
"loss": 3.7162, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 4.007939536353805e-06, |
|
"loss": 2.9746, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 3.7941040869039714e-06, |
|
"loss": 2.4908, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 3.585661442426494e-06, |
|
"loss": 2.2047, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 3.382664611317446e-06, |
|
"loss": 2.5563, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 3.1851652170651584e-06, |
|
"loss": 2.804, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 2.9932134851220038e-06, |
|
"loss": 3.0241, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 2.8068582301317425e-06, |
|
"loss": 2.2791, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 2.6261468435155978e-06, |
|
"loss": 2.997, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 2.4511252814203107e-06, |
|
"loss": 2.8508, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.2818380530311655e-06, |
|
"loss": 2.4406, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 2.1183282092530065e-06, |
|
"loss": 2.5544, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 1.960637331762091e-06, |
|
"loss": 2.9358, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 1.8088055224315697e-06, |
|
"loss": 2.2767, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 1.6628713931333445e-06, |
|
"loss": 3.5501, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 1.5228720559187642e-06, |
|
"loss": 2.3521, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 1.3888431135807956e-06, |
|
"loss": 2.6828, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 1.2608186505999847e-06, |
|
"loss": 3.3842, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 1.138831224476533e-06, |
|
"loss": 3.7512, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 1.0229118574507174e-06, |
|
"loss": 2.5144, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 9.130900286137001e-07, |
|
"loss": 1.8449, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 8.093936664108071e-07, |
|
"loss": 2.3796, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 7.118491415391337e-07, |
|
"loss": 3.1911, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 6.204812602412902e-07, |
|
"loss": 3.3982, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 5.353132579969972e-07, |
|
"loss": 2.5562, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 4.56366793614163e-07, |
|
"loss": 3.1778, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 3.8366194372088384e-07, |
|
"loss": 2.8666, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 3.1721719765981926e-07, |
|
"loss": 1.849, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 2.5704945278623436e-07, |
|
"loss": 2.9817, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 2.0317401017088122e-07, |
|
"loss": 2.8708, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 1.5560457070883105e-07, |
|
"loss": 1.9914, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 1.1435323163525025e-07, |
|
"loss": 2.631, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 7.94304834489723e-08, |
|
"loss": 3.1964, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 5.0845207244715196e-08, |
|
"loss": 3.1362, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.8604672454538018e-08, |
|
"loss": 2.4684, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 1.2714534999183625e-08, |
|
"loss": 2.9911, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 3.1788358497431005e-09, |
|
"loss": 2.7763, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 0.0, |
|
"loss": 3.1651, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"step": 204, |
|
"total_flos": 0.0, |
|
"train_loss": 2.9658100090774835, |
|
"train_runtime": 18093.1922, |
|
"train_samples_per_second": 23.54, |
|
"train_steps_per_second": 0.011 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 204, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 6, |
|
"save_steps": 50, |
|
"total_flos": 0.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|