| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9998846198223146, | |
| "eval_steps": 500, | |
| "global_step": 9750, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.7104153633117676, | |
| "learning_rate": 4.999532814343219e-05, | |
| "loss": 1.5746, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 6.236631393432617, | |
| "learning_rate": 4.998131431982826e-05, | |
| "loss": 1.3364, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 3.3378114700317383, | |
| "learning_rate": 4.99579637668341e-05, | |
| "loss": 1.297, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 3.676436185836792, | |
| "learning_rate": 4.992528521168449e-05, | |
| "loss": 1.3101, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.182366132736206, | |
| "learning_rate": 4.988329086794122e-05, | |
| "loss": 1.3038, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 3.702284097671509, | |
| "learning_rate": 4.9831996430928326e-05, | |
| "loss": 1.2762, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.2854039669036865, | |
| "learning_rate": 4.977142107186602e-05, | |
| "loss": 1.2709, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 4.770021438598633, | |
| "learning_rate": 4.9701587430705415e-05, | |
| "loss": 1.2656, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 3.181934118270874, | |
| "learning_rate": 4.962252160766693e-05, | |
| "loss": 1.2751, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 2.0251986980438232, | |
| "learning_rate": 4.953425315348534e-05, | |
| "loss": 1.261, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 2.5135626792907715, | |
| "learning_rate": 4.943681505836523e-05, | |
| "loss": 1.2627, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.7317075729370117, | |
| "learning_rate": 4.9330243739650964e-05, | |
| "loss": 1.2619, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 4.191065311431885, | |
| "learning_rate": 4.9214579028215776e-05, | |
| "loss": 1.274, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.875460147857666, | |
| "learning_rate": 4.9089864153575016e-05, | |
| "loss": 1.2138, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 3.3708271980285645, | |
| "learning_rate": 4.8956145727729156e-05, | |
| "loss": 1.2295, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 2.718130111694336, | |
| "learning_rate": 4.88134737277427e-05, | |
| "loss": 1.2281, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 2.6917874813079834, | |
| "learning_rate": 4.8661901477065244e-05, | |
| "loss": 1.2429, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 2.3287134170532227, | |
| "learning_rate": 4.8501485625601996e-05, | |
| "loss": 1.2524, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 2.6867642402648926, | |
| "learning_rate": 4.833228612854087e-05, | |
| "loss": 1.2149, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 3.3543832302093506, | |
| "learning_rate": 4.815436622394441e-05, | |
| "loss": 1.2188, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 4.045291900634766, | |
| "learning_rate": 4.7967792409114606e-05, | |
| "loss": 1.2227, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 2.516897439956665, | |
| "learning_rate": 4.7772634415739624e-05, | |
| "loss": 1.2365, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 3.2049200534820557, | |
| "learning_rate": 4.7568965183831726e-05, | |
| "loss": 1.2102, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 3.082902669906616, | |
| "learning_rate": 4.735686083446599e-05, | |
| "loss": 1.2465, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 3.1766459941864014, | |
| "learning_rate": 4.713640064133025e-05, | |
| "loss": 1.2446, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 3.313269853591919, | |
| "learning_rate": 4.690766700109659e-05, | |
| "loss": 1.2042, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 3.3025588989257812, | |
| "learning_rate": 4.667074540262577e-05, | |
| "loss": 1.2229, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 3.4626569747924805, | |
| "learning_rate": 4.6425724395015865e-05, | |
| "loss": 1.2064, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 3.679563522338867, | |
| "learning_rate": 4.617269555450715e-05, | |
| "loss": 1.214, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 2.328498363494873, | |
| "learning_rate": 4.5911753450255665e-05, | |
| "loss": 1.2208, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 2.3970301151275635, | |
| "learning_rate": 4.56429956089881e-05, | |
| "loss": 1.1879, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 3.7843685150146484, | |
| "learning_rate": 4.5366522478551335e-05, | |
| "loss": 1.2135, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 3.4662420749664307, | |
| "learning_rate": 4.508243739037016e-05, | |
| "loss": 1.2137, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 3.565755844116211, | |
| "learning_rate": 4.47908465208274e-05, | |
| "loss": 1.2167, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 6.096432685852051, | |
| "learning_rate": 4.449185885158056e-05, | |
| "loss": 1.2095, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 2.6640658378601074, | |
| "learning_rate": 4.418558612883016e-05, | |
| "loss": 1.2176, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 3.7319626808166504, | |
| "learning_rate": 4.387214282155469e-05, | |
| "loss": 1.2092, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 2.938650131225586, | |
| "learning_rate": 4.355164607872806e-05, | |
| "loss": 1.2288, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 3.252643346786499, | |
| "learning_rate": 4.3224215685535294e-05, | |
| "loss": 1.2019, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 4.297436714172363, | |
| "learning_rate": 4.28955998543643e-05, | |
| "loss": 1.2178, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 2.8940541744232178, | |
| "learning_rate": 4.255478223794045e-05, | |
| "loss": 1.193, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.981809377670288, | |
| "learning_rate": 4.2207403547541e-05, | |
| "loss": 1.2246, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 2.6929879188537598, | |
| "learning_rate": 4.185359361543927e-05, | |
| "loss": 1.2228, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 2.4627578258514404, | |
| "learning_rate": 4.149348467757566e-05, | |
| "loss": 1.2085, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 2.8424439430236816, | |
| "learning_rate": 4.112721132413467e-05, | |
| "loss": 1.1943, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 6.201737403869629, | |
| "learning_rate": 4.075491044924209e-05, | |
| "loss": 1.2187, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 3.3698055744171143, | |
| "learning_rate": 4.0376721199800896e-05, | |
| "loss": 1.1978, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 3.9884934425354004, | |
| "learning_rate": 3.999278492348539e-05, | |
| "loss": 1.2312, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 2.8434813022613525, | |
| "learning_rate": 3.9603245115912736e-05, | |
| "loss": 1.1852, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 3.6574010848999023, | |
| "learning_rate": 3.9208247367011574e-05, | |
| "loss": 1.162, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 3.347731828689575, | |
| "learning_rate": 3.880793930660813e-05, | |
| "loss": 1.2028, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 2.4751527309417725, | |
| "learning_rate": 3.840247054924968e-05, | |
| "loss": 1.2244, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 2.099729299545288, | |
| "learning_rate": 3.79919926382864e-05, | |
| "loss": 1.2152, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 3.765007972717285, | |
| "learning_rate": 3.757665898923223e-05, | |
| "loss": 1.1827, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "grad_norm": 2.1012563705444336, | |
| "learning_rate": 3.715662483242605e-05, | |
| "loss": 0.9704, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "grad_norm": 2.3256635665893555, | |
| "learning_rate": 3.673204715501461e-05, | |
| "loss": 0.9513, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "grad_norm": 3.2609546184539795, | |
| "learning_rate": 3.630308464227877e-05, | |
| "loss": 0.9259, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "grad_norm": 3.2246479988098145, | |
| "learning_rate": 3.5869897618325126e-05, | |
| "loss": 0.9585, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "grad_norm": 4.168481826782227, | |
| "learning_rate": 3.54326479861651e-05, | |
| "loss": 0.9518, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "grad_norm": 3.362656354904175, | |
| "learning_rate": 3.499149916720398e-05, | |
| "loss": 0.9482, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "grad_norm": 3.152392625808716, | |
| "learning_rate": 3.4546616040162334e-05, | |
| "loss": 0.9565, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "grad_norm": 2.6556153297424316, | |
| "learning_rate": 3.409816487945286e-05, | |
| "loss": 0.9448, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 2.465056896209717, | |
| "learning_rate": 3.364631329303564e-05, | |
| "loss": 0.9558, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 3.3087334632873535, | |
| "learning_rate": 3.319123015977478e-05, | |
| "loss": 0.9664, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 2.0656557083129883, | |
| "learning_rate": 3.2733085566320285e-05, | |
| "loss": 0.9527, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "grad_norm": 3.9231972694396973, | |
| "learning_rate": 3.2272050743538385e-05, | |
| "loss": 0.9605, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 1.9942598342895508, | |
| "learning_rate": 3.180829800251428e-05, | |
| "loss": 0.9232, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "grad_norm": 2.5485622882843018, | |
| "learning_rate": 3.134200067015108e-05, | |
| "loss": 0.9459, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "grad_norm": 3.6776840686798096, | |
| "learning_rate": 3.087333302438916e-05, | |
| "loss": 0.9388, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "grad_norm": 2.463665723800659, | |
| "learning_rate": 3.0402470229070056e-05, | |
| "loss": 0.9466, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "grad_norm": 2.8538758754730225, | |
| "learning_rate": 2.992958826846918e-05, | |
| "loss": 0.927, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "grad_norm": 3.169262409210205, | |
| "learning_rate": 2.945486388152201e-05, | |
| "loss": 0.9406, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "grad_norm": 2.700239896774292, | |
| "learning_rate": 2.897847449576815e-05, | |
| "loss": 0.9546, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "grad_norm": 2.7255053520202637, | |
| "learning_rate": 2.8500598161038057e-05, | |
| "loss": 0.9512, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 2.7375125885009766, | |
| "learning_rate": 2.8021413482907176e-05, | |
| "loss": 0.9426, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 2.461376667022705, | |
| "learning_rate": 2.754109955594235e-05, | |
| "loss": 0.9381, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "grad_norm": 3.127470016479492, | |
| "learning_rate": 2.705983589676554e-05, | |
| "loss": 0.9522, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 3.07511830329895, | |
| "learning_rate": 2.6577802376959698e-05, | |
| "loss": 0.955, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "grad_norm": 2.239698648452759, | |
| "learning_rate": 2.609517915584204e-05, | |
| "loss": 0.9558, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 3.7481932640075684, | |
| "learning_rate": 2.5612146613129828e-05, | |
| "loss": 0.9607, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 2.2911975383758545, | |
| "learning_rate": 2.5128885281523606e-05, | |
| "loss": 0.9354, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "grad_norm": 3.977375030517578, | |
| "learning_rate": 2.4645575779233464e-05, | |
| "loss": 0.9593, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "grad_norm": 2.6258833408355713, | |
| "learning_rate": 2.4162398742473214e-05, | |
| "loss": 0.9403, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "grad_norm": 3.0968546867370605, | |
| "learning_rate": 2.3679534757947862e-05, | |
| "loss": 0.9555, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "grad_norm": 3.982872486114502, | |
| "learning_rate": 2.3197164295359593e-05, | |
| "loss": 0.9482, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "grad_norm": 2.4326817989349365, | |
| "learning_rate": 2.271546763995752e-05, | |
| "loss": 0.9209, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "grad_norm": 2.619610071182251, | |
| "learning_rate": 2.2234624825156293e-05, | |
| "loss": 0.9113, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "grad_norm": 2.6039981842041016, | |
| "learning_rate": 2.175481556524892e-05, | |
| "loss": 0.9225, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "grad_norm": 2.5391623973846436, | |
| "learning_rate": 2.1276219188238768e-05, | |
| "loss": 0.933, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "grad_norm": 2.85158109664917, | |
| "learning_rate": 2.079901456881601e-05, | |
| "loss": 0.9185, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "grad_norm": 2.5199923515319824, | |
| "learning_rate": 2.0323380061503494e-05, | |
| "loss": 0.9338, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "grad_norm": 3.02559757232666, | |
| "learning_rate": 1.9849493433996963e-05, | |
| "loss": 0.9386, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 2.78646183013916, | |
| "learning_rate": 1.937753180072466e-05, | |
| "loss": 0.9361, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "grad_norm": 3.28619122505188, | |
| "learning_rate": 1.8907671556651102e-05, | |
| "loss": 0.9106, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 2.7548022270202637, | |
| "learning_rate": 1.8440088311349634e-05, | |
| "loss": 0.9431, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "grad_norm": 2.226672649383545, | |
| "learning_rate": 1.7974956823368727e-05, | |
| "loss": 0.947, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "grad_norm": 2.304502010345459, | |
| "learning_rate": 1.7512450934916128e-05, | |
| "loss": 0.9505, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "grad_norm": 4.344946384429932, | |
| "learning_rate": 1.7052743506885652e-05, | |
| "loss": 0.9486, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "grad_norm": 3.1504874229431152, | |
| "learning_rate": 1.659600635425066e-05, | |
| "loss": 0.9548, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "grad_norm": 2.1094934940338135, | |
| "learning_rate": 1.614241018184844e-05, | |
| "loss": 0.9227, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "grad_norm": 3.3043620586395264, | |
| "learning_rate": 1.5692124520579565e-05, | |
| "loss": 0.9363, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 2.956177234649658, | |
| "learning_rate": 1.5245317664045971e-05, | |
| "loss": 0.9332, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "grad_norm": 2.5686097145080566, | |
| "learning_rate": 1.4802156605651432e-05, | |
| "loss": 0.9186, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 1.969947099685669, | |
| "learning_rate": 1.4362806976188054e-05, | |
| "loss": 0.9313, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "grad_norm": 2.0967071056365967, | |
| "learning_rate": 1.392743298193197e-05, | |
| "loss": 0.9473, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 2.932309150695801, | |
| "learning_rate": 1.349619734327156e-05, | |
| "loss": 0.9312, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "grad_norm": 3.2197911739349365, | |
| "learning_rate": 1.3069261233890891e-05, | |
| "loss": 0.9315, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "grad_norm": Infinity, | |
| "learning_rate": 1.2653788106514852e-05, | |
| "loss": 0.9393, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "grad_norm": 1.9021306037902832, | |
| "learning_rate": 1.2235849854764194e-05, | |
| "loss": 0.865, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "grad_norm": 2.515909194946289, | |
| "learning_rate": 1.1822682185308612e-05, | |
| "loss": 0.8174, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "grad_norm": 2.4534690380096436, | |
| "learning_rate": 1.1414439518955334e-05, | |
| "loss": 0.8246, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "grad_norm": 3.6022934913635254, | |
| "learning_rate": 1.101127443579891e-05, | |
| "loss": 0.8026, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "grad_norm": 2.551551103591919, | |
| "learning_rate": 1.0613337618194691e-05, | |
| "loss": 0.8174, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "grad_norm": 2.128619432449341, | |
| "learning_rate": 1.022077779444145e-05, | |
| "loss": 0.8298, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "grad_norm": 2.290801763534546, | |
| "learning_rate": 9.833741683194475e-06, | |
| "loss": 0.7925, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "grad_norm": 2.4129796028137207, | |
| "learning_rate": 9.452373938629619e-06, | |
| "loss": 0.8262, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 2.9145517349243164, | |
| "learning_rate": 9.07681709637905e-06, | |
| "loss": 0.8367, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "grad_norm": 1.9673601388931274, | |
| "learning_rate": 8.707211520258782e-06, | |
| "loss": 0.8133, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 2.545464515686035, | |
| "learning_rate": 8.34369534980789e-06, | |
| "loss": 0.795, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "grad_norm": 2.039494752883911, | |
| "learning_rate": 7.986404448659023e-06, | |
| "loss": 0.8183, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "grad_norm": 2.6971044540405273, | |
| "learning_rate": 7.63547235375966e-06, | |
| "loss": 0.8219, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 2.3640589714050293, | |
| "learning_rate": 7.291030225462781e-06, | |
| "loss": 0.8124, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "grad_norm": 2.031846761703491, | |
| "learning_rate": 6.953206798505918e-06, | |
| "loss": 0.8174, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "grad_norm": 2.62103533744812, | |
| "learning_rate": 6.622128333896768e-06, | |
| "loss": 0.806, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "grad_norm": 2.6120152473449707, | |
| "learning_rate": 6.297918571723288e-06, | |
| "loss": 0.8114, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "grad_norm": 2.473111391067505, | |
| "learning_rate": 5.980698684905989e-06, | |
| "loss": 0.8098, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "grad_norm": 2.5968923568725586, | |
| "learning_rate": 5.6705872339098186e-06, | |
| "loss": 0.8205, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "grad_norm": 2.617664098739624, | |
| "learning_rate": 5.367700122432315e-06, | |
| "loss": 0.8067, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "grad_norm": 1.9892934560775757, | |
| "learning_rate": 5.072150554084745e-06, | |
| "loss": 0.8287, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 2.1314797401428223, | |
| "learning_rate": 4.784048990082484e-06, | |
| "loss": 0.8267, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "grad_norm": 3.0698599815368652, | |
| "learning_rate": 4.5035031079602445e-06, | |
| "loss": 0.8079, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "grad_norm": 2.567479133605957, | |
| "learning_rate": 4.2306177613277765e-06, | |
| "loss": 0.8262, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "grad_norm": 2.587759017944336, | |
| "learning_rate": 3.9654949406809995e-06, | |
| "loss": 0.8223, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "grad_norm": 2.034844398498535, | |
| "learning_rate": 3.7082337352831923e-06, | |
| "loss": 0.8114, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "grad_norm": 3.257702589035034, | |
| "learning_rate": 3.458930296130519e-06, | |
| "loss": 0.8267, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "grad_norm": 2.539806365966797, | |
| "learning_rate": 3.2176778000157367e-06, | |
| "loss": 0.8251, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "grad_norm": 2.7473506927490234, | |
| "learning_rate": 2.9845664147035326e-06, | |
| "loss": 0.7826, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "grad_norm": 2.324519395828247, | |
| "learning_rate": 2.7596832652304283e-06, | |
| "loss": 0.8178, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "grad_norm": 2.691235065460205, | |
| "learning_rate": 2.5431124013419237e-06, | |
| "loss": 0.7982, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "grad_norm": 3.084143877029419, | |
| "learning_rate": 2.338335189634505e-06, | |
| "loss": 0.8007, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 2.8486242294311523, | |
| "learning_rate": 2.138486784704746e-06, | |
| "loss": 0.8192, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "grad_norm": 2.290395975112915, | |
| "learning_rate": 1.947182836628192e-06, | |
| "loss": 0.8303, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 2.120853900909424, | |
| "learning_rate": 1.7644948449733384e-06, | |
| "loss": 0.8317, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "grad_norm": 2.403571605682373, | |
| "learning_rate": 1.590491089107679e-06, | |
| "loss": 0.8012, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "grad_norm": 1.6690127849578857, | |
| "learning_rate": 1.425236602678387e-06, | |
| "loss": 0.8241, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "grad_norm": 3.3557851314544678, | |
| "learning_rate": 1.268793149306091e-06, | |
| "loss": 0.8234, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "grad_norm": 3.1359851360321045, | |
| "learning_rate": 1.1212191995007975e-06, | |
| "loss": 0.802, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "grad_norm": 2.1641125679016113, | |
| "learning_rate": 9.825699088086338e-07, | |
| "loss": 0.8138, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 2.6551270484924316, | |
| "learning_rate": 8.528970971975553e-07, | |
| "loss": 0.8375, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "grad_norm": 1.8573861122131348, | |
| "learning_rate": 7.322492296896799e-07, | |
| "loss": 0.8166, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "grad_norm": 2.5303752422332764, | |
| "learning_rate": 6.206713982475909e-07, | |
| "loss": 0.8346, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "grad_norm": 2.4010512828826904, | |
| "learning_rate": 5.182053049212626e-07, | |
| "loss": 0.8077, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "grad_norm": 2.183361053466797, | |
| "learning_rate": 4.248892462619725e-07, | |
| "loss": 0.799, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "grad_norm": 2.452626943588257, | |
| "learning_rate": 3.4075809900904756e-07, | |
| "loss": 0.8143, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "grad_norm": 2.959763526916504, | |
| "learning_rate": 2.658433070547195e-07, | |
| "loss": 0.8163, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 2.3562393188476562, | |
| "learning_rate": 2.0017286969200578e-07, | |
| "loss": 0.8134, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 1.8572157621383667, | |
| "learning_rate": 1.4377133115004438e-07, | |
| "loss": 0.8188, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "grad_norm": 3.5713188648223877, | |
| "learning_rate": 9.665977142068738e-08, | |
| "loss": 0.8248, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "grad_norm": 2.22605562210083, | |
| "learning_rate": 5.885579837992261e-08, | |
| "loss": 0.7936, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "grad_norm": 2.264847993850708, | |
| "learning_rate": 3.037354120692393e-08, | |
| "loss": 0.8273, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "grad_norm": 2.1906256675720215, | |
| "learning_rate": 1.1223645103311531e-08, | |
| "loss": 0.8199, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "grad_norm": 1.983068823814392, | |
| "learning_rate": 1.413267314517852e-09, | |
| "loss": 0.813, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 9750, | |
| "total_flos": 1.6697107709546988e+18, | |
| "train_loss": 0.9979035949707031, | |
| "train_runtime": 4718.1281, | |
| "train_samples_per_second": 33.065, | |
| "train_steps_per_second": 2.066 | |
| } | |
| ], | |
| "logging_steps": 60, | |
| "max_steps": 9750, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 5000, | |
| "total_flos": 1.6697107709546988e+18, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |