| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 64.51612903225806, | |
| "eval_steps": 500, | |
| "global_step": 2000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.3225806451612903, | |
| "grad_norm": 4.83341646194458, | |
| "learning_rate": 1.8e-07, | |
| "loss": 0.6146, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.6451612903225806, | |
| "grad_norm": 4.601212978363037, | |
| "learning_rate": 3.8e-07, | |
| "loss": 0.6156, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.967741935483871, | |
| "grad_norm": 4.288501739501953, | |
| "learning_rate": 5.8e-07, | |
| "loss": 0.6041, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 1.2903225806451613, | |
| "grad_norm": 3.9819118976593018, | |
| "learning_rate": 7.8e-07, | |
| "loss": 0.5691, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 1.6129032258064515, | |
| "grad_norm": 3.2057559490203857, | |
| "learning_rate": 9.8e-07, | |
| "loss": 0.4867, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.935483870967742, | |
| "grad_norm": 1.8752663135528564, | |
| "learning_rate": 1.18e-06, | |
| "loss": 0.4109, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 2.258064516129032, | |
| "grad_norm": 1.5188192129135132, | |
| "learning_rate": 1.3800000000000001e-06, | |
| "loss": 0.3164, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 2.5806451612903225, | |
| "grad_norm": 0.7869158983230591, | |
| "learning_rate": 1.5800000000000003e-06, | |
| "loss": 0.2642, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 2.903225806451613, | |
| "grad_norm": 0.499815970659256, | |
| "learning_rate": 1.7800000000000001e-06, | |
| "loss": 0.2078, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 3.225806451612903, | |
| "grad_norm": 0.36157462000846863, | |
| "learning_rate": 1.98e-06, | |
| "loss": 0.1876, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 3.5483870967741935, | |
| "grad_norm": 0.4228762686252594, | |
| "learning_rate": 2.1800000000000003e-06, | |
| "loss": 0.167, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 3.870967741935484, | |
| "grad_norm": 0.22709406912326813, | |
| "learning_rate": 2.38e-06, | |
| "loss": 0.1528, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 4.193548387096774, | |
| "grad_norm": 0.2506711184978485, | |
| "learning_rate": 2.5800000000000003e-06, | |
| "loss": 0.141, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 4.516129032258064, | |
| "grad_norm": 0.17586937546730042, | |
| "learning_rate": 2.78e-06, | |
| "loss": 0.1301, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 4.838709677419355, | |
| "grad_norm": 0.2080131471157074, | |
| "learning_rate": 2.9800000000000003e-06, | |
| "loss": 0.1252, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 5.161290322580645, | |
| "grad_norm": 0.14357304573059082, | |
| "learning_rate": 3.1800000000000005e-06, | |
| "loss": 0.1174, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 5.483870967741936, | |
| "grad_norm": 0.15768013894557953, | |
| "learning_rate": 3.38e-06, | |
| "loss": 0.1123, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 5.806451612903226, | |
| "grad_norm": 0.1478148102760315, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.1059, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 6.129032258064516, | |
| "grad_norm": 0.11791401356458664, | |
| "learning_rate": 3.7800000000000002e-06, | |
| "loss": 0.1028, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 6.451612903225806, | |
| "grad_norm": 0.12674356997013092, | |
| "learning_rate": 3.98e-06, | |
| "loss": 0.1002, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 6.774193548387097, | |
| "grad_norm": 0.10726606845855713, | |
| "learning_rate": 4.18e-06, | |
| "loss": 0.0936, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 7.096774193548387, | |
| "grad_norm": 0.13341137766838074, | |
| "learning_rate": 4.38e-06, | |
| "loss": 0.093, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 7.419354838709677, | |
| "grad_norm": 0.09624793380498886, | |
| "learning_rate": 4.58e-06, | |
| "loss": 0.0891, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 7.741935483870968, | |
| "grad_norm": 0.14156094193458557, | |
| "learning_rate": 4.780000000000001e-06, | |
| "loss": 0.0861, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 8.064516129032258, | |
| "grad_norm": 0.10326485335826874, | |
| "learning_rate": 4.98e-06, | |
| "loss": 0.0835, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 8.387096774193548, | |
| "grad_norm": 0.08262120187282562, | |
| "learning_rate": 5.18e-06, | |
| "loss": 0.0805, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 8.709677419354838, | |
| "grad_norm": 0.1273198425769806, | |
| "learning_rate": 5.38e-06, | |
| "loss": 0.0779, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 9.03225806451613, | |
| "grad_norm": 0.12214396893978119, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.0757, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 9.35483870967742, | |
| "grad_norm": 0.1394161581993103, | |
| "learning_rate": 5.78e-06, | |
| "loss": 0.0728, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 9.67741935483871, | |
| "grad_norm": 0.12285201996564865, | |
| "learning_rate": 5.98e-06, | |
| "loss": 0.0722, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.15410321950912476, | |
| "learning_rate": 6.18e-06, | |
| "loss": 0.0692, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 10.32258064516129, | |
| "grad_norm": 0.07469774037599564, | |
| "learning_rate": 6.38e-06, | |
| "loss": 0.0676, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 10.64516129032258, | |
| "grad_norm": 0.16188545525074005, | |
| "learning_rate": 6.58e-06, | |
| "loss": 0.0643, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 10.967741935483872, | |
| "grad_norm": 0.07787156105041504, | |
| "learning_rate": 6.78e-06, | |
| "loss": 0.0632, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 11.290322580645162, | |
| "grad_norm": 0.13963359594345093, | |
| "learning_rate": 6.98e-06, | |
| "loss": 0.0593, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 11.612903225806452, | |
| "grad_norm": 0.12019138038158417, | |
| "learning_rate": 7.180000000000001e-06, | |
| "loss": 0.0594, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 11.935483870967742, | |
| "grad_norm": 0.13999758660793304, | |
| "learning_rate": 7.3800000000000005e-06, | |
| "loss": 0.0578, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 12.258064516129032, | |
| "grad_norm": 0.11083265393972397, | |
| "learning_rate": 7.580000000000001e-06, | |
| "loss": 0.0544, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 12.580645161290322, | |
| "grad_norm": 0.10924753546714783, | |
| "learning_rate": 7.78e-06, | |
| "loss": 0.0529, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 12.903225806451612, | |
| "grad_norm": 0.10007922351360321, | |
| "learning_rate": 7.98e-06, | |
| "loss": 0.0519, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 13.225806451612904, | |
| "grad_norm": 0.09409403055906296, | |
| "learning_rate": 8.18e-06, | |
| "loss": 0.0499, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 13.548387096774194, | |
| "grad_norm": 0.1096971333026886, | |
| "learning_rate": 8.380000000000001e-06, | |
| "loss": 0.049, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 13.870967741935484, | |
| "grad_norm": 0.10034970194101334, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.0462, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 14.193548387096774, | |
| "grad_norm": 0.13287365436553955, | |
| "learning_rate": 8.78e-06, | |
| "loss": 0.0454, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 14.516129032258064, | |
| "grad_norm": 0.10294642299413681, | |
| "learning_rate": 8.98e-06, | |
| "loss": 0.0436, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 14.838709677419354, | |
| "grad_norm": 0.11570604145526886, | |
| "learning_rate": 9.180000000000002e-06, | |
| "loss": 0.0404, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 15.161290322580646, | |
| "grad_norm": 0.08866477012634277, | |
| "learning_rate": 9.38e-06, | |
| "loss": 0.0395, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 15.483870967741936, | |
| "grad_norm": 0.1237388625741005, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.0377, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 15.806451612903226, | |
| "grad_norm": 0.09724581241607666, | |
| "learning_rate": 9.78e-06, | |
| "loss": 0.0364, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 16.129032258064516, | |
| "grad_norm": 0.1216067299246788, | |
| "learning_rate": 9.980000000000001e-06, | |
| "loss": 0.0369, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 16.451612903225808, | |
| "grad_norm": 0.14183978736400604, | |
| "learning_rate": 1.018e-05, | |
| "loss": 0.0335, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 16.774193548387096, | |
| "grad_norm": 0.10971418768167496, | |
| "learning_rate": 1.038e-05, | |
| "loss": 0.0337, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 17.096774193548388, | |
| "grad_norm": 0.10205938667058945, | |
| "learning_rate": 1.058e-05, | |
| "loss": 0.0314, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 17.419354838709676, | |
| "grad_norm": 0.08642183989286423, | |
| "learning_rate": 1.0780000000000002e-05, | |
| "loss": 0.0309, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 17.741935483870968, | |
| "grad_norm": 0.11878576874732971, | |
| "learning_rate": 1.098e-05, | |
| "loss": 0.0313, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 18.06451612903226, | |
| "grad_norm": 0.11662951111793518, | |
| "learning_rate": 1.118e-05, | |
| "loss": 0.03, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 18.387096774193548, | |
| "grad_norm": 0.13768725097179413, | |
| "learning_rate": 1.1380000000000001e-05, | |
| "loss": 0.0285, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 18.70967741935484, | |
| "grad_norm": 0.1288958489894867, | |
| "learning_rate": 1.1580000000000001e-05, | |
| "loss": 0.0288, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 19.032258064516128, | |
| "grad_norm": 0.11418294161558151, | |
| "learning_rate": 1.178e-05, | |
| "loss": 0.0289, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 19.35483870967742, | |
| "grad_norm": 0.12079351395368576, | |
| "learning_rate": 1.198e-05, | |
| "loss": 0.0287, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 19.677419354838708, | |
| "grad_norm": 0.2023964524269104, | |
| "learning_rate": 1.2180000000000002e-05, | |
| "loss": 0.0279, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "grad_norm": 0.19349494576454163, | |
| "learning_rate": 1.238e-05, | |
| "loss": 0.0278, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 20.322580645161292, | |
| "grad_norm": 0.17760895192623138, | |
| "learning_rate": 1.258e-05, | |
| "loss": 0.0273, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 20.64516129032258, | |
| "grad_norm": 0.1515321135520935, | |
| "learning_rate": 1.278e-05, | |
| "loss": 0.0257, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 20.967741935483872, | |
| "grad_norm": 0.10156595706939697, | |
| "learning_rate": 1.2980000000000001e-05, | |
| "loss": 0.0257, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 21.29032258064516, | |
| "grad_norm": 0.12676751613616943, | |
| "learning_rate": 1.3180000000000001e-05, | |
| "loss": 0.0249, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 21.612903225806452, | |
| "grad_norm": 0.13134433329105377, | |
| "learning_rate": 1.338e-05, | |
| "loss": 0.0239, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 21.93548387096774, | |
| "grad_norm": 0.13195613026618958, | |
| "learning_rate": 1.358e-05, | |
| "loss": 0.0242, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 22.258064516129032, | |
| "grad_norm": 0.12307468056678772, | |
| "learning_rate": 1.3780000000000002e-05, | |
| "loss": 0.0238, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 22.580645161290324, | |
| "grad_norm": 0.1110624298453331, | |
| "learning_rate": 1.3980000000000002e-05, | |
| "loss": 0.0253, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 22.903225806451612, | |
| "grad_norm": 0.13264206051826477, | |
| "learning_rate": 1.4180000000000001e-05, | |
| "loss": 0.0239, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 23.225806451612904, | |
| "grad_norm": 0.11219354718923569, | |
| "learning_rate": 1.4380000000000001e-05, | |
| "loss": 0.0234, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 23.548387096774192, | |
| "grad_norm": 0.14356958866119385, | |
| "learning_rate": 1.4580000000000003e-05, | |
| "loss": 0.0225, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 23.870967741935484, | |
| "grad_norm": 0.13088181614875793, | |
| "learning_rate": 1.4779999999999999e-05, | |
| "loss": 0.0221, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 24.193548387096776, | |
| "grad_norm": 0.15026481449604034, | |
| "learning_rate": 1.4979999999999999e-05, | |
| "loss": 0.0225, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 24.516129032258064, | |
| "grad_norm": 0.20722658932209015, | |
| "learning_rate": 1.518e-05, | |
| "loss": 0.0225, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 24.838709677419356, | |
| "grad_norm": 0.13866043090820312, | |
| "learning_rate": 1.538e-05, | |
| "loss": 0.0221, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 25.161290322580644, | |
| "grad_norm": 0.11805257201194763, | |
| "learning_rate": 1.558e-05, | |
| "loss": 0.0214, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 25.483870967741936, | |
| "grad_norm": 0.1360214352607727, | |
| "learning_rate": 1.578e-05, | |
| "loss": 0.0205, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 25.806451612903224, | |
| "grad_norm": 0.15285663306713104, | |
| "learning_rate": 1.598e-05, | |
| "loss": 0.0215, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 26.129032258064516, | |
| "grad_norm": 0.13831627368927002, | |
| "learning_rate": 1.618e-05, | |
| "loss": 0.021, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 26.451612903225808, | |
| "grad_norm": 0.1219727173447609, | |
| "learning_rate": 1.6380000000000002e-05, | |
| "loss": 0.0208, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 26.774193548387096, | |
| "grad_norm": 0.14780209958553314, | |
| "learning_rate": 1.658e-05, | |
| "loss": 0.0212, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 27.096774193548388, | |
| "grad_norm": 0.13057461380958557, | |
| "learning_rate": 1.6780000000000002e-05, | |
| "loss": 0.0206, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 27.419354838709676, | |
| "grad_norm": 0.13308848440647125, | |
| "learning_rate": 1.698e-05, | |
| "loss": 0.021, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 27.741935483870968, | |
| "grad_norm": 0.1406048834323883, | |
| "learning_rate": 1.718e-05, | |
| "loss": 0.0204, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 28.06451612903226, | |
| "grad_norm": 0.1681479662656784, | |
| "learning_rate": 1.7380000000000003e-05, | |
| "loss": 0.0203, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 28.387096774193548, | |
| "grad_norm": 0.15317517518997192, | |
| "learning_rate": 1.758e-05, | |
| "loss": 0.0192, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 28.70967741935484, | |
| "grad_norm": 0.17327739298343658, | |
| "learning_rate": 1.7780000000000003e-05, | |
| "loss": 0.0191, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 29.032258064516128, | |
| "grad_norm": 0.13825540244579315, | |
| "learning_rate": 1.798e-05, | |
| "loss": 0.0194, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 29.35483870967742, | |
| "grad_norm": 0.14294251799583435, | |
| "learning_rate": 1.818e-05, | |
| "loss": 0.0191, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 29.677419354838708, | |
| "grad_norm": 0.1148597002029419, | |
| "learning_rate": 1.838e-05, | |
| "loss": 0.0194, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "grad_norm": 0.10468383878469467, | |
| "learning_rate": 1.858e-05, | |
| "loss": 0.019, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 30.322580645161292, | |
| "grad_norm": 0.16523204743862152, | |
| "learning_rate": 1.878e-05, | |
| "loss": 0.0183, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 30.64516129032258, | |
| "grad_norm": 0.1483301818370819, | |
| "learning_rate": 1.898e-05, | |
| "loss": 0.0183, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 30.967741935483872, | |
| "grad_norm": 0.19092631340026855, | |
| "learning_rate": 1.918e-05, | |
| "loss": 0.0188, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 31.29032258064516, | |
| "grad_norm": 0.19763915240764618, | |
| "learning_rate": 1.938e-05, | |
| "loss": 0.0181, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 31.612903225806452, | |
| "grad_norm": 0.18357335031032562, | |
| "learning_rate": 1.9580000000000002e-05, | |
| "loss": 0.0179, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 31.93548387096774, | |
| "grad_norm": 0.1674022376537323, | |
| "learning_rate": 1.978e-05, | |
| "loss": 0.0181, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 32.25806451612903, | |
| "grad_norm": 0.1773853302001953, | |
| "learning_rate": 1.9980000000000002e-05, | |
| "loss": 0.0187, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 32.58064516129032, | |
| "grad_norm": 0.17372146248817444, | |
| "learning_rate": 2.0180000000000003e-05, | |
| "loss": 0.0175, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 32.903225806451616, | |
| "grad_norm": 0.16912385821342468, | |
| "learning_rate": 2.038e-05, | |
| "loss": 0.0177, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 33.225806451612904, | |
| "grad_norm": 0.1924871802330017, | |
| "learning_rate": 2.0580000000000003e-05, | |
| "loss": 0.017, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 33.54838709677419, | |
| "grad_norm": 0.1980140060186386, | |
| "learning_rate": 2.078e-05, | |
| "loss": 0.0174, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 33.87096774193548, | |
| "grad_norm": 0.15361692011356354, | |
| "learning_rate": 2.098e-05, | |
| "loss": 0.0169, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 34.193548387096776, | |
| "grad_norm": 0.15744690597057343, | |
| "learning_rate": 2.118e-05, | |
| "loss": 0.0174, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 34.516129032258064, | |
| "grad_norm": 0.18941748142242432, | |
| "learning_rate": 2.138e-05, | |
| "loss": 0.0173, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 34.83870967741935, | |
| "grad_norm": 0.18401823937892914, | |
| "learning_rate": 2.158e-05, | |
| "loss": 0.0176, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 35.16129032258065, | |
| "grad_norm": 0.1347324103116989, | |
| "learning_rate": 2.178e-05, | |
| "loss": 0.017, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 35.483870967741936, | |
| "grad_norm": 0.13733318448066711, | |
| "learning_rate": 2.198e-05, | |
| "loss": 0.0163, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 35.806451612903224, | |
| "grad_norm": 0.17490868270397186, | |
| "learning_rate": 2.218e-05, | |
| "loss": 0.0167, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 36.12903225806452, | |
| "grad_norm": 0.17697739601135254, | |
| "learning_rate": 2.2380000000000003e-05, | |
| "loss": 0.0173, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 36.45161290322581, | |
| "grad_norm": 0.19351543486118317, | |
| "learning_rate": 2.258e-05, | |
| "loss": 0.0174, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 36.774193548387096, | |
| "grad_norm": 0.17104071378707886, | |
| "learning_rate": 2.2780000000000002e-05, | |
| "loss": 0.0159, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 37.096774193548384, | |
| "grad_norm": 0.1536385416984558, | |
| "learning_rate": 2.298e-05, | |
| "loss": 0.016, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 37.41935483870968, | |
| "grad_norm": 0.14215905964374542, | |
| "learning_rate": 2.318e-05, | |
| "loss": 0.0167, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 37.74193548387097, | |
| "grad_norm": 0.17754510045051575, | |
| "learning_rate": 2.3380000000000003e-05, | |
| "loss": 0.0163, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 38.064516129032256, | |
| "grad_norm": 0.15013834834098816, | |
| "learning_rate": 2.358e-05, | |
| "loss": 0.0152, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 38.38709677419355, | |
| "grad_norm": 0.17872834205627441, | |
| "learning_rate": 2.3780000000000003e-05, | |
| "loss": 0.0152, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 38.70967741935484, | |
| "grad_norm": 0.16451333463191986, | |
| "learning_rate": 2.398e-05, | |
| "loss": 0.015, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 39.03225806451613, | |
| "grad_norm": 0.17044350504875183, | |
| "learning_rate": 2.418e-05, | |
| "loss": 0.0149, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 39.354838709677416, | |
| "grad_norm": 0.16590650379657745, | |
| "learning_rate": 2.438e-05, | |
| "loss": 0.0155, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 39.67741935483871, | |
| "grad_norm": 0.1371670961380005, | |
| "learning_rate": 2.4580000000000002e-05, | |
| "loss": 0.015, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 40.0, | |
| "grad_norm": 0.11834592372179031, | |
| "learning_rate": 2.478e-05, | |
| "loss": 0.0158, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 40.32258064516129, | |
| "grad_norm": 0.17337927222251892, | |
| "learning_rate": 2.498e-05, | |
| "loss": 0.0148, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 40.645161290322584, | |
| "grad_norm": 0.17919541895389557, | |
| "learning_rate": 2.5180000000000003e-05, | |
| "loss": 0.0146, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 40.96774193548387, | |
| "grad_norm": 0.1446114331483841, | |
| "learning_rate": 2.5380000000000004e-05, | |
| "loss": 0.0144, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 41.29032258064516, | |
| "grad_norm": 0.12958332896232605, | |
| "learning_rate": 2.5580000000000002e-05, | |
| "loss": 0.0143, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 41.61290322580645, | |
| "grad_norm": 0.208790123462677, | |
| "learning_rate": 2.5779999999999997e-05, | |
| "loss": 0.0145, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 41.935483870967744, | |
| "grad_norm": 0.13380534946918488, | |
| "learning_rate": 2.598e-05, | |
| "loss": 0.0144, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 42.25806451612903, | |
| "grad_norm": 0.202678382396698, | |
| "learning_rate": 2.618e-05, | |
| "loss": 0.0137, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 42.58064516129032, | |
| "grad_norm": 0.19495125114917755, | |
| "learning_rate": 2.6379999999999998e-05, | |
| "loss": 0.0147, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 42.903225806451616, | |
| "grad_norm": 0.17326615750789642, | |
| "learning_rate": 2.658e-05, | |
| "loss": 0.0143, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 43.225806451612904, | |
| "grad_norm": 0.17888951301574707, | |
| "learning_rate": 2.678e-05, | |
| "loss": 0.0143, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 43.54838709677419, | |
| "grad_norm": 0.1571645885705948, | |
| "learning_rate": 2.698e-05, | |
| "loss": 0.0136, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 43.87096774193548, | |
| "grad_norm": 0.13882234692573547, | |
| "learning_rate": 2.718e-05, | |
| "loss": 0.0136, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 44.193548387096776, | |
| "grad_norm": 0.12413440644741058, | |
| "learning_rate": 2.738e-05, | |
| "loss": 0.0132, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 44.516129032258064, | |
| "grad_norm": 0.13164107501506805, | |
| "learning_rate": 2.758e-05, | |
| "loss": 0.0138, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 44.83870967741935, | |
| "grad_norm": 0.14705562591552734, | |
| "learning_rate": 2.778e-05, | |
| "loss": 0.0135, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 45.16129032258065, | |
| "grad_norm": 0.17040109634399414, | |
| "learning_rate": 2.798e-05, | |
| "loss": 0.0136, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 45.483870967741936, | |
| "grad_norm": 0.17892764508724213, | |
| "learning_rate": 2.818e-05, | |
| "loss": 0.0135, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 45.806451612903224, | |
| "grad_norm": 0.14825959503650665, | |
| "learning_rate": 2.8380000000000003e-05, | |
| "loss": 0.013, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 46.12903225806452, | |
| "grad_norm": 0.11769016087055206, | |
| "learning_rate": 2.858e-05, | |
| "loss": 0.0134, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 46.45161290322581, | |
| "grad_norm": 0.16785204410552979, | |
| "learning_rate": 2.8780000000000002e-05, | |
| "loss": 0.0131, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 46.774193548387096, | |
| "grad_norm": 0.12980397045612335, | |
| "learning_rate": 2.898e-05, | |
| "loss": 0.0141, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 47.096774193548384, | |
| "grad_norm": 0.19282320141792297, | |
| "learning_rate": 2.9180000000000002e-05, | |
| "loss": 0.014, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 47.41935483870968, | |
| "grad_norm": 0.17180287837982178, | |
| "learning_rate": 2.9380000000000003e-05, | |
| "loss": 0.0133, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 47.74193548387097, | |
| "grad_norm": 0.1480427086353302, | |
| "learning_rate": 2.958e-05, | |
| "loss": 0.0133, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 48.064516129032256, | |
| "grad_norm": 0.18842412531375885, | |
| "learning_rate": 2.9780000000000003e-05, | |
| "loss": 0.0135, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 48.38709677419355, | |
| "grad_norm": 0.2088242620229721, | |
| "learning_rate": 2.998e-05, | |
| "loss": 0.0142, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 48.70967741935484, | |
| "grad_norm": 0.1986250877380371, | |
| "learning_rate": 3.0180000000000002e-05, | |
| "loss": 0.0128, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 49.03225806451613, | |
| "grad_norm": 0.1687200963497162, | |
| "learning_rate": 3.0380000000000004e-05, | |
| "loss": 0.0127, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 49.354838709677416, | |
| "grad_norm": 0.1558302789926529, | |
| "learning_rate": 3.058e-05, | |
| "loss": 0.0131, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 49.67741935483871, | |
| "grad_norm": 0.17784525454044342, | |
| "learning_rate": 3.078e-05, | |
| "loss": 0.0125, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 50.0, | |
| "grad_norm": 0.22291983664035797, | |
| "learning_rate": 3.0980000000000005e-05, | |
| "loss": 0.0121, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 50.32258064516129, | |
| "grad_norm": 0.21824175119400024, | |
| "learning_rate": 3.118e-05, | |
| "loss": 0.012, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 50.645161290322584, | |
| "grad_norm": 0.2027992457151413, | |
| "learning_rate": 3.138e-05, | |
| "loss": 0.0127, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 50.96774193548387, | |
| "grad_norm": 0.2533424496650696, | |
| "learning_rate": 3.1580000000000006e-05, | |
| "loss": 0.0121, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 51.29032258064516, | |
| "grad_norm": 0.15423865616321564, | |
| "learning_rate": 3.1780000000000004e-05, | |
| "loss": 0.0125, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 51.61290322580645, | |
| "grad_norm": 0.13885457813739777, | |
| "learning_rate": 3.198e-05, | |
| "loss": 0.013, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 51.935483870967744, | |
| "grad_norm": 0.1417354941368103, | |
| "learning_rate": 3.218e-05, | |
| "loss": 0.0119, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 52.25806451612903, | |
| "grad_norm": 0.17335128784179688, | |
| "learning_rate": 3.238e-05, | |
| "loss": 0.012, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 52.58064516129032, | |
| "grad_norm": 0.19661465287208557, | |
| "learning_rate": 3.2579999999999996e-05, | |
| "loss": 0.0119, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 52.903225806451616, | |
| "grad_norm": 0.2602691650390625, | |
| "learning_rate": 3.278e-05, | |
| "loss": 0.0122, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 53.225806451612904, | |
| "grad_norm": 0.21529579162597656, | |
| "learning_rate": 3.298e-05, | |
| "loss": 0.0117, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 53.54838709677419, | |
| "grad_norm": 0.1529514640569687, | |
| "learning_rate": 3.318e-05, | |
| "loss": 0.0116, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 53.87096774193548, | |
| "grad_norm": 0.1834433674812317, | |
| "learning_rate": 3.338e-05, | |
| "loss": 0.0125, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 54.193548387096776, | |
| "grad_norm": 0.24152186512947083, | |
| "learning_rate": 3.358e-05, | |
| "loss": 0.0122, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 54.516129032258064, | |
| "grad_norm": 0.1319815069437027, | |
| "learning_rate": 3.378e-05, | |
| "loss": 0.0116, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 54.83870967741935, | |
| "grad_norm": 0.16622133553028107, | |
| "learning_rate": 3.398e-05, | |
| "loss": 0.0123, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 55.16129032258065, | |
| "grad_norm": 0.12881875038146973, | |
| "learning_rate": 3.418e-05, | |
| "loss": 0.0115, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 55.483870967741936, | |
| "grad_norm": 0.16083110868930817, | |
| "learning_rate": 3.438e-05, | |
| "loss": 0.0121, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 55.806451612903224, | |
| "grad_norm": 0.17213866114616394, | |
| "learning_rate": 3.4580000000000004e-05, | |
| "loss": 0.0111, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 56.12903225806452, | |
| "grad_norm": 0.1291566640138626, | |
| "learning_rate": 3.478e-05, | |
| "loss": 0.0113, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 56.45161290322581, | |
| "grad_norm": 0.1637270748615265, | |
| "learning_rate": 3.498e-05, | |
| "loss": 0.011, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 56.774193548387096, | |
| "grad_norm": 0.1748403012752533, | |
| "learning_rate": 3.518e-05, | |
| "loss": 0.0108, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 57.096774193548384, | |
| "grad_norm": 0.18834678828716278, | |
| "learning_rate": 3.5380000000000003e-05, | |
| "loss": 0.0109, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 57.41935483870968, | |
| "grad_norm": 0.1650463044643402, | |
| "learning_rate": 3.558e-05, | |
| "loss": 0.0115, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 57.74193548387097, | |
| "grad_norm": 0.17039620876312256, | |
| "learning_rate": 3.578e-05, | |
| "loss": 0.0114, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 58.064516129032256, | |
| "grad_norm": 0.13908374309539795, | |
| "learning_rate": 3.5980000000000004e-05, | |
| "loss": 0.0107, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 58.38709677419355, | |
| "grad_norm": 0.1755029857158661, | |
| "learning_rate": 3.618e-05, | |
| "loss": 0.0104, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 58.70967741935484, | |
| "grad_norm": 0.16456303000450134, | |
| "learning_rate": 3.638e-05, | |
| "loss": 0.011, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 59.03225806451613, | |
| "grad_norm": 0.1215883269906044, | |
| "learning_rate": 3.6580000000000006e-05, | |
| "loss": 0.0106, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 59.354838709677416, | |
| "grad_norm": 0.13105925917625427, | |
| "learning_rate": 3.6780000000000004e-05, | |
| "loss": 0.0104, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 59.67741935483871, | |
| "grad_norm": 0.14026036858558655, | |
| "learning_rate": 3.698e-05, | |
| "loss": 0.0105, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 60.0, | |
| "grad_norm": 0.15687642991542816, | |
| "learning_rate": 3.7180000000000007e-05, | |
| "loss": 0.0105, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 60.32258064516129, | |
| "grad_norm": 0.1946769654750824, | |
| "learning_rate": 3.7380000000000005e-05, | |
| "loss": 0.011, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 60.645161290322584, | |
| "grad_norm": 0.2352798581123352, | |
| "learning_rate": 3.758e-05, | |
| "loss": 0.0108, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 60.96774193548387, | |
| "grad_norm": 0.2311507761478424, | |
| "learning_rate": 3.778000000000001e-05, | |
| "loss": 0.0119, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 61.29032258064516, | |
| "grad_norm": 0.24302849173545837, | |
| "learning_rate": 3.7980000000000006e-05, | |
| "loss": 0.0104, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 61.61290322580645, | |
| "grad_norm": 0.20787030458450317, | |
| "learning_rate": 3.818e-05, | |
| "loss": 0.011, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 61.935483870967744, | |
| "grad_norm": 0.21964043378829956, | |
| "learning_rate": 3.838e-05, | |
| "loss": 0.0105, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 62.25806451612903, | |
| "grad_norm": 0.17259149253368378, | |
| "learning_rate": 3.858e-05, | |
| "loss": 0.011, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 62.58064516129032, | |
| "grad_norm": 0.2735665440559387, | |
| "learning_rate": 3.878e-05, | |
| "loss": 0.0108, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 62.903225806451616, | |
| "grad_norm": 0.19282570481300354, | |
| "learning_rate": 3.898e-05, | |
| "loss": 0.0108, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 63.225806451612904, | |
| "grad_norm": 0.21735602617263794, | |
| "learning_rate": 3.918e-05, | |
| "loss": 0.0098, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 63.54838709677419, | |
| "grad_norm": 0.192997008562088, | |
| "learning_rate": 3.938e-05, | |
| "loss": 0.0101, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 63.87096774193548, | |
| "grad_norm": 0.21661442518234253, | |
| "learning_rate": 3.958e-05, | |
| "loss": 0.0107, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 64.19354838709677, | |
| "grad_norm": 0.23013737797737122, | |
| "learning_rate": 3.978e-05, | |
| "loss": 0.0106, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 64.51612903225806, | |
| "grad_norm": 0.17058345675468445, | |
| "learning_rate": 3.998e-05, | |
| "loss": 0.0101, | |
| "step": 2000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 100000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3226, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 512, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |