| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0687871403531273, | |
| "eval_steps": 100000000, | |
| "global_step": 115000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 5e-05, | |
| "loss": 7.7296, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9999535305620204e-05, | |
| "loss": 6.5786, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99990706112404e-05, | |
| "loss": 6.2317, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99986059168606e-05, | |
| "loss": 6.0053, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99981412224808e-05, | |
| "loss": 5.8615, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9997676528101e-05, | |
| "loss": 5.6794, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99972118337212e-05, | |
| "loss": 5.5481, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.9996747139341396e-05, | |
| "loss": 5.4375, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99962824449616e-05, | |
| "loss": 5.3391, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 4.99958177505818e-05, | |
| "loss": 5.2598, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9995353056202e-05, | |
| "loss": 5.17, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9994888361822204e-05, | |
| "loss": 5.0886, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.99944236674424e-05, | |
| "loss": 5.0128, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.99939589730626e-05, | |
| "loss": 4.9504, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.99934942786828e-05, | |
| "loss": 4.8755, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9993029584303e-05, | |
| "loss": 4.8402, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.99925648899232e-05, | |
| "loss": 4.7674, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9992100195543396e-05, | |
| "loss": 4.7254, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.99916355011636e-05, | |
| "loss": 4.6749, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.99911708067838e-05, | |
| "loss": 4.6135, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9990706112403995e-05, | |
| "loss": 4.5239, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.99902414180242e-05, | |
| "loss": 4.4729, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.998977672364439e-05, | |
| "loss": 4.4434, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9989312029264594e-05, | |
| "loss": 4.3587, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9988847334884796e-05, | |
| "loss": 4.3126, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.998838264050499e-05, | |
| "loss": 4.2658, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.998791794612519e-05, | |
| "loss": 4.2412, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9987453251745395e-05, | |
| "loss": 4.2035, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.998698855736559e-05, | |
| "loss": 4.1695, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.998652386298579e-05, | |
| "loss": 4.1406, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9986059168605994e-05, | |
| "loss": 4.1275, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4.9985594474226196e-05, | |
| "loss": 4.1032, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.99851297798464e-05, | |
| "loss": 4.0588, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9984665085466594e-05, | |
| "loss": 4.0536, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9984200391086796e-05, | |
| "loss": 3.9979, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.998373569670699e-05, | |
| "loss": 3.9913, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.998327100232719e-05, | |
| "loss": 3.988, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.9982806307947395e-05, | |
| "loss": 3.9575, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.998234161356759e-05, | |
| "loss": 3.942, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.998187691918779e-05, | |
| "loss": 3.9258, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.998141222480799e-05, | |
| "loss": 3.8736, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.998094753042819e-05, | |
| "loss": 3.903, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.9804810945458605e-05, | |
| "loss": 3.7108, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 4.980016358701714e-05, | |
| "loss": 3.6276, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.979551622857568e-05, | |
| "loss": 3.568, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 4.9790868870134216e-05, | |
| "loss": 3.5307, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.978622151169275e-05, | |
| "loss": 3.4857, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 4.978157415325129e-05, | |
| "loss": 3.4463, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.977692679480983e-05, | |
| "loss": 3.4396, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 4.977227943636837e-05, | |
| "loss": 3.402, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.9767632077926904e-05, | |
| "loss": 3.3716, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 4.9762984719485446e-05, | |
| "loss": 3.3533, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.975833736104398e-05, | |
| "loss": 3.3084, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 4.975369000260252e-05, | |
| "loss": 3.293, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.974904264416106e-05, | |
| "loss": 3.2707, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.97443952857196e-05, | |
| "loss": 3.2561, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 4.9739747927278134e-05, | |
| "loss": 3.2219, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.9735100568836676e-05, | |
| "loss": 3.2014, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 4.973045321039521e-05, | |
| "loss": 3.1892, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.972580585195375e-05, | |
| "loss": 3.1734, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 4.972115849351229e-05, | |
| "loss": 3.1624, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.971651113507082e-05, | |
| "loss": 3.1416, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 4.9711863776629364e-05, | |
| "loss": 3.1255, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.97072164181879e-05, | |
| "loss": 3.1074, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 4.970256905974644e-05, | |
| "loss": 3.0992, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.9697921701304976e-05, | |
| "loss": 3.0867, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 4.969327434286352e-05, | |
| "loss": 3.0711, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.968862698442205e-05, | |
| "loss": 3.0621, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 4.9683979625980594e-05, | |
| "loss": 3.0543, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.967933226753913e-05, | |
| "loss": 3.0407, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.967468490909768e-05, | |
| "loss": 3.0363, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 4.967003755065621e-05, | |
| "loss": 3.0204, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.966539019221475e-05, | |
| "loss": 3.0192, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 4.966074283377329e-05, | |
| "loss": 3.0018, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.9656095475331824e-05, | |
| "loss": 2.992, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 4.9651448116890365e-05, | |
| "loss": 2.9887, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.96468007584489e-05, | |
| "loss": 2.9729, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 4.964215340000744e-05, | |
| "loss": 2.9743, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.963750604156598e-05, | |
| "loss": 2.9579, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 4.963285868312452e-05, | |
| "loss": 2.9507, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.9628211324683054e-05, | |
| "loss": 2.9553, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 4.9623563966241595e-05, | |
| "loss": 2.9419, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.961891660780013e-05, | |
| "loss": 2.9302, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.961426924935867e-05, | |
| "loss": 2.934, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 4.960962189091721e-05, | |
| "loss": 2.9192, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.960497453247575e-05, | |
| "loss": 2.9089, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 4.9600327174034283e-05, | |
| "loss": 2.9131, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.959567981559282e-05, | |
| "loss": 2.8963, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 4.959103245715136e-05, | |
| "loss": 2.8968, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.9586385098709895e-05, | |
| "loss": 2.8909, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 4.958173774026844e-05, | |
| "loss": 2.8808, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.957709038182697e-05, | |
| "loss": 2.879, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 4.957244302338551e-05, | |
| "loss": 2.8674, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.956779566494405e-05, | |
| "loss": 2.8615, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 4.956314830650259e-05, | |
| "loss": 2.8619, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.9558500948061125e-05, | |
| "loss": 2.8642, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 4.9553853589619667e-05, | |
| "loss": 2.8486, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.95492062311782e-05, | |
| "loss": 2.8469, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.954455887273674e-05, | |
| "loss": 2.8438, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 4.953991151429528e-05, | |
| "loss": 2.832, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.953526415585382e-05, | |
| "loss": 2.8319, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 4.9530616797412355e-05, | |
| "loss": 2.8332, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.952596943897089e-05, | |
| "loss": 2.8333, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 4.952132208052943e-05, | |
| "loss": 2.8224, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.9516674722087966e-05, | |
| "loss": 2.8138, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 4.951202736364651e-05, | |
| "loss": 2.8129, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.950738000520504e-05, | |
| "loss": 2.8076, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 4.9502732646763585e-05, | |
| "loss": 2.7981, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.949808528832212e-05, | |
| "loss": 2.8029, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 4.949343792988066e-05, | |
| "loss": 2.7994, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.9488790571439196e-05, | |
| "loss": 2.793, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 4.948414321299774e-05, | |
| "loss": 2.7945, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.947949585455627e-05, | |
| "loss": 2.7819, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.9474848496114814e-05, | |
| "loss": 2.7854, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 4.947020113767335e-05, | |
| "loss": 2.7698, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.946555377923189e-05, | |
| "loss": 2.7791, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 4.9460906420790426e-05, | |
| "loss": 2.7766, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.945625906234896e-05, | |
| "loss": 2.7627, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 4.94516117039075e-05, | |
| "loss": 2.7705, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.944696434546604e-05, | |
| "loss": 2.7582, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 4.944231698702458e-05, | |
| "loss": 2.7632, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.9437669628583114e-05, | |
| "loss": 2.764, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 4.9433022270141656e-05, | |
| "loss": 2.753, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.942837491170019e-05, | |
| "loss": 2.7469, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 4.942372755325873e-05, | |
| "loss": 2.7506, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.941908019481727e-05, | |
| "loss": 2.746, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.941443283637581e-05, | |
| "loss": 2.7373, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 4.9409785477934344e-05, | |
| "loss": 2.7325, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.9405138119492886e-05, | |
| "loss": 2.7329, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 4.940049076105142e-05, | |
| "loss": 2.7313, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 4.939584340260996e-05, | |
| "loss": 2.728, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 4.93911960441685e-05, | |
| "loss": 2.7244, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.938654868572703e-05, | |
| "loss": 2.722, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 4.9381901327285574e-05, | |
| "loss": 2.7278, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 4.937725396884411e-05, | |
| "loss": 2.7148, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 4.937260661040265e-05, | |
| "loss": 2.7251, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.9367959251961185e-05, | |
| "loss": 2.7076, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 4.936331189351973e-05, | |
| "loss": 2.717, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 4.935866453507826e-05, | |
| "loss": 2.7075, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 4.9354017176636804e-05, | |
| "loss": 2.7132, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 4.934936981819534e-05, | |
| "loss": 2.7055, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 4.934472245975388e-05, | |
| "loss": 2.7106, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 4.9340075101312415e-05, | |
| "loss": 2.71, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 4.933542774287096e-05, | |
| "loss": 2.6932, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 4.933078038442949e-05, | |
| "loss": 2.7055, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.9326133025988034e-05, | |
| "loss": 2.7001, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 4.932148566754657e-05, | |
| "loss": 2.6966, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.93168383091051e-05, | |
| "loss": 2.6817, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 4.9312190950663645e-05, | |
| "loss": 2.6838, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.930754359222218e-05, | |
| "loss": 2.6867, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 4.930289623378072e-05, | |
| "loss": 2.6786, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.929824887533926e-05, | |
| "loss": 2.6769, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 4.92936015168978e-05, | |
| "loss": 2.668, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.928895415845633e-05, | |
| "loss": 2.6775, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.9284306800014875e-05, | |
| "loss": 2.6715, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 4.927965944157341e-05, | |
| "loss": 2.67, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 4.927501208313195e-05, | |
| "loss": 2.6713, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 4.9270364724690486e-05, | |
| "loss": 2.6632, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.926571736624903e-05, | |
| "loss": 2.6631, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.926107000780756e-05, | |
| "loss": 2.6666, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.9256422649366105e-05, | |
| "loss": 2.6637, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 4.925177529092464e-05, | |
| "loss": 2.6602, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.924712793248318e-05, | |
| "loss": 2.657, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 4.9242480574041716e-05, | |
| "loss": 2.6548, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.923783321560025e-05, | |
| "loss": 2.6514, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.923318585715879e-05, | |
| "loss": 2.6485, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.922853849871733e-05, | |
| "loss": 2.6519, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.922389114027587e-05, | |
| "loss": 2.6462, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.9219243781834404e-05, | |
| "loss": 2.6415, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.9214596423392946e-05, | |
| "loss": 2.6441, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.920994906495148e-05, | |
| "loss": 2.6473, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.920530170651002e-05, | |
| "loss": 2.6343, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.920065434806856e-05, | |
| "loss": 2.634, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.91960069896271e-05, | |
| "loss": 2.6393, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.9191359631185634e-05, | |
| "loss": 2.6328, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.9186712272744176e-05, | |
| "loss": 2.6286, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.918206491430271e-05, | |
| "loss": 2.6346, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.917741755586125e-05, | |
| "loss": 2.6302, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 4.917277019741979e-05, | |
| "loss": 2.6394, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 4.916812283897832e-05, | |
| "loss": 2.6319, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 4.9163475480536864e-05, | |
| "loss": 2.6225, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 4.91588281220954e-05, | |
| "loss": 2.6239, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 4.915418076365394e-05, | |
| "loss": 2.6232, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.9149533405212476e-05, | |
| "loss": 2.6222, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.914488604677102e-05, | |
| "loss": 2.6206, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 4.914023868832955e-05, | |
| "loss": 2.6182, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.9135591329888094e-05, | |
| "loss": 2.6104, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 4.913094397144663e-05, | |
| "loss": 2.6129, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.912629661300517e-05, | |
| "loss": 2.606, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 4.9121649254563706e-05, | |
| "loss": 2.6154, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.911700189612225e-05, | |
| "loss": 2.6098, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 4.911235453768078e-05, | |
| "loss": 2.6139, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.9107707179239324e-05, | |
| "loss": 2.6098, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 4.910305982079786e-05, | |
| "loss": 2.601, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.9098412462356394e-05, | |
| "loss": 2.6003, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 4.9093765103914935e-05, | |
| "loss": 2.6065, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.908911774547347e-05, | |
| "loss": 2.6015, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.908447038703201e-05, | |
| "loss": 2.5993, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 4.907982302859055e-05, | |
| "loss": 2.6024, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.907517567014909e-05, | |
| "loss": 2.5903, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.9070528311707624e-05, | |
| "loss": 2.5874, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.9065880953266165e-05, | |
| "loss": 2.5897, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 4.90612335948247e-05, | |
| "loss": 2.5892, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.905658623638324e-05, | |
| "loss": 2.5923, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 4.905193887794178e-05, | |
| "loss": 2.5904, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.904729151950032e-05, | |
| "loss": 2.5814, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 4.9042644161058853e-05, | |
| "loss": 2.5814, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.9037996802617395e-05, | |
| "loss": 2.5822, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 4.903334944417593e-05, | |
| "loss": 2.5875, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.9028702085734465e-05, | |
| "loss": 2.5868, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.9024054727293013e-05, | |
| "loss": 2.5837, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.901940736885155e-05, | |
| "loss": 2.5794, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.901476001041009e-05, | |
| "loss": 2.5774, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 4.9010112651968625e-05, | |
| "loss": 2.5793, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.900546529352717e-05, | |
| "loss": 2.5743, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.90008179350857e-05, | |
| "loss": 2.5456, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.899617057664424e-05, | |
| "loss": 2.5337, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 4.899152321820278e-05, | |
| "loss": 2.542, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.898687585976131e-05, | |
| "loss": 2.5439, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 4.8982228501319855e-05, | |
| "loss": 2.5341, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.897758114287839e-05, | |
| "loss": 2.5415, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 4.897293378443693e-05, | |
| "loss": 2.5367, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.8968286425995466e-05, | |
| "loss": 2.5401, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 4.896363906755401e-05, | |
| "loss": 2.5385, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.895899170911254e-05, | |
| "loss": 2.5412, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.8954344350671085e-05, | |
| "loss": 2.5451, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 4.894969699222962e-05, | |
| "loss": 2.5377, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 4.894504963378816e-05, | |
| "loss": 2.5349, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 4.8940402275346696e-05, | |
| "loss": 2.5364, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 4.893575491690524e-05, | |
| "loss": 2.5315, | |
| "step": 115000 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 5379900, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 50, | |
| "save_steps": 1000, | |
| "total_flos": 1.46391313022976e+19, | |
| "train_batch_size": 7, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |