| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9971830985915493, |
| "eval_steps": 500, |
| "global_step": 2661, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0011267605633802818, |
| "grad_norm": 54.608844611192445, |
| "learning_rate": 1.8726591760299626e-07, |
| "loss": 11.127, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0022535211267605635, |
| "grad_norm": 55.36601190256884, |
| "learning_rate": 3.7453183520599253e-07, |
| "loss": 11.0439, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0033802816901408453, |
| "grad_norm": 57.0063901888475, |
| "learning_rate": 5.617977528089887e-07, |
| "loss": 11.0579, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.004507042253521127, |
| "grad_norm": 56.35810808194075, |
| "learning_rate": 7.490636704119851e-07, |
| "loss": 11.0157, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.005633802816901409, |
| "grad_norm": 55.58312483866836, |
| "learning_rate": 9.363295880149814e-07, |
| "loss": 10.9188, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0067605633802816905, |
| "grad_norm": 53.77064024372009, |
| "learning_rate": 1.1235955056179775e-06, |
| "loss": 11.0218, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.007887323943661971, |
| "grad_norm": 56.27070673773756, |
| "learning_rate": 1.310861423220974e-06, |
| "loss": 10.8824, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.009014084507042254, |
| "grad_norm": 60.218078994693784, |
| "learning_rate": 1.4981273408239701e-06, |
| "loss": 10.7133, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.010140845070422535, |
| "grad_norm": 61.64507136781477, |
| "learning_rate": 1.6853932584269663e-06, |
| "loss": 10.633, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.011267605633802818, |
| "grad_norm": 61.97384688440408, |
| "learning_rate": 1.8726591760299627e-06, |
| "loss": 10.6376, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.012394366197183098, |
| "grad_norm": 80.39629204947155, |
| "learning_rate": 2.0599250936329587e-06, |
| "loss": 9.5566, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.013521126760563381, |
| "grad_norm": 86.33183007861814, |
| "learning_rate": 2.247191011235955e-06, |
| "loss": 9.394, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.014647887323943662, |
| "grad_norm": 95.91392176867679, |
| "learning_rate": 2.4344569288389516e-06, |
| "loss": 8.8441, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.015774647887323943, |
| "grad_norm": 103.31532121583676, |
| "learning_rate": 2.621722846441948e-06, |
| "loss": 8.6743, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.016901408450704224, |
| "grad_norm": 65.1185503810022, |
| "learning_rate": 2.808988764044944e-06, |
| "loss": 3.687, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.018028169014084508, |
| "grad_norm": 59.49401022581223, |
| "learning_rate": 2.9962546816479402e-06, |
| "loss": 3.345, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.01915492957746479, |
| "grad_norm": 51.01948856952058, |
| "learning_rate": 3.1835205992509364e-06, |
| "loss": 3.1423, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.02028169014084507, |
| "grad_norm": 37.11138317252327, |
| "learning_rate": 3.3707865168539327e-06, |
| "loss": 2.5809, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.02140845070422535, |
| "grad_norm": 31.987438014939944, |
| "learning_rate": 3.558052434456929e-06, |
| "loss": 2.3297, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.022535211267605635, |
| "grad_norm": 6.423839988685857, |
| "learning_rate": 3.7453183520599255e-06, |
| "loss": 1.364, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.023661971830985916, |
| "grad_norm": 5.045295439788782, |
| "learning_rate": 3.932584269662922e-06, |
| "loss": 1.2811, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.024788732394366197, |
| "grad_norm": 4.170853030544661, |
| "learning_rate": 4.1198501872659175e-06, |
| "loss": 1.2669, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.025915492957746478, |
| "grad_norm": 3.3349694092652165, |
| "learning_rate": 4.307116104868914e-06, |
| "loss": 1.1987, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.027042253521126762, |
| "grad_norm": 2.533758218153574, |
| "learning_rate": 4.49438202247191e-06, |
| "loss": 1.0448, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.028169014084507043, |
| "grad_norm": 2.155231765906746, |
| "learning_rate": 4.6816479400749066e-06, |
| "loss": 1.0711, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.029295774647887324, |
| "grad_norm": 1.8042975450695744, |
| "learning_rate": 4.868913857677903e-06, |
| "loss": 1.0416, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.030422535211267605, |
| "grad_norm": 1.3444349342706392, |
| "learning_rate": 5.056179775280899e-06, |
| "loss": 0.9522, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.031549295774647886, |
| "grad_norm": 11.864058093901697, |
| "learning_rate": 5.243445692883896e-06, |
| "loss": 0.9194, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.03267605633802817, |
| "grad_norm": 6.2119522899213475, |
| "learning_rate": 5.430711610486891e-06, |
| "loss": 0.8908, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.03380281690140845, |
| "grad_norm": 1.4146654224158117, |
| "learning_rate": 5.617977528089888e-06, |
| "loss": 0.8566, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.03492957746478873, |
| "grad_norm": 1.0329328673745717, |
| "learning_rate": 5.805243445692885e-06, |
| "loss": 0.8422, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.036056338028169016, |
| "grad_norm": 0.9928258011925138, |
| "learning_rate": 5.9925093632958805e-06, |
| "loss": 0.8016, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.03718309859154929, |
| "grad_norm": 0.8349238341651046, |
| "learning_rate": 6.179775280898876e-06, |
| "loss": 0.7795, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.03830985915492958, |
| "grad_norm": 0.7208913642406135, |
| "learning_rate": 6.367041198501873e-06, |
| "loss": 0.7584, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.03943661971830986, |
| "grad_norm": 0.7512239641572525, |
| "learning_rate": 6.554307116104869e-06, |
| "loss": 0.7576, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.04056338028169014, |
| "grad_norm": 0.7667204457282304, |
| "learning_rate": 6.741573033707865e-06, |
| "loss": 0.7354, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.041690140845070424, |
| "grad_norm": 0.5845579314338065, |
| "learning_rate": 6.928838951310862e-06, |
| "loss": 0.7496, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.0428169014084507, |
| "grad_norm": 0.6139154497318552, |
| "learning_rate": 7.116104868913858e-06, |
| "loss": 0.7276, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.043943661971830986, |
| "grad_norm": 0.6594049952301563, |
| "learning_rate": 7.303370786516854e-06, |
| "loss": 0.7092, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.04507042253521127, |
| "grad_norm": 0.6420935015482123, |
| "learning_rate": 7.490636704119851e-06, |
| "loss": 0.7326, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.04619718309859155, |
| "grad_norm": 0.5281343129019502, |
| "learning_rate": 7.677902621722846e-06, |
| "loss": 0.6908, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.04732394366197183, |
| "grad_norm": 0.5290984786213891, |
| "learning_rate": 7.865168539325843e-06, |
| "loss": 0.6736, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.048450704225352116, |
| "grad_norm": 0.4441023144930531, |
| "learning_rate": 8.05243445692884e-06, |
| "loss": 0.6482, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.049577464788732394, |
| "grad_norm": 0.494465466454887, |
| "learning_rate": 8.239700374531835e-06, |
| "loss": 0.6487, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.05070422535211268, |
| "grad_norm": 0.46992378200427287, |
| "learning_rate": 8.426966292134832e-06, |
| "loss": 0.6515, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.051830985915492955, |
| "grad_norm": 0.48528089193349894, |
| "learning_rate": 8.614232209737828e-06, |
| "loss": 0.6652, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.05295774647887324, |
| "grad_norm": 0.4309300506022204, |
| "learning_rate": 8.801498127340826e-06, |
| "loss": 0.6395, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.054084507042253524, |
| "grad_norm": 0.46983425215048863, |
| "learning_rate": 8.98876404494382e-06, |
| "loss": 0.6498, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.0552112676056338, |
| "grad_norm": 0.4232542955369631, |
| "learning_rate": 9.176029962546817e-06, |
| "loss": 0.6074, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.056338028169014086, |
| "grad_norm": 0.4950602179089511, |
| "learning_rate": 9.363295880149813e-06, |
| "loss": 0.6537, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.05746478873239436, |
| "grad_norm": 0.40781412615064017, |
| "learning_rate": 9.550561797752809e-06, |
| "loss": 0.6075, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.05859154929577465, |
| "grad_norm": 0.36716946070233086, |
| "learning_rate": 9.737827715355806e-06, |
| "loss": 0.5996, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.05971830985915493, |
| "grad_norm": 0.3285197883333934, |
| "learning_rate": 9.925093632958802e-06, |
| "loss": 0.5855, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.06084507042253521, |
| "grad_norm": 0.37133706441288794, |
| "learning_rate": 1.0112359550561798e-05, |
| "loss": 0.6064, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.061971830985915494, |
| "grad_norm": 0.39194875781607186, |
| "learning_rate": 1.0299625468164795e-05, |
| "loss": 0.6192, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.06309859154929577, |
| "grad_norm": 0.35740092268419, |
| "learning_rate": 1.0486891385767791e-05, |
| "loss": 0.5782, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.06422535211267606, |
| "grad_norm": 0.3684352593616929, |
| "learning_rate": 1.0674157303370787e-05, |
| "loss": 0.5872, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.06535211267605634, |
| "grad_norm": 0.3025401149269732, |
| "learning_rate": 1.0861423220973783e-05, |
| "loss": 0.5798, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.06647887323943662, |
| "grad_norm": 0.3590555360599909, |
| "learning_rate": 1.104868913857678e-05, |
| "loss": 0.5729, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.0676056338028169, |
| "grad_norm": 0.34830243471060585, |
| "learning_rate": 1.1235955056179776e-05, |
| "loss": 0.5957, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.06873239436619719, |
| "grad_norm": 0.3460855247633223, |
| "learning_rate": 1.1423220973782772e-05, |
| "loss": 0.6068, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.06985915492957746, |
| "grad_norm": 0.35193542528227495, |
| "learning_rate": 1.161048689138577e-05, |
| "loss": 0.5972, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.07098591549295774, |
| "grad_norm": 0.31692586734081885, |
| "learning_rate": 1.1797752808988765e-05, |
| "loss": 0.556, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.07211267605633803, |
| "grad_norm": 0.37008606019965695, |
| "learning_rate": 1.1985018726591761e-05, |
| "loss": 0.5631, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.07323943661971831, |
| "grad_norm": 0.3526075220813616, |
| "learning_rate": 1.2172284644194758e-05, |
| "loss": 0.5881, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.07436619718309859, |
| "grad_norm": 0.2850697798787257, |
| "learning_rate": 1.2359550561797752e-05, |
| "loss": 0.5767, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.07549295774647888, |
| "grad_norm": 0.3287887407319393, |
| "learning_rate": 1.254681647940075e-05, |
| "loss": 0.5817, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.07661971830985916, |
| "grad_norm": 0.28737949408045615, |
| "learning_rate": 1.2734082397003746e-05, |
| "loss": 0.5433, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.07774647887323943, |
| "grad_norm": 0.3319938188307383, |
| "learning_rate": 1.2921348314606743e-05, |
| "loss": 0.6024, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.07887323943661972, |
| "grad_norm": 0.30064037993492726, |
| "learning_rate": 1.3108614232209737e-05, |
| "loss": 0.5634, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 0.2990949423538674, |
| "learning_rate": 1.3295880149812733e-05, |
| "loss": 0.5624, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.08112676056338028, |
| "grad_norm": 0.3711818897681024, |
| "learning_rate": 1.348314606741573e-05, |
| "loss": 0.5755, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.08225352112676056, |
| "grad_norm": 0.30557570361690733, |
| "learning_rate": 1.3670411985018728e-05, |
| "loss": 0.5373, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.08338028169014085, |
| "grad_norm": 0.28899560683771475, |
| "learning_rate": 1.3857677902621724e-05, |
| "loss": 0.5435, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.08450704225352113, |
| "grad_norm": 0.33073697214599473, |
| "learning_rate": 1.4044943820224721e-05, |
| "loss": 0.5498, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.0856338028169014, |
| "grad_norm": 0.34803723973746536, |
| "learning_rate": 1.4232209737827715e-05, |
| "loss": 0.5429, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.0867605633802817, |
| "grad_norm": 0.3509830944935367, |
| "learning_rate": 1.4419475655430711e-05, |
| "loss": 0.5601, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.08788732394366197, |
| "grad_norm": 0.2550870509542074, |
| "learning_rate": 1.4606741573033709e-05, |
| "loss": 0.5507, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.08901408450704225, |
| "grad_norm": 0.35308533996212627, |
| "learning_rate": 1.4794007490636705e-05, |
| "loss": 0.5496, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.09014084507042254, |
| "grad_norm": 0.3081240884981584, |
| "learning_rate": 1.4981273408239702e-05, |
| "loss": 0.5218, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.09126760563380282, |
| "grad_norm": 0.33145750299141974, |
| "learning_rate": 1.5168539325842698e-05, |
| "loss": 0.5071, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.0923943661971831, |
| "grad_norm": 0.26989050721529634, |
| "learning_rate": 1.5355805243445692e-05, |
| "loss": 0.5137, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.09352112676056339, |
| "grad_norm": 0.3076846617884833, |
| "learning_rate": 1.554307116104869e-05, |
| "loss": 0.511, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.09464788732394366, |
| "grad_norm": 0.34576789102985467, |
| "learning_rate": 1.5730337078651687e-05, |
| "loss": 0.5382, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.09577464788732394, |
| "grad_norm": 0.3102686540327493, |
| "learning_rate": 1.591760299625468e-05, |
| "loss": 0.5391, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.09690140845070423, |
| "grad_norm": 0.2907807397746215, |
| "learning_rate": 1.610486891385768e-05, |
| "loss": 0.5421, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.09802816901408451, |
| "grad_norm": 0.3538353366658328, |
| "learning_rate": 1.6292134831460676e-05, |
| "loss": 0.54, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.09915492957746479, |
| "grad_norm": 0.32089888058996763, |
| "learning_rate": 1.647940074906367e-05, |
| "loss": 0.5281, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.10028169014084506, |
| "grad_norm": 0.30311268558188664, |
| "learning_rate": 1.6666666666666667e-05, |
| "loss": 0.5136, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.10140845070422536, |
| "grad_norm": 0.32804689946336774, |
| "learning_rate": 1.6853932584269665e-05, |
| "loss": 0.5344, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.10253521126760563, |
| "grad_norm": 0.3634597759205688, |
| "learning_rate": 1.704119850187266e-05, |
| "loss": 0.5401, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.10366197183098591, |
| "grad_norm": 0.3671107837131908, |
| "learning_rate": 1.7228464419475657e-05, |
| "loss": 0.5274, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.1047887323943662, |
| "grad_norm": 0.3126280631131301, |
| "learning_rate": 1.7415730337078654e-05, |
| "loss": 0.5585, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.10591549295774648, |
| "grad_norm": 0.3840901646645938, |
| "learning_rate": 1.760299625468165e-05, |
| "loss": 0.5543, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.10704225352112676, |
| "grad_norm": 0.34406766779885367, |
| "learning_rate": 1.7790262172284646e-05, |
| "loss": 0.5155, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.10816901408450705, |
| "grad_norm": 0.36234031937492145, |
| "learning_rate": 1.797752808988764e-05, |
| "loss": 0.5551, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.10929577464788733, |
| "grad_norm": 0.36566069027244064, |
| "learning_rate": 1.8164794007490637e-05, |
| "loss": 0.5217, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.1104225352112676, |
| "grad_norm": 0.3763936721342812, |
| "learning_rate": 1.8352059925093635e-05, |
| "loss": 0.5074, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.1115492957746479, |
| "grad_norm": 0.3095163828509057, |
| "learning_rate": 1.8539325842696632e-05, |
| "loss": 0.5049, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.11267605633802817, |
| "grad_norm": 0.32865175173186806, |
| "learning_rate": 1.8726591760299626e-05, |
| "loss": 0.5154, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.11380281690140845, |
| "grad_norm": 0.3018690447008865, |
| "learning_rate": 1.891385767790262e-05, |
| "loss": 0.5111, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.11492957746478873, |
| "grad_norm": 0.32640387711607555, |
| "learning_rate": 1.9101123595505618e-05, |
| "loss": 0.5083, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.11605633802816902, |
| "grad_norm": 0.2905811810611324, |
| "learning_rate": 1.9288389513108615e-05, |
| "loss": 0.5241, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.1171830985915493, |
| "grad_norm": 0.2871053244214264, |
| "learning_rate": 1.9475655430711613e-05, |
| "loss": 0.5136, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.11830985915492957, |
| "grad_norm": 0.3024184852425039, |
| "learning_rate": 1.9662921348314607e-05, |
| "loss": 0.5238, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.11943661971830986, |
| "grad_norm": 0.29008239870487684, |
| "learning_rate": 1.9850187265917604e-05, |
| "loss": 0.4903, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.12056338028169014, |
| "grad_norm": 0.28736122238768774, |
| "learning_rate": 2.00374531835206e-05, |
| "loss": 0.5224, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.12169014084507042, |
| "grad_norm": 0.33884824146385534, |
| "learning_rate": 2.0224719101123596e-05, |
| "loss": 0.5159, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.12281690140845071, |
| "grad_norm": 0.33764167173784454, |
| "learning_rate": 2.0411985018726593e-05, |
| "loss": 0.4998, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.12394366197183099, |
| "grad_norm": 0.28667282492308055, |
| "learning_rate": 2.059925093632959e-05, |
| "loss": 0.4886, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.12507042253521128, |
| "grad_norm": 0.3586302431156048, |
| "learning_rate": 2.0786516853932585e-05, |
| "loss": 0.535, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.12619718309859154, |
| "grad_norm": 0.29139506020299266, |
| "learning_rate": 2.0973782771535582e-05, |
| "loss": 0.5203, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.12732394366197183, |
| "grad_norm": 0.32466629756389626, |
| "learning_rate": 2.1161048689138577e-05, |
| "loss": 0.4974, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.12845070422535212, |
| "grad_norm": 0.319478891916245, |
| "learning_rate": 2.1348314606741574e-05, |
| "loss": 0.5037, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.1295774647887324, |
| "grad_norm": 0.30895243254092425, |
| "learning_rate": 2.153558052434457e-05, |
| "loss": 0.506, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.13070422535211268, |
| "grad_norm": 0.3081561843950191, |
| "learning_rate": 2.1722846441947566e-05, |
| "loss": 0.4933, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.13183098591549297, |
| "grad_norm": 0.35620398362072353, |
| "learning_rate": 2.1910112359550563e-05, |
| "loss": 0.4953, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.13295774647887323, |
| "grad_norm": 0.29870939906727606, |
| "learning_rate": 2.209737827715356e-05, |
| "loss": 0.4991, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.13408450704225353, |
| "grad_norm": 0.34582247341621225, |
| "learning_rate": 2.2284644194756555e-05, |
| "loss": 0.5069, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.1352112676056338, |
| "grad_norm": 0.3137351594240408, |
| "learning_rate": 2.2471910112359552e-05, |
| "loss": 0.4734, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.13633802816901408, |
| "grad_norm": 0.3390962431731741, |
| "learning_rate": 2.2659176029962546e-05, |
| "loss": 0.4921, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.13746478873239437, |
| "grad_norm": 0.35843870700131947, |
| "learning_rate": 2.2846441947565544e-05, |
| "loss": 0.4967, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.13859154929577464, |
| "grad_norm": 0.3454143874126859, |
| "learning_rate": 2.303370786516854e-05, |
| "loss": 0.4949, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.13971830985915493, |
| "grad_norm": 0.3793108056331812, |
| "learning_rate": 2.322097378277154e-05, |
| "loss": 0.4919, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.14084507042253522, |
| "grad_norm": 0.33289196984913244, |
| "learning_rate": 2.3408239700374533e-05, |
| "loss": 0.4913, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.14197183098591548, |
| "grad_norm": 0.3425874685500103, |
| "learning_rate": 2.359550561797753e-05, |
| "loss": 0.5134, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.14309859154929577, |
| "grad_norm": 0.35144318220532755, |
| "learning_rate": 2.3782771535580524e-05, |
| "loss": 0.4847, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.14422535211267606, |
| "grad_norm": 0.36947792672202345, |
| "learning_rate": 2.3970037453183522e-05, |
| "loss": 0.5171, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.14535211267605633, |
| "grad_norm": 0.3525506964360257, |
| "learning_rate": 2.415730337078652e-05, |
| "loss": 0.5108, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.14647887323943662, |
| "grad_norm": 0.36146203432070717, |
| "learning_rate": 2.4344569288389517e-05, |
| "loss": 0.5098, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.1476056338028169, |
| "grad_norm": 0.35620473132880365, |
| "learning_rate": 2.453183520599251e-05, |
| "loss": 0.4795, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.14873239436619717, |
| "grad_norm": 0.3630204881069924, |
| "learning_rate": 2.4719101123595505e-05, |
| "loss": 0.5187, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.14985915492957746, |
| "grad_norm": 0.3482086458727476, |
| "learning_rate": 2.4906367041198502e-05, |
| "loss": 0.4977, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.15098591549295776, |
| "grad_norm": 0.368109870697486, |
| "learning_rate": 2.50936329588015e-05, |
| "loss": 0.5202, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.15211267605633802, |
| "grad_norm": 0.32473683043927754, |
| "learning_rate": 2.5280898876404497e-05, |
| "loss": 0.491, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.1532394366197183, |
| "grad_norm": 0.39170978189739086, |
| "learning_rate": 2.546816479400749e-05, |
| "loss": 0.4819, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.1543661971830986, |
| "grad_norm": 0.33051320119460775, |
| "learning_rate": 2.565543071161049e-05, |
| "loss": 0.4803, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.15549295774647887, |
| "grad_norm": 0.39201639325298016, |
| "learning_rate": 2.5842696629213486e-05, |
| "loss": 0.4796, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.15661971830985916, |
| "grad_norm": 0.3436149030584173, |
| "learning_rate": 2.6029962546816484e-05, |
| "loss": 0.4883, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.15774647887323945, |
| "grad_norm": 0.366083590088758, |
| "learning_rate": 2.6217228464419475e-05, |
| "loss": 0.4894, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.1588732394366197, |
| "grad_norm": 0.30735207167025824, |
| "learning_rate": 2.6404494382022472e-05, |
| "loss": 0.501, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 0.4232062943090275, |
| "learning_rate": 2.6591760299625466e-05, |
| "loss": 0.5067, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.1611267605633803, |
| "grad_norm": 0.3322052225992824, |
| "learning_rate": 2.6779026217228464e-05, |
| "loss": 0.4802, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.16225352112676056, |
| "grad_norm": 0.383878101272465, |
| "learning_rate": 2.696629213483146e-05, |
| "loss": 0.498, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.16338028169014085, |
| "grad_norm": 0.34217239742624306, |
| "learning_rate": 2.715355805243446e-05, |
| "loss": 0.4757, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.1645070422535211, |
| "grad_norm": 0.38048598444904064, |
| "learning_rate": 2.7340823970037456e-05, |
| "loss": 0.5133, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.1656338028169014, |
| "grad_norm": 0.3704985899909714, |
| "learning_rate": 2.752808988764045e-05, |
| "loss": 0.5114, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.1667605633802817, |
| "grad_norm": 0.347020727924182, |
| "learning_rate": 2.7715355805243448e-05, |
| "loss": 0.4881, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.16788732394366196, |
| "grad_norm": 0.3636357042062005, |
| "learning_rate": 2.7902621722846445e-05, |
| "loss": 0.4777, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.16901408450704225, |
| "grad_norm": 0.3634097825691515, |
| "learning_rate": 2.8089887640449443e-05, |
| "loss": 0.4889, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.17014084507042254, |
| "grad_norm": 0.38132967790819516, |
| "learning_rate": 2.8277153558052437e-05, |
| "loss": 0.4746, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.1712676056338028, |
| "grad_norm": 0.3685509255363722, |
| "learning_rate": 2.846441947565543e-05, |
| "loss": 0.4751, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.1723943661971831, |
| "grad_norm": 0.3677251643575248, |
| "learning_rate": 2.8651685393258425e-05, |
| "loss": 0.4847, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.1735211267605634, |
| "grad_norm": 0.3699177372205316, |
| "learning_rate": 2.8838951310861422e-05, |
| "loss": 0.4738, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.17464788732394365, |
| "grad_norm": 0.3483871961624536, |
| "learning_rate": 2.902621722846442e-05, |
| "loss": 0.4805, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.17577464788732394, |
| "grad_norm": 0.3920789722753322, |
| "learning_rate": 2.9213483146067417e-05, |
| "loss": 0.4712, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.17690140845070423, |
| "grad_norm": 0.3551926481565757, |
| "learning_rate": 2.940074906367041e-05, |
| "loss": 0.5052, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.1780281690140845, |
| "grad_norm": 0.36364018726777314, |
| "learning_rate": 2.958801498127341e-05, |
| "loss": 0.4584, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.1791549295774648, |
| "grad_norm": 0.3972954045737472, |
| "learning_rate": 2.9775280898876406e-05, |
| "loss": 0.514, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.18028169014084508, |
| "grad_norm": 0.4587946650119913, |
| "learning_rate": 2.9962546816479404e-05, |
| "loss": 0.4727, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.18140845070422534, |
| "grad_norm": 0.351499766618042, |
| "learning_rate": 3.01498127340824e-05, |
| "loss": 0.4992, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.18253521126760563, |
| "grad_norm": 0.5422721694050262, |
| "learning_rate": 3.0337078651685396e-05, |
| "loss": 0.4764, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.18366197183098593, |
| "grad_norm": 0.5273905666988745, |
| "learning_rate": 3.052434456928839e-05, |
| "loss": 0.4953, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.1847887323943662, |
| "grad_norm": 0.399247691576563, |
| "learning_rate": 3.0711610486891384e-05, |
| "loss": 0.4579, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.18591549295774648, |
| "grad_norm": 0.43479889451138115, |
| "learning_rate": 3.089887640449438e-05, |
| "loss": 0.4643, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.18704225352112677, |
| "grad_norm": 0.5429791467443568, |
| "learning_rate": 3.108614232209738e-05, |
| "loss": 0.5066, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.18816901408450704, |
| "grad_norm": 0.4540405451965911, |
| "learning_rate": 3.1273408239700376e-05, |
| "loss": 0.4893, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.18929577464788733, |
| "grad_norm": 0.5823441022105608, |
| "learning_rate": 3.1460674157303374e-05, |
| "loss": 0.4755, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.19042253521126762, |
| "grad_norm": 0.7004005424258571, |
| "learning_rate": 3.164794007490637e-05, |
| "loss": 0.4891, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.19154929577464788, |
| "grad_norm": 0.40931277269381217, |
| "learning_rate": 3.183520599250936e-05, |
| "loss": 0.4499, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.19267605633802817, |
| "grad_norm": 0.6973058224617746, |
| "learning_rate": 3.202247191011236e-05, |
| "loss": 0.482, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.19380281690140846, |
| "grad_norm": 0.7957866592242848, |
| "learning_rate": 3.220973782771536e-05, |
| "loss": 0.4844, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.19492957746478873, |
| "grad_norm": 0.5275061025504003, |
| "learning_rate": 3.2397003745318354e-05, |
| "loss": 0.4652, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.19605633802816902, |
| "grad_norm": 1.1842902748039579, |
| "learning_rate": 3.258426966292135e-05, |
| "loss": 0.4868, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.19718309859154928, |
| "grad_norm": 0.6115542815114193, |
| "learning_rate": 3.277153558052435e-05, |
| "loss": 0.4724, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.19830985915492957, |
| "grad_norm": 0.6492777695364972, |
| "learning_rate": 3.295880149812734e-05, |
| "loss": 0.441, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.19943661971830987, |
| "grad_norm": 0.5988372578158707, |
| "learning_rate": 3.314606741573034e-05, |
| "loss": 0.4792, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.20056338028169013, |
| "grad_norm": 0.5653319864850155, |
| "learning_rate": 3.3333333333333335e-05, |
| "loss": 0.4868, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.20169014084507042, |
| "grad_norm": 0.5435905257267866, |
| "learning_rate": 3.352059925093633e-05, |
| "loss": 0.4747, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.2028169014084507, |
| "grad_norm": 0.5572162635083778, |
| "learning_rate": 3.370786516853933e-05, |
| "loss": 0.4752, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.20394366197183098, |
| "grad_norm": 0.5904914688697619, |
| "learning_rate": 3.389513108614232e-05, |
| "loss": 0.5, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.20507042253521127, |
| "grad_norm": 0.5718739497949387, |
| "learning_rate": 3.408239700374532e-05, |
| "loss": 0.4834, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.20619718309859156, |
| "grad_norm": 0.5136731587567704, |
| "learning_rate": 3.4269662921348316e-05, |
| "loss": 0.5129, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.20732394366197182, |
| "grad_norm": 0.5553306091609278, |
| "learning_rate": 3.445692883895131e-05, |
| "loss": 0.4769, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.2084507042253521, |
| "grad_norm": 0.5041333309153727, |
| "learning_rate": 3.464419475655431e-05, |
| "loss": 0.4673, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.2095774647887324, |
| "grad_norm": 0.526418919436349, |
| "learning_rate": 3.483146067415731e-05, |
| "loss": 0.482, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.21070422535211267, |
| "grad_norm": 0.5365226140722351, |
| "learning_rate": 3.5018726591760305e-05, |
| "loss": 0.4771, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.21183098591549296, |
| "grad_norm": 0.456310763127666, |
| "learning_rate": 3.52059925093633e-05, |
| "loss": 0.4917, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.21295774647887325, |
| "grad_norm": 0.5038033687577937, |
| "learning_rate": 3.5393258426966294e-05, |
| "loss": 0.4648, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.2140845070422535, |
| "grad_norm": 0.3999119007137244, |
| "learning_rate": 3.558052434456929e-05, |
| "loss": 0.4736, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.2152112676056338, |
| "grad_norm": 0.5384182633924429, |
| "learning_rate": 3.576779026217228e-05, |
| "loss": 0.4614, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.2163380281690141, |
| "grad_norm": 0.3938798987316564, |
| "learning_rate": 3.595505617977528e-05, |
| "loss": 0.4824, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.21746478873239436, |
| "grad_norm": 0.47825665590170224, |
| "learning_rate": 3.614232209737828e-05, |
| "loss": 0.4944, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.21859154929577465, |
| "grad_norm": 0.4739210393787863, |
| "learning_rate": 3.6329588014981274e-05, |
| "loss": 0.4854, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.21971830985915494, |
| "grad_norm": 0.4000560342009286, |
| "learning_rate": 3.651685393258427e-05, |
| "loss": 0.4537, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.2208450704225352, |
| "grad_norm": 0.4745488866447151, |
| "learning_rate": 3.670411985018727e-05, |
| "loss": 0.4925, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.2219718309859155, |
| "grad_norm": 0.40427608903936857, |
| "learning_rate": 3.689138576779027e-05, |
| "loss": 0.4759, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.2230985915492958, |
| "grad_norm": 0.36830227225633555, |
| "learning_rate": 3.7078651685393264e-05, |
| "loss": 0.4736, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.22422535211267605, |
| "grad_norm": 0.35731267597771954, |
| "learning_rate": 3.726591760299626e-05, |
| "loss": 0.4655, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.22535211267605634, |
| "grad_norm": 0.36040514424773884, |
| "learning_rate": 3.745318352059925e-05, |
| "loss": 0.5123, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.2264788732394366, |
| "grad_norm": 0.341416480034844, |
| "learning_rate": 3.764044943820225e-05, |
| "loss": 0.4817, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.2276056338028169, |
| "grad_norm": 0.38281780990844233, |
| "learning_rate": 3.782771535580524e-05, |
| "loss": 0.4464, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.2287323943661972, |
| "grad_norm": 0.39091583875242464, |
| "learning_rate": 3.801498127340824e-05, |
| "loss": 0.459, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.22985915492957745, |
| "grad_norm": 0.4484240195809876, |
| "learning_rate": 3.8202247191011236e-05, |
| "loss": 0.4722, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.23098591549295774, |
| "grad_norm": 0.3557165364605743, |
| "learning_rate": 3.838951310861423e-05, |
| "loss": 0.4481, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.23211267605633804, |
| "grad_norm": 0.4351009901681479, |
| "learning_rate": 3.857677902621723e-05, |
| "loss": 0.471, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.2332394366197183, |
| "grad_norm": 0.38692072509933034, |
| "learning_rate": 3.876404494382023e-05, |
| "loss": 0.4286, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.2343661971830986, |
| "grad_norm": 0.4675415655077608, |
| "learning_rate": 3.8951310861423226e-05, |
| "loss": 0.4576, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.23549295774647888, |
| "grad_norm": 0.39761524002543014, |
| "learning_rate": 3.913857677902622e-05, |
| "loss": 0.4794, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.23661971830985915, |
| "grad_norm": 0.4540277389661571, |
| "learning_rate": 3.9325842696629214e-05, |
| "loss": 0.4759, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.23774647887323944, |
| "grad_norm": 0.4440657059092484, |
| "learning_rate": 3.951310861423221e-05, |
| "loss": 0.4534, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.23887323943661973, |
| "grad_norm": 0.3939795789051039, |
| "learning_rate": 3.970037453183521e-05, |
| "loss": 0.4612, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 0.4762851872749466, |
| "learning_rate": 3.98876404494382e-05, |
| "loss": 0.4566, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.24112676056338028, |
| "grad_norm": 0.41770476809342594, |
| "learning_rate": 4.00749063670412e-05, |
| "loss": 0.4532, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.24225352112676057, |
| "grad_norm": 0.48777063502172047, |
| "learning_rate": 4.0262172284644194e-05, |
| "loss": 0.4432, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.24338028169014084, |
| "grad_norm": 0.3690439757602066, |
| "learning_rate": 4.044943820224719e-05, |
| "loss": 0.4408, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.24450704225352113, |
| "grad_norm": 0.49741320292973695, |
| "learning_rate": 4.063670411985019e-05, |
| "loss": 0.4659, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.24563380281690142, |
| "grad_norm": 0.522242568777597, |
| "learning_rate": 4.082397003745319e-05, |
| "loss": 0.4474, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.24676056338028168, |
| "grad_norm": 0.37597023810209546, |
| "learning_rate": 4.1011235955056184e-05, |
| "loss": 0.4863, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.24788732394366197, |
| "grad_norm": 0.5868290025238638, |
| "learning_rate": 4.119850187265918e-05, |
| "loss": 0.4829, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.24901408450704227, |
| "grad_norm": 0.5148405897733493, |
| "learning_rate": 4.138576779026217e-05, |
| "loss": 0.4518, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.25014084507042256, |
| "grad_norm": 0.44695171811561535, |
| "learning_rate": 4.157303370786517e-05, |
| "loss": 0.4548, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.2512676056338028, |
| "grad_norm": 0.5467900665481616, |
| "learning_rate": 4.176029962546817e-05, |
| "loss": 0.4716, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.2523943661971831, |
| "grad_norm": 0.41846730812607025, |
| "learning_rate": 4.1947565543071165e-05, |
| "loss": 0.4661, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.2535211267605634, |
| "grad_norm": 0.45662423635892097, |
| "learning_rate": 4.2134831460674156e-05, |
| "loss": 0.4534, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.25464788732394367, |
| "grad_norm": 0.6325159390990795, |
| "learning_rate": 4.232209737827715e-05, |
| "loss": 0.4512, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.25577464788732396, |
| "grad_norm": 0.3891612621892786, |
| "learning_rate": 4.250936329588015e-05, |
| "loss": 0.456, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.25690140845070425, |
| "grad_norm": 0.49703727890231636, |
| "learning_rate": 4.269662921348315e-05, |
| "loss": 0.4563, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.2580281690140845, |
| "grad_norm": 0.4866546409272407, |
| "learning_rate": 4.2883895131086146e-05, |
| "loss": 0.4717, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.2591549295774648, |
| "grad_norm": 0.4531740529278661, |
| "learning_rate": 4.307116104868914e-05, |
| "loss": 0.4446, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.26028169014084507, |
| "grad_norm": 0.5386248688297244, |
| "learning_rate": 4.3258426966292134e-05, |
| "loss": 0.4674, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.26140845070422536, |
| "grad_norm": 0.42614317035006366, |
| "learning_rate": 4.344569288389513e-05, |
| "loss": 0.4583, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.26253521126760565, |
| "grad_norm": 0.5720498969311514, |
| "learning_rate": 4.363295880149813e-05, |
| "loss": 0.4501, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.26366197183098594, |
| "grad_norm": 0.5617737038927428, |
| "learning_rate": 4.3820224719101126e-05, |
| "loss": 0.4759, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.2647887323943662, |
| "grad_norm": 0.4432550193181308, |
| "learning_rate": 4.4007490636704124e-05, |
| "loss": 0.4658, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.26591549295774647, |
| "grad_norm": 0.5733406217420002, |
| "learning_rate": 4.419475655430712e-05, |
| "loss": 0.4739, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.26704225352112676, |
| "grad_norm": 0.4228087393204907, |
| "learning_rate": 4.438202247191011e-05, |
| "loss": 0.4549, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.26816901408450705, |
| "grad_norm": 0.4865503254588127, |
| "learning_rate": 4.456928838951311e-05, |
| "loss": 0.4413, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.26929577464788734, |
| "grad_norm": 0.5412922689255075, |
| "learning_rate": 4.475655430711611e-05, |
| "loss": 0.4627, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.2704225352112676, |
| "grad_norm": 0.42876550267809127, |
| "learning_rate": 4.4943820224719104e-05, |
| "loss": 0.4547, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.27154929577464787, |
| "grad_norm": 0.5373861362677359, |
| "learning_rate": 4.51310861423221e-05, |
| "loss": 0.4612, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.27267605633802816, |
| "grad_norm": 0.5804462215718947, |
| "learning_rate": 4.531835205992509e-05, |
| "loss": 0.4645, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.27380281690140845, |
| "grad_norm": 0.4122842878113385, |
| "learning_rate": 4.550561797752809e-05, |
| "loss": 0.4451, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.27492957746478874, |
| "grad_norm": 0.5336968424824022, |
| "learning_rate": 4.569288389513109e-05, |
| "loss": 0.4481, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.27605633802816903, |
| "grad_norm": 0.42951583225710527, |
| "learning_rate": 4.5880149812734085e-05, |
| "loss": 0.4567, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.27718309859154927, |
| "grad_norm": 0.48609553240950315, |
| "learning_rate": 4.606741573033708e-05, |
| "loss": 0.447, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.27830985915492956, |
| "grad_norm": 0.5840157329385284, |
| "learning_rate": 4.625468164794008e-05, |
| "loss": 0.4683, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.27943661971830985, |
| "grad_norm": 0.464308042264615, |
| "learning_rate": 4.644194756554308e-05, |
| "loss": 0.4435, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.28056338028169014, |
| "grad_norm": 0.48568184619644433, |
| "learning_rate": 4.662921348314607e-05, |
| "loss": 0.4202, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.28169014084507044, |
| "grad_norm": 0.5421087431500514, |
| "learning_rate": 4.6816479400749066e-05, |
| "loss": 0.4802, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.2828169014084507, |
| "grad_norm": 0.5726166139181046, |
| "learning_rate": 4.700374531835206e-05, |
| "loss": 0.4082, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.28394366197183096, |
| "grad_norm": 0.38708381470148295, |
| "learning_rate": 4.719101123595506e-05, |
| "loss": 0.4201, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.28507042253521125, |
| "grad_norm": 0.6599296604136585, |
| "learning_rate": 4.737827715355805e-05, |
| "loss": 0.4574, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.28619718309859155, |
| "grad_norm": 0.3689919503814012, |
| "learning_rate": 4.756554307116105e-05, |
| "loss": 0.443, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.28732394366197184, |
| "grad_norm": 0.6554671527107907, |
| "learning_rate": 4.7752808988764046e-05, |
| "loss": 0.4521, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.28845070422535213, |
| "grad_norm": 0.4732513325463823, |
| "learning_rate": 4.7940074906367044e-05, |
| "loss": 0.4612, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.2895774647887324, |
| "grad_norm": 0.6422946109709861, |
| "learning_rate": 4.812734082397004e-05, |
| "loss": 0.4592, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.29070422535211266, |
| "grad_norm": 0.5006208962628628, |
| "learning_rate": 4.831460674157304e-05, |
| "loss": 0.4591, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.29183098591549295, |
| "grad_norm": 0.6352179330776139, |
| "learning_rate": 4.8501872659176036e-05, |
| "loss": 0.4421, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.29295774647887324, |
| "grad_norm": 0.47815163623650375, |
| "learning_rate": 4.8689138576779034e-05, |
| "loss": 0.4416, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.29408450704225353, |
| "grad_norm": 0.5761319167007566, |
| "learning_rate": 4.8876404494382024e-05, |
| "loss": 0.4423, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.2952112676056338, |
| "grad_norm": 0.664916170697062, |
| "learning_rate": 4.906367041198502e-05, |
| "loss": 0.4752, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.2963380281690141, |
| "grad_norm": 0.4686347528012791, |
| "learning_rate": 4.925093632958801e-05, |
| "loss": 0.4549, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.29746478873239435, |
| "grad_norm": 0.6535509019931558, |
| "learning_rate": 4.943820224719101e-05, |
| "loss": 0.4185, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.29859154929577464, |
| "grad_norm": 0.4418063367523136, |
| "learning_rate": 4.962546816479401e-05, |
| "loss": 0.445, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.29971830985915493, |
| "grad_norm": 0.4780381630674905, |
| "learning_rate": 4.9812734082397005e-05, |
| "loss": 0.4396, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.3008450704225352, |
| "grad_norm": 0.5799759746220542, |
| "learning_rate": 5e-05, |
| "loss": 0.4581, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.3019718309859155, |
| "grad_norm": 0.5140227732840926, |
| "learning_rate": 4.997911445279866e-05, |
| "loss": 0.4258, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.30309859154929575, |
| "grad_norm": 0.5093369543361972, |
| "learning_rate": 4.995822890559733e-05, |
| "loss": 0.447, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.30422535211267604, |
| "grad_norm": 0.536665447881301, |
| "learning_rate": 4.9937343358395996e-05, |
| "loss": 0.4375, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.30535211267605633, |
| "grad_norm": 0.6045725882311949, |
| "learning_rate": 4.9916457811194656e-05, |
| "loss": 0.4596, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.3064788732394366, |
| "grad_norm": 0.4513638559820733, |
| "learning_rate": 4.9895572263993316e-05, |
| "loss": 0.4213, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.3076056338028169, |
| "grad_norm": 0.5579346841610208, |
| "learning_rate": 4.987468671679198e-05, |
| "loss": 0.4496, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.3087323943661972, |
| "grad_norm": 0.721853421554717, |
| "learning_rate": 4.985380116959065e-05, |
| "loss": 0.4447, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.30985915492957744, |
| "grad_norm": 0.6403095657433422, |
| "learning_rate": 4.983291562238931e-05, |
| "loss": 0.4502, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.31098591549295773, |
| "grad_norm": 0.6518049489007518, |
| "learning_rate": 4.981203007518797e-05, |
| "loss": 0.4632, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.312112676056338, |
| "grad_norm": 0.8147228654510137, |
| "learning_rate": 4.9791144527986636e-05, |
| "loss": 0.4759, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.3132394366197183, |
| "grad_norm": 0.9543881154945372, |
| "learning_rate": 4.97702589807853e-05, |
| "loss": 0.466, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.3143661971830986, |
| "grad_norm": 0.711807527160549, |
| "learning_rate": 4.974937343358396e-05, |
| "loss": 0.4458, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.3154929577464789, |
| "grad_norm": 0.9703166053609333, |
| "learning_rate": 4.972848788638262e-05, |
| "loss": 0.4548, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.31661971830985913, |
| "grad_norm": 0.8568539146313636, |
| "learning_rate": 4.970760233918128e-05, |
| "loss": 0.4397, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.3177464788732394, |
| "grad_norm": 0.8062901175799372, |
| "learning_rate": 4.9686716791979956e-05, |
| "loss": 0.4529, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.3188732394366197, |
| "grad_norm": 0.8565696839712605, |
| "learning_rate": 4.9665831244778616e-05, |
| "loss": 0.4577, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 0.9299611575587775, |
| "learning_rate": 4.9644945697577276e-05, |
| "loss": 0.4351, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.3211267605633803, |
| "grad_norm": 0.7290633308595302, |
| "learning_rate": 4.9624060150375936e-05, |
| "loss": 0.4337, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.3222535211267606, |
| "grad_norm": 0.7827462663607737, |
| "learning_rate": 4.960317460317461e-05, |
| "loss": 0.4419, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.3233802816901408, |
| "grad_norm": 0.8289797573873895, |
| "learning_rate": 4.958228905597327e-05, |
| "loss": 0.4313, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.3245070422535211, |
| "grad_norm": 0.44578585581376295, |
| "learning_rate": 4.956140350877193e-05, |
| "loss": 0.4597, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.3256338028169014, |
| "grad_norm": 0.8766044076142936, |
| "learning_rate": 4.954051796157059e-05, |
| "loss": 0.4812, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.3267605633802817, |
| "grad_norm": 0.4914683054076262, |
| "learning_rate": 4.9519632414369263e-05, |
| "loss": 0.4461, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.327887323943662, |
| "grad_norm": 0.7848770318839384, |
| "learning_rate": 4.9498746867167923e-05, |
| "loss": 0.4615, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.3290140845070422, |
| "grad_norm": 0.5878732648655055, |
| "learning_rate": 4.947786131996658e-05, |
| "loss": 0.4487, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.3301408450704225, |
| "grad_norm": 0.6268082137586906, |
| "learning_rate": 4.945697577276524e-05, |
| "loss": 0.4317, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.3312676056338028, |
| "grad_norm": 0.5757058247458672, |
| "learning_rate": 4.943609022556392e-05, |
| "loss": 0.4319, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.3323943661971831, |
| "grad_norm": 0.5478319671642797, |
| "learning_rate": 4.941520467836258e-05, |
| "loss": 0.4414, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.3335211267605634, |
| "grad_norm": 0.6073166154579848, |
| "learning_rate": 4.939431913116124e-05, |
| "loss": 0.437, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.3346478873239437, |
| "grad_norm": 0.5288488847365398, |
| "learning_rate": 4.93734335839599e-05, |
| "loss": 0.4634, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.3357746478873239, |
| "grad_norm": 0.665813028870457, |
| "learning_rate": 4.9352548036758564e-05, |
| "loss": 0.4169, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.3369014084507042, |
| "grad_norm": 0.5021503452806892, |
| "learning_rate": 4.933166248955723e-05, |
| "loss": 0.4362, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.3380281690140845, |
| "grad_norm": 0.62033447140205, |
| "learning_rate": 4.931077694235589e-05, |
| "loss": 0.4563, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.3391549295774648, |
| "grad_norm": 0.5274838444890766, |
| "learning_rate": 4.928989139515455e-05, |
| "loss": 0.4441, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.3402816901408451, |
| "grad_norm": 0.612414648195828, |
| "learning_rate": 4.926900584795322e-05, |
| "loss": 0.4527, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.3414084507042254, |
| "grad_norm": 0.575726524433347, |
| "learning_rate": 4.9248120300751884e-05, |
| "loss": 0.4618, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.3425352112676056, |
| "grad_norm": 0.4857128803230017, |
| "learning_rate": 4.9227234753550544e-05, |
| "loss": 0.455, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.3436619718309859, |
| "grad_norm": 0.4651119564318662, |
| "learning_rate": 4.9206349206349204e-05, |
| "loss": 0.4415, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.3447887323943662, |
| "grad_norm": 0.43833590031904573, |
| "learning_rate": 4.918546365914787e-05, |
| "loss": 0.4461, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.3459154929577465, |
| "grad_norm": 0.39155081982450585, |
| "learning_rate": 4.916457811194654e-05, |
| "loss": 0.4385, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.3470422535211268, |
| "grad_norm": 0.4515617162267813, |
| "learning_rate": 4.91436925647452e-05, |
| "loss": 0.4397, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.34816901408450707, |
| "grad_norm": 0.39034501069443966, |
| "learning_rate": 4.912280701754386e-05, |
| "loss": 0.4275, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.3492957746478873, |
| "grad_norm": 0.434941416892916, |
| "learning_rate": 4.9101921470342524e-05, |
| "loss": 0.4516, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.3504225352112676, |
| "grad_norm": 0.405450124656521, |
| "learning_rate": 4.908103592314119e-05, |
| "loss": 0.4444, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.3515492957746479, |
| "grad_norm": 0.3890876078168863, |
| "learning_rate": 4.906015037593985e-05, |
| "loss": 0.4337, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.3526760563380282, |
| "grad_norm": 0.4368474819090546, |
| "learning_rate": 4.903926482873851e-05, |
| "loss": 0.4465, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.35380281690140847, |
| "grad_norm": 0.49321100467381773, |
| "learning_rate": 4.901837928153718e-05, |
| "loss": 0.4418, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.35492957746478876, |
| "grad_norm": 0.4007160642334723, |
| "learning_rate": 4.8997493734335844e-05, |
| "loss": 0.4354, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.356056338028169, |
| "grad_norm": 0.34955955912720427, |
| "learning_rate": 4.8976608187134504e-05, |
| "loss": 0.4651, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.3571830985915493, |
| "grad_norm": 0.45158037335045736, |
| "learning_rate": 4.8955722639933164e-05, |
| "loss": 0.4476, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.3583098591549296, |
| "grad_norm": 0.3750679094506128, |
| "learning_rate": 4.893483709273183e-05, |
| "loss": 0.4296, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.35943661971830987, |
| "grad_norm": 0.3949654475565995, |
| "learning_rate": 4.89139515455305e-05, |
| "loss": 0.4274, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.36056338028169016, |
| "grad_norm": 0.4279246749378893, |
| "learning_rate": 4.889306599832916e-05, |
| "loss": 0.4531, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.3616901408450704, |
| "grad_norm": 0.4464764928096698, |
| "learning_rate": 4.887218045112782e-05, |
| "loss": 0.4626, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.3628169014084507, |
| "grad_norm": 0.4094055448948909, |
| "learning_rate": 4.8851294903926485e-05, |
| "loss": 0.4574, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.363943661971831, |
| "grad_norm": 0.433464486212505, |
| "learning_rate": 4.883040935672515e-05, |
| "loss": 0.4419, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.36507042253521127, |
| "grad_norm": 0.3943121841578179, |
| "learning_rate": 4.880952380952381e-05, |
| "loss": 0.4379, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.36619718309859156, |
| "grad_norm": 0.43899232439651487, |
| "learning_rate": 4.878863826232247e-05, |
| "loss": 0.4465, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.36732394366197185, |
| "grad_norm": 0.4193670484154254, |
| "learning_rate": 4.876775271512114e-05, |
| "loss": 0.4518, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.3684507042253521, |
| "grad_norm": 0.4047850180694318, |
| "learning_rate": 4.8746867167919805e-05, |
| "loss": 0.4237, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.3695774647887324, |
| "grad_norm": 0.32930872482740814, |
| "learning_rate": 4.8725981620718465e-05, |
| "loss": 0.438, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.37070422535211267, |
| "grad_norm": 0.4319956598214786, |
| "learning_rate": 4.8705096073517125e-05, |
| "loss": 0.4342, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.37183098591549296, |
| "grad_norm": 0.39983873887542376, |
| "learning_rate": 4.868421052631579e-05, |
| "loss": 0.4323, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.37295774647887325, |
| "grad_norm": 0.42371687498447236, |
| "learning_rate": 4.866332497911446e-05, |
| "loss": 0.4601, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.37408450704225354, |
| "grad_norm": 0.48052857625941175, |
| "learning_rate": 4.864243943191312e-05, |
| "loss": 0.4791, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.3752112676056338, |
| "grad_norm": 0.40445062983039526, |
| "learning_rate": 4.862155388471178e-05, |
| "loss": 0.457, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.37633802816901407, |
| "grad_norm": 0.4419492432404764, |
| "learning_rate": 4.8600668337510445e-05, |
| "loss": 0.4396, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.37746478873239436, |
| "grad_norm": 0.3831902844997573, |
| "learning_rate": 4.857978279030911e-05, |
| "loss": 0.4775, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.37859154929577465, |
| "grad_norm": 0.46441513775498133, |
| "learning_rate": 4.855889724310777e-05, |
| "loss": 0.4292, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.37971830985915495, |
| "grad_norm": 0.4558569399558645, |
| "learning_rate": 4.853801169590643e-05, |
| "loss": 0.4321, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.38084507042253524, |
| "grad_norm": 0.46584322528549255, |
| "learning_rate": 4.85171261487051e-05, |
| "loss": 0.4381, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.3819718309859155, |
| "grad_norm": 0.4477280961298829, |
| "learning_rate": 4.849624060150376e-05, |
| "loss": 0.4149, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.38309859154929576, |
| "grad_norm": 0.4000418205043348, |
| "learning_rate": 4.8475355054302425e-05, |
| "loss": 0.4278, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.38422535211267606, |
| "grad_norm": 0.43292724989992054, |
| "learning_rate": 4.8454469507101085e-05, |
| "loss": 0.4582, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.38535211267605635, |
| "grad_norm": 0.6115776129330851, |
| "learning_rate": 4.843358395989975e-05, |
| "loss": 0.4502, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.38647887323943664, |
| "grad_norm": 0.4753406969860461, |
| "learning_rate": 4.841269841269841e-05, |
| "loss": 0.4546, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.38760563380281693, |
| "grad_norm": 0.6453081760079101, |
| "learning_rate": 4.839181286549708e-05, |
| "loss": 0.4647, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.38873239436619716, |
| "grad_norm": 0.6874748404475204, |
| "learning_rate": 4.837092731829574e-05, |
| "loss": 0.4372, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.38985915492957746, |
| "grad_norm": 0.40443377712949796, |
| "learning_rate": 4.8350041771094406e-05, |
| "loss": 0.4357, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.39098591549295775, |
| "grad_norm": 0.5991286906041724, |
| "learning_rate": 4.8329156223893066e-05, |
| "loss": 0.4635, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.39211267605633804, |
| "grad_norm": 0.4722265848003038, |
| "learning_rate": 4.830827067669173e-05, |
| "loss": 0.4489, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.39323943661971833, |
| "grad_norm": 0.5058106439502263, |
| "learning_rate": 4.828738512949039e-05, |
| "loss": 0.4393, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.39436619718309857, |
| "grad_norm": 0.5121245224918637, |
| "learning_rate": 4.826649958228906e-05, |
| "loss": 0.4495, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.39549295774647886, |
| "grad_norm": 0.45671179599721246, |
| "learning_rate": 4.824561403508772e-05, |
| "loss": 0.4507, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.39661971830985915, |
| "grad_norm": 0.45937663080556684, |
| "learning_rate": 4.8224728487886386e-05, |
| "loss": 0.4209, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.39774647887323944, |
| "grad_norm": 0.6104067835573581, |
| "learning_rate": 4.8203842940685046e-05, |
| "loss": 0.4418, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.39887323943661973, |
| "grad_norm": 0.420277131165378, |
| "learning_rate": 4.818295739348371e-05, |
| "loss": 0.4243, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 0.5086625172059411, |
| "learning_rate": 4.816207184628237e-05, |
| "loss": 0.443, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.40112676056338026, |
| "grad_norm": 0.4470016709990632, |
| "learning_rate": 4.814118629908104e-05, |
| "loss": 0.4179, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.40225352112676055, |
| "grad_norm": 0.44297926877619226, |
| "learning_rate": 4.81203007518797e-05, |
| "loss": 0.4402, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.40338028169014084, |
| "grad_norm": 0.4010493977276293, |
| "learning_rate": 4.8099415204678366e-05, |
| "loss": 0.4588, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.40450704225352113, |
| "grad_norm": 0.3506045656678495, |
| "learning_rate": 4.8078529657477026e-05, |
| "loss": 0.4151, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.4056338028169014, |
| "grad_norm": 0.41248134787582563, |
| "learning_rate": 4.805764411027569e-05, |
| "loss": 0.4308, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.4067605633802817, |
| "grad_norm": 0.42669347587090345, |
| "learning_rate": 4.803675856307435e-05, |
| "loss": 0.4395, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.40788732394366195, |
| "grad_norm": 0.37272375532810725, |
| "learning_rate": 4.801587301587302e-05, |
| "loss": 0.4364, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.40901408450704224, |
| "grad_norm": 0.4570821983785425, |
| "learning_rate": 4.799498746867168e-05, |
| "loss": 0.4418, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.41014084507042253, |
| "grad_norm": 0.398872273320948, |
| "learning_rate": 4.7974101921470346e-05, |
| "loss": 0.4436, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.4112676056338028, |
| "grad_norm": 0.511403087399427, |
| "learning_rate": 4.7953216374269006e-05, |
| "loss": 0.4255, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.4123943661971831, |
| "grad_norm": 0.3713182569695666, |
| "learning_rate": 4.793233082706767e-05, |
| "loss": 0.4411, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.4135211267605634, |
| "grad_norm": 0.4584153156070977, |
| "learning_rate": 4.791144527986633e-05, |
| "loss": 0.4372, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.41464788732394364, |
| "grad_norm": 0.42578321791979223, |
| "learning_rate": 4.7890559732665e-05, |
| "loss": 0.4291, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.41577464788732393, |
| "grad_norm": 0.4177132674416258, |
| "learning_rate": 4.786967418546366e-05, |
| "loss": 0.4204, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.4169014084507042, |
| "grad_norm": 0.4666753953413797, |
| "learning_rate": 4.784878863826233e-05, |
| "loss": 0.443, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.4180281690140845, |
| "grad_norm": 0.48466090823607716, |
| "learning_rate": 4.782790309106099e-05, |
| "loss": 0.451, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.4191549295774648, |
| "grad_norm": 0.46837220316117795, |
| "learning_rate": 4.780701754385965e-05, |
| "loss": 0.4063, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.42028169014084504, |
| "grad_norm": 0.5287131069297187, |
| "learning_rate": 4.778613199665831e-05, |
| "loss": 0.4419, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.42140845070422533, |
| "grad_norm": 0.4684724501520272, |
| "learning_rate": 4.776524644945698e-05, |
| "loss": 0.4197, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.4225352112676056, |
| "grad_norm": 0.45380336381720876, |
| "learning_rate": 4.774436090225564e-05, |
| "loss": 0.4391, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.4236619718309859, |
| "grad_norm": 0.44925990919244896, |
| "learning_rate": 4.77234753550543e-05, |
| "loss": 0.4406, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.4247887323943662, |
| "grad_norm": 0.3873662129780452, |
| "learning_rate": 4.770258980785297e-05, |
| "loss": 0.4088, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.4259154929577465, |
| "grad_norm": 0.37778608304923517, |
| "learning_rate": 4.7681704260651634e-05, |
| "loss": 0.4254, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.42704225352112674, |
| "grad_norm": 0.44423925673574166, |
| "learning_rate": 4.7660818713450294e-05, |
| "loss": 0.4595, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.428169014084507, |
| "grad_norm": 0.3663991698191898, |
| "learning_rate": 4.7639933166248954e-05, |
| "loss": 0.4286, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.4292957746478873, |
| "grad_norm": 0.4099685876093145, |
| "learning_rate": 4.761904761904762e-05, |
| "loss": 0.4363, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.4304225352112676, |
| "grad_norm": 0.4309445114432002, |
| "learning_rate": 4.759816207184629e-05, |
| "loss": 0.4276, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.4315492957746479, |
| "grad_norm": 0.4082416446454451, |
| "learning_rate": 4.757727652464495e-05, |
| "loss": 0.4468, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.4326760563380282, |
| "grad_norm": 0.3823707441932576, |
| "learning_rate": 4.755639097744361e-05, |
| "loss": 0.4353, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.43380281690140843, |
| "grad_norm": 0.4984043757512769, |
| "learning_rate": 4.7535505430242274e-05, |
| "loss": 0.4578, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.4349295774647887, |
| "grad_norm": 0.43584279497841244, |
| "learning_rate": 4.751461988304094e-05, |
| "loss": 0.4258, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.436056338028169, |
| "grad_norm": 0.43822399751636953, |
| "learning_rate": 4.74937343358396e-05, |
| "loss": 0.4353, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.4371830985915493, |
| "grad_norm": 0.42344449072896734, |
| "learning_rate": 4.747284878863826e-05, |
| "loss": 0.4778, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.4383098591549296, |
| "grad_norm": 0.4056081290584808, |
| "learning_rate": 4.745196324143693e-05, |
| "loss": 0.4237, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.4394366197183099, |
| "grad_norm": 0.3732430125687137, |
| "learning_rate": 4.7431077694235594e-05, |
| "loss": 0.4554, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.4405633802816901, |
| "grad_norm": 0.3647079020199324, |
| "learning_rate": 4.7410192147034254e-05, |
| "loss": 0.439, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.4416901408450704, |
| "grad_norm": 0.4000271217336532, |
| "learning_rate": 4.7389306599832914e-05, |
| "loss": 0.4393, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.4428169014084507, |
| "grad_norm": 0.374753617359853, |
| "learning_rate": 4.736842105263158e-05, |
| "loss": 0.4256, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.443943661971831, |
| "grad_norm": 0.3305173070867681, |
| "learning_rate": 4.734753550543025e-05, |
| "loss": 0.4329, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.4450704225352113, |
| "grad_norm": 0.36944667056910313, |
| "learning_rate": 4.732664995822891e-05, |
| "loss": 0.4695, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.4461971830985916, |
| "grad_norm": 0.3928435655072016, |
| "learning_rate": 4.730576441102757e-05, |
| "loss": 0.4319, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.4473239436619718, |
| "grad_norm": 0.336375328408579, |
| "learning_rate": 4.7284878863826234e-05, |
| "loss": 0.425, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.4484507042253521, |
| "grad_norm": 0.39132740901645385, |
| "learning_rate": 4.72639933166249e-05, |
| "loss": 0.4431, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.4495774647887324, |
| "grad_norm": 0.369926735816109, |
| "learning_rate": 4.724310776942356e-05, |
| "loss": 0.429, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.4507042253521127, |
| "grad_norm": 0.39992553614477316, |
| "learning_rate": 4.722222222222222e-05, |
| "loss": 0.4484, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.451830985915493, |
| "grad_norm": 0.4458014520019311, |
| "learning_rate": 4.720133667502089e-05, |
| "loss": 0.4483, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.4529577464788732, |
| "grad_norm": 0.4549007653653714, |
| "learning_rate": 4.7180451127819555e-05, |
| "loss": 0.4745, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.4540845070422535, |
| "grad_norm": 0.43605986381583667, |
| "learning_rate": 4.7159565580618215e-05, |
| "loss": 0.4234, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.4552112676056338, |
| "grad_norm": 0.43415229649505843, |
| "learning_rate": 4.7138680033416875e-05, |
| "loss": 0.417, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.4563380281690141, |
| "grad_norm": 0.45516649193571096, |
| "learning_rate": 4.711779448621554e-05, |
| "loss": 0.4116, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.4574647887323944, |
| "grad_norm": 0.47614856097142705, |
| "learning_rate": 4.709690893901421e-05, |
| "loss": 0.4447, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.45859154929577467, |
| "grad_norm": 0.3842022687208916, |
| "learning_rate": 4.707602339181287e-05, |
| "loss": 0.4265, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.4597183098591549, |
| "grad_norm": 0.5003337639832289, |
| "learning_rate": 4.705513784461153e-05, |
| "loss": 0.4576, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.4608450704225352, |
| "grad_norm": 0.371920341514909, |
| "learning_rate": 4.7034252297410195e-05, |
| "loss": 0.4139, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.4619718309859155, |
| "grad_norm": 0.4327864270513005, |
| "learning_rate": 4.701336675020886e-05, |
| "loss": 0.4482, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.4630985915492958, |
| "grad_norm": 0.4134719706292766, |
| "learning_rate": 4.699248120300752e-05, |
| "loss": 0.4187, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.46422535211267607, |
| "grad_norm": 0.40524197759616115, |
| "learning_rate": 4.697159565580618e-05, |
| "loss": 0.4435, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.46535211267605636, |
| "grad_norm": 0.42067638203215985, |
| "learning_rate": 4.695071010860485e-05, |
| "loss": 0.4363, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.4664788732394366, |
| "grad_norm": 0.48630981183960437, |
| "learning_rate": 4.6929824561403515e-05, |
| "loss": 0.4577, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.4676056338028169, |
| "grad_norm": 0.36522530679978066, |
| "learning_rate": 4.6908939014202175e-05, |
| "loss": 0.4353, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.4687323943661972, |
| "grad_norm": 0.5227222722102871, |
| "learning_rate": 4.6888053467000835e-05, |
| "loss": 0.4226, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.46985915492957747, |
| "grad_norm": 0.5069277128907972, |
| "learning_rate": 4.6867167919799495e-05, |
| "loss": 0.4316, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.47098591549295776, |
| "grad_norm": 0.484118433212617, |
| "learning_rate": 4.684628237259817e-05, |
| "loss": 0.4224, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.47211267605633805, |
| "grad_norm": 0.6464454503406327, |
| "learning_rate": 4.682539682539683e-05, |
| "loss": 0.4267, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.4732394366197183, |
| "grad_norm": 0.3914137430386243, |
| "learning_rate": 4.680451127819549e-05, |
| "loss": 0.4344, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.4743661971830986, |
| "grad_norm": 0.6288927819005267, |
| "learning_rate": 4.678362573099415e-05, |
| "loss": 0.4471, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.4754929577464789, |
| "grad_norm": 0.42140273410218787, |
| "learning_rate": 4.676274018379282e-05, |
| "loss": 0.4376, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.47661971830985916, |
| "grad_norm": 0.5218529452733929, |
| "learning_rate": 4.674185463659148e-05, |
| "loss": 0.4329, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.47774647887323946, |
| "grad_norm": 0.4462368667411693, |
| "learning_rate": 4.672096908939014e-05, |
| "loss": 0.4427, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.4788732394366197, |
| "grad_norm": 0.41668970796138727, |
| "learning_rate": 4.67000835421888e-05, |
| "loss": 0.4405, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 0.3831817162113262, |
| "learning_rate": 4.6679197994987476e-05, |
| "loss": 0.4284, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.4811267605633803, |
| "grad_norm": 0.4229704513884789, |
| "learning_rate": 4.6658312447786136e-05, |
| "loss": 0.4454, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.48225352112676056, |
| "grad_norm": 0.48032056597692396, |
| "learning_rate": 4.6637426900584796e-05, |
| "loss": 0.4377, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.48338028169014086, |
| "grad_norm": 0.3352986960279401, |
| "learning_rate": 4.6616541353383456e-05, |
| "loss": 0.4259, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.48450704225352115, |
| "grad_norm": 0.4131630884201645, |
| "learning_rate": 4.659565580618213e-05, |
| "loss": 0.4317, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.4856338028169014, |
| "grad_norm": 0.45751748634481637, |
| "learning_rate": 4.657477025898079e-05, |
| "loss": 0.4405, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.4867605633802817, |
| "grad_norm": 0.4139862920321763, |
| "learning_rate": 4.655388471177945e-05, |
| "loss": 0.4353, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.48788732394366197, |
| "grad_norm": 0.43396018489841076, |
| "learning_rate": 4.653299916457811e-05, |
| "loss": 0.4182, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.48901408450704226, |
| "grad_norm": 0.4406093388366263, |
| "learning_rate": 4.6512113617376776e-05, |
| "loss": 0.4457, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.49014084507042255, |
| "grad_norm": 0.43843357456964716, |
| "learning_rate": 4.649122807017544e-05, |
| "loss": 0.4339, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.49126760563380284, |
| "grad_norm": 0.3239492046043941, |
| "learning_rate": 4.64703425229741e-05, |
| "loss": 0.4134, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.4923943661971831, |
| "grad_norm": 0.40897075153266427, |
| "learning_rate": 4.644945697577276e-05, |
| "loss": 0.4339, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.49352112676056337, |
| "grad_norm": 0.3331343883758221, |
| "learning_rate": 4.642857142857143e-05, |
| "loss": 0.409, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.49464788732394366, |
| "grad_norm": 0.3994719175224885, |
| "learning_rate": 4.6407685881370096e-05, |
| "loss": 0.4189, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.49577464788732395, |
| "grad_norm": 0.387575870558901, |
| "learning_rate": 4.6386800334168756e-05, |
| "loss": 0.4342, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.49690140845070424, |
| "grad_norm": 0.38418921533926903, |
| "learning_rate": 4.6365914786967416e-05, |
| "loss": 0.4294, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.49802816901408453, |
| "grad_norm": 0.515380262957802, |
| "learning_rate": 4.634502923976608e-05, |
| "loss": 0.4215, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.49915492957746477, |
| "grad_norm": 0.3892122015570572, |
| "learning_rate": 4.632414369256475e-05, |
| "loss": 0.436, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.5002816901408451, |
| "grad_norm": 0.43122533906327076, |
| "learning_rate": 4.630325814536341e-05, |
| "loss": 0.4174, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.5014084507042254, |
| "grad_norm": 0.3825183575537722, |
| "learning_rate": 4.628237259816207e-05, |
| "loss": 0.4204, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.5025352112676056, |
| "grad_norm": 0.3951031876799554, |
| "learning_rate": 4.6261487050960736e-05, |
| "loss": 0.4524, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.5036619718309859, |
| "grad_norm": 0.3993720699665561, |
| "learning_rate": 4.62406015037594e-05, |
| "loss": 0.4535, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.5047887323943662, |
| "grad_norm": 0.3702213447503512, |
| "learning_rate": 4.621971595655806e-05, |
| "loss": 0.4467, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.5059154929577465, |
| "grad_norm": 0.42987431158793665, |
| "learning_rate": 4.619883040935672e-05, |
| "loss": 0.4491, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.5070422535211268, |
| "grad_norm": 0.39840069823036856, |
| "learning_rate": 4.617794486215539e-05, |
| "loss": 0.4452, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.508169014084507, |
| "grad_norm": 0.3774377587435373, |
| "learning_rate": 4.6157059314954057e-05, |
| "loss": 0.4151, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.5092957746478873, |
| "grad_norm": 0.32586659282974617, |
| "learning_rate": 4.6136173767752717e-05, |
| "loss": 0.4047, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.5104225352112676, |
| "grad_norm": 0.370312046890077, |
| "learning_rate": 4.6115288220551377e-05, |
| "loss": 0.4163, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.5115492957746479, |
| "grad_norm": 0.3819499089937181, |
| "learning_rate": 4.609440267335004e-05, |
| "loss": 0.4144, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.5126760563380282, |
| "grad_norm": 0.43036152053924126, |
| "learning_rate": 4.607351712614871e-05, |
| "loss": 0.4317, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.5138028169014085, |
| "grad_norm": 0.5255656991540114, |
| "learning_rate": 4.605263157894737e-05, |
| "loss": 0.4429, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.5149295774647887, |
| "grad_norm": 0.39606254003461006, |
| "learning_rate": 4.603174603174603e-05, |
| "loss": 0.4415, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.516056338028169, |
| "grad_norm": 0.4396753982202345, |
| "learning_rate": 4.60108604845447e-05, |
| "loss": 0.4216, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.5171830985915493, |
| "grad_norm": 0.42929853762727704, |
| "learning_rate": 4.5989974937343364e-05, |
| "loss": 0.4164, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.5183098591549296, |
| "grad_norm": 0.43170237614681684, |
| "learning_rate": 4.5969089390142024e-05, |
| "loss": 0.4448, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.5194366197183099, |
| "grad_norm": 0.48778911427761545, |
| "learning_rate": 4.5948203842940684e-05, |
| "loss": 0.432, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.5205633802816901, |
| "grad_norm": 0.4312484290328229, |
| "learning_rate": 4.592731829573935e-05, |
| "loss": 0.4354, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.5216901408450704, |
| "grad_norm": 0.41655513515023973, |
| "learning_rate": 4.590643274853802e-05, |
| "loss": 0.4378, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.5228169014084507, |
| "grad_norm": 0.535031469710637, |
| "learning_rate": 4.588554720133668e-05, |
| "loss": 0.456, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.523943661971831, |
| "grad_norm": 0.4089281972043279, |
| "learning_rate": 4.586466165413534e-05, |
| "loss": 0.4412, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.5250704225352113, |
| "grad_norm": 0.49326718973583, |
| "learning_rate": 4.5843776106934004e-05, |
| "loss": 0.4505, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.5261971830985915, |
| "grad_norm": 0.4220477561910598, |
| "learning_rate": 4.582289055973267e-05, |
| "loss": 0.4277, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.5273239436619719, |
| "grad_norm": 0.46060462678182534, |
| "learning_rate": 4.580200501253133e-05, |
| "loss": 0.4593, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.5284507042253521, |
| "grad_norm": 0.4370238774092618, |
| "learning_rate": 4.578111946532999e-05, |
| "loss": 0.4173, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.5295774647887324, |
| "grad_norm": 0.4313253955555467, |
| "learning_rate": 4.576023391812866e-05, |
| "loss": 0.4289, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.5307042253521127, |
| "grad_norm": 0.3539426843281356, |
| "learning_rate": 4.5739348370927324e-05, |
| "loss": 0.4154, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.5318309859154929, |
| "grad_norm": 0.4487503536152153, |
| "learning_rate": 4.5718462823725984e-05, |
| "loss": 0.4304, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.5329577464788733, |
| "grad_norm": 0.3698974487994367, |
| "learning_rate": 4.5697577276524644e-05, |
| "loss": 0.435, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.5340845070422535, |
| "grad_norm": 0.41550377526255394, |
| "learning_rate": 4.567669172932331e-05, |
| "loss": 0.4502, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.5352112676056338, |
| "grad_norm": 0.3800550727022986, |
| "learning_rate": 4.565580618212197e-05, |
| "loss": 0.427, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.5363380281690141, |
| "grad_norm": 0.3988520807240111, |
| "learning_rate": 4.563492063492064e-05, |
| "loss": 0.4127, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.5374647887323943, |
| "grad_norm": 0.4376872891197101, |
| "learning_rate": 4.56140350877193e-05, |
| "loss": 0.4291, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.5385915492957747, |
| "grad_norm": 0.4572428687430911, |
| "learning_rate": 4.5593149540517964e-05, |
| "loss": 0.4216, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.5397183098591549, |
| "grad_norm": 0.49237250245493225, |
| "learning_rate": 4.5572263993316624e-05, |
| "loss": 0.4511, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.5408450704225352, |
| "grad_norm": 0.4082354204406026, |
| "learning_rate": 4.555137844611529e-05, |
| "loss": 0.4344, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.5419718309859155, |
| "grad_norm": 0.5330130407437706, |
| "learning_rate": 4.553049289891395e-05, |
| "loss": 0.4268, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.5430985915492957, |
| "grad_norm": 0.3877512017370143, |
| "learning_rate": 4.550960735171262e-05, |
| "loss": 0.4339, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.5442253521126761, |
| "grad_norm": 0.496803144386499, |
| "learning_rate": 4.548872180451128e-05, |
| "loss": 0.4265, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.5453521126760563, |
| "grad_norm": 0.3924339898963181, |
| "learning_rate": 4.5467836257309945e-05, |
| "loss": 0.4316, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.5464788732394367, |
| "grad_norm": 0.5523374929860176, |
| "learning_rate": 4.5446950710108605e-05, |
| "loss": 0.4374, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.5476056338028169, |
| "grad_norm": 0.39714016214787484, |
| "learning_rate": 4.542606516290727e-05, |
| "loss": 0.4141, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.5487323943661971, |
| "grad_norm": 0.4672623284669598, |
| "learning_rate": 4.540517961570593e-05, |
| "loss": 0.4338, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.5498591549295775, |
| "grad_norm": 0.45532761050056775, |
| "learning_rate": 4.53842940685046e-05, |
| "loss": 0.4282, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.5509859154929577, |
| "grad_norm": 0.3512267218354597, |
| "learning_rate": 4.536340852130326e-05, |
| "loss": 0.4262, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.5521126760563381, |
| "grad_norm": 0.5344471882676834, |
| "learning_rate": 4.5342522974101925e-05, |
| "loss": 0.4214, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.5532394366197183, |
| "grad_norm": 0.40804809863465386, |
| "learning_rate": 4.5321637426900585e-05, |
| "loss": 0.4176, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.5543661971830985, |
| "grad_norm": 0.5026100013021888, |
| "learning_rate": 4.530075187969925e-05, |
| "loss": 0.4435, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.5554929577464789, |
| "grad_norm": 0.4298804130144407, |
| "learning_rate": 4.527986633249791e-05, |
| "loss": 0.42, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.5566197183098591, |
| "grad_norm": 0.4450683590913135, |
| "learning_rate": 4.525898078529658e-05, |
| "loss": 0.4361, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.5577464788732395, |
| "grad_norm": 0.5168808901708248, |
| "learning_rate": 4.523809523809524e-05, |
| "loss": 0.4044, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.5588732394366197, |
| "grad_norm": 0.3796230305562614, |
| "learning_rate": 4.5217209690893905e-05, |
| "loss": 0.3923, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 0.494439451537962, |
| "learning_rate": 4.5196324143692565e-05, |
| "loss": 0.4134, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.5611267605633803, |
| "grad_norm": 0.38887094714361475, |
| "learning_rate": 4.517543859649123e-05, |
| "loss": 0.4277, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.5622535211267605, |
| "grad_norm": 0.4771133935931392, |
| "learning_rate": 4.515455304928989e-05, |
| "loss": 0.4332, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.5633802816901409, |
| "grad_norm": 0.5833210814037667, |
| "learning_rate": 4.513366750208856e-05, |
| "loss": 0.4479, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.5645070422535211, |
| "grad_norm": 0.4650742185978965, |
| "learning_rate": 4.511278195488722e-05, |
| "loss": 0.4276, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.5656338028169015, |
| "grad_norm": 0.7649017257631372, |
| "learning_rate": 4.5091896407685885e-05, |
| "loss": 0.4271, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.5667605633802817, |
| "grad_norm": 0.40039301536077476, |
| "learning_rate": 4.5071010860484545e-05, |
| "loss": 0.4291, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.5678873239436619, |
| "grad_norm": 0.6664943828112884, |
| "learning_rate": 4.505012531328321e-05, |
| "loss": 0.4256, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.5690140845070423, |
| "grad_norm": 0.4303326479225263, |
| "learning_rate": 4.502923976608187e-05, |
| "loss": 0.4366, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.5701408450704225, |
| "grad_norm": 0.48633413358954836, |
| "learning_rate": 4.500835421888054e-05, |
| "loss": 0.4264, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.5712676056338029, |
| "grad_norm": 0.48788986503015436, |
| "learning_rate": 4.49874686716792e-05, |
| "loss": 0.4094, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.5723943661971831, |
| "grad_norm": 0.44298895531473165, |
| "learning_rate": 4.4966583124477866e-05, |
| "loss": 0.4142, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.5735211267605633, |
| "grad_norm": 0.42806203997959974, |
| "learning_rate": 4.4945697577276526e-05, |
| "loss": 0.4358, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.5746478873239437, |
| "grad_norm": 0.43778101558906973, |
| "learning_rate": 4.492481203007519e-05, |
| "loss": 0.4124, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.5757746478873239, |
| "grad_norm": 0.3604903511146798, |
| "learning_rate": 4.490392648287385e-05, |
| "loss": 0.4039, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.5769014084507043, |
| "grad_norm": 2.205061903497407, |
| "learning_rate": 4.488304093567251e-05, |
| "loss": 0.4032, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.5780281690140845, |
| "grad_norm": 0.5118994703596127, |
| "learning_rate": 4.486215538847118e-05, |
| "loss": 0.4059, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.5791549295774648, |
| "grad_norm": 0.40456548089618005, |
| "learning_rate": 4.4841269841269846e-05, |
| "loss": 0.4197, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.5802816901408451, |
| "grad_norm": 0.4337732467996675, |
| "learning_rate": 4.4820384294068506e-05, |
| "loss": 0.4215, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.5814084507042253, |
| "grad_norm": 0.37286043099281574, |
| "learning_rate": 4.4799498746867166e-05, |
| "loss": 0.4133, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.5825352112676057, |
| "grad_norm": 0.4430381139696347, |
| "learning_rate": 4.477861319966583e-05, |
| "loss": 0.421, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.5836619718309859, |
| "grad_norm": 0.38762202455986466, |
| "learning_rate": 4.47577276524645e-05, |
| "loss": 0.4464, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.5847887323943662, |
| "grad_norm": 0.4254747897345159, |
| "learning_rate": 4.473684210526316e-05, |
| "loss": 0.4464, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.5859154929577465, |
| "grad_norm": 0.36818503891060694, |
| "learning_rate": 4.471595655806182e-05, |
| "loss": 0.4359, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.5870422535211267, |
| "grad_norm": 0.4396858568603212, |
| "learning_rate": 4.4695071010860486e-05, |
| "loss": 0.4524, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.5881690140845071, |
| "grad_norm": 0.47575935108191253, |
| "learning_rate": 4.467418546365915e-05, |
| "loss": 0.4408, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.5892957746478873, |
| "grad_norm": 0.4798203797254968, |
| "learning_rate": 4.465329991645781e-05, |
| "loss": 0.3955, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.5904225352112676, |
| "grad_norm": 0.491404025492639, |
| "learning_rate": 4.463241436925647e-05, |
| "loss": 0.4347, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.5915492957746479, |
| "grad_norm": 0.49123207445955547, |
| "learning_rate": 4.461152882205514e-05, |
| "loss": 0.4356, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.5926760563380282, |
| "grad_norm": 0.6105485825942882, |
| "learning_rate": 4.4590643274853806e-05, |
| "loss": 0.409, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.5938028169014085, |
| "grad_norm": 0.5207042930483079, |
| "learning_rate": 4.4569757727652466e-05, |
| "loss": 0.441, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.5949295774647887, |
| "grad_norm": 0.5149266515759818, |
| "learning_rate": 4.4548872180451126e-05, |
| "loss": 0.4273, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.596056338028169, |
| "grad_norm": 0.5477491737233843, |
| "learning_rate": 4.452798663324979e-05, |
| "loss": 0.4263, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.5971830985915493, |
| "grad_norm": 0.468103294840018, |
| "learning_rate": 4.450710108604846e-05, |
| "loss": 0.4218, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.5983098591549296, |
| "grad_norm": 0.45956460347799083, |
| "learning_rate": 4.448621553884712e-05, |
| "loss": 0.4334, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.5994366197183099, |
| "grad_norm": 0.4111204934729319, |
| "learning_rate": 4.446532999164578e-05, |
| "loss": 0.4324, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.6005633802816901, |
| "grad_norm": 0.36963074782643357, |
| "learning_rate": 4.4444444444444447e-05, |
| "loss": 0.431, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.6016901408450704, |
| "grad_norm": 0.4104529558446069, |
| "learning_rate": 4.442355889724311e-05, |
| "loss": 0.4372, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.6028169014084507, |
| "grad_norm": 0.37824181533897855, |
| "learning_rate": 4.440267335004177e-05, |
| "loss": 0.4626, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.603943661971831, |
| "grad_norm": 0.3483670591865026, |
| "learning_rate": 4.438178780284043e-05, |
| "loss": 0.4034, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.6050704225352113, |
| "grad_norm": 0.392492115921629, |
| "learning_rate": 4.43609022556391e-05, |
| "loss": 0.4165, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.6061971830985915, |
| "grad_norm": 0.38568538328258095, |
| "learning_rate": 4.434001670843777e-05, |
| "loss": 0.427, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.6073239436619718, |
| "grad_norm": 0.40601482709549314, |
| "learning_rate": 4.431913116123643e-05, |
| "loss": 0.4251, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.6084507042253521, |
| "grad_norm": 0.4320403980156557, |
| "learning_rate": 4.429824561403509e-05, |
| "loss": 0.4463, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.6095774647887324, |
| "grad_norm": 0.3739618657407569, |
| "learning_rate": 4.4277360066833754e-05, |
| "loss": 0.4003, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.6107042253521127, |
| "grad_norm": 0.36832449593704425, |
| "learning_rate": 4.425647451963242e-05, |
| "loss": 0.4245, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.611830985915493, |
| "grad_norm": 0.3614080383744093, |
| "learning_rate": 4.423558897243108e-05, |
| "loss": 0.4178, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.6129577464788732, |
| "grad_norm": 0.37450999344316493, |
| "learning_rate": 4.421470342522974e-05, |
| "loss": 0.4145, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.6140845070422535, |
| "grad_norm": 0.36930155146025684, |
| "learning_rate": 4.419381787802841e-05, |
| "loss": 0.4226, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.6152112676056338, |
| "grad_norm": 0.37304790770081386, |
| "learning_rate": 4.4172932330827074e-05, |
| "loss": 0.4184, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.6163380281690141, |
| "grad_norm": 0.37345149852575577, |
| "learning_rate": 4.4152046783625734e-05, |
| "loss": 0.4419, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.6174647887323944, |
| "grad_norm": 0.3942758512347128, |
| "learning_rate": 4.4131161236424394e-05, |
| "loss": 0.4573, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.6185915492957746, |
| "grad_norm": 0.3407591248518673, |
| "learning_rate": 4.411027568922306e-05, |
| "loss": 0.412, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.6197183098591549, |
| "grad_norm": 0.4414476222318839, |
| "learning_rate": 4.408939014202173e-05, |
| "loss": 0.4467, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.6208450704225352, |
| "grad_norm": 0.3927007732686169, |
| "learning_rate": 4.406850459482039e-05, |
| "loss": 0.43, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.6219718309859155, |
| "grad_norm": 0.31673151571390507, |
| "learning_rate": 4.404761904761905e-05, |
| "loss": 0.3833, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.6230985915492958, |
| "grad_norm": 0.34613960365197394, |
| "learning_rate": 4.402673350041771e-05, |
| "loss": 0.3923, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.624225352112676, |
| "grad_norm": 0.3562786927865309, |
| "learning_rate": 4.400584795321638e-05, |
| "loss": 0.4188, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.6253521126760564, |
| "grad_norm": 0.3559363238612658, |
| "learning_rate": 4.398496240601504e-05, |
| "loss": 0.4437, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.6264788732394366, |
| "grad_norm": 0.3843902875225514, |
| "learning_rate": 4.39640768588137e-05, |
| "loss": 0.42, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.6276056338028169, |
| "grad_norm": 0.36186469271967187, |
| "learning_rate": 4.394319131161236e-05, |
| "loss": 0.3991, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.6287323943661972, |
| "grad_norm": 0.33224653105469515, |
| "learning_rate": 4.3922305764411034e-05, |
| "loss": 0.4303, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.6298591549295774, |
| "grad_norm": 0.4442020175280569, |
| "learning_rate": 4.3901420217209694e-05, |
| "loss": 0.4325, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.6309859154929578, |
| "grad_norm": 0.32939188578737433, |
| "learning_rate": 4.3880534670008354e-05, |
| "loss": 0.4145, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.632112676056338, |
| "grad_norm": 0.3941484544311852, |
| "learning_rate": 4.3859649122807014e-05, |
| "loss": 0.4449, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.6332394366197183, |
| "grad_norm": 0.37320706101610485, |
| "learning_rate": 4.383876357560569e-05, |
| "loss": 0.429, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.6343661971830986, |
| "grad_norm": 0.37437865774917695, |
| "learning_rate": 4.381787802840435e-05, |
| "loss": 0.4185, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.6354929577464788, |
| "grad_norm": 0.4286549189097408, |
| "learning_rate": 4.379699248120301e-05, |
| "loss": 0.4292, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.6366197183098592, |
| "grad_norm": 0.35232103572447265, |
| "learning_rate": 4.377610693400167e-05, |
| "loss": 0.4129, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.6377464788732394, |
| "grad_norm": 0.37846014546497503, |
| "learning_rate": 4.375522138680034e-05, |
| "loss": 0.4141, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.6388732394366197, |
| "grad_norm": 0.4434692424607111, |
| "learning_rate": 4.3734335839599e-05, |
| "loss": 0.4075, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.37421587532396255, |
| "learning_rate": 4.371345029239766e-05, |
| "loss": 0.4087, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.6411267605633802, |
| "grad_norm": 0.39080496222014216, |
| "learning_rate": 4.369256474519632e-05, |
| "loss": 0.4394, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.6422535211267606, |
| "grad_norm": 0.3066071449286599, |
| "learning_rate": 4.367167919799499e-05, |
| "loss": 0.4259, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.6433802816901408, |
| "grad_norm": 0.43110701849869826, |
| "learning_rate": 4.3650793650793655e-05, |
| "loss": 0.4267, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.6445070422535212, |
| "grad_norm": 0.3124331941008899, |
| "learning_rate": 4.3629908103592315e-05, |
| "loss": 0.4092, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.6456338028169014, |
| "grad_norm": 0.4571726835635694, |
| "learning_rate": 4.3609022556390975e-05, |
| "loss": 0.4033, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.6467605633802817, |
| "grad_norm": 0.3705030567782246, |
| "learning_rate": 4.358813700918964e-05, |
| "loss": 0.4112, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.647887323943662, |
| "grad_norm": 0.44415346849679094, |
| "learning_rate": 4.356725146198831e-05, |
| "loss": 0.4459, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.6490140845070422, |
| "grad_norm": 0.4080778529925401, |
| "learning_rate": 4.354636591478697e-05, |
| "loss": 0.4, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.6501408450704226, |
| "grad_norm": 0.4163294923487941, |
| "learning_rate": 4.352548036758563e-05, |
| "loss": 0.4256, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.6512676056338028, |
| "grad_norm": 0.4200663678367453, |
| "learning_rate": 4.3504594820384295e-05, |
| "loss": 0.4517, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.652394366197183, |
| "grad_norm": 0.42091769996589545, |
| "learning_rate": 4.348370927318296e-05, |
| "loss": 0.4177, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.6535211267605634, |
| "grad_norm": 0.3307175111414965, |
| "learning_rate": 4.346282372598162e-05, |
| "loss": 0.4279, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.6546478873239436, |
| "grad_norm": 0.4406406069874574, |
| "learning_rate": 4.344193817878028e-05, |
| "loss": 0.4478, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.655774647887324, |
| "grad_norm": 0.37920787525299443, |
| "learning_rate": 4.342105263157895e-05, |
| "loss": 0.4318, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.6569014084507042, |
| "grad_norm": 0.41822171166089017, |
| "learning_rate": 4.3400167084377615e-05, |
| "loss": 0.409, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.6580281690140845, |
| "grad_norm": 0.3671624982481095, |
| "learning_rate": 4.3379281537176275e-05, |
| "loss": 0.3919, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.6591549295774648, |
| "grad_norm": 0.4322278406739707, |
| "learning_rate": 4.3358395989974935e-05, |
| "loss": 0.4238, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.660281690140845, |
| "grad_norm": 0.39790250618223566, |
| "learning_rate": 4.33375104427736e-05, |
| "loss": 0.4391, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.6614084507042254, |
| "grad_norm": 0.4065653425921878, |
| "learning_rate": 4.331662489557227e-05, |
| "loss": 0.433, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.6625352112676056, |
| "grad_norm": 0.38700056294166, |
| "learning_rate": 4.329573934837093e-05, |
| "loss": 0.419, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.663661971830986, |
| "grad_norm": 0.387974246136351, |
| "learning_rate": 4.327485380116959e-05, |
| "loss": 0.4237, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.6647887323943662, |
| "grad_norm": 0.3990357887007478, |
| "learning_rate": 4.3253968253968256e-05, |
| "loss": 0.4194, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.6659154929577464, |
| "grad_norm": 0.3496417106380883, |
| "learning_rate": 4.323308270676692e-05, |
| "loss": 0.4079, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.6670422535211268, |
| "grad_norm": 0.5495630885527686, |
| "learning_rate": 4.321219715956558e-05, |
| "loss": 0.4357, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.668169014084507, |
| "grad_norm": 0.2900877904850997, |
| "learning_rate": 4.319131161236424e-05, |
| "loss": 0.4043, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.6692957746478874, |
| "grad_norm": 0.5601474985977506, |
| "learning_rate": 4.317042606516291e-05, |
| "loss": 0.41, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.6704225352112676, |
| "grad_norm": 0.3021224989529405, |
| "learning_rate": 4.3149540517961576e-05, |
| "loss": 0.408, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.6715492957746478, |
| "grad_norm": 0.4829631218423333, |
| "learning_rate": 4.3128654970760236e-05, |
| "loss": 0.4386, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.6726760563380282, |
| "grad_norm": 0.359916714114907, |
| "learning_rate": 4.3107769423558896e-05, |
| "loss": 0.3914, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.6738028169014084, |
| "grad_norm": 0.4765698815625955, |
| "learning_rate": 4.308688387635756e-05, |
| "loss": 0.4005, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.6749295774647888, |
| "grad_norm": 1.237364168501525, |
| "learning_rate": 4.306599832915623e-05, |
| "loss": 0.4228, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.676056338028169, |
| "grad_norm": 0.5930283676015237, |
| "learning_rate": 4.304511278195489e-05, |
| "loss": 0.4148, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.6771830985915493, |
| "grad_norm": 0.5209464944628275, |
| "learning_rate": 4.302422723475355e-05, |
| "loss": 0.4123, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.6783098591549296, |
| "grad_norm": 0.5466948649482853, |
| "learning_rate": 4.3003341687552216e-05, |
| "loss": 0.4427, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.6794366197183098, |
| "grad_norm": 0.45120228009864977, |
| "learning_rate": 4.298245614035088e-05, |
| "loss": 0.4303, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.6805633802816902, |
| "grad_norm": 0.4505983118265598, |
| "learning_rate": 4.296157059314954e-05, |
| "loss": 0.4043, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.6816901408450704, |
| "grad_norm": 0.3585135118397449, |
| "learning_rate": 4.29406850459482e-05, |
| "loss": 0.4061, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.6828169014084507, |
| "grad_norm": 0.46174446600580643, |
| "learning_rate": 4.291979949874687e-05, |
| "loss": 0.4247, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.683943661971831, |
| "grad_norm": 0.5222240839271706, |
| "learning_rate": 4.2898913951545536e-05, |
| "loss": 0.4256, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.6850704225352112, |
| "grad_norm": 0.3614268309232858, |
| "learning_rate": 4.2878028404344196e-05, |
| "loss": 0.4367, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.6861971830985916, |
| "grad_norm": 0.5036220549816727, |
| "learning_rate": 4.2857142857142856e-05, |
| "loss": 0.4162, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.6873239436619718, |
| "grad_norm": 0.3363253080485147, |
| "learning_rate": 4.283625730994152e-05, |
| "loss": 0.4245, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.6884507042253522, |
| "grad_norm": 0.46033573962506164, |
| "learning_rate": 4.281537176274018e-05, |
| "loss": 0.4215, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.6895774647887324, |
| "grad_norm": 0.3247787059168572, |
| "learning_rate": 4.279448621553885e-05, |
| "loss": 0.419, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.6907042253521126, |
| "grad_norm": 0.4447384588517809, |
| "learning_rate": 4.277360066833751e-05, |
| "loss": 0.4254, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.691830985915493, |
| "grad_norm": 0.32317206065539833, |
| "learning_rate": 4.2752715121136177e-05, |
| "loss": 0.4352, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.6929577464788732, |
| "grad_norm": 0.4461717956132611, |
| "learning_rate": 4.2731829573934837e-05, |
| "loss": 0.3882, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.6940845070422536, |
| "grad_norm": 0.40128470765817387, |
| "learning_rate": 4.27109440267335e-05, |
| "loss": 0.4254, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.6952112676056338, |
| "grad_norm": 0.40703672121281326, |
| "learning_rate": 4.269005847953216e-05, |
| "loss": 0.4026, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.6963380281690141, |
| "grad_norm": 0.3542224954088699, |
| "learning_rate": 4.266917293233083e-05, |
| "loss": 0.41, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.6974647887323944, |
| "grad_norm": 0.45578234504211723, |
| "learning_rate": 4.264828738512949e-05, |
| "loss": 0.41, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.6985915492957746, |
| "grad_norm": 0.4080317444602256, |
| "learning_rate": 4.262740183792816e-05, |
| "loss": 0.4192, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.699718309859155, |
| "grad_norm": 0.4274600706527524, |
| "learning_rate": 4.260651629072682e-05, |
| "loss": 0.4449, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.7008450704225352, |
| "grad_norm": 0.4349579841624264, |
| "learning_rate": 4.2585630743525484e-05, |
| "loss": 0.4139, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.7019718309859155, |
| "grad_norm": 0.370472320194136, |
| "learning_rate": 4.2564745196324144e-05, |
| "loss": 0.409, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.7030985915492958, |
| "grad_norm": 0.3718077496564049, |
| "learning_rate": 4.254385964912281e-05, |
| "loss": 0.4175, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.704225352112676, |
| "grad_norm": 0.4276658047382789, |
| "learning_rate": 4.252297410192147e-05, |
| "loss": 0.4424, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.7053521126760564, |
| "grad_norm": 0.3194214007233317, |
| "learning_rate": 4.250208855472014e-05, |
| "loss": 0.4014, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.7064788732394366, |
| "grad_norm": 0.48810927800348675, |
| "learning_rate": 4.24812030075188e-05, |
| "loss": 0.4408, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.7076056338028169, |
| "grad_norm": 0.41652474245146415, |
| "learning_rate": 4.2460317460317464e-05, |
| "loss": 0.4301, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.7087323943661972, |
| "grad_norm": 0.3835655312696598, |
| "learning_rate": 4.2439431913116124e-05, |
| "loss": 0.4206, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.7098591549295775, |
| "grad_norm": 0.45662664167441197, |
| "learning_rate": 4.241854636591479e-05, |
| "loss": 0.4017, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.7109859154929578, |
| "grad_norm": 0.42170162230275204, |
| "learning_rate": 4.239766081871345e-05, |
| "loss": 0.4262, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.712112676056338, |
| "grad_norm": 0.5617019107854335, |
| "learning_rate": 4.237677527151212e-05, |
| "loss": 0.413, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.7132394366197183, |
| "grad_norm": 0.44358045519069617, |
| "learning_rate": 4.235588972431078e-05, |
| "loss": 0.3937, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.7143661971830986, |
| "grad_norm": 0.38692996860487894, |
| "learning_rate": 4.2335004177109444e-05, |
| "loss": 0.4267, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.7154929577464789, |
| "grad_norm": 0.4348368744099167, |
| "learning_rate": 4.2314118629908104e-05, |
| "loss": 0.4096, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.7166197183098592, |
| "grad_norm": 0.3630151334921331, |
| "learning_rate": 4.229323308270677e-05, |
| "loss": 0.4127, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.7177464788732394, |
| "grad_norm": 0.34479478722909607, |
| "learning_rate": 4.227234753550543e-05, |
| "loss": 0.4125, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.7188732394366197, |
| "grad_norm": 0.33161591976676635, |
| "learning_rate": 4.22514619883041e-05, |
| "loss": 0.4234, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 0.35947364399414766, |
| "learning_rate": 4.223057644110276e-05, |
| "loss": 0.4, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.7211267605633803, |
| "grad_norm": 0.3871104208671629, |
| "learning_rate": 4.2209690893901424e-05, |
| "loss": 0.4337, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.7222535211267606, |
| "grad_norm": 0.42153964326186993, |
| "learning_rate": 4.2188805346700084e-05, |
| "loss": 0.4334, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.7233802816901408, |
| "grad_norm": 0.34333286440381905, |
| "learning_rate": 4.216791979949875e-05, |
| "loss": 0.4161, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.7245070422535211, |
| "grad_norm": 0.37528246725660674, |
| "learning_rate": 4.214703425229741e-05, |
| "loss": 0.4154, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.7256338028169014, |
| "grad_norm": 0.390514048224168, |
| "learning_rate": 4.212614870509608e-05, |
| "loss": 0.4154, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.7267605633802817, |
| "grad_norm": 0.3000205982909856, |
| "learning_rate": 4.210526315789474e-05, |
| "loss": 0.4087, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.727887323943662, |
| "grad_norm": 0.3672658159191762, |
| "learning_rate": 4.2084377610693405e-05, |
| "loss": 0.4308, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.7290140845070423, |
| "grad_norm": 0.41532606867614813, |
| "learning_rate": 4.2063492063492065e-05, |
| "loss": 0.4084, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.7301408450704225, |
| "grad_norm": 0.35732639249142395, |
| "learning_rate": 4.2042606516290725e-05, |
| "loss": 0.4204, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.7312676056338028, |
| "grad_norm": 0.3849385876548144, |
| "learning_rate": 4.202172096908939e-05, |
| "loss": 0.4281, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.7323943661971831, |
| "grad_norm": 0.38047640795559845, |
| "learning_rate": 4.200083542188806e-05, |
| "loss": 0.4059, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.7335211267605634, |
| "grad_norm": 0.3837923395192255, |
| "learning_rate": 4.197994987468672e-05, |
| "loss": 0.4313, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.7346478873239437, |
| "grad_norm": 0.3774699264614354, |
| "learning_rate": 4.195906432748538e-05, |
| "loss": 0.3941, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.7357746478873239, |
| "grad_norm": 0.2985838697242696, |
| "learning_rate": 4.1938178780284045e-05, |
| "loss": 0.4153, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.7369014084507042, |
| "grad_norm": 0.38164814201138764, |
| "learning_rate": 4.191729323308271e-05, |
| "loss": 0.4143, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.7380281690140845, |
| "grad_norm": 0.3620891871835965, |
| "learning_rate": 4.189640768588137e-05, |
| "loss": 0.4231, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.7391549295774648, |
| "grad_norm": 0.3834560816016984, |
| "learning_rate": 4.187552213868003e-05, |
| "loss": 0.392, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.7402816901408451, |
| "grad_norm": 0.40285992655625297, |
| "learning_rate": 4.18546365914787e-05, |
| "loss": 0.4297, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.7414084507042253, |
| "grad_norm": 0.4044847340584241, |
| "learning_rate": 4.1833751044277365e-05, |
| "loss": 0.4161, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.7425352112676057, |
| "grad_norm": 0.4195935788052179, |
| "learning_rate": 4.1812865497076025e-05, |
| "loss": 0.4137, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.7436619718309859, |
| "grad_norm": 0.3665940252411504, |
| "learning_rate": 4.1791979949874685e-05, |
| "loss": 0.4427, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.7447887323943662, |
| "grad_norm": 0.49282768353460193, |
| "learning_rate": 4.177109440267335e-05, |
| "loss": 0.3939, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.7459154929577465, |
| "grad_norm": 0.3789428349328341, |
| "learning_rate": 4.175020885547202e-05, |
| "loss": 0.3978, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.7470422535211267, |
| "grad_norm": 0.4423781388327986, |
| "learning_rate": 4.172932330827068e-05, |
| "loss": 0.423, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.7481690140845071, |
| "grad_norm": 0.38796006159727037, |
| "learning_rate": 4.170843776106934e-05, |
| "loss": 0.4138, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.7492957746478873, |
| "grad_norm": 0.34306544342853107, |
| "learning_rate": 4.1687552213868005e-05, |
| "loss": 0.3977, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.7504225352112676, |
| "grad_norm": 0.4288434223869902, |
| "learning_rate": 4.166666666666667e-05, |
| "loss": 0.4003, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.7515492957746479, |
| "grad_norm": 0.4006094181432751, |
| "learning_rate": 4.164578111946533e-05, |
| "loss": 0.4048, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.7526760563380281, |
| "grad_norm": 0.40733982774726674, |
| "learning_rate": 4.162489557226399e-05, |
| "loss": 0.3939, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.7538028169014085, |
| "grad_norm": 0.4063549027527979, |
| "learning_rate": 4.160401002506266e-05, |
| "loss": 0.4223, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.7549295774647887, |
| "grad_norm": 0.45687019171833976, |
| "learning_rate": 4.1583124477861326e-05, |
| "loss": 0.4185, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.756056338028169, |
| "grad_norm": 0.36681775649393633, |
| "learning_rate": 4.1562238930659986e-05, |
| "loss": 0.4083, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.7571830985915493, |
| "grad_norm": 0.5537705015482887, |
| "learning_rate": 4.1541353383458646e-05, |
| "loss": 0.4225, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.7583098591549295, |
| "grad_norm": 0.4108896243173496, |
| "learning_rate": 4.152046783625731e-05, |
| "loss": 0.3993, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.7594366197183099, |
| "grad_norm": 0.4180315200055127, |
| "learning_rate": 4.149958228905598e-05, |
| "loss": 0.4069, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.7605633802816901, |
| "grad_norm": 0.43974251385905005, |
| "learning_rate": 4.147869674185464e-05, |
| "loss": 0.4036, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.7616901408450705, |
| "grad_norm": 0.46133112591194336, |
| "learning_rate": 4.14578111946533e-05, |
| "loss": 0.4151, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.7628169014084507, |
| "grad_norm": 0.4285557758637548, |
| "learning_rate": 4.1436925647451966e-05, |
| "loss": 0.4091, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.763943661971831, |
| "grad_norm": 0.45188384956398886, |
| "learning_rate": 4.141604010025063e-05, |
| "loss": 0.4013, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.7650704225352113, |
| "grad_norm": 0.378944363878999, |
| "learning_rate": 4.139515455304929e-05, |
| "loss": 0.3954, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.7661971830985915, |
| "grad_norm": 0.4652707802166915, |
| "learning_rate": 4.137426900584795e-05, |
| "loss": 0.4059, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.7673239436619719, |
| "grad_norm": 0.3315608943628699, |
| "learning_rate": 4.135338345864662e-05, |
| "loss": 0.4301, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.7684507042253521, |
| "grad_norm": 0.3764928938013357, |
| "learning_rate": 4.1332497911445286e-05, |
| "loss": 0.4014, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.7695774647887323, |
| "grad_norm": 0.28199550514715443, |
| "learning_rate": 4.1311612364243946e-05, |
| "loss": 0.3836, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.7707042253521127, |
| "grad_norm": 0.3754349031218491, |
| "learning_rate": 4.1290726817042606e-05, |
| "loss": 0.4017, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.7718309859154929, |
| "grad_norm": 0.29913461312362266, |
| "learning_rate": 4.126984126984127e-05, |
| "loss": 0.384, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.7729577464788733, |
| "grad_norm": 0.45858396872072255, |
| "learning_rate": 4.124895572263994e-05, |
| "loss": 0.4308, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.7740845070422535, |
| "grad_norm": 0.3412505295746519, |
| "learning_rate": 4.12280701754386e-05, |
| "loss": 0.4168, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.7752112676056339, |
| "grad_norm": 0.3858887385455696, |
| "learning_rate": 4.120718462823726e-05, |
| "loss": 0.406, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.7763380281690141, |
| "grad_norm": 0.3345909319603766, |
| "learning_rate": 4.118629908103592e-05, |
| "loss": 0.3944, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.7774647887323943, |
| "grad_norm": 0.40269397288356046, |
| "learning_rate": 4.116541353383459e-05, |
| "loss": 0.405, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.7785915492957747, |
| "grad_norm": 0.3737994454866607, |
| "learning_rate": 4.114452798663325e-05, |
| "loss": 0.4206, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.7797183098591549, |
| "grad_norm": 0.45903125849438714, |
| "learning_rate": 4.112364243943191e-05, |
| "loss": 0.4065, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.7808450704225353, |
| "grad_norm": 0.4034087006226384, |
| "learning_rate": 4.110275689223057e-05, |
| "loss": 0.4383, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.7819718309859155, |
| "grad_norm": 0.4349274997587744, |
| "learning_rate": 4.1081871345029247e-05, |
| "loss": 0.42, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.7830985915492957, |
| "grad_norm": 0.3930297481138719, |
| "learning_rate": 4.1060985797827907e-05, |
| "loss": 0.4071, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.7842253521126761, |
| "grad_norm": 0.5192874311515382, |
| "learning_rate": 4.1040100250626567e-05, |
| "loss": 0.4117, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.7853521126760563, |
| "grad_norm": 0.38351615215668994, |
| "learning_rate": 4.1019214703425227e-05, |
| "loss": 0.3994, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.7864788732394367, |
| "grad_norm": 0.4572728634907756, |
| "learning_rate": 4.09983291562239e-05, |
| "loss": 0.3895, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.7876056338028169, |
| "grad_norm": 0.46697618002820673, |
| "learning_rate": 4.097744360902256e-05, |
| "loss": 0.4148, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.7887323943661971, |
| "grad_norm": 0.45965026461144826, |
| "learning_rate": 4.095655806182122e-05, |
| "loss": 0.4156, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.7898591549295775, |
| "grad_norm": 0.4093107614744624, |
| "learning_rate": 4.093567251461988e-05, |
| "loss": 0.4292, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.7909859154929577, |
| "grad_norm": 0.4486831843348481, |
| "learning_rate": 4.0914786967418554e-05, |
| "loss": 0.4073, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.7921126760563381, |
| "grad_norm": 0.48346726109054283, |
| "learning_rate": 4.0893901420217214e-05, |
| "loss": 0.4462, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.7932394366197183, |
| "grad_norm": 0.4946289570640352, |
| "learning_rate": 4.0873015873015874e-05, |
| "loss": 0.4141, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.7943661971830986, |
| "grad_norm": 0.43699923683252617, |
| "learning_rate": 4.0852130325814534e-05, |
| "loss": 0.4139, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.7954929577464789, |
| "grad_norm": 0.447209015951833, |
| "learning_rate": 4.08312447786132e-05, |
| "loss": 0.4063, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.7966197183098591, |
| "grad_norm": 0.41188678028975295, |
| "learning_rate": 4.081035923141187e-05, |
| "loss": 0.4195, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.7977464788732395, |
| "grad_norm": 0.4360319548470077, |
| "learning_rate": 4.078947368421053e-05, |
| "loss": 0.4181, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.7988732394366197, |
| "grad_norm": 0.44710924458871487, |
| "learning_rate": 4.076858813700919e-05, |
| "loss": 0.4165, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.35901189271573075, |
| "learning_rate": 4.0747702589807854e-05, |
| "loss": 0.4042, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.8011267605633803, |
| "grad_norm": 0.37421550134735554, |
| "learning_rate": 4.072681704260652e-05, |
| "loss": 0.4133, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.8022535211267605, |
| "grad_norm": 0.38518246190045313, |
| "learning_rate": 4.070593149540518e-05, |
| "loss": 0.4252, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.8033802816901409, |
| "grad_norm": 0.35316967468640464, |
| "learning_rate": 4.068504594820384e-05, |
| "loss": 0.3921, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.8045070422535211, |
| "grad_norm": 0.45881821126358313, |
| "learning_rate": 4.066416040100251e-05, |
| "loss": 0.4247, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.8056338028169014, |
| "grad_norm": 0.35069913984167855, |
| "learning_rate": 4.0643274853801174e-05, |
| "loss": 0.4269, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.8067605633802817, |
| "grad_norm": 0.3835410130576258, |
| "learning_rate": 4.0622389306599834e-05, |
| "loss": 0.3898, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.8078873239436619, |
| "grad_norm": 0.4704560512938768, |
| "learning_rate": 4.0601503759398494e-05, |
| "loss": 0.4128, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.8090140845070423, |
| "grad_norm": 0.36196175261639196, |
| "learning_rate": 4.058061821219716e-05, |
| "loss": 0.3914, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.8101408450704225, |
| "grad_norm": 0.5037656792609231, |
| "learning_rate": 4.055973266499583e-05, |
| "loss": 0.4097, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.8112676056338028, |
| "grad_norm": 0.4524163850817384, |
| "learning_rate": 4.053884711779449e-05, |
| "loss": 0.4108, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.8123943661971831, |
| "grad_norm": 0.4349260980067756, |
| "learning_rate": 4.051796157059315e-05, |
| "loss": 0.4144, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.8135211267605634, |
| "grad_norm": 0.5243382584421026, |
| "learning_rate": 4.0497076023391814e-05, |
| "loss": 0.4142, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.8146478873239437, |
| "grad_norm": 0.391790515054285, |
| "learning_rate": 4.047619047619048e-05, |
| "loss": 0.4012, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.8157746478873239, |
| "grad_norm": 0.48320887589903105, |
| "learning_rate": 4.045530492898914e-05, |
| "loss": 0.4117, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.8169014084507042, |
| "grad_norm": 0.41303488238708935, |
| "learning_rate": 4.04344193817878e-05, |
| "loss": 0.3809, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.8180281690140845, |
| "grad_norm": 0.4348424868731927, |
| "learning_rate": 4.041353383458647e-05, |
| "loss": 0.4766, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.8191549295774648, |
| "grad_norm": 0.37121791299850376, |
| "learning_rate": 4.0392648287385135e-05, |
| "loss": 0.394, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.8202816901408451, |
| "grad_norm": 0.4798401011692672, |
| "learning_rate": 4.0371762740183795e-05, |
| "loss": 0.4227, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.8214084507042253, |
| "grad_norm": 0.39082215709390694, |
| "learning_rate": 4.0350877192982455e-05, |
| "loss": 0.4209, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.8225352112676056, |
| "grad_norm": 0.43375941551110303, |
| "learning_rate": 4.032999164578112e-05, |
| "loss": 0.4225, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.8236619718309859, |
| "grad_norm": 0.3237228830696122, |
| "learning_rate": 4.030910609857979e-05, |
| "loss": 0.3872, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.8247887323943662, |
| "grad_norm": 0.34724647024342176, |
| "learning_rate": 4.028822055137845e-05, |
| "loss": 0.4032, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.8259154929577465, |
| "grad_norm": 0.31407638077523553, |
| "learning_rate": 4.026733500417711e-05, |
| "loss": 0.3883, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.8270422535211268, |
| "grad_norm": 0.3613395057755356, |
| "learning_rate": 4.0246449456975775e-05, |
| "loss": 0.4154, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.828169014084507, |
| "grad_norm": 0.32893242131018746, |
| "learning_rate": 4.022556390977444e-05, |
| "loss": 0.425, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.8292957746478873, |
| "grad_norm": 0.3969348591140389, |
| "learning_rate": 4.02046783625731e-05, |
| "loss": 0.4305, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.8304225352112676, |
| "grad_norm": 0.3481060320315369, |
| "learning_rate": 4.018379281537176e-05, |
| "loss": 0.3991, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.8315492957746479, |
| "grad_norm": 0.31650648249000474, |
| "learning_rate": 4.016290726817043e-05, |
| "loss": 0.4033, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.8326760563380282, |
| "grad_norm": 0.3965538972614481, |
| "learning_rate": 4.0142021720969095e-05, |
| "loss": 0.3981, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.8338028169014085, |
| "grad_norm": 0.3406795949884074, |
| "learning_rate": 4.0121136173767755e-05, |
| "loss": 0.3969, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.8349295774647887, |
| "grad_norm": 0.3635583402480603, |
| "learning_rate": 4.0100250626566415e-05, |
| "loss": 0.4061, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.836056338028169, |
| "grad_norm": 0.40519489587384744, |
| "learning_rate": 4.007936507936508e-05, |
| "loss": 0.4143, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.8371830985915493, |
| "grad_norm": 0.3593266084385044, |
| "learning_rate": 4.005847953216375e-05, |
| "loss": 0.3991, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.8383098591549296, |
| "grad_norm": 0.34564675037719295, |
| "learning_rate": 4.003759398496241e-05, |
| "loss": 0.4058, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.8394366197183099, |
| "grad_norm": 0.3889580008045617, |
| "learning_rate": 4.001670843776107e-05, |
| "loss": 0.4357, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.8405633802816901, |
| "grad_norm": 0.3076419722480114, |
| "learning_rate": 3.9995822890559735e-05, |
| "loss": 0.4215, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.8416901408450704, |
| "grad_norm": 0.3953958807659964, |
| "learning_rate": 3.9974937343358395e-05, |
| "loss": 0.4282, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.8428169014084507, |
| "grad_norm": 0.3510246787976059, |
| "learning_rate": 3.995405179615706e-05, |
| "loss": 0.405, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.843943661971831, |
| "grad_norm": 0.3904858752741403, |
| "learning_rate": 3.993316624895572e-05, |
| "loss": 0.4017, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.8450704225352113, |
| "grad_norm": 0.3466183465990686, |
| "learning_rate": 3.991228070175439e-05, |
| "loss": 0.4063, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.8461971830985916, |
| "grad_norm": 0.4145982550809233, |
| "learning_rate": 3.989139515455305e-05, |
| "loss": 0.3943, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.8473239436619718, |
| "grad_norm": 0.3056074309746521, |
| "learning_rate": 3.9870509607351716e-05, |
| "loss": 0.4096, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.8484507042253521, |
| "grad_norm": 0.40235174127762036, |
| "learning_rate": 3.9849624060150376e-05, |
| "loss": 0.4344, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.8495774647887324, |
| "grad_norm": 0.3331530224010519, |
| "learning_rate": 3.982873851294904e-05, |
| "loss": 0.4019, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.8507042253521127, |
| "grad_norm": 0.3395688761807231, |
| "learning_rate": 3.98078529657477e-05, |
| "loss": 0.4223, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.851830985915493, |
| "grad_norm": 0.31344411916177906, |
| "learning_rate": 3.978696741854637e-05, |
| "loss": 0.41, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.8529577464788732, |
| "grad_norm": 0.31052339494796105, |
| "learning_rate": 3.976608187134503e-05, |
| "loss": 0.3863, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.8540845070422535, |
| "grad_norm": 0.3505115182352671, |
| "learning_rate": 3.9745196324143696e-05, |
| "loss": 0.4043, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.8552112676056338, |
| "grad_norm": 0.2989368214428119, |
| "learning_rate": 3.9724310776942356e-05, |
| "loss": 0.4027, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.856338028169014, |
| "grad_norm": 0.3583425560695883, |
| "learning_rate": 3.970342522974102e-05, |
| "loss": 0.4232, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.8574647887323944, |
| "grad_norm": 0.4429332714250863, |
| "learning_rate": 3.968253968253968e-05, |
| "loss": 0.4031, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.8585915492957746, |
| "grad_norm": 0.34496606200669994, |
| "learning_rate": 3.966165413533835e-05, |
| "loss": 0.388, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.859718309859155, |
| "grad_norm": 0.34447312913304845, |
| "learning_rate": 3.964076858813701e-05, |
| "loss": 0.4015, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.8608450704225352, |
| "grad_norm": 0.4109964850785612, |
| "learning_rate": 3.9619883040935676e-05, |
| "loss": 0.399, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.8619718309859155, |
| "grad_norm": 0.34614031909152393, |
| "learning_rate": 3.9598997493734336e-05, |
| "loss": 0.4203, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.8630985915492958, |
| "grad_norm": 0.3786215940986347, |
| "learning_rate": 3.9578111946533e-05, |
| "loss": 0.4048, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.864225352112676, |
| "grad_norm": 0.3485285199072157, |
| "learning_rate": 3.955722639933166e-05, |
| "loss": 0.3923, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.8653521126760564, |
| "grad_norm": 0.32117325622732684, |
| "learning_rate": 3.953634085213033e-05, |
| "loss": 0.396, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.8664788732394366, |
| "grad_norm": 0.3288721214913486, |
| "learning_rate": 3.951545530492899e-05, |
| "loss": 0.4073, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.8676056338028169, |
| "grad_norm": 0.3553541184041968, |
| "learning_rate": 3.9494569757727656e-05, |
| "loss": 0.3791, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.8687323943661972, |
| "grad_norm": 0.36597469077692873, |
| "learning_rate": 3.9473684210526316e-05, |
| "loss": 0.3823, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.8698591549295774, |
| "grad_norm": 0.36679352760459877, |
| "learning_rate": 3.945279866332498e-05, |
| "loss": 0.3898, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.8709859154929578, |
| "grad_norm": 0.3862878116970243, |
| "learning_rate": 3.943191311612364e-05, |
| "loss": 0.4099, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.872112676056338, |
| "grad_norm": 0.34475847881480404, |
| "learning_rate": 3.941102756892231e-05, |
| "loss": 0.4084, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.8732394366197183, |
| "grad_norm": 0.45638654676684215, |
| "learning_rate": 3.939014202172097e-05, |
| "loss": 0.3986, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.8743661971830986, |
| "grad_norm": 0.35807833510998827, |
| "learning_rate": 3.9369256474519637e-05, |
| "loss": 0.4081, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.8754929577464788, |
| "grad_norm": 0.5409597238956879, |
| "learning_rate": 3.9348370927318297e-05, |
| "loss": 0.4147, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.8766197183098592, |
| "grad_norm": 0.3789429877677125, |
| "learning_rate": 3.932748538011696e-05, |
| "loss": 0.4195, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.8777464788732394, |
| "grad_norm": 0.3709352534507574, |
| "learning_rate": 3.930659983291562e-05, |
| "loss": 0.3851, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.8788732394366198, |
| "grad_norm": 0.45932668818057254, |
| "learning_rate": 3.928571428571429e-05, |
| "loss": 0.4018, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.88, |
| "grad_norm": 0.3189973459204549, |
| "learning_rate": 3.926482873851295e-05, |
| "loss": 0.4321, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.8811267605633802, |
| "grad_norm": 0.5214079758077962, |
| "learning_rate": 3.924394319131162e-05, |
| "loss": 0.4237, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.8822535211267606, |
| "grad_norm": 0.42449649095885933, |
| "learning_rate": 3.922305764411028e-05, |
| "loss": 0.4028, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.8833802816901408, |
| "grad_norm": 0.42873753000780335, |
| "learning_rate": 3.920217209690894e-05, |
| "loss": 0.3954, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.8845070422535212, |
| "grad_norm": 0.49566264876473903, |
| "learning_rate": 3.9181286549707604e-05, |
| "loss": 0.3963, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.8856338028169014, |
| "grad_norm": 0.2995262123704027, |
| "learning_rate": 3.916040100250627e-05, |
| "loss": 0.3963, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.8867605633802816, |
| "grad_norm": 0.4903480183321258, |
| "learning_rate": 3.913951545530493e-05, |
| "loss": 0.4457, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.887887323943662, |
| "grad_norm": 0.46991879985880375, |
| "learning_rate": 3.911862990810359e-05, |
| "loss": 0.427, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.8890140845070422, |
| "grad_norm": 0.3134823915048358, |
| "learning_rate": 3.909774436090226e-05, |
| "loss": 0.3867, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.8901408450704226, |
| "grad_norm": 0.5545602314022595, |
| "learning_rate": 3.9076858813700924e-05, |
| "loss": 0.4171, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.8912676056338028, |
| "grad_norm": 0.41881667647515214, |
| "learning_rate": 3.9055973266499584e-05, |
| "loss": 0.4053, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.8923943661971832, |
| "grad_norm": 0.44463249533252486, |
| "learning_rate": 3.9035087719298244e-05, |
| "loss": 0.405, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.8935211267605634, |
| "grad_norm": 0.4890949744923107, |
| "learning_rate": 3.901420217209691e-05, |
| "loss": 0.4142, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.8946478873239436, |
| "grad_norm": 0.38382002153711203, |
| "learning_rate": 3.899331662489558e-05, |
| "loss": 0.3991, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.895774647887324, |
| "grad_norm": 0.41032789060356845, |
| "learning_rate": 3.897243107769424e-05, |
| "loss": 0.4121, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.8969014084507042, |
| "grad_norm": 0.44242188192656345, |
| "learning_rate": 3.89515455304929e-05, |
| "loss": 0.412, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.8980281690140846, |
| "grad_norm": 0.4999281545425411, |
| "learning_rate": 3.8930659983291564e-05, |
| "loss": 0.4168, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.8991549295774648, |
| "grad_norm": 0.38598639385702344, |
| "learning_rate": 3.890977443609023e-05, |
| "loss": 0.4099, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.900281690140845, |
| "grad_norm": 0.34803726788055417, |
| "learning_rate": 3.888888888888889e-05, |
| "loss": 0.4274, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.9014084507042254, |
| "grad_norm": 0.3249736560218246, |
| "learning_rate": 3.886800334168755e-05, |
| "loss": 0.3987, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.9025352112676056, |
| "grad_norm": 0.37525912151977125, |
| "learning_rate": 3.884711779448622e-05, |
| "loss": 0.4337, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.903661971830986, |
| "grad_norm": 0.3450630535655991, |
| "learning_rate": 3.8826232247284884e-05, |
| "loss": 0.409, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.9047887323943662, |
| "grad_norm": 0.41490433438630725, |
| "learning_rate": 3.8805346700083544e-05, |
| "loss": 0.4196, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.9059154929577464, |
| "grad_norm": 0.3918643716520213, |
| "learning_rate": 3.8784461152882204e-05, |
| "loss": 0.4178, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.9070422535211268, |
| "grad_norm": 0.3031248324523258, |
| "learning_rate": 3.876357560568087e-05, |
| "loss": 0.3882, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.908169014084507, |
| "grad_norm": 0.3612974091721475, |
| "learning_rate": 3.874269005847954e-05, |
| "loss": 0.3831, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.9092957746478874, |
| "grad_norm": 0.3356819962030845, |
| "learning_rate": 3.87218045112782e-05, |
| "loss": 0.4013, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.9104225352112676, |
| "grad_norm": 0.34249347780367584, |
| "learning_rate": 3.870091896407686e-05, |
| "loss": 0.4033, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.9115492957746479, |
| "grad_norm": 0.34376175177747587, |
| "learning_rate": 3.8680033416875525e-05, |
| "loss": 0.3801, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.9126760563380282, |
| "grad_norm": 0.4014809133874393, |
| "learning_rate": 3.865914786967419e-05, |
| "loss": 0.4096, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.9138028169014084, |
| "grad_norm": 0.39950046560062835, |
| "learning_rate": 3.863826232247285e-05, |
| "loss": 0.3909, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.9149295774647888, |
| "grad_norm": 0.3209448071853923, |
| "learning_rate": 3.861737677527151e-05, |
| "loss": 0.3981, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.916056338028169, |
| "grad_norm": 0.38532834965240903, |
| "learning_rate": 3.859649122807018e-05, |
| "loss": 0.3903, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.9171830985915493, |
| "grad_norm": 0.34248793319108745, |
| "learning_rate": 3.8575605680868845e-05, |
| "loss": 0.4094, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.9183098591549296, |
| "grad_norm": 0.3360813492097211, |
| "learning_rate": 3.8554720133667505e-05, |
| "loss": 0.4159, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.9194366197183098, |
| "grad_norm": 0.34850592514190076, |
| "learning_rate": 3.8533834586466165e-05, |
| "loss": 0.4025, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.9205633802816902, |
| "grad_norm": 0.3708447582880061, |
| "learning_rate": 3.851294903926483e-05, |
| "loss": 0.4182, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.9216901408450704, |
| "grad_norm": 0.4013945982859129, |
| "learning_rate": 3.84920634920635e-05, |
| "loss": 0.4198, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.9228169014084507, |
| "grad_norm": 0.35166439502151015, |
| "learning_rate": 3.847117794486216e-05, |
| "loss": 0.4127, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.923943661971831, |
| "grad_norm": 0.4182281253626696, |
| "learning_rate": 3.845029239766082e-05, |
| "loss": 0.4213, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.9250704225352113, |
| "grad_norm": 0.45579640979574715, |
| "learning_rate": 3.8429406850459485e-05, |
| "loss": 0.4104, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.9261971830985916, |
| "grad_norm": 0.3339252060540916, |
| "learning_rate": 3.840852130325815e-05, |
| "loss": 0.4067, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.9273239436619718, |
| "grad_norm": 0.4981318249725421, |
| "learning_rate": 3.838763575605681e-05, |
| "loss": 0.3988, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.9284507042253521, |
| "grad_norm": 0.3792710144886669, |
| "learning_rate": 3.836675020885547e-05, |
| "loss": 0.3925, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.9295774647887324, |
| "grad_norm": 0.46368311223715153, |
| "learning_rate": 3.834586466165413e-05, |
| "loss": 0.4222, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.9307042253521127, |
| "grad_norm": 0.362841062246029, |
| "learning_rate": 3.8324979114452805e-05, |
| "loss": 0.3936, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.931830985915493, |
| "grad_norm": 0.43378036177185536, |
| "learning_rate": 3.8304093567251465e-05, |
| "loss": 0.3861, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.9329577464788732, |
| "grad_norm": 0.33713557262630695, |
| "learning_rate": 3.8283208020050125e-05, |
| "loss": 0.4063, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.9340845070422535, |
| "grad_norm": 0.45656551755730873, |
| "learning_rate": 3.8262322472848785e-05, |
| "loss": 0.4202, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.9352112676056338, |
| "grad_norm": 0.4351376754181724, |
| "learning_rate": 3.824143692564746e-05, |
| "loss": 0.3832, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.9363380281690141, |
| "grad_norm": 0.3703220173375687, |
| "learning_rate": 3.822055137844612e-05, |
| "loss": 0.406, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.9374647887323944, |
| "grad_norm": 0.44651146569953, |
| "learning_rate": 3.819966583124478e-05, |
| "loss": 0.4045, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.9385915492957746, |
| "grad_norm": 0.4189212862088426, |
| "learning_rate": 3.817878028404344e-05, |
| "loss": 0.4244, |
| "step": 833 |
| }, |
| { |
| "epoch": 0.9397183098591549, |
| "grad_norm": 0.45820610740313783, |
| "learning_rate": 3.815789473684211e-05, |
| "loss": 0.4163, |
| "step": 834 |
| }, |
| { |
| "epoch": 0.9408450704225352, |
| "grad_norm": 0.380217918734662, |
| "learning_rate": 3.813700918964077e-05, |
| "loss": 0.4205, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.9419718309859155, |
| "grad_norm": 0.407312064783555, |
| "learning_rate": 3.811612364243943e-05, |
| "loss": 0.3974, |
| "step": 836 |
| }, |
| { |
| "epoch": 0.9430985915492958, |
| "grad_norm": 0.35017334450149956, |
| "learning_rate": 3.809523809523809e-05, |
| "loss": 0.4092, |
| "step": 837 |
| }, |
| { |
| "epoch": 0.9442253521126761, |
| "grad_norm": 0.38343367952612833, |
| "learning_rate": 3.8074352548036766e-05, |
| "loss": 0.3923, |
| "step": 838 |
| }, |
| { |
| "epoch": 0.9453521126760563, |
| "grad_norm": 0.3889582171194483, |
| "learning_rate": 3.8053467000835426e-05, |
| "loss": 0.4173, |
| "step": 839 |
| }, |
| { |
| "epoch": 0.9464788732394366, |
| "grad_norm": 0.45980181954652527, |
| "learning_rate": 3.8032581453634086e-05, |
| "loss": 0.422, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.9476056338028169, |
| "grad_norm": 0.3297731615524771, |
| "learning_rate": 3.8011695906432746e-05, |
| "loss": 0.404, |
| "step": 841 |
| }, |
| { |
| "epoch": 0.9487323943661972, |
| "grad_norm": 0.38624507916793943, |
| "learning_rate": 3.799081035923141e-05, |
| "loss": 0.4159, |
| "step": 842 |
| }, |
| { |
| "epoch": 0.9498591549295775, |
| "grad_norm": 0.32519279849185767, |
| "learning_rate": 3.796992481203008e-05, |
| "loss": 0.4072, |
| "step": 843 |
| }, |
| { |
| "epoch": 0.9509859154929577, |
| "grad_norm": 0.42925411014050585, |
| "learning_rate": 3.794903926482874e-05, |
| "loss": 0.4143, |
| "step": 844 |
| }, |
| { |
| "epoch": 0.952112676056338, |
| "grad_norm": 0.31153163118316113, |
| "learning_rate": 3.79281537176274e-05, |
| "loss": 0.4076, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.9532394366197183, |
| "grad_norm": 0.43824860641584196, |
| "learning_rate": 3.7907268170426066e-05, |
| "loss": 0.3763, |
| "step": 846 |
| }, |
| { |
| "epoch": 0.9543661971830986, |
| "grad_norm": 0.31884786049993985, |
| "learning_rate": 3.788638262322473e-05, |
| "loss": 0.4044, |
| "step": 847 |
| }, |
| { |
| "epoch": 0.9554929577464789, |
| "grad_norm": 0.4107221881754472, |
| "learning_rate": 3.786549707602339e-05, |
| "loss": 0.4054, |
| "step": 848 |
| }, |
| { |
| "epoch": 0.9566197183098591, |
| "grad_norm": 0.34924994082934024, |
| "learning_rate": 3.784461152882205e-05, |
| "loss": 0.3931, |
| "step": 849 |
| }, |
| { |
| "epoch": 0.9577464788732394, |
| "grad_norm": 0.3864084211943787, |
| "learning_rate": 3.782372598162072e-05, |
| "loss": 0.4181, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.9588732394366197, |
| "grad_norm": 0.31475807218831736, |
| "learning_rate": 3.7802840434419386e-05, |
| "loss": 0.3796, |
| "step": 851 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.3679848297154997, |
| "learning_rate": 3.7781954887218046e-05, |
| "loss": 0.3829, |
| "step": 852 |
| }, |
| { |
| "epoch": 0.9611267605633803, |
| "grad_norm": 0.2816290223191142, |
| "learning_rate": 3.7761069340016706e-05, |
| "loss": 0.3892, |
| "step": 853 |
| }, |
| { |
| "epoch": 0.9622535211267605, |
| "grad_norm": 0.4296998369649018, |
| "learning_rate": 3.774018379281537e-05, |
| "loss": 0.3983, |
| "step": 854 |
| }, |
| { |
| "epoch": 0.9633802816901409, |
| "grad_norm": 0.2973109212493355, |
| "learning_rate": 3.771929824561404e-05, |
| "loss": 0.3917, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.9645070422535211, |
| "grad_norm": 0.41637877482516356, |
| "learning_rate": 3.76984126984127e-05, |
| "loss": 0.4151, |
| "step": 856 |
| }, |
| { |
| "epoch": 0.9656338028169014, |
| "grad_norm": 0.4014759551588234, |
| "learning_rate": 3.767752715121136e-05, |
| "loss": 0.4026, |
| "step": 857 |
| }, |
| { |
| "epoch": 0.9667605633802817, |
| "grad_norm": 0.4896006055867873, |
| "learning_rate": 3.7656641604010026e-05, |
| "loss": 0.4078, |
| "step": 858 |
| }, |
| { |
| "epoch": 0.967887323943662, |
| "grad_norm": 0.3818461047798166, |
| "learning_rate": 3.763575605680869e-05, |
| "loss": 0.3896, |
| "step": 859 |
| }, |
| { |
| "epoch": 0.9690140845070423, |
| "grad_norm": 0.4857861818312063, |
| "learning_rate": 3.761487050960735e-05, |
| "loss": 0.4065, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.9701408450704225, |
| "grad_norm": 0.3852300916675479, |
| "learning_rate": 3.759398496240601e-05, |
| "loss": 0.4376, |
| "step": 861 |
| }, |
| { |
| "epoch": 0.9712676056338028, |
| "grad_norm": 0.40146277023613025, |
| "learning_rate": 3.757309941520468e-05, |
| "loss": 0.3928, |
| "step": 862 |
| }, |
| { |
| "epoch": 0.9723943661971831, |
| "grad_norm": 0.3914507746385292, |
| "learning_rate": 3.755221386800335e-05, |
| "loss": 0.4047, |
| "step": 863 |
| }, |
| { |
| "epoch": 0.9735211267605633, |
| "grad_norm": 0.382869425291017, |
| "learning_rate": 3.753132832080201e-05, |
| "loss": 0.393, |
| "step": 864 |
| }, |
| { |
| "epoch": 0.9746478873239437, |
| "grad_norm": 0.358535369989854, |
| "learning_rate": 3.751044277360067e-05, |
| "loss": 0.447, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.9757746478873239, |
| "grad_norm": 0.38266539367069063, |
| "learning_rate": 3.7489557226399333e-05, |
| "loss": 0.4214, |
| "step": 866 |
| }, |
| { |
| "epoch": 0.9769014084507043, |
| "grad_norm": 0.3417675891619715, |
| "learning_rate": 3.7468671679198e-05, |
| "loss": 0.414, |
| "step": 867 |
| }, |
| { |
| "epoch": 0.9780281690140845, |
| "grad_norm": 0.40431141770165596, |
| "learning_rate": 3.744778613199666e-05, |
| "loss": 0.3971, |
| "step": 868 |
| }, |
| { |
| "epoch": 0.9791549295774647, |
| "grad_norm": 0.3604611534772216, |
| "learning_rate": 3.742690058479532e-05, |
| "loss": 0.3979, |
| "step": 869 |
| }, |
| { |
| "epoch": 0.9802816901408451, |
| "grad_norm": 0.45047472284441686, |
| "learning_rate": 3.740601503759399e-05, |
| "loss": 0.4047, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.9814084507042253, |
| "grad_norm": 0.4281416434513922, |
| "learning_rate": 3.7385129490392654e-05, |
| "loss": 0.43, |
| "step": 871 |
| }, |
| { |
| "epoch": 0.9825352112676057, |
| "grad_norm": 0.4839616963151465, |
| "learning_rate": 3.7364243943191314e-05, |
| "loss": 0.4165, |
| "step": 872 |
| }, |
| { |
| "epoch": 0.9836619718309859, |
| "grad_norm": 0.3517165408652703, |
| "learning_rate": 3.7343358395989974e-05, |
| "loss": 0.4066, |
| "step": 873 |
| }, |
| { |
| "epoch": 0.9847887323943662, |
| "grad_norm": 0.4233470102370198, |
| "learning_rate": 3.732247284878864e-05, |
| "loss": 0.4214, |
| "step": 874 |
| }, |
| { |
| "epoch": 0.9859154929577465, |
| "grad_norm": 0.38852278302910803, |
| "learning_rate": 3.730158730158731e-05, |
| "loss": 0.4253, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.9870422535211267, |
| "grad_norm": 0.373573815310203, |
| "learning_rate": 3.728070175438597e-05, |
| "loss": 0.4159, |
| "step": 876 |
| }, |
| { |
| "epoch": 0.9881690140845071, |
| "grad_norm": 0.3748762280861789, |
| "learning_rate": 3.725981620718463e-05, |
| "loss": 0.4303, |
| "step": 877 |
| }, |
| { |
| "epoch": 0.9892957746478873, |
| "grad_norm": 0.4530223205377112, |
| "learning_rate": 3.7238930659983294e-05, |
| "loss": 0.4173, |
| "step": 878 |
| }, |
| { |
| "epoch": 0.9904225352112676, |
| "grad_norm": 0.32117261713447776, |
| "learning_rate": 3.721804511278196e-05, |
| "loss": 0.4012, |
| "step": 879 |
| }, |
| { |
| "epoch": 0.9915492957746479, |
| "grad_norm": 0.47606239155488134, |
| "learning_rate": 3.719715956558062e-05, |
| "loss": 0.3991, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.9926760563380281, |
| "grad_norm": 0.4081652325703798, |
| "learning_rate": 3.717627401837928e-05, |
| "loss": 0.4031, |
| "step": 881 |
| }, |
| { |
| "epoch": 0.9938028169014085, |
| "grad_norm": 0.3323321538622951, |
| "learning_rate": 3.715538847117795e-05, |
| "loss": 0.4102, |
| "step": 882 |
| }, |
| { |
| "epoch": 0.9949295774647887, |
| "grad_norm": 0.45502275344767745, |
| "learning_rate": 3.713450292397661e-05, |
| "loss": 0.3854, |
| "step": 883 |
| }, |
| { |
| "epoch": 0.9960563380281691, |
| "grad_norm": 0.3425952963375361, |
| "learning_rate": 3.7113617376775274e-05, |
| "loss": 0.4216, |
| "step": 884 |
| }, |
| { |
| "epoch": 0.9971830985915493, |
| "grad_norm": 0.33252625920951534, |
| "learning_rate": 3.7092731829573934e-05, |
| "loss": 0.4233, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.9983098591549295, |
| "grad_norm": 0.3060932623255807, |
| "learning_rate": 3.70718462823726e-05, |
| "loss": 0.4147, |
| "step": 886 |
| }, |
| { |
| "epoch": 0.9994366197183099, |
| "grad_norm": 0.34051331038310045, |
| "learning_rate": 3.705096073517126e-05, |
| "loss": 0.402, |
| "step": 887 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.34051331038310045, |
| "learning_rate": 3.703007518796993e-05, |
| "loss": 0.4178, |
| "step": 888 |
| }, |
| { |
| "epoch": 1.0011267605633802, |
| "grad_norm": 0.45164933939808577, |
| "learning_rate": 3.700918964076859e-05, |
| "loss": 0.3601, |
| "step": 889 |
| }, |
| { |
| "epoch": 1.0022535211267605, |
| "grad_norm": 0.3324150998169332, |
| "learning_rate": 3.6988304093567254e-05, |
| "loss": 0.3333, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.003380281690141, |
| "grad_norm": 0.37670491841487863, |
| "learning_rate": 3.6967418546365914e-05, |
| "loss": 0.3264, |
| "step": 891 |
| }, |
| { |
| "epoch": 1.0045070422535212, |
| "grad_norm": 0.35763553244923446, |
| "learning_rate": 3.694653299916458e-05, |
| "loss": 0.341, |
| "step": 892 |
| }, |
| { |
| "epoch": 1.0056338028169014, |
| "grad_norm": 0.3519446554726191, |
| "learning_rate": 3.692564745196324e-05, |
| "loss": 0.3544, |
| "step": 893 |
| }, |
| { |
| "epoch": 1.0067605633802816, |
| "grad_norm": 0.3247266894304795, |
| "learning_rate": 3.690476190476191e-05, |
| "loss": 0.3539, |
| "step": 894 |
| }, |
| { |
| "epoch": 1.0078873239436619, |
| "grad_norm": 0.39108519251934243, |
| "learning_rate": 3.688387635756057e-05, |
| "loss": 0.3298, |
| "step": 895 |
| }, |
| { |
| "epoch": 1.0090140845070423, |
| "grad_norm": 0.3865567028361942, |
| "learning_rate": 3.6862990810359235e-05, |
| "loss": 0.3355, |
| "step": 896 |
| }, |
| { |
| "epoch": 1.0101408450704226, |
| "grad_norm": 0.3681661446703723, |
| "learning_rate": 3.6842105263157895e-05, |
| "loss": 0.3686, |
| "step": 897 |
| }, |
| { |
| "epoch": 1.0112676056338028, |
| "grad_norm": 0.416773726387342, |
| "learning_rate": 3.682121971595656e-05, |
| "loss": 0.3456, |
| "step": 898 |
| }, |
| { |
| "epoch": 1.012394366197183, |
| "grad_norm": 0.38573812250069184, |
| "learning_rate": 3.680033416875522e-05, |
| "loss": 0.3264, |
| "step": 899 |
| }, |
| { |
| "epoch": 1.0135211267605633, |
| "grad_norm": 0.32759729545292754, |
| "learning_rate": 3.677944862155389e-05, |
| "loss": 0.3504, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.0146478873239437, |
| "grad_norm": 0.4785684697978304, |
| "learning_rate": 3.675856307435255e-05, |
| "loss": 0.3545, |
| "step": 901 |
| }, |
| { |
| "epoch": 1.015774647887324, |
| "grad_norm": 0.4082888514252489, |
| "learning_rate": 3.6737677527151215e-05, |
| "loss": 0.3428, |
| "step": 902 |
| }, |
| { |
| "epoch": 1.0169014084507042, |
| "grad_norm": 0.44634616820631956, |
| "learning_rate": 3.6716791979949875e-05, |
| "loss": 0.337, |
| "step": 903 |
| }, |
| { |
| "epoch": 1.0180281690140844, |
| "grad_norm": 0.3283715545248377, |
| "learning_rate": 3.669590643274854e-05, |
| "loss": 0.3533, |
| "step": 904 |
| }, |
| { |
| "epoch": 1.019154929577465, |
| "grad_norm": 0.46773983776072087, |
| "learning_rate": 3.66750208855472e-05, |
| "loss": 0.3576, |
| "step": 905 |
| }, |
| { |
| "epoch": 1.0202816901408451, |
| "grad_norm": 0.32934659204004435, |
| "learning_rate": 3.665413533834587e-05, |
| "loss": 0.3521, |
| "step": 906 |
| }, |
| { |
| "epoch": 1.0214084507042254, |
| "grad_norm": 0.4097291624527684, |
| "learning_rate": 3.663324979114453e-05, |
| "loss": 0.339, |
| "step": 907 |
| }, |
| { |
| "epoch": 1.0225352112676056, |
| "grad_norm": 0.36518890704682266, |
| "learning_rate": 3.6612364243943195e-05, |
| "loss": 0.3214, |
| "step": 908 |
| }, |
| { |
| "epoch": 1.0236619718309858, |
| "grad_norm": 0.37007347409989166, |
| "learning_rate": 3.6591478696741855e-05, |
| "loss": 0.3511, |
| "step": 909 |
| }, |
| { |
| "epoch": 1.0247887323943663, |
| "grad_norm": 0.39681642474464907, |
| "learning_rate": 3.657059314954052e-05, |
| "loss": 0.3384, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.0259154929577465, |
| "grad_norm": 0.40990269096333465, |
| "learning_rate": 3.654970760233918e-05, |
| "loss": 0.3646, |
| "step": 911 |
| }, |
| { |
| "epoch": 1.0270422535211268, |
| "grad_norm": 0.31420500028287357, |
| "learning_rate": 3.652882205513785e-05, |
| "loss": 0.3197, |
| "step": 912 |
| }, |
| { |
| "epoch": 1.028169014084507, |
| "grad_norm": 0.4022121307134004, |
| "learning_rate": 3.650793650793651e-05, |
| "loss": 0.3697, |
| "step": 913 |
| }, |
| { |
| "epoch": 1.0292957746478872, |
| "grad_norm": 0.3436605082848157, |
| "learning_rate": 3.6487050960735175e-05, |
| "loss": 0.3484, |
| "step": 914 |
| }, |
| { |
| "epoch": 1.0304225352112677, |
| "grad_norm": 0.3464021769519033, |
| "learning_rate": 3.6466165413533835e-05, |
| "loss": 0.3479, |
| "step": 915 |
| }, |
| { |
| "epoch": 1.031549295774648, |
| "grad_norm": 0.3138471218213921, |
| "learning_rate": 3.64452798663325e-05, |
| "loss": 0.3288, |
| "step": 916 |
| }, |
| { |
| "epoch": 1.0326760563380282, |
| "grad_norm": 0.34546317527627096, |
| "learning_rate": 3.642439431913116e-05, |
| "loss": 0.3397, |
| "step": 917 |
| }, |
| { |
| "epoch": 1.0338028169014084, |
| "grad_norm": 0.28791110851255375, |
| "learning_rate": 3.640350877192983e-05, |
| "loss": 0.337, |
| "step": 918 |
| }, |
| { |
| "epoch": 1.0349295774647886, |
| "grad_norm": 0.35902903221755456, |
| "learning_rate": 3.638262322472849e-05, |
| "loss": 0.3469, |
| "step": 919 |
| }, |
| { |
| "epoch": 1.036056338028169, |
| "grad_norm": 0.31866269461936547, |
| "learning_rate": 3.636173767752715e-05, |
| "loss": 0.3597, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.0371830985915493, |
| "grad_norm": 0.29209923379950026, |
| "learning_rate": 3.6340852130325816e-05, |
| "loss": 0.3268, |
| "step": 921 |
| }, |
| { |
| "epoch": 1.0383098591549296, |
| "grad_norm": 0.311877341299532, |
| "learning_rate": 3.631996658312448e-05, |
| "loss": 0.339, |
| "step": 922 |
| }, |
| { |
| "epoch": 1.0394366197183098, |
| "grad_norm": 0.3592110411387359, |
| "learning_rate": 3.629908103592314e-05, |
| "loss": 0.3421, |
| "step": 923 |
| }, |
| { |
| "epoch": 1.04056338028169, |
| "grad_norm": 0.2874188388571024, |
| "learning_rate": 3.62781954887218e-05, |
| "loss": 0.3659, |
| "step": 924 |
| }, |
| { |
| "epoch": 1.0416901408450705, |
| "grad_norm": 0.3493121303234514, |
| "learning_rate": 3.625730994152047e-05, |
| "loss": 0.3742, |
| "step": 925 |
| }, |
| { |
| "epoch": 1.0428169014084507, |
| "grad_norm": 0.34023873349599343, |
| "learning_rate": 3.6236424394319136e-05, |
| "loss": 0.3526, |
| "step": 926 |
| }, |
| { |
| "epoch": 1.043943661971831, |
| "grad_norm": 0.30697286116775674, |
| "learning_rate": 3.6215538847117796e-05, |
| "loss": 0.3617, |
| "step": 927 |
| }, |
| { |
| "epoch": 1.0450704225352112, |
| "grad_norm": 0.3608790433993316, |
| "learning_rate": 3.6194653299916456e-05, |
| "loss": 0.333, |
| "step": 928 |
| }, |
| { |
| "epoch": 1.0461971830985914, |
| "grad_norm": 0.38038596969857236, |
| "learning_rate": 3.617376775271512e-05, |
| "loss": 0.337, |
| "step": 929 |
| }, |
| { |
| "epoch": 1.047323943661972, |
| "grad_norm": 0.31211340626839723, |
| "learning_rate": 3.615288220551379e-05, |
| "loss": 0.352, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.0484507042253521, |
| "grad_norm": 0.3365065293288928, |
| "learning_rate": 3.613199665831245e-05, |
| "loss": 0.3533, |
| "step": 931 |
| }, |
| { |
| "epoch": 1.0495774647887324, |
| "grad_norm": 0.3197898561220735, |
| "learning_rate": 3.611111111111111e-05, |
| "loss": 0.3401, |
| "step": 932 |
| }, |
| { |
| "epoch": 1.0507042253521126, |
| "grad_norm": 0.30282863716780495, |
| "learning_rate": 3.6090225563909776e-05, |
| "loss": 0.3721, |
| "step": 933 |
| }, |
| { |
| "epoch": 1.051830985915493, |
| "grad_norm": 0.3817497816730595, |
| "learning_rate": 3.606934001670844e-05, |
| "loss": 0.3355, |
| "step": 934 |
| }, |
| { |
| "epoch": 1.0529577464788733, |
| "grad_norm": 0.2633891541035518, |
| "learning_rate": 3.60484544695071e-05, |
| "loss": 0.3253, |
| "step": 935 |
| }, |
| { |
| "epoch": 1.0540845070422535, |
| "grad_norm": 0.3546137378241751, |
| "learning_rate": 3.602756892230576e-05, |
| "loss": 0.3592, |
| "step": 936 |
| }, |
| { |
| "epoch": 1.0552112676056338, |
| "grad_norm": 0.3192527668437388, |
| "learning_rate": 3.600668337510443e-05, |
| "loss": 0.3628, |
| "step": 937 |
| }, |
| { |
| "epoch": 1.056338028169014, |
| "grad_norm": 0.2941649387397572, |
| "learning_rate": 3.5985797827903096e-05, |
| "loss": 0.3479, |
| "step": 938 |
| }, |
| { |
| "epoch": 1.0574647887323945, |
| "grad_norm": 0.31290100122573195, |
| "learning_rate": 3.5964912280701756e-05, |
| "loss": 0.3252, |
| "step": 939 |
| }, |
| { |
| "epoch": 1.0585915492957747, |
| "grad_norm": 0.3320108281653657, |
| "learning_rate": 3.5944026733500416e-05, |
| "loss": 0.3572, |
| "step": 940 |
| }, |
| { |
| "epoch": 1.059718309859155, |
| "grad_norm": 0.27602331372293537, |
| "learning_rate": 3.592314118629908e-05, |
| "loss": 0.3621, |
| "step": 941 |
| }, |
| { |
| "epoch": 1.0608450704225352, |
| "grad_norm": 0.36796858266701027, |
| "learning_rate": 3.590225563909775e-05, |
| "loss": 0.3487, |
| "step": 942 |
| }, |
| { |
| "epoch": 1.0619718309859154, |
| "grad_norm": 0.36233861800684203, |
| "learning_rate": 3.588137009189641e-05, |
| "loss": 0.3419, |
| "step": 943 |
| }, |
| { |
| "epoch": 1.0630985915492959, |
| "grad_norm": 0.32452120940977225, |
| "learning_rate": 3.586048454469507e-05, |
| "loss": 0.3267, |
| "step": 944 |
| }, |
| { |
| "epoch": 1.064225352112676, |
| "grad_norm": 0.39936202937932747, |
| "learning_rate": 3.583959899749374e-05, |
| "loss": 0.3688, |
| "step": 945 |
| }, |
| { |
| "epoch": 1.0653521126760563, |
| "grad_norm": 0.32428851970148864, |
| "learning_rate": 3.5818713450292403e-05, |
| "loss": 0.356, |
| "step": 946 |
| }, |
| { |
| "epoch": 1.0664788732394366, |
| "grad_norm": 0.38698310508892947, |
| "learning_rate": 3.5797827903091063e-05, |
| "loss": 0.3671, |
| "step": 947 |
| }, |
| { |
| "epoch": 1.0676056338028168, |
| "grad_norm": 0.3994091204255308, |
| "learning_rate": 3.5776942355889723e-05, |
| "loss": 0.3308, |
| "step": 948 |
| }, |
| { |
| "epoch": 1.0687323943661973, |
| "grad_norm": 0.3570173882044036, |
| "learning_rate": 3.575605680868839e-05, |
| "loss": 0.3278, |
| "step": 949 |
| }, |
| { |
| "epoch": 1.0698591549295775, |
| "grad_norm": 0.3984032734080762, |
| "learning_rate": 3.573517126148706e-05, |
| "loss": 0.3364, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.0709859154929577, |
| "grad_norm": 0.41969619013730747, |
| "learning_rate": 3.571428571428572e-05, |
| "loss": 0.352, |
| "step": 951 |
| }, |
| { |
| "epoch": 1.072112676056338, |
| "grad_norm": 0.3103306898949888, |
| "learning_rate": 3.569340016708438e-05, |
| "loss": 0.3655, |
| "step": 952 |
| }, |
| { |
| "epoch": 1.0732394366197182, |
| "grad_norm": 0.4202159085269893, |
| "learning_rate": 3.5672514619883044e-05, |
| "loss": 0.3619, |
| "step": 953 |
| }, |
| { |
| "epoch": 1.0743661971830987, |
| "grad_norm": 0.3390271655464648, |
| "learning_rate": 3.565162907268171e-05, |
| "loss": 0.3374, |
| "step": 954 |
| }, |
| { |
| "epoch": 1.075492957746479, |
| "grad_norm": 0.3979837332740325, |
| "learning_rate": 3.563074352548037e-05, |
| "loss": 0.3422, |
| "step": 955 |
| }, |
| { |
| "epoch": 1.0766197183098591, |
| "grad_norm": 0.34921767225311867, |
| "learning_rate": 3.560985797827903e-05, |
| "loss": 0.3291, |
| "step": 956 |
| }, |
| { |
| "epoch": 1.0777464788732394, |
| "grad_norm": 0.35651371731998577, |
| "learning_rate": 3.55889724310777e-05, |
| "loss": 0.3386, |
| "step": 957 |
| }, |
| { |
| "epoch": 1.0788732394366196, |
| "grad_norm": 0.288890974136275, |
| "learning_rate": 3.5568086883876364e-05, |
| "loss": 0.3284, |
| "step": 958 |
| }, |
| { |
| "epoch": 1.08, |
| "grad_norm": 0.3038814223753594, |
| "learning_rate": 3.5547201336675024e-05, |
| "loss": 0.3581, |
| "step": 959 |
| }, |
| { |
| "epoch": 1.0811267605633803, |
| "grad_norm": 0.3764557737078917, |
| "learning_rate": 3.5526315789473684e-05, |
| "loss": 0.3523, |
| "step": 960 |
| }, |
| { |
| "epoch": 1.0822535211267605, |
| "grad_norm": 0.32601737650410584, |
| "learning_rate": 3.5505430242272344e-05, |
| "loss": 0.3638, |
| "step": 961 |
| }, |
| { |
| "epoch": 1.0833802816901408, |
| "grad_norm": 0.40463015180513584, |
| "learning_rate": 3.548454469507102e-05, |
| "loss": 0.3682, |
| "step": 962 |
| }, |
| { |
| "epoch": 1.084507042253521, |
| "grad_norm": 0.27853954158074656, |
| "learning_rate": 3.546365914786968e-05, |
| "loss": 0.3341, |
| "step": 963 |
| }, |
| { |
| "epoch": 1.0856338028169015, |
| "grad_norm": 0.4034672865399672, |
| "learning_rate": 3.544277360066834e-05, |
| "loss": 0.3625, |
| "step": 964 |
| }, |
| { |
| "epoch": 1.0867605633802817, |
| "grad_norm": 0.2958763421888676, |
| "learning_rate": 3.5421888053467e-05, |
| "loss": 0.3645, |
| "step": 965 |
| }, |
| { |
| "epoch": 1.087887323943662, |
| "grad_norm": 0.3589284532436751, |
| "learning_rate": 3.540100250626567e-05, |
| "loss": 0.3535, |
| "step": 966 |
| }, |
| { |
| "epoch": 1.0890140845070422, |
| "grad_norm": 0.38048224390427376, |
| "learning_rate": 3.538011695906433e-05, |
| "loss": 0.3306, |
| "step": 967 |
| }, |
| { |
| "epoch": 1.0901408450704226, |
| "grad_norm": 0.34312664242838625, |
| "learning_rate": 3.535923141186299e-05, |
| "loss": 0.3486, |
| "step": 968 |
| }, |
| { |
| "epoch": 1.0912676056338029, |
| "grad_norm": 0.371785849240145, |
| "learning_rate": 3.533834586466165e-05, |
| "loss": 0.3468, |
| "step": 969 |
| }, |
| { |
| "epoch": 1.092394366197183, |
| "grad_norm": 0.40756311653422916, |
| "learning_rate": 3.5317460317460324e-05, |
| "loss": 0.3481, |
| "step": 970 |
| }, |
| { |
| "epoch": 1.0935211267605633, |
| "grad_norm": 0.3644784761338962, |
| "learning_rate": 3.5296574770258984e-05, |
| "loss": 0.3556, |
| "step": 971 |
| }, |
| { |
| "epoch": 1.0946478873239436, |
| "grad_norm": 0.36577915822034535, |
| "learning_rate": 3.5275689223057644e-05, |
| "loss": 0.3482, |
| "step": 972 |
| }, |
| { |
| "epoch": 1.095774647887324, |
| "grad_norm": 0.35163451346027164, |
| "learning_rate": 3.5254803675856304e-05, |
| "loss": 0.3384, |
| "step": 973 |
| }, |
| { |
| "epoch": 1.0969014084507043, |
| "grad_norm": 0.33307728406450554, |
| "learning_rate": 3.523391812865498e-05, |
| "loss": 0.3325, |
| "step": 974 |
| }, |
| { |
| "epoch": 1.0980281690140845, |
| "grad_norm": 0.3623939639719022, |
| "learning_rate": 3.521303258145364e-05, |
| "loss": 0.3346, |
| "step": 975 |
| }, |
| { |
| "epoch": 1.0991549295774647, |
| "grad_norm": 0.3236939339415892, |
| "learning_rate": 3.51921470342523e-05, |
| "loss": 0.335, |
| "step": 976 |
| }, |
| { |
| "epoch": 1.100281690140845, |
| "grad_norm": 0.30628231696632063, |
| "learning_rate": 3.517126148705096e-05, |
| "loss": 0.3704, |
| "step": 977 |
| }, |
| { |
| "epoch": 1.1014084507042254, |
| "grad_norm": 0.3438025646505993, |
| "learning_rate": 3.5150375939849625e-05, |
| "loss": 0.3474, |
| "step": 978 |
| }, |
| { |
| "epoch": 1.1025352112676057, |
| "grad_norm": 0.31351680352585487, |
| "learning_rate": 3.512949039264829e-05, |
| "loss": 0.3275, |
| "step": 979 |
| }, |
| { |
| "epoch": 1.103661971830986, |
| "grad_norm": 0.3152564819386366, |
| "learning_rate": 3.510860484544695e-05, |
| "loss": 0.3448, |
| "step": 980 |
| }, |
| { |
| "epoch": 1.1047887323943661, |
| "grad_norm": 0.30276251407844496, |
| "learning_rate": 3.508771929824561e-05, |
| "loss": 0.3397, |
| "step": 981 |
| }, |
| { |
| "epoch": 1.1059154929577464, |
| "grad_norm": 0.33042618325454126, |
| "learning_rate": 3.506683375104428e-05, |
| "loss": 0.3323, |
| "step": 982 |
| }, |
| { |
| "epoch": 1.1070422535211268, |
| "grad_norm": 0.32059827191068474, |
| "learning_rate": 3.5045948203842945e-05, |
| "loss": 0.3745, |
| "step": 983 |
| }, |
| { |
| "epoch": 1.108169014084507, |
| "grad_norm": 0.3571444063980653, |
| "learning_rate": 3.5025062656641605e-05, |
| "loss": 0.3448, |
| "step": 984 |
| }, |
| { |
| "epoch": 1.1092957746478873, |
| "grad_norm": 0.30726538847486445, |
| "learning_rate": 3.5004177109440265e-05, |
| "loss": 0.3346, |
| "step": 985 |
| }, |
| { |
| "epoch": 1.1104225352112675, |
| "grad_norm": 0.3626977765380658, |
| "learning_rate": 3.498329156223893e-05, |
| "loss": 0.3387, |
| "step": 986 |
| }, |
| { |
| "epoch": 1.111549295774648, |
| "grad_norm": 0.3393776360689493, |
| "learning_rate": 3.49624060150376e-05, |
| "loss": 0.3276, |
| "step": 987 |
| }, |
| { |
| "epoch": 1.1126760563380282, |
| "grad_norm": 0.2927132796531137, |
| "learning_rate": 3.494152046783626e-05, |
| "loss": 0.3182, |
| "step": 988 |
| }, |
| { |
| "epoch": 1.1138028169014085, |
| "grad_norm": 0.362124017997493, |
| "learning_rate": 3.492063492063492e-05, |
| "loss": 0.3504, |
| "step": 989 |
| }, |
| { |
| "epoch": 1.1149295774647887, |
| "grad_norm": 0.31528677471051464, |
| "learning_rate": 3.4899749373433585e-05, |
| "loss": 0.3269, |
| "step": 990 |
| }, |
| { |
| "epoch": 1.116056338028169, |
| "grad_norm": 0.3118000729629574, |
| "learning_rate": 3.487886382623225e-05, |
| "loss": 0.3547, |
| "step": 991 |
| }, |
| { |
| "epoch": 1.1171830985915494, |
| "grad_norm": 0.3687927756216515, |
| "learning_rate": 3.485797827903091e-05, |
| "loss": 0.3452, |
| "step": 992 |
| }, |
| { |
| "epoch": 1.1183098591549296, |
| "grad_norm": 0.31161259789155016, |
| "learning_rate": 3.483709273182957e-05, |
| "loss": 0.3521, |
| "step": 993 |
| }, |
| { |
| "epoch": 1.1194366197183099, |
| "grad_norm": 0.3234835784243064, |
| "learning_rate": 3.481620718462824e-05, |
| "loss": 0.3464, |
| "step": 994 |
| }, |
| { |
| "epoch": 1.1205633802816901, |
| "grad_norm": 0.3088864882657699, |
| "learning_rate": 3.4795321637426905e-05, |
| "loss": 0.3542, |
| "step": 995 |
| }, |
| { |
| "epoch": 1.1216901408450703, |
| "grad_norm": 0.25277381440828456, |
| "learning_rate": 3.4774436090225565e-05, |
| "loss": 0.3619, |
| "step": 996 |
| }, |
| { |
| "epoch": 1.1228169014084508, |
| "grad_norm": 0.3160344031005969, |
| "learning_rate": 3.4753550543024225e-05, |
| "loss": 0.3474, |
| "step": 997 |
| }, |
| { |
| "epoch": 1.123943661971831, |
| "grad_norm": 0.301588939897409, |
| "learning_rate": 3.473266499582289e-05, |
| "loss": 0.3503, |
| "step": 998 |
| }, |
| { |
| "epoch": 1.1250704225352113, |
| "grad_norm": 0.28935381172209435, |
| "learning_rate": 3.471177944862156e-05, |
| "loss": 0.3575, |
| "step": 999 |
| }, |
| { |
| "epoch": 1.1261971830985915, |
| "grad_norm": 0.31640598782618606, |
| "learning_rate": 3.469089390142022e-05, |
| "loss": 0.3503, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.1273239436619718, |
| "grad_norm": 0.3002177232042162, |
| "learning_rate": 3.467000835421888e-05, |
| "loss": 0.3347, |
| "step": 1001 |
| }, |
| { |
| "epoch": 1.1284507042253522, |
| "grad_norm": 0.3179693477403671, |
| "learning_rate": 3.4649122807017546e-05, |
| "loss": 0.352, |
| "step": 1002 |
| }, |
| { |
| "epoch": 1.1295774647887324, |
| "grad_norm": 0.5729433235749711, |
| "learning_rate": 3.462823725981621e-05, |
| "loss": 0.344, |
| "step": 1003 |
| }, |
| { |
| "epoch": 1.1307042253521127, |
| "grad_norm": 0.3239148096356237, |
| "learning_rate": 3.460735171261487e-05, |
| "loss": 0.3406, |
| "step": 1004 |
| }, |
| { |
| "epoch": 1.131830985915493, |
| "grad_norm": 0.36993062213931016, |
| "learning_rate": 3.458646616541353e-05, |
| "loss": 0.3519, |
| "step": 1005 |
| }, |
| { |
| "epoch": 1.1329577464788732, |
| "grad_norm": 0.36413925386071816, |
| "learning_rate": 3.45655806182122e-05, |
| "loss": 0.3613, |
| "step": 1006 |
| }, |
| { |
| "epoch": 1.1340845070422536, |
| "grad_norm": 0.29549142844737325, |
| "learning_rate": 3.4544695071010866e-05, |
| "loss": 0.3378, |
| "step": 1007 |
| }, |
| { |
| "epoch": 1.1352112676056338, |
| "grad_norm": 0.3460169384070374, |
| "learning_rate": 3.4523809523809526e-05, |
| "loss": 0.3584, |
| "step": 1008 |
| }, |
| { |
| "epoch": 1.136338028169014, |
| "grad_norm": 0.34099155583527796, |
| "learning_rate": 3.4502923976608186e-05, |
| "loss": 0.3475, |
| "step": 1009 |
| }, |
| { |
| "epoch": 1.1374647887323943, |
| "grad_norm": 0.2889062780758684, |
| "learning_rate": 3.448203842940685e-05, |
| "loss": 0.35, |
| "step": 1010 |
| }, |
| { |
| "epoch": 1.1385915492957746, |
| "grad_norm": 0.3352673980415479, |
| "learning_rate": 3.446115288220552e-05, |
| "loss": 0.3297, |
| "step": 1011 |
| }, |
| { |
| "epoch": 1.139718309859155, |
| "grad_norm": 0.32690036991244015, |
| "learning_rate": 3.444026733500418e-05, |
| "loss": 0.3415, |
| "step": 1012 |
| }, |
| { |
| "epoch": 1.1408450704225352, |
| "grad_norm": 0.3570171231337064, |
| "learning_rate": 3.441938178780284e-05, |
| "loss": 0.3472, |
| "step": 1013 |
| }, |
| { |
| "epoch": 1.1419718309859155, |
| "grad_norm": 0.3001542919735517, |
| "learning_rate": 3.4398496240601506e-05, |
| "loss": 0.3363, |
| "step": 1014 |
| }, |
| { |
| "epoch": 1.1430985915492957, |
| "grad_norm": 0.332950416261565, |
| "learning_rate": 3.437761069340017e-05, |
| "loss": 0.3411, |
| "step": 1015 |
| }, |
| { |
| "epoch": 1.144225352112676, |
| "grad_norm": 0.34943372991524113, |
| "learning_rate": 3.435672514619883e-05, |
| "loss": 0.3434, |
| "step": 1016 |
| }, |
| { |
| "epoch": 1.1453521126760564, |
| "grad_norm": 0.4687340030778913, |
| "learning_rate": 3.433583959899749e-05, |
| "loss": 0.3199, |
| "step": 1017 |
| }, |
| { |
| "epoch": 1.1464788732394366, |
| "grad_norm": 0.32490609856126923, |
| "learning_rate": 3.431495405179616e-05, |
| "loss": 0.3498, |
| "step": 1018 |
| }, |
| { |
| "epoch": 1.1476056338028169, |
| "grad_norm": 0.311611088242007, |
| "learning_rate": 3.429406850459482e-05, |
| "loss": 0.3575, |
| "step": 1019 |
| }, |
| { |
| "epoch": 1.1487323943661971, |
| "grad_norm": 0.2990062423503099, |
| "learning_rate": 3.4273182957393486e-05, |
| "loss": 0.3373, |
| "step": 1020 |
| }, |
| { |
| "epoch": 1.1498591549295774, |
| "grad_norm": 0.38353184002896906, |
| "learning_rate": 3.4252297410192146e-05, |
| "loss": 0.3483, |
| "step": 1021 |
| }, |
| { |
| "epoch": 1.1509859154929578, |
| "grad_norm": 0.34451561538155556, |
| "learning_rate": 3.423141186299081e-05, |
| "loss": 0.3719, |
| "step": 1022 |
| }, |
| { |
| "epoch": 1.152112676056338, |
| "grad_norm": 0.3358121138275392, |
| "learning_rate": 3.421052631578947e-05, |
| "loss": 0.3362, |
| "step": 1023 |
| }, |
| { |
| "epoch": 1.1532394366197183, |
| "grad_norm": 0.34065054887892876, |
| "learning_rate": 3.418964076858814e-05, |
| "loss": 0.3565, |
| "step": 1024 |
| }, |
| { |
| "epoch": 1.1543661971830985, |
| "grad_norm": 0.27997379236251685, |
| "learning_rate": 3.41687552213868e-05, |
| "loss": 0.3403, |
| "step": 1025 |
| }, |
| { |
| "epoch": 1.1554929577464788, |
| "grad_norm": 0.30141207345682136, |
| "learning_rate": 3.414786967418547e-05, |
| "loss": 0.3471, |
| "step": 1026 |
| }, |
| { |
| "epoch": 1.1566197183098592, |
| "grad_norm": 0.337034300756015, |
| "learning_rate": 3.412698412698413e-05, |
| "loss": 0.3615, |
| "step": 1027 |
| }, |
| { |
| "epoch": 1.1577464788732394, |
| "grad_norm": 0.30674246250268733, |
| "learning_rate": 3.4106098579782793e-05, |
| "loss": 0.3462, |
| "step": 1028 |
| }, |
| { |
| "epoch": 1.1588732394366197, |
| "grad_norm": 0.3173684428076217, |
| "learning_rate": 3.4085213032581453e-05, |
| "loss": 0.3516, |
| "step": 1029 |
| }, |
| { |
| "epoch": 1.16, |
| "grad_norm": 0.37769826101860116, |
| "learning_rate": 3.406432748538012e-05, |
| "loss": 0.3524, |
| "step": 1030 |
| }, |
| { |
| "epoch": 1.1611267605633804, |
| "grad_norm": 0.28073869279700486, |
| "learning_rate": 3.404344193817878e-05, |
| "loss": 0.3531, |
| "step": 1031 |
| }, |
| { |
| "epoch": 1.1622535211267606, |
| "grad_norm": 0.30120610684344723, |
| "learning_rate": 3.402255639097745e-05, |
| "loss": 0.3446, |
| "step": 1032 |
| }, |
| { |
| "epoch": 1.1633802816901408, |
| "grad_norm": 0.35522297671980546, |
| "learning_rate": 3.400167084377611e-05, |
| "loss": 0.3458, |
| "step": 1033 |
| }, |
| { |
| "epoch": 1.164507042253521, |
| "grad_norm": 0.2838997521983254, |
| "learning_rate": 3.3980785296574774e-05, |
| "loss": 0.371, |
| "step": 1034 |
| }, |
| { |
| "epoch": 1.1656338028169013, |
| "grad_norm": 0.3611036407312474, |
| "learning_rate": 3.3959899749373434e-05, |
| "loss": 0.334, |
| "step": 1035 |
| }, |
| { |
| "epoch": 1.1667605633802818, |
| "grad_norm": 0.2950730591608434, |
| "learning_rate": 3.39390142021721e-05, |
| "loss": 0.3434, |
| "step": 1036 |
| }, |
| { |
| "epoch": 1.167887323943662, |
| "grad_norm": 0.28270863416618186, |
| "learning_rate": 3.391812865497076e-05, |
| "loss": 0.35, |
| "step": 1037 |
| }, |
| { |
| "epoch": 1.1690140845070423, |
| "grad_norm": 0.3661793145277532, |
| "learning_rate": 3.389724310776943e-05, |
| "loss": 0.3614, |
| "step": 1038 |
| }, |
| { |
| "epoch": 1.1701408450704225, |
| "grad_norm": 0.31362783654640425, |
| "learning_rate": 3.387635756056809e-05, |
| "loss": 0.3499, |
| "step": 1039 |
| }, |
| { |
| "epoch": 1.1712676056338027, |
| "grad_norm": 0.2971686710050231, |
| "learning_rate": 3.3855472013366754e-05, |
| "loss": 0.3569, |
| "step": 1040 |
| }, |
| { |
| "epoch": 1.1723943661971832, |
| "grad_norm": 0.352731153270978, |
| "learning_rate": 3.3834586466165414e-05, |
| "loss": 0.3524, |
| "step": 1041 |
| }, |
| { |
| "epoch": 1.1735211267605634, |
| "grad_norm": 0.2936538359382206, |
| "learning_rate": 3.381370091896408e-05, |
| "loss": 0.3376, |
| "step": 1042 |
| }, |
| { |
| "epoch": 1.1746478873239437, |
| "grad_norm": 0.3330092908628216, |
| "learning_rate": 3.379281537176274e-05, |
| "loss": 0.3464, |
| "step": 1043 |
| }, |
| { |
| "epoch": 1.1757746478873239, |
| "grad_norm": 0.30245046256286734, |
| "learning_rate": 3.377192982456141e-05, |
| "loss": 0.3455, |
| "step": 1044 |
| }, |
| { |
| "epoch": 1.1769014084507043, |
| "grad_norm": 0.3760828172052888, |
| "learning_rate": 3.375104427736007e-05, |
| "loss": 0.3364, |
| "step": 1045 |
| }, |
| { |
| "epoch": 1.1780281690140846, |
| "grad_norm": 0.30918561156039337, |
| "learning_rate": 3.3730158730158734e-05, |
| "loss": 0.339, |
| "step": 1046 |
| }, |
| { |
| "epoch": 1.1791549295774648, |
| "grad_norm": 0.334820663107568, |
| "learning_rate": 3.3709273182957394e-05, |
| "loss": 0.3452, |
| "step": 1047 |
| }, |
| { |
| "epoch": 1.180281690140845, |
| "grad_norm": 0.3235665593969472, |
| "learning_rate": 3.368838763575606e-05, |
| "loss": 0.353, |
| "step": 1048 |
| }, |
| { |
| "epoch": 1.1814084507042253, |
| "grad_norm": 0.3683834175550552, |
| "learning_rate": 3.366750208855472e-05, |
| "loss": 0.353, |
| "step": 1049 |
| }, |
| { |
| "epoch": 1.1825352112676057, |
| "grad_norm": 0.30202094720462735, |
| "learning_rate": 3.364661654135339e-05, |
| "loss": 0.3319, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.183661971830986, |
| "grad_norm": 0.3240517238787127, |
| "learning_rate": 3.362573099415205e-05, |
| "loss": 0.3328, |
| "step": 1051 |
| }, |
| { |
| "epoch": 1.1847887323943662, |
| "grad_norm": 0.35263531537635145, |
| "learning_rate": 3.3604845446950714e-05, |
| "loss": 0.3475, |
| "step": 1052 |
| }, |
| { |
| "epoch": 1.1859154929577465, |
| "grad_norm": 0.30604588229024526, |
| "learning_rate": 3.3583959899749374e-05, |
| "loss": 0.334, |
| "step": 1053 |
| }, |
| { |
| "epoch": 1.1870422535211267, |
| "grad_norm": 0.3464586434956338, |
| "learning_rate": 3.356307435254804e-05, |
| "loss": 0.3412, |
| "step": 1054 |
| }, |
| { |
| "epoch": 1.1881690140845071, |
| "grad_norm": 0.32135691756390916, |
| "learning_rate": 3.35421888053467e-05, |
| "loss": 0.361, |
| "step": 1055 |
| }, |
| { |
| "epoch": 1.1892957746478874, |
| "grad_norm": 0.28987551299344433, |
| "learning_rate": 3.352130325814536e-05, |
| "loss": 0.3351, |
| "step": 1056 |
| }, |
| { |
| "epoch": 1.1904225352112676, |
| "grad_norm": 0.3919076009259972, |
| "learning_rate": 3.350041771094403e-05, |
| "loss": 0.3589, |
| "step": 1057 |
| }, |
| { |
| "epoch": 1.1915492957746479, |
| "grad_norm": 0.275569997096992, |
| "learning_rate": 3.3479532163742695e-05, |
| "loss": 0.3489, |
| "step": 1058 |
| }, |
| { |
| "epoch": 1.192676056338028, |
| "grad_norm": 0.3382553487679515, |
| "learning_rate": 3.3458646616541355e-05, |
| "loss": 0.3549, |
| "step": 1059 |
| }, |
| { |
| "epoch": 1.1938028169014085, |
| "grad_norm": 0.33618071363176094, |
| "learning_rate": 3.3437761069340015e-05, |
| "loss": 0.3319, |
| "step": 1060 |
| }, |
| { |
| "epoch": 1.1949295774647888, |
| "grad_norm": 0.3035047482300514, |
| "learning_rate": 3.341687552213868e-05, |
| "loss": 0.3385, |
| "step": 1061 |
| }, |
| { |
| "epoch": 1.196056338028169, |
| "grad_norm": 0.27465118295033303, |
| "learning_rate": 3.339598997493735e-05, |
| "loss": 0.3356, |
| "step": 1062 |
| }, |
| { |
| "epoch": 1.1971830985915493, |
| "grad_norm": 0.3427740400251671, |
| "learning_rate": 3.337510442773601e-05, |
| "loss": 0.3359, |
| "step": 1063 |
| }, |
| { |
| "epoch": 1.1983098591549295, |
| "grad_norm": 0.3228672170869719, |
| "learning_rate": 3.335421888053467e-05, |
| "loss": 0.344, |
| "step": 1064 |
| }, |
| { |
| "epoch": 1.19943661971831, |
| "grad_norm": 0.3162251658775707, |
| "learning_rate": 3.3333333333333335e-05, |
| "loss": 0.3395, |
| "step": 1065 |
| }, |
| { |
| "epoch": 1.2005633802816902, |
| "grad_norm": 0.36784522815336534, |
| "learning_rate": 3.3312447786132e-05, |
| "loss": 0.3539, |
| "step": 1066 |
| }, |
| { |
| "epoch": 1.2016901408450704, |
| "grad_norm": 0.3498666413543401, |
| "learning_rate": 3.329156223893066e-05, |
| "loss": 0.3411, |
| "step": 1067 |
| }, |
| { |
| "epoch": 1.2028169014084507, |
| "grad_norm": 0.40603387109599365, |
| "learning_rate": 3.327067669172932e-05, |
| "loss": 0.339, |
| "step": 1068 |
| }, |
| { |
| "epoch": 1.203943661971831, |
| "grad_norm": 0.28466237503238295, |
| "learning_rate": 3.324979114452799e-05, |
| "loss": 0.3374, |
| "step": 1069 |
| }, |
| { |
| "epoch": 1.2050704225352113, |
| "grad_norm": 0.43643291002776546, |
| "learning_rate": 3.3228905597326655e-05, |
| "loss": 0.3421, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.2061971830985916, |
| "grad_norm": 0.3209330822309557, |
| "learning_rate": 3.3208020050125315e-05, |
| "loss": 0.3263, |
| "step": 1071 |
| }, |
| { |
| "epoch": 1.2073239436619718, |
| "grad_norm": 0.3170979690800217, |
| "learning_rate": 3.3187134502923975e-05, |
| "loss": 0.349, |
| "step": 1072 |
| }, |
| { |
| "epoch": 1.208450704225352, |
| "grad_norm": 0.29620537314071277, |
| "learning_rate": 3.316624895572264e-05, |
| "loss": 0.3415, |
| "step": 1073 |
| }, |
| { |
| "epoch": 1.2095774647887323, |
| "grad_norm": 0.2814235048086694, |
| "learning_rate": 3.314536340852131e-05, |
| "loss": 0.3533, |
| "step": 1074 |
| }, |
| { |
| "epoch": 1.2107042253521128, |
| "grad_norm": 0.30429985408344573, |
| "learning_rate": 3.312447786131997e-05, |
| "loss": 0.3403, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.211830985915493, |
| "grad_norm": 0.2776747142048821, |
| "learning_rate": 3.310359231411863e-05, |
| "loss": 0.3489, |
| "step": 1076 |
| }, |
| { |
| "epoch": 1.2129577464788732, |
| "grad_norm": 0.3117922804791841, |
| "learning_rate": 3.3082706766917295e-05, |
| "loss": 0.3589, |
| "step": 1077 |
| }, |
| { |
| "epoch": 1.2140845070422535, |
| "grad_norm": 0.28206225975246074, |
| "learning_rate": 3.306182121971596e-05, |
| "loss": 0.3219, |
| "step": 1078 |
| }, |
| { |
| "epoch": 1.2152112676056337, |
| "grad_norm": 0.3339551879612691, |
| "learning_rate": 3.304093567251462e-05, |
| "loss": 0.3487, |
| "step": 1079 |
| }, |
| { |
| "epoch": 1.2163380281690142, |
| "grad_norm": 0.2470821047506818, |
| "learning_rate": 3.302005012531328e-05, |
| "loss": 0.3293, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.2174647887323944, |
| "grad_norm": 0.31331363910902593, |
| "learning_rate": 3.299916457811195e-05, |
| "loss": 0.3384, |
| "step": 1081 |
| }, |
| { |
| "epoch": 1.2185915492957746, |
| "grad_norm": 0.3173999509165457, |
| "learning_rate": 3.2978279030910616e-05, |
| "loss": 0.3519, |
| "step": 1082 |
| }, |
| { |
| "epoch": 1.2197183098591549, |
| "grad_norm": 0.3472442907380024, |
| "learning_rate": 3.2957393483709276e-05, |
| "loss": 0.3453, |
| "step": 1083 |
| }, |
| { |
| "epoch": 1.220845070422535, |
| "grad_norm": 0.31667306436662185, |
| "learning_rate": 3.2936507936507936e-05, |
| "loss": 0.3632, |
| "step": 1084 |
| }, |
| { |
| "epoch": 1.2219718309859156, |
| "grad_norm": 0.3633369367271794, |
| "learning_rate": 3.29156223893066e-05, |
| "loss": 0.3346, |
| "step": 1085 |
| }, |
| { |
| "epoch": 1.2230985915492958, |
| "grad_norm": 0.2735176852696013, |
| "learning_rate": 3.289473684210527e-05, |
| "loss": 0.3561, |
| "step": 1086 |
| }, |
| { |
| "epoch": 1.224225352112676, |
| "grad_norm": 0.3880089164415539, |
| "learning_rate": 3.287385129490393e-05, |
| "loss": 0.3209, |
| "step": 1087 |
| }, |
| { |
| "epoch": 1.2253521126760563, |
| "grad_norm": 0.26906175661116216, |
| "learning_rate": 3.285296574770259e-05, |
| "loss": 0.3465, |
| "step": 1088 |
| }, |
| { |
| "epoch": 1.2264788732394365, |
| "grad_norm": 0.3284498960738571, |
| "learning_rate": 3.2832080200501256e-05, |
| "loss": 0.3457, |
| "step": 1089 |
| }, |
| { |
| "epoch": 1.227605633802817, |
| "grad_norm": 0.3667097517230621, |
| "learning_rate": 3.281119465329992e-05, |
| "loss": 0.3605, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.2287323943661972, |
| "grad_norm": 0.28752992668073607, |
| "learning_rate": 3.279030910609858e-05, |
| "loss": 0.3635, |
| "step": 1091 |
| }, |
| { |
| "epoch": 1.2298591549295774, |
| "grad_norm": 0.3946314540545924, |
| "learning_rate": 3.276942355889724e-05, |
| "loss": 0.3493, |
| "step": 1092 |
| }, |
| { |
| "epoch": 1.2309859154929577, |
| "grad_norm": 0.2789906483434097, |
| "learning_rate": 3.274853801169591e-05, |
| "loss": 0.3347, |
| "step": 1093 |
| }, |
| { |
| "epoch": 1.2321126760563381, |
| "grad_norm": 0.3225093109594014, |
| "learning_rate": 3.2727652464494576e-05, |
| "loss": 0.341, |
| "step": 1094 |
| }, |
| { |
| "epoch": 1.2332394366197184, |
| "grad_norm": 0.32862283959211835, |
| "learning_rate": 3.2706766917293236e-05, |
| "loss": 0.3448, |
| "step": 1095 |
| }, |
| { |
| "epoch": 1.2343661971830986, |
| "grad_norm": 0.30766363411442, |
| "learning_rate": 3.2685881370091896e-05, |
| "loss": 0.345, |
| "step": 1096 |
| }, |
| { |
| "epoch": 1.2354929577464788, |
| "grad_norm": 0.33495055865479145, |
| "learning_rate": 3.2664995822890556e-05, |
| "loss": 0.3482, |
| "step": 1097 |
| }, |
| { |
| "epoch": 1.236619718309859, |
| "grad_norm": 0.4100596803077402, |
| "learning_rate": 3.264411027568923e-05, |
| "loss": 0.3776, |
| "step": 1098 |
| }, |
| { |
| "epoch": 1.2377464788732395, |
| "grad_norm": 0.29849674067446036, |
| "learning_rate": 3.262322472848789e-05, |
| "loss": 0.3478, |
| "step": 1099 |
| }, |
| { |
| "epoch": 1.2388732394366198, |
| "grad_norm": 0.4752815995738534, |
| "learning_rate": 3.260233918128655e-05, |
| "loss": 0.3413, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.24, |
| "grad_norm": 0.2841451269993109, |
| "learning_rate": 3.258145363408521e-05, |
| "loss": 0.3311, |
| "step": 1101 |
| }, |
| { |
| "epoch": 1.2411267605633802, |
| "grad_norm": 0.43287277582944, |
| "learning_rate": 3.256056808688388e-05, |
| "loss": 0.3404, |
| "step": 1102 |
| }, |
| { |
| "epoch": 1.2422535211267607, |
| "grad_norm": 0.27842035168195645, |
| "learning_rate": 3.253968253968254e-05, |
| "loss": 0.3364, |
| "step": 1103 |
| }, |
| { |
| "epoch": 1.243380281690141, |
| "grad_norm": 0.3580502578234584, |
| "learning_rate": 3.25187969924812e-05, |
| "loss": 0.3304, |
| "step": 1104 |
| }, |
| { |
| "epoch": 1.2445070422535212, |
| "grad_norm": 0.33254398767744164, |
| "learning_rate": 3.249791144527986e-05, |
| "loss": 0.3371, |
| "step": 1105 |
| }, |
| { |
| "epoch": 1.2456338028169014, |
| "grad_norm": 0.32142884321075016, |
| "learning_rate": 3.247702589807854e-05, |
| "loss": 0.3604, |
| "step": 1106 |
| }, |
| { |
| "epoch": 1.2467605633802816, |
| "grad_norm": 0.39048564971156885, |
| "learning_rate": 3.24561403508772e-05, |
| "loss": 0.3665, |
| "step": 1107 |
| }, |
| { |
| "epoch": 1.247887323943662, |
| "grad_norm": 0.47432868715894116, |
| "learning_rate": 3.243525480367586e-05, |
| "loss": 0.3379, |
| "step": 1108 |
| }, |
| { |
| "epoch": 1.2490140845070423, |
| "grad_norm": 0.5373426510405537, |
| "learning_rate": 3.241436925647452e-05, |
| "loss": 0.352, |
| "step": 1109 |
| }, |
| { |
| "epoch": 1.2501408450704226, |
| "grad_norm": 0.28347871588612267, |
| "learning_rate": 3.239348370927319e-05, |
| "loss": 0.3209, |
| "step": 1110 |
| }, |
| { |
| "epoch": 1.2512676056338028, |
| "grad_norm": 0.39484792243555067, |
| "learning_rate": 3.237259816207185e-05, |
| "loss": 0.3388, |
| "step": 1111 |
| }, |
| { |
| "epoch": 1.252394366197183, |
| "grad_norm": 0.3107741346812873, |
| "learning_rate": 3.235171261487051e-05, |
| "loss": 0.3366, |
| "step": 1112 |
| }, |
| { |
| "epoch": 1.2535211267605635, |
| "grad_norm": 0.3898314974601013, |
| "learning_rate": 3.233082706766917e-05, |
| "loss": 0.3604, |
| "step": 1113 |
| }, |
| { |
| "epoch": 1.2546478873239437, |
| "grad_norm": 0.3408581558334333, |
| "learning_rate": 3.230994152046784e-05, |
| "loss": 0.3492, |
| "step": 1114 |
| }, |
| { |
| "epoch": 1.255774647887324, |
| "grad_norm": 0.3618348938509109, |
| "learning_rate": 3.2289055973266504e-05, |
| "loss": 0.3617, |
| "step": 1115 |
| }, |
| { |
| "epoch": 1.2569014084507042, |
| "grad_norm": 0.2817036619413573, |
| "learning_rate": 3.2268170426065164e-05, |
| "loss": 0.3517, |
| "step": 1116 |
| }, |
| { |
| "epoch": 1.2580281690140844, |
| "grad_norm": 0.3070164964104573, |
| "learning_rate": 3.2247284878863824e-05, |
| "loss": 0.3683, |
| "step": 1117 |
| }, |
| { |
| "epoch": 1.2591549295774649, |
| "grad_norm": 0.31552633114716866, |
| "learning_rate": 3.222639933166249e-05, |
| "loss": 0.3657, |
| "step": 1118 |
| }, |
| { |
| "epoch": 1.2602816901408451, |
| "grad_norm": 0.3175500946772457, |
| "learning_rate": 3.220551378446116e-05, |
| "loss": 0.3625, |
| "step": 1119 |
| }, |
| { |
| "epoch": 1.2614084507042254, |
| "grad_norm": 0.28442849784940555, |
| "learning_rate": 3.218462823725982e-05, |
| "loss": 0.3461, |
| "step": 1120 |
| }, |
| { |
| "epoch": 1.2625352112676056, |
| "grad_norm": 0.3041046845176297, |
| "learning_rate": 3.216374269005848e-05, |
| "loss": 0.3325, |
| "step": 1121 |
| }, |
| { |
| "epoch": 1.2636619718309858, |
| "grad_norm": 0.3065101535698693, |
| "learning_rate": 3.2142857142857144e-05, |
| "loss": 0.3328, |
| "step": 1122 |
| }, |
| { |
| "epoch": 1.2647887323943663, |
| "grad_norm": 0.32118291745639427, |
| "learning_rate": 3.212197159565581e-05, |
| "loss": 0.3516, |
| "step": 1123 |
| }, |
| { |
| "epoch": 1.2659154929577465, |
| "grad_norm": 0.2706365873502233, |
| "learning_rate": 3.210108604845447e-05, |
| "loss": 0.3402, |
| "step": 1124 |
| }, |
| { |
| "epoch": 1.2670422535211268, |
| "grad_norm": 0.3395123585654144, |
| "learning_rate": 3.208020050125313e-05, |
| "loss": 0.3488, |
| "step": 1125 |
| }, |
| { |
| "epoch": 1.268169014084507, |
| "grad_norm": 0.3192115703388621, |
| "learning_rate": 3.20593149540518e-05, |
| "loss": 0.3321, |
| "step": 1126 |
| }, |
| { |
| "epoch": 1.2692957746478872, |
| "grad_norm": 0.2748915319828329, |
| "learning_rate": 3.2038429406850464e-05, |
| "loss": 0.36, |
| "step": 1127 |
| }, |
| { |
| "epoch": 1.2704225352112677, |
| "grad_norm": 0.33790604735267227, |
| "learning_rate": 3.2017543859649124e-05, |
| "loss": 0.359, |
| "step": 1128 |
| }, |
| { |
| "epoch": 1.271549295774648, |
| "grad_norm": 0.28636007427713034, |
| "learning_rate": 3.1996658312447784e-05, |
| "loss": 0.3697, |
| "step": 1129 |
| }, |
| { |
| "epoch": 1.2726760563380282, |
| "grad_norm": 0.3147026101699195, |
| "learning_rate": 3.197577276524645e-05, |
| "loss": 0.3566, |
| "step": 1130 |
| }, |
| { |
| "epoch": 1.2738028169014084, |
| "grad_norm": 0.2939860946022757, |
| "learning_rate": 3.195488721804512e-05, |
| "loss": 0.3308, |
| "step": 1131 |
| }, |
| { |
| "epoch": 1.2749295774647886, |
| "grad_norm": 0.29802348283544033, |
| "learning_rate": 3.193400167084378e-05, |
| "loss": 0.3563, |
| "step": 1132 |
| }, |
| { |
| "epoch": 1.276056338028169, |
| "grad_norm": 0.29506623728971354, |
| "learning_rate": 3.191311612364244e-05, |
| "loss": 0.3337, |
| "step": 1133 |
| }, |
| { |
| "epoch": 1.2771830985915493, |
| "grad_norm": 0.2822855549021315, |
| "learning_rate": 3.1892230576441104e-05, |
| "loss": 0.3489, |
| "step": 1134 |
| }, |
| { |
| "epoch": 1.2783098591549296, |
| "grad_norm": 0.28426720383589205, |
| "learning_rate": 3.187134502923977e-05, |
| "loss": 0.3338, |
| "step": 1135 |
| }, |
| { |
| "epoch": 1.2794366197183098, |
| "grad_norm": 0.24335968053060297, |
| "learning_rate": 3.185045948203843e-05, |
| "loss": 0.3586, |
| "step": 1136 |
| }, |
| { |
| "epoch": 1.28056338028169, |
| "grad_norm": 0.34492248613575655, |
| "learning_rate": 3.182957393483709e-05, |
| "loss": 0.3643, |
| "step": 1137 |
| }, |
| { |
| "epoch": 1.2816901408450705, |
| "grad_norm": 0.28163190816338013, |
| "learning_rate": 3.180868838763576e-05, |
| "loss": 0.3551, |
| "step": 1138 |
| }, |
| { |
| "epoch": 1.2828169014084507, |
| "grad_norm": 0.3038306599947534, |
| "learning_rate": 3.1787802840434425e-05, |
| "loss": 0.3261, |
| "step": 1139 |
| }, |
| { |
| "epoch": 1.283943661971831, |
| "grad_norm": 0.27177700302131724, |
| "learning_rate": 3.1766917293233085e-05, |
| "loss": 0.3351, |
| "step": 1140 |
| }, |
| { |
| "epoch": 1.2850704225352112, |
| "grad_norm": 0.26269138228106, |
| "learning_rate": 3.1746031746031745e-05, |
| "loss": 0.3549, |
| "step": 1141 |
| }, |
| { |
| "epoch": 1.2861971830985914, |
| "grad_norm": 0.2913648915223674, |
| "learning_rate": 3.172514619883041e-05, |
| "loss": 0.3436, |
| "step": 1142 |
| }, |
| { |
| "epoch": 1.287323943661972, |
| "grad_norm": 0.27276203661405635, |
| "learning_rate": 3.170426065162908e-05, |
| "loss": 0.3368, |
| "step": 1143 |
| }, |
| { |
| "epoch": 1.2884507042253521, |
| "grad_norm": 0.356736914318009, |
| "learning_rate": 3.168337510442774e-05, |
| "loss": 0.3465, |
| "step": 1144 |
| }, |
| { |
| "epoch": 1.2895774647887324, |
| "grad_norm": 0.28475878519907033, |
| "learning_rate": 3.16624895572264e-05, |
| "loss": 0.3569, |
| "step": 1145 |
| }, |
| { |
| "epoch": 1.2907042253521126, |
| "grad_norm": 0.3514003335437731, |
| "learning_rate": 3.1641604010025065e-05, |
| "loss": 0.33, |
| "step": 1146 |
| }, |
| { |
| "epoch": 1.2918309859154928, |
| "grad_norm": 0.3458744624264682, |
| "learning_rate": 3.162071846282373e-05, |
| "loss": 0.3775, |
| "step": 1147 |
| }, |
| { |
| "epoch": 1.2929577464788733, |
| "grad_norm": 0.31156956928774027, |
| "learning_rate": 3.159983291562239e-05, |
| "loss": 0.3355, |
| "step": 1148 |
| }, |
| { |
| "epoch": 1.2940845070422535, |
| "grad_norm": 0.2790592740208634, |
| "learning_rate": 3.157894736842105e-05, |
| "loss": 0.3183, |
| "step": 1149 |
| }, |
| { |
| "epoch": 1.2952112676056338, |
| "grad_norm": 0.31894750285146517, |
| "learning_rate": 3.155806182121972e-05, |
| "loss": 0.3424, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.2963380281690142, |
| "grad_norm": 0.2653718691621682, |
| "learning_rate": 3.1537176274018385e-05, |
| "loss": 0.3448, |
| "step": 1151 |
| }, |
| { |
| "epoch": 1.2974647887323942, |
| "grad_norm": 0.30262000464687844, |
| "learning_rate": 3.1516290726817045e-05, |
| "loss": 0.3321, |
| "step": 1152 |
| }, |
| { |
| "epoch": 1.2985915492957747, |
| "grad_norm": 0.3287674540594305, |
| "learning_rate": 3.1495405179615705e-05, |
| "loss": 0.3368, |
| "step": 1153 |
| }, |
| { |
| "epoch": 1.299718309859155, |
| "grad_norm": 0.2944767444308635, |
| "learning_rate": 3.147451963241437e-05, |
| "loss": 0.3563, |
| "step": 1154 |
| }, |
| { |
| "epoch": 1.3008450704225352, |
| "grad_norm": 0.30510447934543955, |
| "learning_rate": 3.145363408521303e-05, |
| "loss": 0.3464, |
| "step": 1155 |
| }, |
| { |
| "epoch": 1.3019718309859156, |
| "grad_norm": 0.3375919433957859, |
| "learning_rate": 3.14327485380117e-05, |
| "loss": 0.3423, |
| "step": 1156 |
| }, |
| { |
| "epoch": 1.3030985915492956, |
| "grad_norm": 0.35168934931086815, |
| "learning_rate": 3.141186299081036e-05, |
| "loss": 0.3545, |
| "step": 1157 |
| }, |
| { |
| "epoch": 1.304225352112676, |
| "grad_norm": 0.3677607944955644, |
| "learning_rate": 3.1390977443609025e-05, |
| "loss": 0.3654, |
| "step": 1158 |
| }, |
| { |
| "epoch": 1.3053521126760563, |
| "grad_norm": 0.35260259876267547, |
| "learning_rate": 3.1370091896407685e-05, |
| "loss": 0.3486, |
| "step": 1159 |
| }, |
| { |
| "epoch": 1.3064788732394366, |
| "grad_norm": 0.35691937485605885, |
| "learning_rate": 3.134920634920635e-05, |
| "loss": 0.3265, |
| "step": 1160 |
| }, |
| { |
| "epoch": 1.307605633802817, |
| "grad_norm": 0.3512055814657964, |
| "learning_rate": 3.132832080200501e-05, |
| "loss": 0.366, |
| "step": 1161 |
| }, |
| { |
| "epoch": 1.3087323943661973, |
| "grad_norm": 0.42092551860734834, |
| "learning_rate": 3.130743525480368e-05, |
| "loss": 0.3159, |
| "step": 1162 |
| }, |
| { |
| "epoch": 1.3098591549295775, |
| "grad_norm": 0.3699923682926425, |
| "learning_rate": 3.128654970760234e-05, |
| "loss": 0.3393, |
| "step": 1163 |
| }, |
| { |
| "epoch": 1.3109859154929577, |
| "grad_norm": 0.3399828357531795, |
| "learning_rate": 3.1265664160401006e-05, |
| "loss": 0.3457, |
| "step": 1164 |
| }, |
| { |
| "epoch": 1.312112676056338, |
| "grad_norm": 0.4585972251711265, |
| "learning_rate": 3.1244778613199666e-05, |
| "loss": 0.3458, |
| "step": 1165 |
| }, |
| { |
| "epoch": 1.3132394366197184, |
| "grad_norm": 0.34287449191225977, |
| "learning_rate": 3.122389306599833e-05, |
| "loss": 0.3667, |
| "step": 1166 |
| }, |
| { |
| "epoch": 1.3143661971830987, |
| "grad_norm": 0.4194731523616112, |
| "learning_rate": 3.120300751879699e-05, |
| "loss": 0.3601, |
| "step": 1167 |
| }, |
| { |
| "epoch": 1.315492957746479, |
| "grad_norm": 0.3294653640015108, |
| "learning_rate": 3.118212197159566e-05, |
| "loss": 0.3568, |
| "step": 1168 |
| }, |
| { |
| "epoch": 1.3166197183098591, |
| "grad_norm": 0.47350063514750734, |
| "learning_rate": 3.116123642439432e-05, |
| "loss": 0.3477, |
| "step": 1169 |
| }, |
| { |
| "epoch": 1.3177464788732394, |
| "grad_norm": 0.280534203327362, |
| "learning_rate": 3.1140350877192986e-05, |
| "loss": 0.3583, |
| "step": 1170 |
| }, |
| { |
| "epoch": 1.3188732394366198, |
| "grad_norm": 0.49437365334095373, |
| "learning_rate": 3.1119465329991646e-05, |
| "loss": 0.3516, |
| "step": 1171 |
| }, |
| { |
| "epoch": 1.32, |
| "grad_norm": 0.3016444543070627, |
| "learning_rate": 3.109857978279031e-05, |
| "loss": 0.3479, |
| "step": 1172 |
| }, |
| { |
| "epoch": 1.3211267605633803, |
| "grad_norm": 0.36752735629356575, |
| "learning_rate": 3.107769423558897e-05, |
| "loss": 0.3776, |
| "step": 1173 |
| }, |
| { |
| "epoch": 1.3222535211267605, |
| "grad_norm": 0.2998746868582936, |
| "learning_rate": 3.105680868838764e-05, |
| "loss": 0.3319, |
| "step": 1174 |
| }, |
| { |
| "epoch": 1.3233802816901408, |
| "grad_norm": 0.4052736495459436, |
| "learning_rate": 3.10359231411863e-05, |
| "loss": 0.3318, |
| "step": 1175 |
| }, |
| { |
| "epoch": 1.3245070422535212, |
| "grad_norm": 0.29620311313799436, |
| "learning_rate": 3.1015037593984966e-05, |
| "loss": 0.3327, |
| "step": 1176 |
| }, |
| { |
| "epoch": 1.3256338028169015, |
| "grad_norm": 0.35940054984699693, |
| "learning_rate": 3.0994152046783626e-05, |
| "loss": 0.3601, |
| "step": 1177 |
| }, |
| { |
| "epoch": 1.3267605633802817, |
| "grad_norm": 0.2998670016480465, |
| "learning_rate": 3.097326649958229e-05, |
| "loss": 0.3358, |
| "step": 1178 |
| }, |
| { |
| "epoch": 1.327887323943662, |
| "grad_norm": 0.2613240139906125, |
| "learning_rate": 3.095238095238095e-05, |
| "loss": 0.3528, |
| "step": 1179 |
| }, |
| { |
| "epoch": 1.3290140845070422, |
| "grad_norm": 0.312707479713965, |
| "learning_rate": 3.093149540517962e-05, |
| "loss": 0.3432, |
| "step": 1180 |
| }, |
| { |
| "epoch": 1.3301408450704226, |
| "grad_norm": 0.2952059364485859, |
| "learning_rate": 3.091060985797828e-05, |
| "loss": 0.3591, |
| "step": 1181 |
| }, |
| { |
| "epoch": 1.3312676056338029, |
| "grad_norm": 0.30308204907758685, |
| "learning_rate": 3.0889724310776946e-05, |
| "loss": 0.3544, |
| "step": 1182 |
| }, |
| { |
| "epoch": 1.332394366197183, |
| "grad_norm": 0.28585642966780267, |
| "learning_rate": 3.0868838763575606e-05, |
| "loss": 0.3189, |
| "step": 1183 |
| }, |
| { |
| "epoch": 1.3335211267605633, |
| "grad_norm": 0.2726099821036104, |
| "learning_rate": 3.084795321637427e-05, |
| "loss": 0.3381, |
| "step": 1184 |
| }, |
| { |
| "epoch": 1.3346478873239436, |
| "grad_norm": 0.32729439406058286, |
| "learning_rate": 3.082706766917293e-05, |
| "loss": 0.337, |
| "step": 1185 |
| }, |
| { |
| "epoch": 1.335774647887324, |
| "grad_norm": 0.30702486116129707, |
| "learning_rate": 3.08061821219716e-05, |
| "loss": 0.3469, |
| "step": 1186 |
| }, |
| { |
| "epoch": 1.3369014084507043, |
| "grad_norm": 0.2919163761992772, |
| "learning_rate": 3.078529657477026e-05, |
| "loss": 0.3614, |
| "step": 1187 |
| }, |
| { |
| "epoch": 1.3380281690140845, |
| "grad_norm": 0.3362453020110343, |
| "learning_rate": 3.076441102756893e-05, |
| "loss": 0.3506, |
| "step": 1188 |
| }, |
| { |
| "epoch": 1.3391549295774647, |
| "grad_norm": 0.3726011350376432, |
| "learning_rate": 3.074352548036759e-05, |
| "loss": 0.3604, |
| "step": 1189 |
| }, |
| { |
| "epoch": 1.340281690140845, |
| "grad_norm": 0.4541280728678603, |
| "learning_rate": 3.0722639933166253e-05, |
| "loss": 0.3764, |
| "step": 1190 |
| }, |
| { |
| "epoch": 1.3414084507042254, |
| "grad_norm": 1.3936880238257683, |
| "learning_rate": 3.0701754385964913e-05, |
| "loss": 0.3615, |
| "step": 1191 |
| }, |
| { |
| "epoch": 1.3425352112676057, |
| "grad_norm": 0.4258102093027606, |
| "learning_rate": 3.068086883876357e-05, |
| "loss": 0.3543, |
| "step": 1192 |
| }, |
| { |
| "epoch": 1.343661971830986, |
| "grad_norm": 0.3575350282116202, |
| "learning_rate": 3.065998329156224e-05, |
| "loss": 0.3367, |
| "step": 1193 |
| }, |
| { |
| "epoch": 1.3447887323943661, |
| "grad_norm": 0.32348813463671583, |
| "learning_rate": 3.063909774436091e-05, |
| "loss": 0.3254, |
| "step": 1194 |
| }, |
| { |
| "epoch": 1.3459154929577464, |
| "grad_norm": 0.2620930397025272, |
| "learning_rate": 3.061821219715957e-05, |
| "loss": 0.3286, |
| "step": 1195 |
| }, |
| { |
| "epoch": 1.3470422535211268, |
| "grad_norm": 0.4043629956242075, |
| "learning_rate": 3.059732664995823e-05, |
| "loss": 0.35, |
| "step": 1196 |
| }, |
| { |
| "epoch": 1.348169014084507, |
| "grad_norm": 0.3120790464669233, |
| "learning_rate": 3.0576441102756894e-05, |
| "loss": 0.3529, |
| "step": 1197 |
| }, |
| { |
| "epoch": 1.3492957746478873, |
| "grad_norm": 0.3456006649337656, |
| "learning_rate": 3.055555555555556e-05, |
| "loss": 0.3422, |
| "step": 1198 |
| }, |
| { |
| "epoch": 1.3504225352112675, |
| "grad_norm": 0.3203816325309061, |
| "learning_rate": 3.053467000835422e-05, |
| "loss": 0.3565, |
| "step": 1199 |
| }, |
| { |
| "epoch": 1.3515492957746478, |
| "grad_norm": 0.3196109116354312, |
| "learning_rate": 3.0513784461152884e-05, |
| "loss": 0.3525, |
| "step": 1200 |
| }, |
| { |
| "epoch": 1.3526760563380282, |
| "grad_norm": 0.3430290246706473, |
| "learning_rate": 3.0492898913951544e-05, |
| "loss": 0.333, |
| "step": 1201 |
| }, |
| { |
| "epoch": 1.3538028169014085, |
| "grad_norm": 0.3306918690981812, |
| "learning_rate": 3.0472013366750214e-05, |
| "loss": 0.3389, |
| "step": 1202 |
| }, |
| { |
| "epoch": 1.3549295774647887, |
| "grad_norm": 0.3451869621368286, |
| "learning_rate": 3.0451127819548874e-05, |
| "loss": 0.3397, |
| "step": 1203 |
| }, |
| { |
| "epoch": 1.356056338028169, |
| "grad_norm": 0.28806868402300256, |
| "learning_rate": 3.0430242272347537e-05, |
| "loss": 0.3501, |
| "step": 1204 |
| }, |
| { |
| "epoch": 1.3571830985915492, |
| "grad_norm": 0.30789548763765795, |
| "learning_rate": 3.0409356725146197e-05, |
| "loss": 0.3637, |
| "step": 1205 |
| }, |
| { |
| "epoch": 1.3583098591549296, |
| "grad_norm": 0.2714479894177003, |
| "learning_rate": 3.0388471177944867e-05, |
| "loss": 0.3519, |
| "step": 1206 |
| }, |
| { |
| "epoch": 1.3594366197183099, |
| "grad_norm": 0.3590544930015899, |
| "learning_rate": 3.0367585630743527e-05, |
| "loss": 0.3502, |
| "step": 1207 |
| }, |
| { |
| "epoch": 1.36056338028169, |
| "grad_norm": 0.3214469345004128, |
| "learning_rate": 3.034670008354219e-05, |
| "loss": 0.3236, |
| "step": 1208 |
| }, |
| { |
| "epoch": 1.3616901408450703, |
| "grad_norm": 0.3042782868325741, |
| "learning_rate": 3.032581453634085e-05, |
| "loss": 0.3452, |
| "step": 1209 |
| }, |
| { |
| "epoch": 1.3628169014084506, |
| "grad_norm": 0.33936594312067697, |
| "learning_rate": 3.030492898913952e-05, |
| "loss": 0.344, |
| "step": 1210 |
| }, |
| { |
| "epoch": 1.363943661971831, |
| "grad_norm": 0.3043390659124525, |
| "learning_rate": 3.028404344193818e-05, |
| "loss": 0.3575, |
| "step": 1211 |
| }, |
| { |
| "epoch": 1.3650704225352113, |
| "grad_norm": 0.3357631465279863, |
| "learning_rate": 3.0263157894736844e-05, |
| "loss": 0.3334, |
| "step": 1212 |
| }, |
| { |
| "epoch": 1.3661971830985915, |
| "grad_norm": 0.3177753566079832, |
| "learning_rate": 3.0242272347535504e-05, |
| "loss": 0.3585, |
| "step": 1213 |
| }, |
| { |
| "epoch": 1.367323943661972, |
| "grad_norm": 0.31936174408730955, |
| "learning_rate": 3.022138680033417e-05, |
| "loss": 0.327, |
| "step": 1214 |
| }, |
| { |
| "epoch": 1.368450704225352, |
| "grad_norm": 0.315057693985089, |
| "learning_rate": 3.0200501253132834e-05, |
| "loss": 0.3462, |
| "step": 1215 |
| }, |
| { |
| "epoch": 1.3695774647887324, |
| "grad_norm": 0.2681371385737636, |
| "learning_rate": 3.0179615705931498e-05, |
| "loss": 0.3207, |
| "step": 1216 |
| }, |
| { |
| "epoch": 1.3707042253521127, |
| "grad_norm": 0.29792906360473065, |
| "learning_rate": 3.0158730158730158e-05, |
| "loss": 0.3357, |
| "step": 1217 |
| }, |
| { |
| "epoch": 1.371830985915493, |
| "grad_norm": 0.3207461742321883, |
| "learning_rate": 3.0137844611528825e-05, |
| "loss": 0.3352, |
| "step": 1218 |
| }, |
| { |
| "epoch": 1.3729577464788734, |
| "grad_norm": 0.2783192420898195, |
| "learning_rate": 3.0116959064327488e-05, |
| "loss": 0.3451, |
| "step": 1219 |
| }, |
| { |
| "epoch": 1.3740845070422536, |
| "grad_norm": 0.32591368421816236, |
| "learning_rate": 3.009607351712615e-05, |
| "loss": 0.353, |
| "step": 1220 |
| }, |
| { |
| "epoch": 1.3752112676056338, |
| "grad_norm": 0.3066561562944395, |
| "learning_rate": 3.007518796992481e-05, |
| "loss": 0.3243, |
| "step": 1221 |
| }, |
| { |
| "epoch": 1.376338028169014, |
| "grad_norm": 2.2660560288471254, |
| "learning_rate": 3.0054302422723478e-05, |
| "loss": 0.4137, |
| "step": 1222 |
| }, |
| { |
| "epoch": 1.3774647887323943, |
| "grad_norm": 0.39210870348356686, |
| "learning_rate": 3.003341687552214e-05, |
| "loss": 0.348, |
| "step": 1223 |
| }, |
| { |
| "epoch": 1.3785915492957748, |
| "grad_norm": 0.2962128553173713, |
| "learning_rate": 3.0012531328320805e-05, |
| "loss": 0.3435, |
| "step": 1224 |
| }, |
| { |
| "epoch": 1.379718309859155, |
| "grad_norm": 0.33275477838598055, |
| "learning_rate": 2.9991645781119465e-05, |
| "loss": 0.3515, |
| "step": 1225 |
| }, |
| { |
| "epoch": 1.3808450704225352, |
| "grad_norm": 0.3352961753145922, |
| "learning_rate": 2.997076023391813e-05, |
| "loss": 0.3636, |
| "step": 1226 |
| }, |
| { |
| "epoch": 1.3819718309859155, |
| "grad_norm": 0.35096015184771046, |
| "learning_rate": 2.9949874686716795e-05, |
| "loss": 0.3648, |
| "step": 1227 |
| }, |
| { |
| "epoch": 1.3830985915492957, |
| "grad_norm": 0.3168355004880717, |
| "learning_rate": 2.9928989139515455e-05, |
| "loss": 0.3643, |
| "step": 1228 |
| }, |
| { |
| "epoch": 1.3842253521126762, |
| "grad_norm": 0.32634015086711105, |
| "learning_rate": 2.9908103592314118e-05, |
| "loss": 0.3492, |
| "step": 1229 |
| }, |
| { |
| "epoch": 1.3853521126760564, |
| "grad_norm": 0.33810275384931515, |
| "learning_rate": 2.9887218045112785e-05, |
| "loss": 0.3492, |
| "step": 1230 |
| }, |
| { |
| "epoch": 1.3864788732394366, |
| "grad_norm": 0.33464097541186544, |
| "learning_rate": 2.986633249791145e-05, |
| "loss": 0.3519, |
| "step": 1231 |
| }, |
| { |
| "epoch": 1.3876056338028169, |
| "grad_norm": 0.31725298321868456, |
| "learning_rate": 2.984544695071011e-05, |
| "loss": 0.3472, |
| "step": 1232 |
| }, |
| { |
| "epoch": 1.388732394366197, |
| "grad_norm": 0.30368942357679957, |
| "learning_rate": 2.9824561403508772e-05, |
| "loss": 0.3572, |
| "step": 1233 |
| }, |
| { |
| "epoch": 1.3898591549295776, |
| "grad_norm": 0.3137955769099765, |
| "learning_rate": 2.980367585630744e-05, |
| "loss": 0.366, |
| "step": 1234 |
| }, |
| { |
| "epoch": 1.3909859154929578, |
| "grad_norm": 0.32494170437175546, |
| "learning_rate": 2.9782790309106102e-05, |
| "loss": 0.3451, |
| "step": 1235 |
| }, |
| { |
| "epoch": 1.392112676056338, |
| "grad_norm": 0.289152507208471, |
| "learning_rate": 2.9761904761904762e-05, |
| "loss": 0.3453, |
| "step": 1236 |
| }, |
| { |
| "epoch": 1.3932394366197183, |
| "grad_norm": 0.30367923467257946, |
| "learning_rate": 2.9741019214703425e-05, |
| "loss": 0.3474, |
| "step": 1237 |
| }, |
| { |
| "epoch": 1.3943661971830985, |
| "grad_norm": 0.27456188122911507, |
| "learning_rate": 2.9720133667502092e-05, |
| "loss": 0.3472, |
| "step": 1238 |
| }, |
| { |
| "epoch": 1.395492957746479, |
| "grad_norm": 0.3338851968314126, |
| "learning_rate": 2.9699248120300755e-05, |
| "loss": 0.3536, |
| "step": 1239 |
| }, |
| { |
| "epoch": 1.3966197183098592, |
| "grad_norm": 0.2909887050565231, |
| "learning_rate": 2.9678362573099415e-05, |
| "loss": 0.3427, |
| "step": 1240 |
| }, |
| { |
| "epoch": 1.3977464788732394, |
| "grad_norm": 0.2808404219130732, |
| "learning_rate": 2.965747702589808e-05, |
| "loss": 0.3349, |
| "step": 1241 |
| }, |
| { |
| "epoch": 1.3988732394366197, |
| "grad_norm": 0.2974931938145654, |
| "learning_rate": 2.9636591478696746e-05, |
| "loss": 0.3258, |
| "step": 1242 |
| }, |
| { |
| "epoch": 1.4, |
| "grad_norm": 0.30446773483609063, |
| "learning_rate": 2.961570593149541e-05, |
| "loss": 0.3451, |
| "step": 1243 |
| }, |
| { |
| "epoch": 1.4011267605633804, |
| "grad_norm": 0.2629360082929095, |
| "learning_rate": 2.959482038429407e-05, |
| "loss": 0.3621, |
| "step": 1244 |
| }, |
| { |
| "epoch": 1.4022535211267606, |
| "grad_norm": 0.33634058178632664, |
| "learning_rate": 2.9573934837092732e-05, |
| "loss": 0.3265, |
| "step": 1245 |
| }, |
| { |
| "epoch": 1.4033802816901408, |
| "grad_norm": 0.25671340810425175, |
| "learning_rate": 2.95530492898914e-05, |
| "loss": 0.3596, |
| "step": 1246 |
| }, |
| { |
| "epoch": 1.404507042253521, |
| "grad_norm": 0.30371346506641733, |
| "learning_rate": 2.9532163742690062e-05, |
| "loss": 0.3373, |
| "step": 1247 |
| }, |
| { |
| "epoch": 1.4056338028169013, |
| "grad_norm": 0.33309340268956333, |
| "learning_rate": 2.9511278195488722e-05, |
| "loss": 0.3524, |
| "step": 1248 |
| }, |
| { |
| "epoch": 1.4067605633802818, |
| "grad_norm": 0.35312534002615037, |
| "learning_rate": 2.9490392648287386e-05, |
| "loss": 0.3554, |
| "step": 1249 |
| }, |
| { |
| "epoch": 1.407887323943662, |
| "grad_norm": 0.268431248391172, |
| "learning_rate": 2.9469507101086053e-05, |
| "loss": 0.3428, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.4090140845070422, |
| "grad_norm": 0.29984418244715005, |
| "learning_rate": 2.9448621553884716e-05, |
| "loss": 0.3434, |
| "step": 1251 |
| }, |
| { |
| "epoch": 1.4101408450704225, |
| "grad_norm": 0.3255596377490089, |
| "learning_rate": 2.9427736006683376e-05, |
| "loss": 0.3322, |
| "step": 1252 |
| }, |
| { |
| "epoch": 1.4112676056338027, |
| "grad_norm": 0.29044414091181564, |
| "learning_rate": 2.940685045948204e-05, |
| "loss": 0.3617, |
| "step": 1253 |
| }, |
| { |
| "epoch": 1.4123943661971832, |
| "grad_norm": 0.3137082885510621, |
| "learning_rate": 2.9385964912280706e-05, |
| "loss": 0.3359, |
| "step": 1254 |
| }, |
| { |
| "epoch": 1.4135211267605634, |
| "grad_norm": 0.3818804237490533, |
| "learning_rate": 2.9365079365079366e-05, |
| "loss": 0.353, |
| "step": 1255 |
| }, |
| { |
| "epoch": 1.4146478873239436, |
| "grad_norm": 0.30290564364536143, |
| "learning_rate": 2.934419381787803e-05, |
| "loss": 0.3236, |
| "step": 1256 |
| }, |
| { |
| "epoch": 1.4157746478873239, |
| "grad_norm": 0.32553490755642506, |
| "learning_rate": 2.9323308270676693e-05, |
| "loss": 0.3352, |
| "step": 1257 |
| }, |
| { |
| "epoch": 1.4169014084507041, |
| "grad_norm": 0.3678190946467779, |
| "learning_rate": 2.930242272347536e-05, |
| "loss": 0.3299, |
| "step": 1258 |
| }, |
| { |
| "epoch": 1.4180281690140846, |
| "grad_norm": 0.33853643840663594, |
| "learning_rate": 2.928153717627402e-05, |
| "loss": 0.3609, |
| "step": 1259 |
| }, |
| { |
| "epoch": 1.4191549295774648, |
| "grad_norm": 0.3450814972809518, |
| "learning_rate": 2.9260651629072683e-05, |
| "loss": 0.3628, |
| "step": 1260 |
| }, |
| { |
| "epoch": 1.420281690140845, |
| "grad_norm": 0.4342951041395845, |
| "learning_rate": 2.9239766081871346e-05, |
| "loss": 0.3575, |
| "step": 1261 |
| }, |
| { |
| "epoch": 1.4214084507042253, |
| "grad_norm": 0.28784037040527977, |
| "learning_rate": 2.9218880534670013e-05, |
| "loss": 0.3454, |
| "step": 1262 |
| }, |
| { |
| "epoch": 1.4225352112676055, |
| "grad_norm": 0.3702939745854903, |
| "learning_rate": 2.9197994987468673e-05, |
| "loss": 0.3366, |
| "step": 1263 |
| }, |
| { |
| "epoch": 1.423661971830986, |
| "grad_norm": 0.36297638741464655, |
| "learning_rate": 2.9177109440267336e-05, |
| "loss": 0.3436, |
| "step": 1264 |
| }, |
| { |
| "epoch": 1.4247887323943662, |
| "grad_norm": 0.2932884280851587, |
| "learning_rate": 2.9156223893066e-05, |
| "loss": 0.3375, |
| "step": 1265 |
| }, |
| { |
| "epoch": 1.4259154929577464, |
| "grad_norm": 0.40800534520034465, |
| "learning_rate": 2.9135338345864667e-05, |
| "loss": 0.3667, |
| "step": 1266 |
| }, |
| { |
| "epoch": 1.4270422535211267, |
| "grad_norm": 0.378230060871396, |
| "learning_rate": 2.9114452798663327e-05, |
| "loss": 0.3279, |
| "step": 1267 |
| }, |
| { |
| "epoch": 1.428169014084507, |
| "grad_norm": 0.34509394026907525, |
| "learning_rate": 2.909356725146199e-05, |
| "loss": 0.3475, |
| "step": 1268 |
| }, |
| { |
| "epoch": 1.4292957746478874, |
| "grad_norm": 0.4153322561933915, |
| "learning_rate": 2.907268170426065e-05, |
| "loss": 0.3438, |
| "step": 1269 |
| }, |
| { |
| "epoch": 1.4304225352112676, |
| "grad_norm": 0.27853930771661345, |
| "learning_rate": 2.905179615705932e-05, |
| "loss": 0.3363, |
| "step": 1270 |
| }, |
| { |
| "epoch": 1.4315492957746478, |
| "grad_norm": 0.32099780030598213, |
| "learning_rate": 2.903091060985798e-05, |
| "loss": 0.3376, |
| "step": 1271 |
| }, |
| { |
| "epoch": 1.4326760563380283, |
| "grad_norm": 0.37729012390993694, |
| "learning_rate": 2.9010025062656643e-05, |
| "loss": 0.3626, |
| "step": 1272 |
| }, |
| { |
| "epoch": 1.4338028169014083, |
| "grad_norm": 0.3412079603451582, |
| "learning_rate": 2.8989139515455303e-05, |
| "loss": 0.347, |
| "step": 1273 |
| }, |
| { |
| "epoch": 1.4349295774647888, |
| "grad_norm": 0.3165021784985063, |
| "learning_rate": 2.8968253968253974e-05, |
| "loss": 0.3464, |
| "step": 1274 |
| }, |
| { |
| "epoch": 1.436056338028169, |
| "grad_norm": 0.3591571483688786, |
| "learning_rate": 2.8947368421052634e-05, |
| "loss": 0.37, |
| "step": 1275 |
| }, |
| { |
| "epoch": 1.4371830985915492, |
| "grad_norm": 0.3562250029512977, |
| "learning_rate": 2.8926482873851297e-05, |
| "loss": 0.333, |
| "step": 1276 |
| }, |
| { |
| "epoch": 1.4383098591549297, |
| "grad_norm": 0.25149984510123385, |
| "learning_rate": 2.8905597326649957e-05, |
| "loss": 0.3468, |
| "step": 1277 |
| }, |
| { |
| "epoch": 1.43943661971831, |
| "grad_norm": 0.30250034342485427, |
| "learning_rate": 2.8884711779448627e-05, |
| "loss": 0.357, |
| "step": 1278 |
| }, |
| { |
| "epoch": 1.4405633802816902, |
| "grad_norm": 0.34875139847130576, |
| "learning_rate": 2.8863826232247287e-05, |
| "loss": 0.3625, |
| "step": 1279 |
| }, |
| { |
| "epoch": 1.4416901408450704, |
| "grad_norm": 0.30334645075958366, |
| "learning_rate": 2.884294068504595e-05, |
| "loss": 0.3253, |
| "step": 1280 |
| }, |
| { |
| "epoch": 1.4428169014084506, |
| "grad_norm": 0.31229266086668034, |
| "learning_rate": 2.882205513784461e-05, |
| "loss": 0.3534, |
| "step": 1281 |
| }, |
| { |
| "epoch": 1.443943661971831, |
| "grad_norm": 0.33041544401134926, |
| "learning_rate": 2.8801169590643277e-05, |
| "loss": 0.3352, |
| "step": 1282 |
| }, |
| { |
| "epoch": 1.4450704225352113, |
| "grad_norm": 0.34674132045981165, |
| "learning_rate": 2.878028404344194e-05, |
| "loss": 0.3311, |
| "step": 1283 |
| }, |
| { |
| "epoch": 1.4461971830985916, |
| "grad_norm": 0.26156829898934003, |
| "learning_rate": 2.8759398496240604e-05, |
| "loss": 0.3432, |
| "step": 1284 |
| }, |
| { |
| "epoch": 1.4473239436619718, |
| "grad_norm": 0.3136080304572592, |
| "learning_rate": 2.8738512949039264e-05, |
| "loss": 0.3569, |
| "step": 1285 |
| }, |
| { |
| "epoch": 1.448450704225352, |
| "grad_norm": 0.30710210539173416, |
| "learning_rate": 2.871762740183793e-05, |
| "loss": 0.3405, |
| "step": 1286 |
| }, |
| { |
| "epoch": 1.4495774647887325, |
| "grad_norm": 0.2894300410033628, |
| "learning_rate": 2.8696741854636594e-05, |
| "loss": 0.3423, |
| "step": 1287 |
| }, |
| { |
| "epoch": 1.4507042253521127, |
| "grad_norm": 0.29605019545996425, |
| "learning_rate": 2.8675856307435257e-05, |
| "loss": 0.3548, |
| "step": 1288 |
| }, |
| { |
| "epoch": 1.451830985915493, |
| "grad_norm": 0.33202235824154624, |
| "learning_rate": 2.8654970760233917e-05, |
| "loss": 0.3642, |
| "step": 1289 |
| }, |
| { |
| "epoch": 1.4529577464788732, |
| "grad_norm": 0.28873570052106284, |
| "learning_rate": 2.8634085213032584e-05, |
| "loss": 0.3352, |
| "step": 1290 |
| }, |
| { |
| "epoch": 1.4540845070422534, |
| "grad_norm": 0.32778904766572686, |
| "learning_rate": 2.8613199665831247e-05, |
| "loss": 0.345, |
| "step": 1291 |
| }, |
| { |
| "epoch": 1.455211267605634, |
| "grad_norm": 0.30770894411783706, |
| "learning_rate": 2.859231411862991e-05, |
| "loss": 0.3484, |
| "step": 1292 |
| }, |
| { |
| "epoch": 1.4563380281690141, |
| "grad_norm": 0.2833498740974589, |
| "learning_rate": 2.857142857142857e-05, |
| "loss": 0.352, |
| "step": 1293 |
| }, |
| { |
| "epoch": 1.4574647887323944, |
| "grad_norm": 0.31814781815089144, |
| "learning_rate": 2.8550543024227238e-05, |
| "loss": 0.3541, |
| "step": 1294 |
| }, |
| { |
| "epoch": 1.4585915492957746, |
| "grad_norm": 0.2770971173774878, |
| "learning_rate": 2.85296574770259e-05, |
| "loss": 0.3657, |
| "step": 1295 |
| }, |
| { |
| "epoch": 1.4597183098591549, |
| "grad_norm": 0.36066551582561196, |
| "learning_rate": 2.850877192982456e-05, |
| "loss": 0.3313, |
| "step": 1296 |
| }, |
| { |
| "epoch": 1.4608450704225353, |
| "grad_norm": 0.30952046262144167, |
| "learning_rate": 2.8487886382623224e-05, |
| "loss": 0.3514, |
| "step": 1297 |
| }, |
| { |
| "epoch": 1.4619718309859155, |
| "grad_norm": 0.2748856964533165, |
| "learning_rate": 2.846700083542189e-05, |
| "loss": 0.3573, |
| "step": 1298 |
| }, |
| { |
| "epoch": 1.4630985915492958, |
| "grad_norm": 0.35452544836499567, |
| "learning_rate": 2.8446115288220554e-05, |
| "loss": 0.3482, |
| "step": 1299 |
| }, |
| { |
| "epoch": 1.464225352112676, |
| "grad_norm": 0.2768770488408414, |
| "learning_rate": 2.8425229741019214e-05, |
| "loss": 0.3645, |
| "step": 1300 |
| }, |
| { |
| "epoch": 1.4653521126760563, |
| "grad_norm": 0.2827464532913941, |
| "learning_rate": 2.8404344193817878e-05, |
| "loss": 0.3351, |
| "step": 1301 |
| }, |
| { |
| "epoch": 1.4664788732394367, |
| "grad_norm": 0.37765012973788603, |
| "learning_rate": 2.8383458646616545e-05, |
| "loss": 0.357, |
| "step": 1302 |
| }, |
| { |
| "epoch": 1.467605633802817, |
| "grad_norm": 0.299044625170323, |
| "learning_rate": 2.8362573099415208e-05, |
| "loss": 0.3456, |
| "step": 1303 |
| }, |
| { |
| "epoch": 1.4687323943661972, |
| "grad_norm": 0.2753644903042144, |
| "learning_rate": 2.8341687552213868e-05, |
| "loss": 0.3496, |
| "step": 1304 |
| }, |
| { |
| "epoch": 1.4698591549295774, |
| "grad_norm": 0.3444703076328087, |
| "learning_rate": 2.832080200501253e-05, |
| "loss": 0.3258, |
| "step": 1305 |
| }, |
| { |
| "epoch": 1.4709859154929577, |
| "grad_norm": 0.2481634802989212, |
| "learning_rate": 2.8299916457811198e-05, |
| "loss": 0.3355, |
| "step": 1306 |
| }, |
| { |
| "epoch": 1.472112676056338, |
| "grad_norm": 0.319902114656331, |
| "learning_rate": 2.827903091060986e-05, |
| "loss": 0.323, |
| "step": 1307 |
| }, |
| { |
| "epoch": 1.4732394366197183, |
| "grad_norm": 0.2994178943222728, |
| "learning_rate": 2.825814536340852e-05, |
| "loss": 0.3484, |
| "step": 1308 |
| }, |
| { |
| "epoch": 1.4743661971830986, |
| "grad_norm": 0.30917647854179314, |
| "learning_rate": 2.8237259816207185e-05, |
| "loss": 0.345, |
| "step": 1309 |
| }, |
| { |
| "epoch": 1.4754929577464788, |
| "grad_norm": 0.2531647281154903, |
| "learning_rate": 2.821637426900585e-05, |
| "loss": 0.3287, |
| "step": 1310 |
| }, |
| { |
| "epoch": 1.476619718309859, |
| "grad_norm": 0.2866526733970423, |
| "learning_rate": 2.8195488721804515e-05, |
| "loss": 0.3373, |
| "step": 1311 |
| }, |
| { |
| "epoch": 1.4777464788732395, |
| "grad_norm": 0.3075467523111137, |
| "learning_rate": 2.8174603174603175e-05, |
| "loss": 0.3357, |
| "step": 1312 |
| }, |
| { |
| "epoch": 1.4788732394366197, |
| "grad_norm": 0.272832856249605, |
| "learning_rate": 2.815371762740184e-05, |
| "loss": 0.3433, |
| "step": 1313 |
| }, |
| { |
| "epoch": 1.48, |
| "grad_norm": 0.2729760741101419, |
| "learning_rate": 2.8132832080200505e-05, |
| "loss": 0.3382, |
| "step": 1314 |
| }, |
| { |
| "epoch": 1.4811267605633802, |
| "grad_norm": 0.28571235772706455, |
| "learning_rate": 2.811194653299917e-05, |
| "loss": 0.3425, |
| "step": 1315 |
| }, |
| { |
| "epoch": 1.4822535211267605, |
| "grad_norm": 0.2736751524988955, |
| "learning_rate": 2.809106098579783e-05, |
| "loss": 0.3207, |
| "step": 1316 |
| }, |
| { |
| "epoch": 1.483380281690141, |
| "grad_norm": 0.33812848916965643, |
| "learning_rate": 2.8070175438596492e-05, |
| "loss": 0.3468, |
| "step": 1317 |
| }, |
| { |
| "epoch": 1.4845070422535211, |
| "grad_norm": 0.2873317112935958, |
| "learning_rate": 2.804928989139516e-05, |
| "loss": 0.3486, |
| "step": 1318 |
| }, |
| { |
| "epoch": 1.4856338028169014, |
| "grad_norm": 0.27183076296760217, |
| "learning_rate": 2.8028404344193822e-05, |
| "loss": 0.3472, |
| "step": 1319 |
| }, |
| { |
| "epoch": 1.4867605633802816, |
| "grad_norm": 0.3445807209520114, |
| "learning_rate": 2.8007518796992482e-05, |
| "loss": 0.3438, |
| "step": 1320 |
| }, |
| { |
| "epoch": 1.4878873239436619, |
| "grad_norm": 0.3127876717078221, |
| "learning_rate": 2.7986633249791145e-05, |
| "loss": 0.3343, |
| "step": 1321 |
| }, |
| { |
| "epoch": 1.4890140845070423, |
| "grad_norm": 0.2911551101410927, |
| "learning_rate": 2.7965747702589812e-05, |
| "loss": 0.3386, |
| "step": 1322 |
| }, |
| { |
| "epoch": 1.4901408450704225, |
| "grad_norm": 0.3141573720998868, |
| "learning_rate": 2.7944862155388472e-05, |
| "loss": 0.3448, |
| "step": 1323 |
| }, |
| { |
| "epoch": 1.4912676056338028, |
| "grad_norm": 0.28291618873548724, |
| "learning_rate": 2.7923976608187135e-05, |
| "loss": 0.3426, |
| "step": 1324 |
| }, |
| { |
| "epoch": 1.492394366197183, |
| "grad_norm": 0.2692167845293607, |
| "learning_rate": 2.79030910609858e-05, |
| "loss": 0.3371, |
| "step": 1325 |
| }, |
| { |
| "epoch": 1.4935211267605633, |
| "grad_norm": 0.27872207571615465, |
| "learning_rate": 2.7882205513784466e-05, |
| "loss": 0.3259, |
| "step": 1326 |
| }, |
| { |
| "epoch": 1.4946478873239437, |
| "grad_norm": 0.31338604494476663, |
| "learning_rate": 2.7861319966583126e-05, |
| "loss": 0.3457, |
| "step": 1327 |
| }, |
| { |
| "epoch": 1.495774647887324, |
| "grad_norm": 0.2928463795031251, |
| "learning_rate": 2.784043441938179e-05, |
| "loss": 0.3391, |
| "step": 1328 |
| }, |
| { |
| "epoch": 1.4969014084507042, |
| "grad_norm": 0.34764955906498535, |
| "learning_rate": 2.7819548872180452e-05, |
| "loss": 0.3351, |
| "step": 1329 |
| }, |
| { |
| "epoch": 1.4980281690140846, |
| "grad_norm": 0.3023292227508466, |
| "learning_rate": 2.779866332497912e-05, |
| "loss": 0.3275, |
| "step": 1330 |
| }, |
| { |
| "epoch": 1.4991549295774647, |
| "grad_norm": 0.3091796807737989, |
| "learning_rate": 2.777777777777778e-05, |
| "loss": 0.3579, |
| "step": 1331 |
| }, |
| { |
| "epoch": 1.5002816901408451, |
| "grad_norm": 0.3095335257232928, |
| "learning_rate": 2.7756892230576442e-05, |
| "loss": 0.3384, |
| "step": 1332 |
| }, |
| { |
| "epoch": 1.5014084507042254, |
| "grad_norm": 0.299616166937427, |
| "learning_rate": 2.7736006683375106e-05, |
| "loss": 0.3334, |
| "step": 1333 |
| }, |
| { |
| "epoch": 1.5025352112676056, |
| "grad_norm": 0.2891915110352774, |
| "learning_rate": 2.7715121136173773e-05, |
| "loss": 0.3356, |
| "step": 1334 |
| }, |
| { |
| "epoch": 1.503661971830986, |
| "grad_norm": 0.3323177952735513, |
| "learning_rate": 2.7694235588972433e-05, |
| "loss": 0.3428, |
| "step": 1335 |
| }, |
| { |
| "epoch": 1.504788732394366, |
| "grad_norm": 0.35973144909936317, |
| "learning_rate": 2.7673350041771096e-05, |
| "loss": 0.3653, |
| "step": 1336 |
| }, |
| { |
| "epoch": 1.5059154929577465, |
| "grad_norm": 0.28987961239262905, |
| "learning_rate": 2.7652464494569756e-05, |
| "loss": 0.3412, |
| "step": 1337 |
| }, |
| { |
| "epoch": 1.5070422535211268, |
| "grad_norm": 0.3386510034404898, |
| "learning_rate": 2.7631578947368426e-05, |
| "loss": 0.3136, |
| "step": 1338 |
| }, |
| { |
| "epoch": 1.508169014084507, |
| "grad_norm": 0.31260198405738565, |
| "learning_rate": 2.7610693400167086e-05, |
| "loss": 0.3317, |
| "step": 1339 |
| }, |
| { |
| "epoch": 1.5092957746478874, |
| "grad_norm": 0.27033647337294403, |
| "learning_rate": 2.758980785296575e-05, |
| "loss": 0.3232, |
| "step": 1340 |
| }, |
| { |
| "epoch": 1.5104225352112675, |
| "grad_norm": 0.2918364615372896, |
| "learning_rate": 2.756892230576441e-05, |
| "loss": 0.337, |
| "step": 1341 |
| }, |
| { |
| "epoch": 1.511549295774648, |
| "grad_norm": 0.32001229005415976, |
| "learning_rate": 2.754803675856308e-05, |
| "loss": 0.345, |
| "step": 1342 |
| }, |
| { |
| "epoch": 1.5126760563380282, |
| "grad_norm": 0.27427834567161835, |
| "learning_rate": 2.752715121136174e-05, |
| "loss": 0.3633, |
| "step": 1343 |
| }, |
| { |
| "epoch": 1.5138028169014084, |
| "grad_norm": 0.2489172670773644, |
| "learning_rate": 2.7506265664160403e-05, |
| "loss": 0.3437, |
| "step": 1344 |
| }, |
| { |
| "epoch": 1.5149295774647888, |
| "grad_norm": 0.2909539905928386, |
| "learning_rate": 2.7485380116959063e-05, |
| "loss": 0.3281, |
| "step": 1345 |
| }, |
| { |
| "epoch": 1.5160563380281689, |
| "grad_norm": 0.28314692739296915, |
| "learning_rate": 2.7464494569757733e-05, |
| "loss": 0.3218, |
| "step": 1346 |
| }, |
| { |
| "epoch": 1.5171830985915493, |
| "grad_norm": 0.2874575696178154, |
| "learning_rate": 2.7443609022556393e-05, |
| "loss": 0.362, |
| "step": 1347 |
| }, |
| { |
| "epoch": 1.5183098591549296, |
| "grad_norm": 0.2974835890176962, |
| "learning_rate": 2.7422723475355056e-05, |
| "loss": 0.3545, |
| "step": 1348 |
| }, |
| { |
| "epoch": 1.5194366197183098, |
| "grad_norm": 0.2835406784466393, |
| "learning_rate": 2.7401837928153716e-05, |
| "loss": 0.3653, |
| "step": 1349 |
| }, |
| { |
| "epoch": 1.5205633802816902, |
| "grad_norm": 0.2844679548913541, |
| "learning_rate": 2.7380952380952383e-05, |
| "loss": 0.3365, |
| "step": 1350 |
| }, |
| { |
| "epoch": 1.5216901408450703, |
| "grad_norm": 0.3180498946601223, |
| "learning_rate": 2.7360066833751047e-05, |
| "loss": 0.3312, |
| "step": 1351 |
| }, |
| { |
| "epoch": 1.5228169014084507, |
| "grad_norm": 0.3023117732604468, |
| "learning_rate": 2.733918128654971e-05, |
| "loss": 0.3696, |
| "step": 1352 |
| }, |
| { |
| "epoch": 1.523943661971831, |
| "grad_norm": 0.33024560678427756, |
| "learning_rate": 2.731829573934837e-05, |
| "loss": 0.3535, |
| "step": 1353 |
| }, |
| { |
| "epoch": 1.5250704225352112, |
| "grad_norm": 0.32889562258435656, |
| "learning_rate": 2.7297410192147037e-05, |
| "loss": 0.3464, |
| "step": 1354 |
| }, |
| { |
| "epoch": 1.5261971830985916, |
| "grad_norm": 0.3164497735318053, |
| "learning_rate": 2.72765246449457e-05, |
| "loss": 0.3438, |
| "step": 1355 |
| }, |
| { |
| "epoch": 1.5273239436619719, |
| "grad_norm": 0.31319837575238246, |
| "learning_rate": 2.7255639097744363e-05, |
| "loss": 0.3225, |
| "step": 1356 |
| }, |
| { |
| "epoch": 1.5284507042253521, |
| "grad_norm": 0.32188854803462924, |
| "learning_rate": 2.7234753550543023e-05, |
| "loss": 0.3483, |
| "step": 1357 |
| }, |
| { |
| "epoch": 1.5295774647887324, |
| "grad_norm": 0.3193960591385271, |
| "learning_rate": 2.721386800334169e-05, |
| "loss": 0.3493, |
| "step": 1358 |
| }, |
| { |
| "epoch": 1.5307042253521126, |
| "grad_norm": 0.30668644315358606, |
| "learning_rate": 2.7192982456140354e-05, |
| "loss": 0.3455, |
| "step": 1359 |
| }, |
| { |
| "epoch": 1.531830985915493, |
| "grad_norm": 0.29429059378149947, |
| "learning_rate": 2.7172096908939017e-05, |
| "loss": 0.3502, |
| "step": 1360 |
| }, |
| { |
| "epoch": 1.5329577464788733, |
| "grad_norm": 0.34593884556412324, |
| "learning_rate": 2.7151211361737677e-05, |
| "loss": 0.3419, |
| "step": 1361 |
| }, |
| { |
| "epoch": 1.5340845070422535, |
| "grad_norm": 0.2697471446325152, |
| "learning_rate": 2.7130325814536344e-05, |
| "loss": 0.3289, |
| "step": 1362 |
| }, |
| { |
| "epoch": 1.5352112676056338, |
| "grad_norm": 0.2897128031295572, |
| "learning_rate": 2.7109440267335007e-05, |
| "loss": 0.3413, |
| "step": 1363 |
| }, |
| { |
| "epoch": 1.536338028169014, |
| "grad_norm": 0.34365216670244914, |
| "learning_rate": 2.7088554720133667e-05, |
| "loss": 0.3671, |
| "step": 1364 |
| }, |
| { |
| "epoch": 1.5374647887323944, |
| "grad_norm": 0.26927575252902075, |
| "learning_rate": 2.706766917293233e-05, |
| "loss": 0.3667, |
| "step": 1365 |
| }, |
| { |
| "epoch": 1.5385915492957747, |
| "grad_norm": 0.3063326672240967, |
| "learning_rate": 2.7046783625730997e-05, |
| "loss": 0.3515, |
| "step": 1366 |
| }, |
| { |
| "epoch": 1.539718309859155, |
| "grad_norm": 0.3648933491827071, |
| "learning_rate": 2.702589807852966e-05, |
| "loss": 0.3756, |
| "step": 1367 |
| }, |
| { |
| "epoch": 1.5408450704225352, |
| "grad_norm": 0.300755843638833, |
| "learning_rate": 2.700501253132832e-05, |
| "loss": 0.3547, |
| "step": 1368 |
| }, |
| { |
| "epoch": 1.5419718309859154, |
| "grad_norm": 0.3250180654384015, |
| "learning_rate": 2.6984126984126984e-05, |
| "loss": 0.352, |
| "step": 1369 |
| }, |
| { |
| "epoch": 1.5430985915492959, |
| "grad_norm": 0.30252245989727555, |
| "learning_rate": 2.696324143692565e-05, |
| "loss": 0.3533, |
| "step": 1370 |
| }, |
| { |
| "epoch": 1.544225352112676, |
| "grad_norm": 0.2845024385448824, |
| "learning_rate": 2.6942355889724314e-05, |
| "loss": 0.3388, |
| "step": 1371 |
| }, |
| { |
| "epoch": 1.5453521126760563, |
| "grad_norm": 0.3180568115975252, |
| "learning_rate": 2.6921470342522974e-05, |
| "loss": 0.3546, |
| "step": 1372 |
| }, |
| { |
| "epoch": 1.5464788732394368, |
| "grad_norm": 0.32527654151497487, |
| "learning_rate": 2.6900584795321637e-05, |
| "loss": 0.348, |
| "step": 1373 |
| }, |
| { |
| "epoch": 1.5476056338028168, |
| "grad_norm": 0.3522036911658601, |
| "learning_rate": 2.6879699248120304e-05, |
| "loss": 0.3582, |
| "step": 1374 |
| }, |
| { |
| "epoch": 1.5487323943661973, |
| "grad_norm": 0.35935681942406394, |
| "learning_rate": 2.6858813700918968e-05, |
| "loss": 0.3471, |
| "step": 1375 |
| }, |
| { |
| "epoch": 1.5498591549295775, |
| "grad_norm": 0.26552218639595676, |
| "learning_rate": 2.6837928153717628e-05, |
| "loss": 0.3448, |
| "step": 1376 |
| }, |
| { |
| "epoch": 1.5509859154929577, |
| "grad_norm": 0.32808185473297674, |
| "learning_rate": 2.681704260651629e-05, |
| "loss": 0.3368, |
| "step": 1377 |
| }, |
| { |
| "epoch": 1.5521126760563382, |
| "grad_norm": 0.27214208030441817, |
| "learning_rate": 2.6796157059314958e-05, |
| "loss": 0.3314, |
| "step": 1378 |
| }, |
| { |
| "epoch": 1.5532394366197182, |
| "grad_norm": 0.26073918239703964, |
| "learning_rate": 2.677527151211362e-05, |
| "loss": 0.3408, |
| "step": 1379 |
| }, |
| { |
| "epoch": 1.5543661971830987, |
| "grad_norm": 0.3421765223133862, |
| "learning_rate": 2.675438596491228e-05, |
| "loss": 0.3595, |
| "step": 1380 |
| }, |
| { |
| "epoch": 1.5554929577464789, |
| "grad_norm": 0.2869738964669758, |
| "learning_rate": 2.6733500417710944e-05, |
| "loss": 0.3349, |
| "step": 1381 |
| }, |
| { |
| "epoch": 1.5566197183098591, |
| "grad_norm": 0.3181298736789464, |
| "learning_rate": 2.671261487050961e-05, |
| "loss": 0.351, |
| "step": 1382 |
| }, |
| { |
| "epoch": 1.5577464788732396, |
| "grad_norm": 0.2565318489948106, |
| "learning_rate": 2.6691729323308275e-05, |
| "loss": 0.3349, |
| "step": 1383 |
| }, |
| { |
| "epoch": 1.5588732394366196, |
| "grad_norm": 0.30534670746119047, |
| "learning_rate": 2.6670843776106935e-05, |
| "loss": 0.3517, |
| "step": 1384 |
| }, |
| { |
| "epoch": 1.56, |
| "grad_norm": 0.2588139544758108, |
| "learning_rate": 2.6649958228905598e-05, |
| "loss": 0.347, |
| "step": 1385 |
| }, |
| { |
| "epoch": 1.5611267605633803, |
| "grad_norm": 0.2861119417513974, |
| "learning_rate": 2.6629072681704265e-05, |
| "loss": 0.3302, |
| "step": 1386 |
| }, |
| { |
| "epoch": 1.5622535211267605, |
| "grad_norm": 0.2661849430364748, |
| "learning_rate": 2.6608187134502928e-05, |
| "loss": 0.3384, |
| "step": 1387 |
| }, |
| { |
| "epoch": 1.563380281690141, |
| "grad_norm": 0.28779496971243823, |
| "learning_rate": 2.6587301587301588e-05, |
| "loss": 0.3489, |
| "step": 1388 |
| }, |
| { |
| "epoch": 1.564507042253521, |
| "grad_norm": 0.280641359638324, |
| "learning_rate": 2.656641604010025e-05, |
| "loss": 0.3316, |
| "step": 1389 |
| }, |
| { |
| "epoch": 1.5656338028169015, |
| "grad_norm": 0.254891395855962, |
| "learning_rate": 2.6545530492898918e-05, |
| "loss": 0.341, |
| "step": 1390 |
| }, |
| { |
| "epoch": 1.5667605633802817, |
| "grad_norm": 0.2807897886759735, |
| "learning_rate": 2.6524644945697578e-05, |
| "loss": 0.3415, |
| "step": 1391 |
| }, |
| { |
| "epoch": 1.567887323943662, |
| "grad_norm": 0.2768837322927675, |
| "learning_rate": 2.650375939849624e-05, |
| "loss": 0.3426, |
| "step": 1392 |
| }, |
| { |
| "epoch": 1.5690140845070424, |
| "grad_norm": 0.3101082631830913, |
| "learning_rate": 2.6482873851294905e-05, |
| "loss": 0.3695, |
| "step": 1393 |
| }, |
| { |
| "epoch": 1.5701408450704224, |
| "grad_norm": 0.6820410639958185, |
| "learning_rate": 2.6461988304093572e-05, |
| "loss": 0.3432, |
| "step": 1394 |
| }, |
| { |
| "epoch": 1.5712676056338029, |
| "grad_norm": 0.362099966934023, |
| "learning_rate": 2.6441102756892232e-05, |
| "loss": 0.3352, |
| "step": 1395 |
| }, |
| { |
| "epoch": 1.572394366197183, |
| "grad_norm": 0.30087023193343065, |
| "learning_rate": 2.6420217209690895e-05, |
| "loss": 0.3482, |
| "step": 1396 |
| }, |
| { |
| "epoch": 1.5735211267605633, |
| "grad_norm": 0.38399351692851613, |
| "learning_rate": 2.639933166248956e-05, |
| "loss": 0.3354, |
| "step": 1397 |
| }, |
| { |
| "epoch": 1.5746478873239438, |
| "grad_norm": 0.31915287347120525, |
| "learning_rate": 2.6378446115288225e-05, |
| "loss": 0.3519, |
| "step": 1398 |
| }, |
| { |
| "epoch": 1.5757746478873238, |
| "grad_norm": 0.35631760611896585, |
| "learning_rate": 2.6357560568086885e-05, |
| "loss": 0.3486, |
| "step": 1399 |
| }, |
| { |
| "epoch": 1.5769014084507043, |
| "grad_norm": 0.24857670158956344, |
| "learning_rate": 2.633667502088555e-05, |
| "loss": 0.3465, |
| "step": 1400 |
| }, |
| { |
| "epoch": 1.5780281690140845, |
| "grad_norm": 0.32180056116060374, |
| "learning_rate": 2.6315789473684212e-05, |
| "loss": 0.3211, |
| "step": 1401 |
| }, |
| { |
| "epoch": 1.5791549295774647, |
| "grad_norm": 0.3912795354364383, |
| "learning_rate": 2.629490392648288e-05, |
| "loss": 0.3663, |
| "step": 1402 |
| }, |
| { |
| "epoch": 1.5802816901408452, |
| "grad_norm": 0.26651798682062733, |
| "learning_rate": 2.627401837928154e-05, |
| "loss": 0.3627, |
| "step": 1403 |
| }, |
| { |
| "epoch": 1.5814084507042252, |
| "grad_norm": 0.2936149895825024, |
| "learning_rate": 2.6253132832080202e-05, |
| "loss": 0.3507, |
| "step": 1404 |
| }, |
| { |
| "epoch": 1.5825352112676057, |
| "grad_norm": 0.30696718635511294, |
| "learning_rate": 2.6232247284878862e-05, |
| "loss": 0.38, |
| "step": 1405 |
| }, |
| { |
| "epoch": 1.583661971830986, |
| "grad_norm": 0.33154457165579876, |
| "learning_rate": 2.6211361737677532e-05, |
| "loss": 0.3513, |
| "step": 1406 |
| }, |
| { |
| "epoch": 1.5847887323943661, |
| "grad_norm": 0.31443266029042044, |
| "learning_rate": 2.6190476190476192e-05, |
| "loss": 0.3487, |
| "step": 1407 |
| }, |
| { |
| "epoch": 1.5859154929577466, |
| "grad_norm": 0.33892978419544095, |
| "learning_rate": 2.6169590643274856e-05, |
| "loss": 0.3643, |
| "step": 1408 |
| }, |
| { |
| "epoch": 1.5870422535211266, |
| "grad_norm": 0.3070809683604584, |
| "learning_rate": 2.6148705096073516e-05, |
| "loss": 0.3596, |
| "step": 1409 |
| }, |
| { |
| "epoch": 1.588169014084507, |
| "grad_norm": 0.2827064976381111, |
| "learning_rate": 2.6127819548872186e-05, |
| "loss": 0.3573, |
| "step": 1410 |
| }, |
| { |
| "epoch": 1.5892957746478873, |
| "grad_norm": 0.3229878235602582, |
| "learning_rate": 2.6106934001670846e-05, |
| "loss": 0.3219, |
| "step": 1411 |
| }, |
| { |
| "epoch": 1.5904225352112675, |
| "grad_norm": 0.31093887040644913, |
| "learning_rate": 2.608604845446951e-05, |
| "loss": 0.3593, |
| "step": 1412 |
| }, |
| { |
| "epoch": 1.591549295774648, |
| "grad_norm": 0.2774968218487699, |
| "learning_rate": 2.606516290726817e-05, |
| "loss": 0.3315, |
| "step": 1413 |
| }, |
| { |
| "epoch": 1.5926760563380282, |
| "grad_norm": 0.25498385907155885, |
| "learning_rate": 2.604427736006684e-05, |
| "loss": 0.3223, |
| "step": 1414 |
| }, |
| { |
| "epoch": 1.5938028169014085, |
| "grad_norm": 0.2727488829583004, |
| "learning_rate": 2.60233918128655e-05, |
| "loss": 0.337, |
| "step": 1415 |
| }, |
| { |
| "epoch": 1.5949295774647887, |
| "grad_norm": 0.33554974496041423, |
| "learning_rate": 2.6002506265664163e-05, |
| "loss": 0.3484, |
| "step": 1416 |
| }, |
| { |
| "epoch": 1.596056338028169, |
| "grad_norm": 0.27591446012623233, |
| "learning_rate": 2.5981620718462823e-05, |
| "loss": 0.3501, |
| "step": 1417 |
| }, |
| { |
| "epoch": 1.5971830985915494, |
| "grad_norm": 0.31941752603872553, |
| "learning_rate": 2.596073517126149e-05, |
| "loss": 0.3454, |
| "step": 1418 |
| }, |
| { |
| "epoch": 1.5983098591549296, |
| "grad_norm": 0.30125833320515955, |
| "learning_rate": 2.5939849624060153e-05, |
| "loss": 0.3327, |
| "step": 1419 |
| }, |
| { |
| "epoch": 1.5994366197183099, |
| "grad_norm": 0.2582237111436985, |
| "learning_rate": 2.5918964076858816e-05, |
| "loss": 0.3457, |
| "step": 1420 |
| }, |
| { |
| "epoch": 1.60056338028169, |
| "grad_norm": 0.31373164120604946, |
| "learning_rate": 2.5898078529657476e-05, |
| "loss": 0.3329, |
| "step": 1421 |
| }, |
| { |
| "epoch": 1.6016901408450703, |
| "grad_norm": 0.32942272873563083, |
| "learning_rate": 2.5877192982456143e-05, |
| "loss": 0.3618, |
| "step": 1422 |
| }, |
| { |
| "epoch": 1.6028169014084508, |
| "grad_norm": 0.2646664149122406, |
| "learning_rate": 2.5856307435254806e-05, |
| "loss": 0.3336, |
| "step": 1423 |
| }, |
| { |
| "epoch": 1.603943661971831, |
| "grad_norm": 0.3058840576523787, |
| "learning_rate": 2.583542188805347e-05, |
| "loss": 0.3402, |
| "step": 1424 |
| }, |
| { |
| "epoch": 1.6050704225352113, |
| "grad_norm": 0.32158351015262177, |
| "learning_rate": 2.581453634085213e-05, |
| "loss": 0.3367, |
| "step": 1425 |
| }, |
| { |
| "epoch": 1.6061971830985915, |
| "grad_norm": 0.2755437238431191, |
| "learning_rate": 2.5793650793650796e-05, |
| "loss": 0.3207, |
| "step": 1426 |
| }, |
| { |
| "epoch": 1.6073239436619717, |
| "grad_norm": 0.30625595434371505, |
| "learning_rate": 2.577276524644946e-05, |
| "loss": 0.3697, |
| "step": 1427 |
| }, |
| { |
| "epoch": 1.6084507042253522, |
| "grad_norm": 0.30284963531984754, |
| "learning_rate": 2.5751879699248123e-05, |
| "loss": 0.3295, |
| "step": 1428 |
| }, |
| { |
| "epoch": 1.6095774647887324, |
| "grad_norm": 0.26367411671532615, |
| "learning_rate": 2.5730994152046783e-05, |
| "loss": 0.3218, |
| "step": 1429 |
| }, |
| { |
| "epoch": 1.6107042253521127, |
| "grad_norm": 0.2723447457176679, |
| "learning_rate": 2.571010860484545e-05, |
| "loss": 0.3256, |
| "step": 1430 |
| }, |
| { |
| "epoch": 1.6118309859154931, |
| "grad_norm": 0.32346483316378777, |
| "learning_rate": 2.5689223057644113e-05, |
| "loss": 0.3464, |
| "step": 1431 |
| }, |
| { |
| "epoch": 1.6129577464788731, |
| "grad_norm": 0.2925095477776419, |
| "learning_rate": 2.5668337510442773e-05, |
| "loss": 0.3605, |
| "step": 1432 |
| }, |
| { |
| "epoch": 1.6140845070422536, |
| "grad_norm": 0.30114671963002587, |
| "learning_rate": 2.5647451963241437e-05, |
| "loss": 0.3477, |
| "step": 1433 |
| }, |
| { |
| "epoch": 1.6152112676056338, |
| "grad_norm": 0.2846035505506225, |
| "learning_rate": 2.5626566416040103e-05, |
| "loss": 0.3428, |
| "step": 1434 |
| }, |
| { |
| "epoch": 1.616338028169014, |
| "grad_norm": 0.2588210598131198, |
| "learning_rate": 2.5605680868838767e-05, |
| "loss": 0.3451, |
| "step": 1435 |
| }, |
| { |
| "epoch": 1.6174647887323945, |
| "grad_norm": 0.2625763271243046, |
| "learning_rate": 2.5584795321637427e-05, |
| "loss": 0.3451, |
| "step": 1436 |
| }, |
| { |
| "epoch": 1.6185915492957745, |
| "grad_norm": 0.24859397280546572, |
| "learning_rate": 2.556390977443609e-05, |
| "loss": 0.3631, |
| "step": 1437 |
| }, |
| { |
| "epoch": 1.619718309859155, |
| "grad_norm": 0.28890725030429065, |
| "learning_rate": 2.5543024227234757e-05, |
| "loss": 0.3338, |
| "step": 1438 |
| }, |
| { |
| "epoch": 1.6208450704225352, |
| "grad_norm": 0.2729341226525535, |
| "learning_rate": 2.552213868003342e-05, |
| "loss": 0.3402, |
| "step": 1439 |
| }, |
| { |
| "epoch": 1.6219718309859155, |
| "grad_norm": 0.321511849439831, |
| "learning_rate": 2.550125313283208e-05, |
| "loss": 0.3275, |
| "step": 1440 |
| }, |
| { |
| "epoch": 1.623098591549296, |
| "grad_norm": 0.2596677711472582, |
| "learning_rate": 2.5480367585630744e-05, |
| "loss": 0.3477, |
| "step": 1441 |
| }, |
| { |
| "epoch": 1.624225352112676, |
| "grad_norm": 0.3215232803975101, |
| "learning_rate": 2.545948203842941e-05, |
| "loss": 0.3475, |
| "step": 1442 |
| }, |
| { |
| "epoch": 1.6253521126760564, |
| "grad_norm": 0.3049605657312551, |
| "learning_rate": 2.5438596491228074e-05, |
| "loss": 0.3408, |
| "step": 1443 |
| }, |
| { |
| "epoch": 1.6264788732394366, |
| "grad_norm": 0.33000021262211476, |
| "learning_rate": 2.5417710944026734e-05, |
| "loss": 0.3451, |
| "step": 1444 |
| }, |
| { |
| "epoch": 1.6276056338028169, |
| "grad_norm": 0.28250547117805813, |
| "learning_rate": 2.5396825396825397e-05, |
| "loss": 0.3533, |
| "step": 1445 |
| }, |
| { |
| "epoch": 1.6287323943661973, |
| "grad_norm": 0.27656167843203644, |
| "learning_rate": 2.5375939849624064e-05, |
| "loss": 0.3286, |
| "step": 1446 |
| }, |
| { |
| "epoch": 1.6298591549295773, |
| "grad_norm": 0.37440105959190834, |
| "learning_rate": 2.5355054302422727e-05, |
| "loss": 0.3666, |
| "step": 1447 |
| }, |
| { |
| "epoch": 1.6309859154929578, |
| "grad_norm": 0.2624422620925436, |
| "learning_rate": 2.5334168755221387e-05, |
| "loss": 0.3353, |
| "step": 1448 |
| }, |
| { |
| "epoch": 1.632112676056338, |
| "grad_norm": 0.29009307096198833, |
| "learning_rate": 2.531328320802005e-05, |
| "loss": 0.3394, |
| "step": 1449 |
| }, |
| { |
| "epoch": 1.6332394366197183, |
| "grad_norm": 0.29225161048598075, |
| "learning_rate": 2.5292397660818717e-05, |
| "loss": 0.3654, |
| "step": 1450 |
| }, |
| { |
| "epoch": 1.6343661971830987, |
| "grad_norm": 0.30721504708137215, |
| "learning_rate": 2.527151211361738e-05, |
| "loss": 0.3321, |
| "step": 1451 |
| }, |
| { |
| "epoch": 1.6354929577464787, |
| "grad_norm": 0.2788064690368735, |
| "learning_rate": 2.525062656641604e-05, |
| "loss": 0.3417, |
| "step": 1452 |
| }, |
| { |
| "epoch": 1.6366197183098592, |
| "grad_norm": 0.26995422234528577, |
| "learning_rate": 2.5229741019214704e-05, |
| "loss": 0.3484, |
| "step": 1453 |
| }, |
| { |
| "epoch": 1.6377464788732394, |
| "grad_norm": 0.28226163647947505, |
| "learning_rate": 2.520885547201337e-05, |
| "loss": 0.346, |
| "step": 1454 |
| }, |
| { |
| "epoch": 1.6388732394366197, |
| "grad_norm": 0.2945446918455596, |
| "learning_rate": 2.5187969924812034e-05, |
| "loss": 0.3518, |
| "step": 1455 |
| }, |
| { |
| "epoch": 1.6400000000000001, |
| "grad_norm": 0.259788327412407, |
| "learning_rate": 2.5167084377610694e-05, |
| "loss": 0.3494, |
| "step": 1456 |
| }, |
| { |
| "epoch": 1.6411267605633801, |
| "grad_norm": 0.270194461267937, |
| "learning_rate": 2.5146198830409358e-05, |
| "loss": 0.3504, |
| "step": 1457 |
| }, |
| { |
| "epoch": 1.6422535211267606, |
| "grad_norm": 0.2950037384175335, |
| "learning_rate": 2.5125313283208024e-05, |
| "loss": 0.3393, |
| "step": 1458 |
| }, |
| { |
| "epoch": 1.6433802816901408, |
| "grad_norm": 0.23883286607922005, |
| "learning_rate": 2.5104427736006684e-05, |
| "loss": 0.3405, |
| "step": 1459 |
| }, |
| { |
| "epoch": 1.644507042253521, |
| "grad_norm": 0.2630432210821515, |
| "learning_rate": 2.5083542188805348e-05, |
| "loss": 0.3357, |
| "step": 1460 |
| }, |
| { |
| "epoch": 1.6456338028169015, |
| "grad_norm": 0.25060028578291704, |
| "learning_rate": 2.506265664160401e-05, |
| "loss": 0.3473, |
| "step": 1461 |
| }, |
| { |
| "epoch": 1.6467605633802815, |
| "grad_norm": 0.25358297437939425, |
| "learning_rate": 2.5041771094402678e-05, |
| "loss": 0.3281, |
| "step": 1462 |
| }, |
| { |
| "epoch": 1.647887323943662, |
| "grad_norm": 0.27110843160789827, |
| "learning_rate": 2.5020885547201338e-05, |
| "loss": 0.364, |
| "step": 1463 |
| }, |
| { |
| "epoch": 1.6490140845070422, |
| "grad_norm": 0.27859901156381256, |
| "learning_rate": 2.5e-05, |
| "loss": 0.3275, |
| "step": 1464 |
| }, |
| { |
| "epoch": 1.6501408450704225, |
| "grad_norm": 0.2590260996612864, |
| "learning_rate": 2.4979114452798665e-05, |
| "loss": 0.3339, |
| "step": 1465 |
| }, |
| { |
| "epoch": 1.651267605633803, |
| "grad_norm": 0.28615345729601793, |
| "learning_rate": 2.4958228905597328e-05, |
| "loss": 0.3186, |
| "step": 1466 |
| }, |
| { |
| "epoch": 1.652394366197183, |
| "grad_norm": 0.2561561406004291, |
| "learning_rate": 2.493734335839599e-05, |
| "loss": 0.3441, |
| "step": 1467 |
| }, |
| { |
| "epoch": 1.6535211267605634, |
| "grad_norm": 0.28420263034336, |
| "learning_rate": 2.4916457811194655e-05, |
| "loss": 0.3487, |
| "step": 1468 |
| }, |
| { |
| "epoch": 1.6546478873239436, |
| "grad_norm": 0.26810924062603336, |
| "learning_rate": 2.4895572263993318e-05, |
| "loss": 0.3218, |
| "step": 1469 |
| }, |
| { |
| "epoch": 1.6557746478873239, |
| "grad_norm": 0.2833833485230433, |
| "learning_rate": 2.487468671679198e-05, |
| "loss": 0.3459, |
| "step": 1470 |
| }, |
| { |
| "epoch": 1.6569014084507043, |
| "grad_norm": 0.31905201414375717, |
| "learning_rate": 2.485380116959064e-05, |
| "loss": 0.3619, |
| "step": 1471 |
| }, |
| { |
| "epoch": 1.6580281690140843, |
| "grad_norm": 0.2858551553408948, |
| "learning_rate": 2.4832915622389308e-05, |
| "loss": 0.3495, |
| "step": 1472 |
| }, |
| { |
| "epoch": 1.6591549295774648, |
| "grad_norm": 0.3322138453327257, |
| "learning_rate": 2.4812030075187968e-05, |
| "loss": 0.3401, |
| "step": 1473 |
| }, |
| { |
| "epoch": 1.660281690140845, |
| "grad_norm": 0.32119712572911885, |
| "learning_rate": 2.4791144527986635e-05, |
| "loss": 0.3395, |
| "step": 1474 |
| }, |
| { |
| "epoch": 1.6614084507042253, |
| "grad_norm": 0.2970697742218709, |
| "learning_rate": 2.4770258980785295e-05, |
| "loss": 0.3528, |
| "step": 1475 |
| }, |
| { |
| "epoch": 1.6625352112676057, |
| "grad_norm": 0.2899062355623534, |
| "learning_rate": 2.4749373433583962e-05, |
| "loss": 0.3456, |
| "step": 1476 |
| }, |
| { |
| "epoch": 1.663661971830986, |
| "grad_norm": 0.3241336123208444, |
| "learning_rate": 2.472848788638262e-05, |
| "loss": 0.3517, |
| "step": 1477 |
| }, |
| { |
| "epoch": 1.6647887323943662, |
| "grad_norm": 0.24657467228328783, |
| "learning_rate": 2.470760233918129e-05, |
| "loss": 0.3398, |
| "step": 1478 |
| }, |
| { |
| "epoch": 1.6659154929577464, |
| "grad_norm": 0.2580275287950773, |
| "learning_rate": 2.468671679197995e-05, |
| "loss": 0.3308, |
| "step": 1479 |
| }, |
| { |
| "epoch": 1.6670422535211267, |
| "grad_norm": 0.2634110979008221, |
| "learning_rate": 2.4665831244778615e-05, |
| "loss": 0.3478, |
| "step": 1480 |
| }, |
| { |
| "epoch": 1.6681690140845071, |
| "grad_norm": 0.2788858596184465, |
| "learning_rate": 2.4644945697577275e-05, |
| "loss": 0.3398, |
| "step": 1481 |
| }, |
| { |
| "epoch": 1.6692957746478874, |
| "grad_norm": 0.31372425383389874, |
| "learning_rate": 2.4624060150375942e-05, |
| "loss": 0.337, |
| "step": 1482 |
| }, |
| { |
| "epoch": 1.6704225352112676, |
| "grad_norm": 0.2509975030925098, |
| "learning_rate": 2.4603174603174602e-05, |
| "loss": 0.3441, |
| "step": 1483 |
| }, |
| { |
| "epoch": 1.6715492957746478, |
| "grad_norm": 0.2522862475159247, |
| "learning_rate": 2.458228905597327e-05, |
| "loss": 0.3338, |
| "step": 1484 |
| }, |
| { |
| "epoch": 1.672676056338028, |
| "grad_norm": 0.2813497727876396, |
| "learning_rate": 2.456140350877193e-05, |
| "loss": 0.3525, |
| "step": 1485 |
| }, |
| { |
| "epoch": 1.6738028169014085, |
| "grad_norm": 0.31413834254162076, |
| "learning_rate": 2.4540517961570595e-05, |
| "loss": 0.3394, |
| "step": 1486 |
| }, |
| { |
| "epoch": 1.6749295774647888, |
| "grad_norm": 0.26360158583575644, |
| "learning_rate": 2.4519632414369255e-05, |
| "loss": 0.3374, |
| "step": 1487 |
| }, |
| { |
| "epoch": 1.676056338028169, |
| "grad_norm": 0.28877767158248374, |
| "learning_rate": 2.4498746867167922e-05, |
| "loss": 0.3313, |
| "step": 1488 |
| }, |
| { |
| "epoch": 1.6771830985915495, |
| "grad_norm": 0.24981169529637415, |
| "learning_rate": 2.4477861319966582e-05, |
| "loss": 0.3645, |
| "step": 1489 |
| }, |
| { |
| "epoch": 1.6783098591549295, |
| "grad_norm": 0.32518492340841537, |
| "learning_rate": 2.445697577276525e-05, |
| "loss": 0.3818, |
| "step": 1490 |
| }, |
| { |
| "epoch": 1.67943661971831, |
| "grad_norm": 0.2849687146700346, |
| "learning_rate": 2.443609022556391e-05, |
| "loss": 0.3455, |
| "step": 1491 |
| }, |
| { |
| "epoch": 1.6805633802816902, |
| "grad_norm": 0.2937565422277086, |
| "learning_rate": 2.4415204678362576e-05, |
| "loss": 0.3482, |
| "step": 1492 |
| }, |
| { |
| "epoch": 1.6816901408450704, |
| "grad_norm": 0.27832882114852947, |
| "learning_rate": 2.4394319131161236e-05, |
| "loss": 0.3569, |
| "step": 1493 |
| }, |
| { |
| "epoch": 1.6828169014084509, |
| "grad_norm": 0.2884288361313236, |
| "learning_rate": 2.4373433583959902e-05, |
| "loss": 0.3503, |
| "step": 1494 |
| }, |
| { |
| "epoch": 1.6839436619718309, |
| "grad_norm": 0.30909359315178064, |
| "learning_rate": 2.4352548036758562e-05, |
| "loss": 0.3474, |
| "step": 1495 |
| }, |
| { |
| "epoch": 1.6850704225352113, |
| "grad_norm": 0.28233050232701684, |
| "learning_rate": 2.433166248955723e-05, |
| "loss": 0.3278, |
| "step": 1496 |
| }, |
| { |
| "epoch": 1.6861971830985916, |
| "grad_norm": 0.3222047849649389, |
| "learning_rate": 2.431077694235589e-05, |
| "loss": 0.3568, |
| "step": 1497 |
| }, |
| { |
| "epoch": 1.6873239436619718, |
| "grad_norm": 0.25531140449026785, |
| "learning_rate": 2.4289891395154556e-05, |
| "loss": 0.3414, |
| "step": 1498 |
| }, |
| { |
| "epoch": 1.6884507042253523, |
| "grad_norm": 0.3219325112504361, |
| "learning_rate": 2.4269005847953216e-05, |
| "loss": 0.3764, |
| "step": 1499 |
| }, |
| { |
| "epoch": 1.6895774647887323, |
| "grad_norm": 0.3187727339219832, |
| "learning_rate": 2.424812030075188e-05, |
| "loss": 0.3404, |
| "step": 1500 |
| }, |
| { |
| "epoch": 1.6907042253521127, |
| "grad_norm": 0.2586679108873934, |
| "learning_rate": 2.4227234753550543e-05, |
| "loss": 0.3321, |
| "step": 1501 |
| }, |
| { |
| "epoch": 1.691830985915493, |
| "grad_norm": 0.27144949193599927, |
| "learning_rate": 2.4206349206349206e-05, |
| "loss": 0.366, |
| "step": 1502 |
| }, |
| { |
| "epoch": 1.6929577464788732, |
| "grad_norm": 0.30940358041853494, |
| "learning_rate": 2.418546365914787e-05, |
| "loss": 0.3305, |
| "step": 1503 |
| }, |
| { |
| "epoch": 1.6940845070422537, |
| "grad_norm": 0.29242474439362176, |
| "learning_rate": 2.4164578111946533e-05, |
| "loss": 0.3509, |
| "step": 1504 |
| }, |
| { |
| "epoch": 1.6952112676056337, |
| "grad_norm": 0.2930460128468937, |
| "learning_rate": 2.4143692564745196e-05, |
| "loss": 0.3562, |
| "step": 1505 |
| }, |
| { |
| "epoch": 1.6963380281690141, |
| "grad_norm": 0.2935340603708641, |
| "learning_rate": 2.412280701754386e-05, |
| "loss": 0.3294, |
| "step": 1506 |
| }, |
| { |
| "epoch": 1.6974647887323944, |
| "grad_norm": 0.36957759240564325, |
| "learning_rate": 2.4101921470342523e-05, |
| "loss": 0.3625, |
| "step": 1507 |
| }, |
| { |
| "epoch": 1.6985915492957746, |
| "grad_norm": 0.26404789663599043, |
| "learning_rate": 2.4081035923141186e-05, |
| "loss": 0.3332, |
| "step": 1508 |
| }, |
| { |
| "epoch": 1.699718309859155, |
| "grad_norm": 0.322718769871893, |
| "learning_rate": 2.406015037593985e-05, |
| "loss": 0.358, |
| "step": 1509 |
| }, |
| { |
| "epoch": 1.700845070422535, |
| "grad_norm": 0.3190498928145156, |
| "learning_rate": 2.4039264828738513e-05, |
| "loss": 0.3455, |
| "step": 1510 |
| }, |
| { |
| "epoch": 1.7019718309859155, |
| "grad_norm": 0.2799891294624286, |
| "learning_rate": 2.4018379281537176e-05, |
| "loss": 0.3622, |
| "step": 1511 |
| }, |
| { |
| "epoch": 1.7030985915492958, |
| "grad_norm": 0.3247412168234658, |
| "learning_rate": 2.399749373433584e-05, |
| "loss": 0.3517, |
| "step": 1512 |
| }, |
| { |
| "epoch": 1.704225352112676, |
| "grad_norm": 0.3178652704989509, |
| "learning_rate": 2.3976608187134503e-05, |
| "loss": 0.3587, |
| "step": 1513 |
| }, |
| { |
| "epoch": 1.7053521126760565, |
| "grad_norm": 0.30342422099773886, |
| "learning_rate": 2.3955722639933167e-05, |
| "loss": 0.3433, |
| "step": 1514 |
| }, |
| { |
| "epoch": 1.7064788732394365, |
| "grad_norm": 0.3195855337345034, |
| "learning_rate": 2.393483709273183e-05, |
| "loss": 0.3254, |
| "step": 1515 |
| }, |
| { |
| "epoch": 1.707605633802817, |
| "grad_norm": 0.31217809950297026, |
| "learning_rate": 2.3913951545530493e-05, |
| "loss": 0.3579, |
| "step": 1516 |
| }, |
| { |
| "epoch": 1.7087323943661972, |
| "grad_norm": 0.299422990510086, |
| "learning_rate": 2.3893065998329157e-05, |
| "loss": 0.3357, |
| "step": 1517 |
| }, |
| { |
| "epoch": 1.7098591549295774, |
| "grad_norm": 0.3175609532668606, |
| "learning_rate": 2.387218045112782e-05, |
| "loss": 0.3371, |
| "step": 1518 |
| }, |
| { |
| "epoch": 1.7109859154929579, |
| "grad_norm": 0.37076828925369637, |
| "learning_rate": 2.3851294903926483e-05, |
| "loss": 0.3743, |
| "step": 1519 |
| }, |
| { |
| "epoch": 1.7121126760563379, |
| "grad_norm": 0.3391543904315033, |
| "learning_rate": 2.3830409356725147e-05, |
| "loss": 0.3319, |
| "step": 1520 |
| }, |
| { |
| "epoch": 1.7132394366197183, |
| "grad_norm": 0.33903528196620386, |
| "learning_rate": 2.380952380952381e-05, |
| "loss": 0.3574, |
| "step": 1521 |
| }, |
| { |
| "epoch": 1.7143661971830986, |
| "grad_norm": 0.2764464452991892, |
| "learning_rate": 2.3788638262322474e-05, |
| "loss": 0.3372, |
| "step": 1522 |
| }, |
| { |
| "epoch": 1.7154929577464788, |
| "grad_norm": 0.3398776749901052, |
| "learning_rate": 2.3767752715121137e-05, |
| "loss": 0.3549, |
| "step": 1523 |
| }, |
| { |
| "epoch": 1.7166197183098593, |
| "grad_norm": 0.28957579450024307, |
| "learning_rate": 2.37468671679198e-05, |
| "loss": 0.3444, |
| "step": 1524 |
| }, |
| { |
| "epoch": 1.7177464788732393, |
| "grad_norm": 0.29884466819939715, |
| "learning_rate": 2.3725981620718464e-05, |
| "loss": 0.3347, |
| "step": 1525 |
| }, |
| { |
| "epoch": 1.7188732394366197, |
| "grad_norm": 0.30274130464516896, |
| "learning_rate": 2.3705096073517127e-05, |
| "loss": 0.3585, |
| "step": 1526 |
| }, |
| { |
| "epoch": 1.72, |
| "grad_norm": 0.305132519989941, |
| "learning_rate": 2.368421052631579e-05, |
| "loss": 0.3242, |
| "step": 1527 |
| }, |
| { |
| "epoch": 1.7211267605633802, |
| "grad_norm": 0.34652155759374775, |
| "learning_rate": 2.3663324979114454e-05, |
| "loss": 0.3651, |
| "step": 1528 |
| }, |
| { |
| "epoch": 1.7222535211267607, |
| "grad_norm": 0.27639427261114835, |
| "learning_rate": 2.3642439431913117e-05, |
| "loss": 0.3354, |
| "step": 1529 |
| }, |
| { |
| "epoch": 1.7233802816901407, |
| "grad_norm": 0.3191386977012033, |
| "learning_rate": 2.362155388471178e-05, |
| "loss": 0.3437, |
| "step": 1530 |
| }, |
| { |
| "epoch": 1.7245070422535211, |
| "grad_norm": 0.3363085997287805, |
| "learning_rate": 2.3600668337510444e-05, |
| "loss": 0.3337, |
| "step": 1531 |
| }, |
| { |
| "epoch": 1.7256338028169014, |
| "grad_norm": 0.3038076959659683, |
| "learning_rate": 2.3579782790309107e-05, |
| "loss": 0.3626, |
| "step": 1532 |
| }, |
| { |
| "epoch": 1.7267605633802816, |
| "grad_norm": 0.3295456357864944, |
| "learning_rate": 2.355889724310777e-05, |
| "loss": 0.329, |
| "step": 1533 |
| }, |
| { |
| "epoch": 1.727887323943662, |
| "grad_norm": 0.3608395454167289, |
| "learning_rate": 2.3538011695906434e-05, |
| "loss": 0.3434, |
| "step": 1534 |
| }, |
| { |
| "epoch": 1.7290140845070423, |
| "grad_norm": 0.305318898574501, |
| "learning_rate": 2.3517126148705097e-05, |
| "loss": 0.3349, |
| "step": 1535 |
| }, |
| { |
| "epoch": 1.7301408450704225, |
| "grad_norm": 0.33016464265419676, |
| "learning_rate": 2.349624060150376e-05, |
| "loss": 0.339, |
| "step": 1536 |
| }, |
| { |
| "epoch": 1.7312676056338028, |
| "grad_norm": 0.2908830781714398, |
| "learning_rate": 2.3475355054302424e-05, |
| "loss": 0.3392, |
| "step": 1537 |
| }, |
| { |
| "epoch": 1.732394366197183, |
| "grad_norm": 0.3143787621188546, |
| "learning_rate": 2.3454469507101088e-05, |
| "loss": 0.3618, |
| "step": 1538 |
| }, |
| { |
| "epoch": 1.7335211267605635, |
| "grad_norm": 0.3055235467368555, |
| "learning_rate": 2.3433583959899748e-05, |
| "loss": 0.3291, |
| "step": 1539 |
| }, |
| { |
| "epoch": 1.7346478873239437, |
| "grad_norm": 0.33773067011488017, |
| "learning_rate": 2.3412698412698414e-05, |
| "loss": 0.3402, |
| "step": 1540 |
| }, |
| { |
| "epoch": 1.735774647887324, |
| "grad_norm": 0.3103493669644309, |
| "learning_rate": 2.3391812865497074e-05, |
| "loss": 0.3151, |
| "step": 1541 |
| }, |
| { |
| "epoch": 1.7369014084507042, |
| "grad_norm": 0.2640626886368537, |
| "learning_rate": 2.337092731829574e-05, |
| "loss": 0.3479, |
| "step": 1542 |
| }, |
| { |
| "epoch": 1.7380281690140844, |
| "grad_norm": 0.32926649547925485, |
| "learning_rate": 2.33500417710944e-05, |
| "loss": 0.3434, |
| "step": 1543 |
| }, |
| { |
| "epoch": 1.7391549295774649, |
| "grad_norm": 0.28911071323480914, |
| "learning_rate": 2.3329156223893068e-05, |
| "loss": 0.3644, |
| "step": 1544 |
| }, |
| { |
| "epoch": 1.740281690140845, |
| "grad_norm": 0.3516859251363121, |
| "learning_rate": 2.3308270676691728e-05, |
| "loss": 0.3466, |
| "step": 1545 |
| }, |
| { |
| "epoch": 1.7414084507042253, |
| "grad_norm": 0.2830494642914461, |
| "learning_rate": 2.3287385129490395e-05, |
| "loss": 0.3333, |
| "step": 1546 |
| }, |
| { |
| "epoch": 1.7425352112676058, |
| "grad_norm": 0.35115973878932605, |
| "learning_rate": 2.3266499582289055e-05, |
| "loss": 0.3471, |
| "step": 1547 |
| }, |
| { |
| "epoch": 1.7436619718309858, |
| "grad_norm": 0.29708723639195195, |
| "learning_rate": 2.324561403508772e-05, |
| "loss": 0.3327, |
| "step": 1548 |
| }, |
| { |
| "epoch": 1.7447887323943663, |
| "grad_norm": 0.29149822506565004, |
| "learning_rate": 2.322472848788638e-05, |
| "loss": 0.3564, |
| "step": 1549 |
| }, |
| { |
| "epoch": 1.7459154929577465, |
| "grad_norm": 0.3059464683757231, |
| "learning_rate": 2.3203842940685048e-05, |
| "loss": 0.374, |
| "step": 1550 |
| }, |
| { |
| "epoch": 1.7470422535211267, |
| "grad_norm": 0.24945858165460774, |
| "learning_rate": 2.3182957393483708e-05, |
| "loss": 0.3373, |
| "step": 1551 |
| }, |
| { |
| "epoch": 1.7481690140845072, |
| "grad_norm": 0.2837768348385519, |
| "learning_rate": 2.3162071846282375e-05, |
| "loss": 0.3421, |
| "step": 1552 |
| }, |
| { |
| "epoch": 1.7492957746478872, |
| "grad_norm": 0.28899009777285367, |
| "learning_rate": 2.3141186299081035e-05, |
| "loss": 0.3422, |
| "step": 1553 |
| }, |
| { |
| "epoch": 1.7504225352112677, |
| "grad_norm": 0.3185735213904924, |
| "learning_rate": 2.31203007518797e-05, |
| "loss": 0.3479, |
| "step": 1554 |
| }, |
| { |
| "epoch": 1.751549295774648, |
| "grad_norm": 0.2727834946078125, |
| "learning_rate": 2.309941520467836e-05, |
| "loss": 0.3515, |
| "step": 1555 |
| }, |
| { |
| "epoch": 1.7526760563380281, |
| "grad_norm": 0.2920114525814956, |
| "learning_rate": 2.3078529657477028e-05, |
| "loss": 0.3259, |
| "step": 1556 |
| }, |
| { |
| "epoch": 1.7538028169014086, |
| "grad_norm": 0.2774511777486092, |
| "learning_rate": 2.3057644110275688e-05, |
| "loss": 0.3444, |
| "step": 1557 |
| }, |
| { |
| "epoch": 1.7549295774647886, |
| "grad_norm": 0.33462145841096125, |
| "learning_rate": 2.3036758563074355e-05, |
| "loss": 0.3394, |
| "step": 1558 |
| }, |
| { |
| "epoch": 1.756056338028169, |
| "grad_norm": 0.28517999320080967, |
| "learning_rate": 2.3015873015873015e-05, |
| "loss": 0.347, |
| "step": 1559 |
| }, |
| { |
| "epoch": 1.7571830985915493, |
| "grad_norm": 0.3207924838988884, |
| "learning_rate": 2.2994987468671682e-05, |
| "loss": 0.3458, |
| "step": 1560 |
| }, |
| { |
| "epoch": 1.7583098591549295, |
| "grad_norm": 0.342362737976875, |
| "learning_rate": 2.2974101921470342e-05, |
| "loss": 0.3757, |
| "step": 1561 |
| }, |
| { |
| "epoch": 1.75943661971831, |
| "grad_norm": 0.3248576570360329, |
| "learning_rate": 2.295321637426901e-05, |
| "loss": 0.3516, |
| "step": 1562 |
| }, |
| { |
| "epoch": 1.76056338028169, |
| "grad_norm": 0.287455125644324, |
| "learning_rate": 2.293233082706767e-05, |
| "loss": 0.3393, |
| "step": 1563 |
| }, |
| { |
| "epoch": 1.7616901408450705, |
| "grad_norm": 0.2868575359250464, |
| "learning_rate": 2.2911445279866335e-05, |
| "loss": 0.3365, |
| "step": 1564 |
| }, |
| { |
| "epoch": 1.7628169014084507, |
| "grad_norm": 0.2549480950241948, |
| "learning_rate": 2.2890559732664995e-05, |
| "loss": 0.343, |
| "step": 1565 |
| }, |
| { |
| "epoch": 1.763943661971831, |
| "grad_norm": 0.282209289641925, |
| "learning_rate": 2.2869674185463662e-05, |
| "loss": 0.3366, |
| "step": 1566 |
| }, |
| { |
| "epoch": 1.7650704225352114, |
| "grad_norm": 0.27361435419089486, |
| "learning_rate": 2.2848788638262322e-05, |
| "loss": 0.3505, |
| "step": 1567 |
| }, |
| { |
| "epoch": 1.7661971830985914, |
| "grad_norm": 0.2619276537280242, |
| "learning_rate": 2.2827903091060985e-05, |
| "loss": 0.3459, |
| "step": 1568 |
| }, |
| { |
| "epoch": 1.7673239436619719, |
| "grad_norm": 0.2746835426997974, |
| "learning_rate": 2.280701754385965e-05, |
| "loss": 0.3608, |
| "step": 1569 |
| }, |
| { |
| "epoch": 1.768450704225352, |
| "grad_norm": 0.2883899114472481, |
| "learning_rate": 2.2786131996658312e-05, |
| "loss": 0.3481, |
| "step": 1570 |
| }, |
| { |
| "epoch": 1.7695774647887323, |
| "grad_norm": 0.27976723869096415, |
| "learning_rate": 2.2765246449456976e-05, |
| "loss": 0.3453, |
| "step": 1571 |
| }, |
| { |
| "epoch": 1.7707042253521128, |
| "grad_norm": 0.29356343885407754, |
| "learning_rate": 2.274436090225564e-05, |
| "loss": 0.3225, |
| "step": 1572 |
| }, |
| { |
| "epoch": 1.7718309859154928, |
| "grad_norm": 0.2548029064797793, |
| "learning_rate": 2.2723475355054302e-05, |
| "loss": 0.3452, |
| "step": 1573 |
| }, |
| { |
| "epoch": 1.7729577464788733, |
| "grad_norm": 0.2887543722226567, |
| "learning_rate": 2.2702589807852966e-05, |
| "loss": 0.3425, |
| "step": 1574 |
| }, |
| { |
| "epoch": 1.7740845070422535, |
| "grad_norm": 0.30230412782024907, |
| "learning_rate": 2.268170426065163e-05, |
| "loss": 0.3473, |
| "step": 1575 |
| }, |
| { |
| "epoch": 1.7752112676056337, |
| "grad_norm": 0.31160638538614543, |
| "learning_rate": 2.2660818713450292e-05, |
| "loss": 0.3744, |
| "step": 1576 |
| }, |
| { |
| "epoch": 1.7763380281690142, |
| "grad_norm": 0.29139656731845104, |
| "learning_rate": 2.2639933166248956e-05, |
| "loss": 0.3514, |
| "step": 1577 |
| }, |
| { |
| "epoch": 1.7774647887323942, |
| "grad_norm": 0.33332671251383295, |
| "learning_rate": 2.261904761904762e-05, |
| "loss": 0.3432, |
| "step": 1578 |
| }, |
| { |
| "epoch": 1.7785915492957747, |
| "grad_norm": 0.28747603115955583, |
| "learning_rate": 2.2598162071846283e-05, |
| "loss": 0.3304, |
| "step": 1579 |
| }, |
| { |
| "epoch": 1.779718309859155, |
| "grad_norm": 0.2779297515417481, |
| "learning_rate": 2.2577276524644946e-05, |
| "loss": 0.3297, |
| "step": 1580 |
| }, |
| { |
| "epoch": 1.7808450704225351, |
| "grad_norm": 0.28845443740956794, |
| "learning_rate": 2.255639097744361e-05, |
| "loss": 0.3488, |
| "step": 1581 |
| }, |
| { |
| "epoch": 1.7819718309859156, |
| "grad_norm": 0.28781351634594954, |
| "learning_rate": 2.2535505430242273e-05, |
| "loss": 0.3438, |
| "step": 1582 |
| }, |
| { |
| "epoch": 1.7830985915492956, |
| "grad_norm": 0.26061432523931766, |
| "learning_rate": 2.2514619883040936e-05, |
| "loss": 0.345, |
| "step": 1583 |
| }, |
| { |
| "epoch": 1.784225352112676, |
| "grad_norm": 0.2668349047223478, |
| "learning_rate": 2.24937343358396e-05, |
| "loss": 0.34, |
| "step": 1584 |
| }, |
| { |
| "epoch": 1.7853521126760563, |
| "grad_norm": 0.2639968324519894, |
| "learning_rate": 2.2472848788638263e-05, |
| "loss": 0.3329, |
| "step": 1585 |
| }, |
| { |
| "epoch": 1.7864788732394365, |
| "grad_norm": 0.2723178234361553, |
| "learning_rate": 2.2451963241436926e-05, |
| "loss": 0.3263, |
| "step": 1586 |
| }, |
| { |
| "epoch": 1.787605633802817, |
| "grad_norm": 0.29903286947902974, |
| "learning_rate": 2.243107769423559e-05, |
| "loss": 0.3306, |
| "step": 1587 |
| }, |
| { |
| "epoch": 1.788732394366197, |
| "grad_norm": 0.24254004324999334, |
| "learning_rate": 2.2410192147034253e-05, |
| "loss": 0.3404, |
| "step": 1588 |
| }, |
| { |
| "epoch": 1.7898591549295775, |
| "grad_norm": 0.7213667973586395, |
| "learning_rate": 2.2389306599832916e-05, |
| "loss": 0.3265, |
| "step": 1589 |
| }, |
| { |
| "epoch": 1.7909859154929577, |
| "grad_norm": 0.2833182002249716, |
| "learning_rate": 2.236842105263158e-05, |
| "loss": 0.3431, |
| "step": 1590 |
| }, |
| { |
| "epoch": 1.792112676056338, |
| "grad_norm": 0.2455273694404395, |
| "learning_rate": 2.2347535505430243e-05, |
| "loss": 0.331, |
| "step": 1591 |
| }, |
| { |
| "epoch": 1.7932394366197184, |
| "grad_norm": 0.312552449696081, |
| "learning_rate": 2.2326649958228906e-05, |
| "loss": 0.3393, |
| "step": 1592 |
| }, |
| { |
| "epoch": 1.7943661971830986, |
| "grad_norm": 0.31396411445311406, |
| "learning_rate": 2.230576441102757e-05, |
| "loss": 0.3467, |
| "step": 1593 |
| }, |
| { |
| "epoch": 1.7954929577464789, |
| "grad_norm": 0.3056183583942239, |
| "learning_rate": 2.2284878863826233e-05, |
| "loss": 0.3624, |
| "step": 1594 |
| }, |
| { |
| "epoch": 1.7966197183098591, |
| "grad_norm": 0.2761911063940829, |
| "learning_rate": 2.2263993316624897e-05, |
| "loss": 0.3358, |
| "step": 1595 |
| }, |
| { |
| "epoch": 1.7977464788732394, |
| "grad_norm": 0.25345443456743505, |
| "learning_rate": 2.224310776942356e-05, |
| "loss": 0.3387, |
| "step": 1596 |
| }, |
| { |
| "epoch": 1.7988732394366198, |
| "grad_norm": 0.33926576308711215, |
| "learning_rate": 2.2222222222222223e-05, |
| "loss": 0.3379, |
| "step": 1597 |
| }, |
| { |
| "epoch": 1.8, |
| "grad_norm": 0.2766434018395404, |
| "learning_rate": 2.2201336675020887e-05, |
| "loss": 0.3523, |
| "step": 1598 |
| }, |
| { |
| "epoch": 1.8011267605633803, |
| "grad_norm": 0.27350545871668236, |
| "learning_rate": 2.218045112781955e-05, |
| "loss": 0.3237, |
| "step": 1599 |
| }, |
| { |
| "epoch": 1.8022535211267605, |
| "grad_norm": 0.29343193244243265, |
| "learning_rate": 2.2159565580618213e-05, |
| "loss": 0.3408, |
| "step": 1600 |
| }, |
| { |
| "epoch": 1.8033802816901408, |
| "grad_norm": 0.30098082503394646, |
| "learning_rate": 2.2138680033416877e-05, |
| "loss": 0.3511, |
| "step": 1601 |
| }, |
| { |
| "epoch": 1.8045070422535212, |
| "grad_norm": 0.31054186922907717, |
| "learning_rate": 2.211779448621554e-05, |
| "loss": 0.3353, |
| "step": 1602 |
| }, |
| { |
| "epoch": 1.8056338028169014, |
| "grad_norm": 0.28301958875610483, |
| "learning_rate": 2.2096908939014204e-05, |
| "loss": 0.3485, |
| "step": 1603 |
| }, |
| { |
| "epoch": 1.8067605633802817, |
| "grad_norm": 0.2942985022402299, |
| "learning_rate": 2.2076023391812867e-05, |
| "loss": 0.3377, |
| "step": 1604 |
| }, |
| { |
| "epoch": 1.807887323943662, |
| "grad_norm": 0.35468777953769726, |
| "learning_rate": 2.205513784461153e-05, |
| "loss": 0.3551, |
| "step": 1605 |
| }, |
| { |
| "epoch": 1.8090140845070422, |
| "grad_norm": 0.3242292242406716, |
| "learning_rate": 2.2034252297410194e-05, |
| "loss": 0.3204, |
| "step": 1606 |
| }, |
| { |
| "epoch": 1.8101408450704226, |
| "grad_norm": 0.3143821962891906, |
| "learning_rate": 2.2013366750208854e-05, |
| "loss": 0.3412, |
| "step": 1607 |
| }, |
| { |
| "epoch": 1.8112676056338028, |
| "grad_norm": 0.27534323950223133, |
| "learning_rate": 2.199248120300752e-05, |
| "loss": 0.3577, |
| "step": 1608 |
| }, |
| { |
| "epoch": 1.812394366197183, |
| "grad_norm": 0.25069024865893086, |
| "learning_rate": 2.197159565580618e-05, |
| "loss": 0.3308, |
| "step": 1609 |
| }, |
| { |
| "epoch": 1.8135211267605635, |
| "grad_norm": 0.33679641393934495, |
| "learning_rate": 2.1950710108604847e-05, |
| "loss": 0.3312, |
| "step": 1610 |
| }, |
| { |
| "epoch": 1.8146478873239436, |
| "grad_norm": 0.26647094286661044, |
| "learning_rate": 2.1929824561403507e-05, |
| "loss": 0.3466, |
| "step": 1611 |
| }, |
| { |
| "epoch": 1.815774647887324, |
| "grad_norm": 0.30509100023725005, |
| "learning_rate": 2.1908939014202174e-05, |
| "loss": 0.3547, |
| "step": 1612 |
| }, |
| { |
| "epoch": 1.8169014084507042, |
| "grad_norm": 0.3493657451369854, |
| "learning_rate": 2.1888053467000834e-05, |
| "loss": 0.3624, |
| "step": 1613 |
| }, |
| { |
| "epoch": 1.8180281690140845, |
| "grad_norm": 0.3230058198263615, |
| "learning_rate": 2.18671679197995e-05, |
| "loss": 0.3588, |
| "step": 1614 |
| }, |
| { |
| "epoch": 1.819154929577465, |
| "grad_norm": 0.3677734400847891, |
| "learning_rate": 2.184628237259816e-05, |
| "loss": 0.3307, |
| "step": 1615 |
| }, |
| { |
| "epoch": 1.820281690140845, |
| "grad_norm": 0.26197131961421155, |
| "learning_rate": 2.1825396825396827e-05, |
| "loss": 0.3349, |
| "step": 1616 |
| }, |
| { |
| "epoch": 1.8214084507042254, |
| "grad_norm": 0.30393753088662656, |
| "learning_rate": 2.1804511278195487e-05, |
| "loss": 0.3315, |
| "step": 1617 |
| }, |
| { |
| "epoch": 1.8225352112676056, |
| "grad_norm": 0.33606822603694075, |
| "learning_rate": 2.1783625730994154e-05, |
| "loss": 0.328, |
| "step": 1618 |
| }, |
| { |
| "epoch": 1.8236619718309859, |
| "grad_norm": 0.27706561665932133, |
| "learning_rate": 2.1762740183792814e-05, |
| "loss": 0.3339, |
| "step": 1619 |
| }, |
| { |
| "epoch": 1.8247887323943663, |
| "grad_norm": 0.3554195135230648, |
| "learning_rate": 2.174185463659148e-05, |
| "loss": 0.358, |
| "step": 1620 |
| }, |
| { |
| "epoch": 1.8259154929577464, |
| "grad_norm": 0.29712856717297026, |
| "learning_rate": 2.172096908939014e-05, |
| "loss": 0.3421, |
| "step": 1621 |
| }, |
| { |
| "epoch": 1.8270422535211268, |
| "grad_norm": 0.2927915595603246, |
| "learning_rate": 2.1700083542188808e-05, |
| "loss": 0.3357, |
| "step": 1622 |
| }, |
| { |
| "epoch": 1.828169014084507, |
| "grad_norm": 0.35141700138951115, |
| "learning_rate": 2.1679197994987468e-05, |
| "loss": 0.3363, |
| "step": 1623 |
| }, |
| { |
| "epoch": 1.8292957746478873, |
| "grad_norm": 0.279248687120629, |
| "learning_rate": 2.1658312447786134e-05, |
| "loss": 0.3311, |
| "step": 1624 |
| }, |
| { |
| "epoch": 1.8304225352112677, |
| "grad_norm": 0.32879501257039334, |
| "learning_rate": 2.1637426900584794e-05, |
| "loss": 0.3285, |
| "step": 1625 |
| }, |
| { |
| "epoch": 1.8315492957746478, |
| "grad_norm": 0.3127639986073736, |
| "learning_rate": 2.161654135338346e-05, |
| "loss": 0.3662, |
| "step": 1626 |
| }, |
| { |
| "epoch": 1.8326760563380282, |
| "grad_norm": 0.2642635116143911, |
| "learning_rate": 2.159565580618212e-05, |
| "loss": 0.3502, |
| "step": 1627 |
| }, |
| { |
| "epoch": 1.8338028169014085, |
| "grad_norm": 0.3184196432962934, |
| "learning_rate": 2.1574770258980788e-05, |
| "loss": 0.3413, |
| "step": 1628 |
| }, |
| { |
| "epoch": 1.8349295774647887, |
| "grad_norm": 0.2762768573978393, |
| "learning_rate": 2.1553884711779448e-05, |
| "loss": 0.335, |
| "step": 1629 |
| }, |
| { |
| "epoch": 1.8360563380281691, |
| "grad_norm": 0.2639598293830892, |
| "learning_rate": 2.1532999164578115e-05, |
| "loss": 0.3487, |
| "step": 1630 |
| }, |
| { |
| "epoch": 1.8371830985915492, |
| "grad_norm": 0.2910237030621018, |
| "learning_rate": 2.1512113617376775e-05, |
| "loss": 0.3339, |
| "step": 1631 |
| }, |
| { |
| "epoch": 1.8383098591549296, |
| "grad_norm": 0.3118135930021659, |
| "learning_rate": 2.149122807017544e-05, |
| "loss": 0.349, |
| "step": 1632 |
| }, |
| { |
| "epoch": 1.8394366197183099, |
| "grad_norm": 0.2705892616745369, |
| "learning_rate": 2.14703425229741e-05, |
| "loss": 0.3525, |
| "step": 1633 |
| }, |
| { |
| "epoch": 1.84056338028169, |
| "grad_norm": 0.340698961529552, |
| "learning_rate": 2.1449456975772768e-05, |
| "loss": 0.3299, |
| "step": 1634 |
| }, |
| { |
| "epoch": 1.8416901408450705, |
| "grad_norm": 0.3091514758989356, |
| "learning_rate": 2.1428571428571428e-05, |
| "loss": 0.3379, |
| "step": 1635 |
| }, |
| { |
| "epoch": 1.8428169014084506, |
| "grad_norm": 0.2643485419141822, |
| "learning_rate": 2.140768588137009e-05, |
| "loss": 0.3302, |
| "step": 1636 |
| }, |
| { |
| "epoch": 1.843943661971831, |
| "grad_norm": 0.31087625176013, |
| "learning_rate": 2.1386800334168755e-05, |
| "loss": 0.3325, |
| "step": 1637 |
| }, |
| { |
| "epoch": 1.8450704225352113, |
| "grad_norm": 0.27948087905956615, |
| "learning_rate": 2.1365914786967418e-05, |
| "loss": 0.3604, |
| "step": 1638 |
| }, |
| { |
| "epoch": 1.8461971830985915, |
| "grad_norm": 0.35227442456776953, |
| "learning_rate": 2.134502923976608e-05, |
| "loss": 0.3449, |
| "step": 1639 |
| }, |
| { |
| "epoch": 1.847323943661972, |
| "grad_norm": 0.3142701794790016, |
| "learning_rate": 2.1324143692564745e-05, |
| "loss": 0.3465, |
| "step": 1640 |
| }, |
| { |
| "epoch": 1.848450704225352, |
| "grad_norm": 0.27148992742513084, |
| "learning_rate": 2.130325814536341e-05, |
| "loss": 0.343, |
| "step": 1641 |
| }, |
| { |
| "epoch": 1.8495774647887324, |
| "grad_norm": 0.3131614508725724, |
| "learning_rate": 2.1282372598162072e-05, |
| "loss": 0.3483, |
| "step": 1642 |
| }, |
| { |
| "epoch": 1.8507042253521127, |
| "grad_norm": 0.2696953224596118, |
| "learning_rate": 2.1261487050960735e-05, |
| "loss": 0.3222, |
| "step": 1643 |
| }, |
| { |
| "epoch": 1.8518309859154929, |
| "grad_norm": 0.3297224034001454, |
| "learning_rate": 2.12406015037594e-05, |
| "loss": 0.338, |
| "step": 1644 |
| }, |
| { |
| "epoch": 1.8529577464788733, |
| "grad_norm": 0.2877479636683936, |
| "learning_rate": 2.1219715956558062e-05, |
| "loss": 0.3287, |
| "step": 1645 |
| }, |
| { |
| "epoch": 1.8540845070422534, |
| "grad_norm": 0.287649253375458, |
| "learning_rate": 2.1198830409356725e-05, |
| "loss": 0.3228, |
| "step": 1646 |
| }, |
| { |
| "epoch": 1.8552112676056338, |
| "grad_norm": 0.3053211833522019, |
| "learning_rate": 2.117794486215539e-05, |
| "loss": 0.3364, |
| "step": 1647 |
| }, |
| { |
| "epoch": 1.856338028169014, |
| "grad_norm": 0.26500299421684886, |
| "learning_rate": 2.1157059314954052e-05, |
| "loss": 0.3367, |
| "step": 1648 |
| }, |
| { |
| "epoch": 1.8574647887323943, |
| "grad_norm": 0.301128723829993, |
| "learning_rate": 2.1136173767752715e-05, |
| "loss": 0.3668, |
| "step": 1649 |
| }, |
| { |
| "epoch": 1.8585915492957747, |
| "grad_norm": 0.26457007250253695, |
| "learning_rate": 2.111528822055138e-05, |
| "loss": 0.3312, |
| "step": 1650 |
| }, |
| { |
| "epoch": 1.859718309859155, |
| "grad_norm": 0.28765531421267604, |
| "learning_rate": 2.1094402673350042e-05, |
| "loss": 0.3429, |
| "step": 1651 |
| }, |
| { |
| "epoch": 1.8608450704225352, |
| "grad_norm": 0.304870943493138, |
| "learning_rate": 2.1073517126148706e-05, |
| "loss": 0.3426, |
| "step": 1652 |
| }, |
| { |
| "epoch": 1.8619718309859155, |
| "grad_norm": 0.2795950818927515, |
| "learning_rate": 2.105263157894737e-05, |
| "loss": 0.3267, |
| "step": 1653 |
| }, |
| { |
| "epoch": 1.8630985915492957, |
| "grad_norm": 0.2754880594324992, |
| "learning_rate": 2.1031746031746032e-05, |
| "loss": 0.3242, |
| "step": 1654 |
| }, |
| { |
| "epoch": 1.8642253521126761, |
| "grad_norm": 0.3027378251702463, |
| "learning_rate": 2.1010860484544696e-05, |
| "loss": 0.3485, |
| "step": 1655 |
| }, |
| { |
| "epoch": 1.8653521126760564, |
| "grad_norm": 0.3177812168120693, |
| "learning_rate": 2.098997493734336e-05, |
| "loss": 0.3513, |
| "step": 1656 |
| }, |
| { |
| "epoch": 1.8664788732394366, |
| "grad_norm": 0.32938906771356163, |
| "learning_rate": 2.0969089390142022e-05, |
| "loss": 0.347, |
| "step": 1657 |
| }, |
| { |
| "epoch": 1.8676056338028169, |
| "grad_norm": 0.2617718930009443, |
| "learning_rate": 2.0948203842940686e-05, |
| "loss": 0.3493, |
| "step": 1658 |
| }, |
| { |
| "epoch": 1.868732394366197, |
| "grad_norm": 0.33340860389167637, |
| "learning_rate": 2.092731829573935e-05, |
| "loss": 0.3351, |
| "step": 1659 |
| }, |
| { |
| "epoch": 1.8698591549295775, |
| "grad_norm": 0.3161357707059111, |
| "learning_rate": 2.0906432748538013e-05, |
| "loss": 0.3255, |
| "step": 1660 |
| }, |
| { |
| "epoch": 1.8709859154929578, |
| "grad_norm": 0.26257517402572905, |
| "learning_rate": 2.0885547201336676e-05, |
| "loss": 0.3625, |
| "step": 1661 |
| }, |
| { |
| "epoch": 1.872112676056338, |
| "grad_norm": 0.28303279003083226, |
| "learning_rate": 2.086466165413534e-05, |
| "loss": 0.3163, |
| "step": 1662 |
| }, |
| { |
| "epoch": 1.8732394366197183, |
| "grad_norm": 0.29747456344610324, |
| "learning_rate": 2.0843776106934003e-05, |
| "loss": 0.3327, |
| "step": 1663 |
| }, |
| { |
| "epoch": 1.8743661971830985, |
| "grad_norm": 0.25373424352702173, |
| "learning_rate": 2.0822890559732666e-05, |
| "loss": 0.3308, |
| "step": 1664 |
| }, |
| { |
| "epoch": 1.875492957746479, |
| "grad_norm": 0.2311080800384494, |
| "learning_rate": 2.080200501253133e-05, |
| "loss": 0.3387, |
| "step": 1665 |
| }, |
| { |
| "epoch": 1.8766197183098592, |
| "grad_norm": 0.2837018423422604, |
| "learning_rate": 2.0781119465329993e-05, |
| "loss": 0.32, |
| "step": 1666 |
| }, |
| { |
| "epoch": 1.8777464788732394, |
| "grad_norm": 0.26993270331175634, |
| "learning_rate": 2.0760233918128656e-05, |
| "loss": 0.3459, |
| "step": 1667 |
| }, |
| { |
| "epoch": 1.8788732394366199, |
| "grad_norm": 0.23928714499375003, |
| "learning_rate": 2.073934837092732e-05, |
| "loss": 0.3301, |
| "step": 1668 |
| }, |
| { |
| "epoch": 1.88, |
| "grad_norm": 0.3453714187721245, |
| "learning_rate": 2.0718462823725983e-05, |
| "loss": 0.3338, |
| "step": 1669 |
| }, |
| { |
| "epoch": 1.8811267605633804, |
| "grad_norm": 0.3423733909657252, |
| "learning_rate": 2.0697577276524646e-05, |
| "loss": 0.3345, |
| "step": 1670 |
| }, |
| { |
| "epoch": 1.8822535211267606, |
| "grad_norm": 0.2788575696634892, |
| "learning_rate": 2.067669172932331e-05, |
| "loss": 0.3378, |
| "step": 1671 |
| }, |
| { |
| "epoch": 1.8833802816901408, |
| "grad_norm": 0.3538012744028978, |
| "learning_rate": 2.0655806182121973e-05, |
| "loss": 0.3564, |
| "step": 1672 |
| }, |
| { |
| "epoch": 1.8845070422535213, |
| "grad_norm": 0.25591831678615323, |
| "learning_rate": 2.0634920634920636e-05, |
| "loss": 0.3135, |
| "step": 1673 |
| }, |
| { |
| "epoch": 1.8856338028169013, |
| "grad_norm": 0.26212868447836296, |
| "learning_rate": 2.06140350877193e-05, |
| "loss": 0.338, |
| "step": 1674 |
| }, |
| { |
| "epoch": 1.8867605633802818, |
| "grad_norm": 0.29447227411910276, |
| "learning_rate": 2.059314954051796e-05, |
| "loss": 0.3283, |
| "step": 1675 |
| }, |
| { |
| "epoch": 1.887887323943662, |
| "grad_norm": 0.3015256669930087, |
| "learning_rate": 2.0572263993316627e-05, |
| "loss": 0.3528, |
| "step": 1676 |
| }, |
| { |
| "epoch": 1.8890140845070422, |
| "grad_norm": 0.3176306050699436, |
| "learning_rate": 2.0551378446115287e-05, |
| "loss": 0.3181, |
| "step": 1677 |
| }, |
| { |
| "epoch": 1.8901408450704227, |
| "grad_norm": 0.2976333332404098, |
| "learning_rate": 2.0530492898913953e-05, |
| "loss": 0.3484, |
| "step": 1678 |
| }, |
| { |
| "epoch": 1.8912676056338027, |
| "grad_norm": 0.3110422983664593, |
| "learning_rate": 2.0509607351712613e-05, |
| "loss": 0.3355, |
| "step": 1679 |
| }, |
| { |
| "epoch": 1.8923943661971832, |
| "grad_norm": 0.29233125015587214, |
| "learning_rate": 2.048872180451128e-05, |
| "loss": 0.3411, |
| "step": 1680 |
| }, |
| { |
| "epoch": 1.8935211267605634, |
| "grad_norm": 0.3111388968581442, |
| "learning_rate": 2.046783625730994e-05, |
| "loss": 0.3507, |
| "step": 1681 |
| }, |
| { |
| "epoch": 1.8946478873239436, |
| "grad_norm": 0.3081020386333236, |
| "learning_rate": 2.0446950710108607e-05, |
| "loss": 0.3373, |
| "step": 1682 |
| }, |
| { |
| "epoch": 1.895774647887324, |
| "grad_norm": 0.2988072234644409, |
| "learning_rate": 2.0426065162907267e-05, |
| "loss": 0.3457, |
| "step": 1683 |
| }, |
| { |
| "epoch": 1.896901408450704, |
| "grad_norm": 0.2840073115940892, |
| "learning_rate": 2.0405179615705934e-05, |
| "loss": 0.3183, |
| "step": 1684 |
| }, |
| { |
| "epoch": 1.8980281690140846, |
| "grad_norm": 0.2600353216994005, |
| "learning_rate": 2.0384294068504594e-05, |
| "loss": 0.3548, |
| "step": 1685 |
| }, |
| { |
| "epoch": 1.8991549295774648, |
| "grad_norm": 0.26737930725428694, |
| "learning_rate": 2.036340852130326e-05, |
| "loss": 0.3314, |
| "step": 1686 |
| }, |
| { |
| "epoch": 1.900281690140845, |
| "grad_norm": 0.30275166487425725, |
| "learning_rate": 2.034252297410192e-05, |
| "loss": 0.3233, |
| "step": 1687 |
| }, |
| { |
| "epoch": 1.9014084507042255, |
| "grad_norm": 0.30729359639027726, |
| "learning_rate": 2.0321637426900587e-05, |
| "loss": 0.3356, |
| "step": 1688 |
| }, |
| { |
| "epoch": 1.9025352112676055, |
| "grad_norm": 0.31944513824067683, |
| "learning_rate": 2.0300751879699247e-05, |
| "loss": 0.3383, |
| "step": 1689 |
| }, |
| { |
| "epoch": 1.903661971830986, |
| "grad_norm": 0.26274255011466285, |
| "learning_rate": 2.0279866332497914e-05, |
| "loss": 0.34, |
| "step": 1690 |
| }, |
| { |
| "epoch": 1.9047887323943662, |
| "grad_norm": 0.29026862877536747, |
| "learning_rate": 2.0258980785296574e-05, |
| "loss": 0.3479, |
| "step": 1691 |
| }, |
| { |
| "epoch": 1.9059154929577464, |
| "grad_norm": 0.24240529796930577, |
| "learning_rate": 2.023809523809524e-05, |
| "loss": 0.3475, |
| "step": 1692 |
| }, |
| { |
| "epoch": 1.9070422535211269, |
| "grad_norm": 0.2721316935600126, |
| "learning_rate": 2.02172096908939e-05, |
| "loss": 0.3283, |
| "step": 1693 |
| }, |
| { |
| "epoch": 1.908169014084507, |
| "grad_norm": 0.286075938733232, |
| "learning_rate": 2.0196324143692567e-05, |
| "loss": 0.3491, |
| "step": 1694 |
| }, |
| { |
| "epoch": 1.9092957746478874, |
| "grad_norm": 0.25044492660792417, |
| "learning_rate": 2.0175438596491227e-05, |
| "loss": 0.3366, |
| "step": 1695 |
| }, |
| { |
| "epoch": 1.9104225352112676, |
| "grad_norm": 0.2617118447514493, |
| "learning_rate": 2.0154553049289894e-05, |
| "loss": 0.3409, |
| "step": 1696 |
| }, |
| { |
| "epoch": 1.9115492957746478, |
| "grad_norm": 0.2896232484551307, |
| "learning_rate": 2.0133667502088554e-05, |
| "loss": 0.362, |
| "step": 1697 |
| }, |
| { |
| "epoch": 1.9126760563380283, |
| "grad_norm": 0.2820678153733654, |
| "learning_rate": 2.011278195488722e-05, |
| "loss": 0.3211, |
| "step": 1698 |
| }, |
| { |
| "epoch": 1.9138028169014083, |
| "grad_norm": 0.25316052627447677, |
| "learning_rate": 2.009189640768588e-05, |
| "loss": 0.3418, |
| "step": 1699 |
| }, |
| { |
| "epoch": 1.9149295774647888, |
| "grad_norm": 0.2969628596361171, |
| "learning_rate": 2.0071010860484548e-05, |
| "loss": 0.3403, |
| "step": 1700 |
| }, |
| { |
| "epoch": 1.916056338028169, |
| "grad_norm": 0.3103149867868271, |
| "learning_rate": 2.0050125313283208e-05, |
| "loss": 0.3286, |
| "step": 1701 |
| }, |
| { |
| "epoch": 1.9171830985915492, |
| "grad_norm": 0.25684431514460954, |
| "learning_rate": 2.0029239766081874e-05, |
| "loss": 0.3312, |
| "step": 1702 |
| }, |
| { |
| "epoch": 1.9183098591549297, |
| "grad_norm": 0.23745838856526574, |
| "learning_rate": 2.0008354218880534e-05, |
| "loss": 0.3097, |
| "step": 1703 |
| }, |
| { |
| "epoch": 1.9194366197183097, |
| "grad_norm": 0.2873795363633347, |
| "learning_rate": 1.9987468671679198e-05, |
| "loss": 0.344, |
| "step": 1704 |
| }, |
| { |
| "epoch": 1.9205633802816902, |
| "grad_norm": 0.29633915312398484, |
| "learning_rate": 1.996658312447786e-05, |
| "loss": 0.3307, |
| "step": 1705 |
| }, |
| { |
| "epoch": 1.9216901408450704, |
| "grad_norm": 0.2810829629998503, |
| "learning_rate": 1.9945697577276524e-05, |
| "loss": 0.3419, |
| "step": 1706 |
| }, |
| { |
| "epoch": 1.9228169014084506, |
| "grad_norm": 0.3281832059884901, |
| "learning_rate": 1.9924812030075188e-05, |
| "loss": 0.3453, |
| "step": 1707 |
| }, |
| { |
| "epoch": 1.923943661971831, |
| "grad_norm": 0.3350869651799835, |
| "learning_rate": 1.990392648287385e-05, |
| "loss": 0.3616, |
| "step": 1708 |
| }, |
| { |
| "epoch": 1.9250704225352113, |
| "grad_norm": 0.27519285232473656, |
| "learning_rate": 1.9883040935672515e-05, |
| "loss": 0.3432, |
| "step": 1709 |
| }, |
| { |
| "epoch": 1.9261971830985916, |
| "grad_norm": 0.36486612755178166, |
| "learning_rate": 1.9862155388471178e-05, |
| "loss": 0.3439, |
| "step": 1710 |
| }, |
| { |
| "epoch": 1.9273239436619718, |
| "grad_norm": 0.2972926771765651, |
| "learning_rate": 1.984126984126984e-05, |
| "loss": 0.3291, |
| "step": 1711 |
| }, |
| { |
| "epoch": 1.928450704225352, |
| "grad_norm": 0.29236209232234134, |
| "learning_rate": 1.9820384294068505e-05, |
| "loss": 0.3264, |
| "step": 1712 |
| }, |
| { |
| "epoch": 1.9295774647887325, |
| "grad_norm": 0.25086892831193985, |
| "learning_rate": 1.9799498746867168e-05, |
| "loss": 0.3605, |
| "step": 1713 |
| }, |
| { |
| "epoch": 1.9307042253521127, |
| "grad_norm": 0.29506199630237756, |
| "learning_rate": 1.977861319966583e-05, |
| "loss": 0.3263, |
| "step": 1714 |
| }, |
| { |
| "epoch": 1.931830985915493, |
| "grad_norm": 0.27982886372693655, |
| "learning_rate": 1.9757727652464495e-05, |
| "loss": 0.3374, |
| "step": 1715 |
| }, |
| { |
| "epoch": 1.9329577464788732, |
| "grad_norm": 0.29117276364325867, |
| "learning_rate": 1.9736842105263158e-05, |
| "loss": 0.3723, |
| "step": 1716 |
| }, |
| { |
| "epoch": 1.9340845070422534, |
| "grad_norm": 0.280093507811842, |
| "learning_rate": 1.971595655806182e-05, |
| "loss": 0.3454, |
| "step": 1717 |
| }, |
| { |
| "epoch": 1.935211267605634, |
| "grad_norm": 0.2793099946402611, |
| "learning_rate": 1.9695071010860485e-05, |
| "loss": 0.3298, |
| "step": 1718 |
| }, |
| { |
| "epoch": 1.9363380281690141, |
| "grad_norm": 0.31700496486348656, |
| "learning_rate": 1.9674185463659148e-05, |
| "loss": 0.3861, |
| "step": 1719 |
| }, |
| { |
| "epoch": 1.9374647887323944, |
| "grad_norm": 0.28444988922202363, |
| "learning_rate": 1.965329991645781e-05, |
| "loss": 0.3747, |
| "step": 1720 |
| }, |
| { |
| "epoch": 1.9385915492957746, |
| "grad_norm": 0.3156855684456946, |
| "learning_rate": 1.9632414369256475e-05, |
| "loss": 0.349, |
| "step": 1721 |
| }, |
| { |
| "epoch": 1.9397183098591548, |
| "grad_norm": 0.2684019030063972, |
| "learning_rate": 1.961152882205514e-05, |
| "loss": 0.3358, |
| "step": 1722 |
| }, |
| { |
| "epoch": 1.9408450704225353, |
| "grad_norm": 0.24664955105702524, |
| "learning_rate": 1.9590643274853802e-05, |
| "loss": 0.3468, |
| "step": 1723 |
| }, |
| { |
| "epoch": 1.9419718309859155, |
| "grad_norm": 0.30750144819353686, |
| "learning_rate": 1.9569757727652465e-05, |
| "loss": 0.3296, |
| "step": 1724 |
| }, |
| { |
| "epoch": 1.9430985915492958, |
| "grad_norm": 0.28855748148807314, |
| "learning_rate": 1.954887218045113e-05, |
| "loss": 0.3298, |
| "step": 1725 |
| }, |
| { |
| "epoch": 1.9442253521126762, |
| "grad_norm": 0.27663366620065105, |
| "learning_rate": 1.9527986633249792e-05, |
| "loss": 0.3484, |
| "step": 1726 |
| }, |
| { |
| "epoch": 1.9453521126760562, |
| "grad_norm": 0.26053405371799565, |
| "learning_rate": 1.9507101086048455e-05, |
| "loss": 0.3435, |
| "step": 1727 |
| }, |
| { |
| "epoch": 1.9464788732394367, |
| "grad_norm": 0.2789834306854869, |
| "learning_rate": 1.948621553884712e-05, |
| "loss": 0.3388, |
| "step": 1728 |
| }, |
| { |
| "epoch": 1.947605633802817, |
| "grad_norm": 0.33677228028455536, |
| "learning_rate": 1.9465329991645782e-05, |
| "loss": 0.3454, |
| "step": 1729 |
| }, |
| { |
| "epoch": 1.9487323943661972, |
| "grad_norm": 0.3078151462694492, |
| "learning_rate": 1.9444444444444445e-05, |
| "loss": 0.3434, |
| "step": 1730 |
| }, |
| { |
| "epoch": 1.9498591549295776, |
| "grad_norm": 0.3089644977305877, |
| "learning_rate": 1.942355889724311e-05, |
| "loss": 0.3413, |
| "step": 1731 |
| }, |
| { |
| "epoch": 1.9509859154929576, |
| "grad_norm": 0.2817600040007839, |
| "learning_rate": 1.9402673350041772e-05, |
| "loss": 0.3435, |
| "step": 1732 |
| }, |
| { |
| "epoch": 1.952112676056338, |
| "grad_norm": 0.360677722904184, |
| "learning_rate": 1.9381787802840436e-05, |
| "loss": 0.3497, |
| "step": 1733 |
| }, |
| { |
| "epoch": 1.9532394366197183, |
| "grad_norm": 0.3332403942471745, |
| "learning_rate": 1.93609022556391e-05, |
| "loss": 0.3432, |
| "step": 1734 |
| }, |
| { |
| "epoch": 1.9543661971830986, |
| "grad_norm": 0.3451996520792954, |
| "learning_rate": 1.9340016708437762e-05, |
| "loss": 0.3457, |
| "step": 1735 |
| }, |
| { |
| "epoch": 1.955492957746479, |
| "grad_norm": 0.37021919612796633, |
| "learning_rate": 1.9319131161236426e-05, |
| "loss": 0.364, |
| "step": 1736 |
| }, |
| { |
| "epoch": 1.956619718309859, |
| "grad_norm": 0.3141412057799376, |
| "learning_rate": 1.929824561403509e-05, |
| "loss": 0.3562, |
| "step": 1737 |
| }, |
| { |
| "epoch": 1.9577464788732395, |
| "grad_norm": 0.2774486726808161, |
| "learning_rate": 1.9277360066833752e-05, |
| "loss": 0.3393, |
| "step": 1738 |
| }, |
| { |
| "epoch": 1.9588732394366197, |
| "grad_norm": 0.3184098259456306, |
| "learning_rate": 1.9256474519632416e-05, |
| "loss": 0.3347, |
| "step": 1739 |
| }, |
| { |
| "epoch": 1.96, |
| "grad_norm": 0.25779629730566117, |
| "learning_rate": 1.923558897243108e-05, |
| "loss": 0.34, |
| "step": 1740 |
| }, |
| { |
| "epoch": 1.9611267605633804, |
| "grad_norm": 0.31507109794266863, |
| "learning_rate": 1.9214703425229743e-05, |
| "loss": 0.3306, |
| "step": 1741 |
| }, |
| { |
| "epoch": 1.9622535211267604, |
| "grad_norm": 0.2427966113722695, |
| "learning_rate": 1.9193817878028406e-05, |
| "loss": 0.3141, |
| "step": 1742 |
| }, |
| { |
| "epoch": 1.963380281690141, |
| "grad_norm": 0.2542233643337515, |
| "learning_rate": 1.9172932330827066e-05, |
| "loss": 0.3344, |
| "step": 1743 |
| }, |
| { |
| "epoch": 1.9645070422535211, |
| "grad_norm": 0.32081915488615875, |
| "learning_rate": 1.9152046783625733e-05, |
| "loss": 0.339, |
| "step": 1744 |
| }, |
| { |
| "epoch": 1.9656338028169014, |
| "grad_norm": 0.27906537367605055, |
| "learning_rate": 1.9131161236424393e-05, |
| "loss": 0.3348, |
| "step": 1745 |
| }, |
| { |
| "epoch": 1.9667605633802818, |
| "grad_norm": 0.274000810063737, |
| "learning_rate": 1.911027568922306e-05, |
| "loss": 0.3487, |
| "step": 1746 |
| }, |
| { |
| "epoch": 1.9678873239436618, |
| "grad_norm": 0.2959088718509456, |
| "learning_rate": 1.908939014202172e-05, |
| "loss": 0.3488, |
| "step": 1747 |
| }, |
| { |
| "epoch": 1.9690140845070423, |
| "grad_norm": 0.4473115218653689, |
| "learning_rate": 1.9068504594820386e-05, |
| "loss": 0.3512, |
| "step": 1748 |
| }, |
| { |
| "epoch": 1.9701408450704225, |
| "grad_norm": 0.2669740194740602, |
| "learning_rate": 1.9047619047619046e-05, |
| "loss": 0.3401, |
| "step": 1749 |
| }, |
| { |
| "epoch": 1.9712676056338028, |
| "grad_norm": 0.2788668712741395, |
| "learning_rate": 1.9026733500417713e-05, |
| "loss": 0.3347, |
| "step": 1750 |
| }, |
| { |
| "epoch": 1.9723943661971832, |
| "grad_norm": 0.2872455377014697, |
| "learning_rate": 1.9005847953216373e-05, |
| "loss": 0.3379, |
| "step": 1751 |
| }, |
| { |
| "epoch": 1.9735211267605632, |
| "grad_norm": 0.27694769236197103, |
| "learning_rate": 1.898496240601504e-05, |
| "loss": 0.3526, |
| "step": 1752 |
| }, |
| { |
| "epoch": 1.9746478873239437, |
| "grad_norm": 0.28203236948807303, |
| "learning_rate": 1.89640768588137e-05, |
| "loss": 0.3412, |
| "step": 1753 |
| }, |
| { |
| "epoch": 1.975774647887324, |
| "grad_norm": 0.24888801814995407, |
| "learning_rate": 1.8943191311612366e-05, |
| "loss": 0.3427, |
| "step": 1754 |
| }, |
| { |
| "epoch": 1.9769014084507042, |
| "grad_norm": 0.287119114978281, |
| "learning_rate": 1.8922305764411026e-05, |
| "loss": 0.3446, |
| "step": 1755 |
| }, |
| { |
| "epoch": 1.9780281690140846, |
| "grad_norm": 0.2766952677979089, |
| "learning_rate": 1.8901420217209693e-05, |
| "loss": 0.3505, |
| "step": 1756 |
| }, |
| { |
| "epoch": 1.9791549295774646, |
| "grad_norm": 0.3045381617070715, |
| "learning_rate": 1.8880534670008353e-05, |
| "loss": 0.3469, |
| "step": 1757 |
| }, |
| { |
| "epoch": 1.980281690140845, |
| "grad_norm": 0.25307779996131646, |
| "learning_rate": 1.885964912280702e-05, |
| "loss": 0.3457, |
| "step": 1758 |
| }, |
| { |
| "epoch": 1.9814084507042253, |
| "grad_norm": 0.2621907977207169, |
| "learning_rate": 1.883876357560568e-05, |
| "loss": 0.3414, |
| "step": 1759 |
| }, |
| { |
| "epoch": 1.9825352112676056, |
| "grad_norm": 0.28413673846352067, |
| "learning_rate": 1.8817878028404347e-05, |
| "loss": 0.3219, |
| "step": 1760 |
| }, |
| { |
| "epoch": 1.983661971830986, |
| "grad_norm": 0.2477558078368733, |
| "learning_rate": 1.8796992481203007e-05, |
| "loss": 0.3418, |
| "step": 1761 |
| }, |
| { |
| "epoch": 1.984788732394366, |
| "grad_norm": 0.2623624152689003, |
| "learning_rate": 1.8776106934001673e-05, |
| "loss": 0.3472, |
| "step": 1762 |
| }, |
| { |
| "epoch": 1.9859154929577465, |
| "grad_norm": 0.32582596048800616, |
| "learning_rate": 1.8755221386800333e-05, |
| "loss": 0.3547, |
| "step": 1763 |
| }, |
| { |
| "epoch": 1.9870422535211267, |
| "grad_norm": 0.2560026865469511, |
| "learning_rate": 1.8734335839599e-05, |
| "loss": 0.3452, |
| "step": 1764 |
| }, |
| { |
| "epoch": 1.988169014084507, |
| "grad_norm": 0.3194965526847108, |
| "learning_rate": 1.871345029239766e-05, |
| "loss": 0.3401, |
| "step": 1765 |
| }, |
| { |
| "epoch": 1.9892957746478874, |
| "grad_norm": 0.2552661250188611, |
| "learning_rate": 1.8692564745196327e-05, |
| "loss": 0.345, |
| "step": 1766 |
| }, |
| { |
| "epoch": 1.9904225352112674, |
| "grad_norm": 0.30136152145840883, |
| "learning_rate": 1.8671679197994987e-05, |
| "loss": 0.3431, |
| "step": 1767 |
| }, |
| { |
| "epoch": 1.991549295774648, |
| "grad_norm": 0.386170303761133, |
| "learning_rate": 1.8650793650793654e-05, |
| "loss": 0.3329, |
| "step": 1768 |
| }, |
| { |
| "epoch": 1.9926760563380281, |
| "grad_norm": 0.25735909944386404, |
| "learning_rate": 1.8629908103592314e-05, |
| "loss": 0.3656, |
| "step": 1769 |
| }, |
| { |
| "epoch": 1.9938028169014084, |
| "grad_norm": 0.35370824580018567, |
| "learning_rate": 1.860902255639098e-05, |
| "loss": 0.3343, |
| "step": 1770 |
| }, |
| { |
| "epoch": 1.9949295774647888, |
| "grad_norm": 0.36421894137805505, |
| "learning_rate": 1.858813700918964e-05, |
| "loss": 0.3448, |
| "step": 1771 |
| }, |
| { |
| "epoch": 1.996056338028169, |
| "grad_norm": 0.30126873361621703, |
| "learning_rate": 1.8567251461988304e-05, |
| "loss": 0.3739, |
| "step": 1772 |
| }, |
| { |
| "epoch": 1.9971830985915493, |
| "grad_norm": 0.3259500784443935, |
| "learning_rate": 1.8546365914786967e-05, |
| "loss": 0.3325, |
| "step": 1773 |
| }, |
| { |
| "epoch": 1.9983098591549295, |
| "grad_norm": 0.32438298898201956, |
| "learning_rate": 1.852548036758563e-05, |
| "loss": 0.3416, |
| "step": 1774 |
| }, |
| { |
| "epoch": 1.9994366197183098, |
| "grad_norm": 0.28319736419228, |
| "learning_rate": 1.8504594820384294e-05, |
| "loss": 0.316, |
| "step": 1775 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.4400283988411228, |
| "learning_rate": 1.8483709273182957e-05, |
| "loss": 0.304, |
| "step": 1776 |
| }, |
| { |
| "epoch": 2.0011267605633805, |
| "grad_norm": 0.36193074426019617, |
| "learning_rate": 1.846282372598162e-05, |
| "loss": 0.2886, |
| "step": 1777 |
| }, |
| { |
| "epoch": 2.0022535211267605, |
| "grad_norm": 0.2898828417148018, |
| "learning_rate": 1.8441938178780284e-05, |
| "loss": 0.2844, |
| "step": 1778 |
| }, |
| { |
| "epoch": 2.003380281690141, |
| "grad_norm": 0.31537351348150766, |
| "learning_rate": 1.8421052631578947e-05, |
| "loss": 0.2768, |
| "step": 1779 |
| }, |
| { |
| "epoch": 2.004507042253521, |
| "grad_norm": 0.30556540318006736, |
| "learning_rate": 1.840016708437761e-05, |
| "loss": 0.2828, |
| "step": 1780 |
| }, |
| { |
| "epoch": 2.0056338028169014, |
| "grad_norm": 0.2875419069796752, |
| "learning_rate": 1.8379281537176274e-05, |
| "loss": 0.2813, |
| "step": 1781 |
| }, |
| { |
| "epoch": 2.006760563380282, |
| "grad_norm": 0.2629141920619817, |
| "learning_rate": 1.8358395989974937e-05, |
| "loss": 0.2709, |
| "step": 1782 |
| }, |
| { |
| "epoch": 2.007887323943662, |
| "grad_norm": 0.3049128895618629, |
| "learning_rate": 1.83375104427736e-05, |
| "loss": 0.2932, |
| "step": 1783 |
| }, |
| { |
| "epoch": 2.0090140845070423, |
| "grad_norm": 0.3183992325698135, |
| "learning_rate": 1.8316624895572264e-05, |
| "loss": 0.2785, |
| "step": 1784 |
| }, |
| { |
| "epoch": 2.0101408450704223, |
| "grad_norm": 0.2710738584069236, |
| "learning_rate": 1.8295739348370928e-05, |
| "loss": 0.2846, |
| "step": 1785 |
| }, |
| { |
| "epoch": 2.011267605633803, |
| "grad_norm": 0.26450209061387736, |
| "learning_rate": 1.827485380116959e-05, |
| "loss": 0.2752, |
| "step": 1786 |
| }, |
| { |
| "epoch": 2.0123943661971833, |
| "grad_norm": 0.3420014204418473, |
| "learning_rate": 1.8253968253968254e-05, |
| "loss": 0.2749, |
| "step": 1787 |
| }, |
| { |
| "epoch": 2.0135211267605633, |
| "grad_norm": 0.2804770684477757, |
| "learning_rate": 1.8233082706766918e-05, |
| "loss": 0.2659, |
| "step": 1788 |
| }, |
| { |
| "epoch": 2.0146478873239437, |
| "grad_norm": 0.25143467930958646, |
| "learning_rate": 1.821219715956558e-05, |
| "loss": 0.2622, |
| "step": 1789 |
| }, |
| { |
| "epoch": 2.0157746478873237, |
| "grad_norm": 0.3047744277993807, |
| "learning_rate": 1.8191311612364244e-05, |
| "loss": 0.2755, |
| "step": 1790 |
| }, |
| { |
| "epoch": 2.016901408450704, |
| "grad_norm": 0.2885467754272612, |
| "learning_rate": 1.8170426065162908e-05, |
| "loss": 0.2674, |
| "step": 1791 |
| }, |
| { |
| "epoch": 2.0180281690140847, |
| "grad_norm": 0.25497456315156125, |
| "learning_rate": 1.814954051796157e-05, |
| "loss": 0.2674, |
| "step": 1792 |
| }, |
| { |
| "epoch": 2.0191549295774647, |
| "grad_norm": 0.26183676682755286, |
| "learning_rate": 1.8128654970760235e-05, |
| "loss": 0.2755, |
| "step": 1793 |
| }, |
| { |
| "epoch": 2.020281690140845, |
| "grad_norm": 0.2647018970469358, |
| "learning_rate": 1.8107769423558898e-05, |
| "loss": 0.2668, |
| "step": 1794 |
| }, |
| { |
| "epoch": 2.021408450704225, |
| "grad_norm": 0.2579179710408074, |
| "learning_rate": 1.808688387635756e-05, |
| "loss": 0.2892, |
| "step": 1795 |
| }, |
| { |
| "epoch": 2.0225352112676056, |
| "grad_norm": 0.26710273941351514, |
| "learning_rate": 1.8065998329156225e-05, |
| "loss": 0.2754, |
| "step": 1796 |
| }, |
| { |
| "epoch": 2.023661971830986, |
| "grad_norm": 0.25497985314228244, |
| "learning_rate": 1.8045112781954888e-05, |
| "loss": 0.2734, |
| "step": 1797 |
| }, |
| { |
| "epoch": 2.024788732394366, |
| "grad_norm": 0.23892301351613357, |
| "learning_rate": 1.802422723475355e-05, |
| "loss": 0.276, |
| "step": 1798 |
| }, |
| { |
| "epoch": 2.0259154929577465, |
| "grad_norm": 0.26278511985156594, |
| "learning_rate": 1.8003341687552215e-05, |
| "loss": 0.2723, |
| "step": 1799 |
| }, |
| { |
| "epoch": 2.0270422535211265, |
| "grad_norm": 0.24659026721963412, |
| "learning_rate": 1.7982456140350878e-05, |
| "loss": 0.2689, |
| "step": 1800 |
| }, |
| { |
| "epoch": 2.028169014084507, |
| "grad_norm": 0.24799078147607242, |
| "learning_rate": 1.796157059314954e-05, |
| "loss": 0.2776, |
| "step": 1801 |
| }, |
| { |
| "epoch": 2.0292957746478875, |
| "grad_norm": 0.22333418600097785, |
| "learning_rate": 1.7940685045948205e-05, |
| "loss": 0.2816, |
| "step": 1802 |
| }, |
| { |
| "epoch": 2.0304225352112675, |
| "grad_norm": 0.23522702564048342, |
| "learning_rate": 1.791979949874687e-05, |
| "loss": 0.2762, |
| "step": 1803 |
| }, |
| { |
| "epoch": 2.031549295774648, |
| "grad_norm": 0.25393937508910336, |
| "learning_rate": 1.7898913951545532e-05, |
| "loss": 0.2831, |
| "step": 1804 |
| }, |
| { |
| "epoch": 2.032676056338028, |
| "grad_norm": 0.23768502018015894, |
| "learning_rate": 1.7878028404344195e-05, |
| "loss": 0.2585, |
| "step": 1805 |
| }, |
| { |
| "epoch": 2.0338028169014084, |
| "grad_norm": 0.2503858979894685, |
| "learning_rate": 1.785714285714286e-05, |
| "loss": 0.2644, |
| "step": 1806 |
| }, |
| { |
| "epoch": 2.034929577464789, |
| "grad_norm": 0.26131973579511447, |
| "learning_rate": 1.7836257309941522e-05, |
| "loss": 0.2693, |
| "step": 1807 |
| }, |
| { |
| "epoch": 2.036056338028169, |
| "grad_norm": 0.22749664547284826, |
| "learning_rate": 1.7815371762740185e-05, |
| "loss": 0.2715, |
| "step": 1808 |
| }, |
| { |
| "epoch": 2.0371830985915493, |
| "grad_norm": 0.27026999257320183, |
| "learning_rate": 1.779448621553885e-05, |
| "loss": 0.2692, |
| "step": 1809 |
| }, |
| { |
| "epoch": 2.03830985915493, |
| "grad_norm": 0.22997409267610297, |
| "learning_rate": 1.7773600668337512e-05, |
| "loss": 0.2637, |
| "step": 1810 |
| }, |
| { |
| "epoch": 2.03943661971831, |
| "grad_norm": 0.23763229339076203, |
| "learning_rate": 1.7752715121136172e-05, |
| "loss": 0.2758, |
| "step": 1811 |
| }, |
| { |
| "epoch": 2.0405633802816903, |
| "grad_norm": 0.2569594050394131, |
| "learning_rate": 1.773182957393484e-05, |
| "loss": 0.2884, |
| "step": 1812 |
| }, |
| { |
| "epoch": 2.0416901408450703, |
| "grad_norm": 0.2225541191637147, |
| "learning_rate": 1.77109440267335e-05, |
| "loss": 0.2688, |
| "step": 1813 |
| }, |
| { |
| "epoch": 2.0428169014084507, |
| "grad_norm": 0.26774599371649005, |
| "learning_rate": 1.7690058479532165e-05, |
| "loss": 0.2585, |
| "step": 1814 |
| }, |
| { |
| "epoch": 2.043943661971831, |
| "grad_norm": 0.24749302500377496, |
| "learning_rate": 1.7669172932330825e-05, |
| "loss": 0.2678, |
| "step": 1815 |
| }, |
| { |
| "epoch": 2.045070422535211, |
| "grad_norm": 0.2101377864414631, |
| "learning_rate": 1.7648287385129492e-05, |
| "loss": 0.263, |
| "step": 1816 |
| }, |
| { |
| "epoch": 2.0461971830985917, |
| "grad_norm": 0.25971366791251355, |
| "learning_rate": 1.7627401837928152e-05, |
| "loss": 0.2687, |
| "step": 1817 |
| }, |
| { |
| "epoch": 2.0473239436619717, |
| "grad_norm": 0.2639815940476354, |
| "learning_rate": 1.760651629072682e-05, |
| "loss": 0.2863, |
| "step": 1818 |
| }, |
| { |
| "epoch": 2.048450704225352, |
| "grad_norm": 0.24759316454790214, |
| "learning_rate": 1.758563074352548e-05, |
| "loss": 0.2618, |
| "step": 1819 |
| }, |
| { |
| "epoch": 2.0495774647887326, |
| "grad_norm": 0.21908544281153025, |
| "learning_rate": 1.7564745196324146e-05, |
| "loss": 0.262, |
| "step": 1820 |
| }, |
| { |
| "epoch": 2.0507042253521126, |
| "grad_norm": 0.24986814532092871, |
| "learning_rate": 1.7543859649122806e-05, |
| "loss": 0.2752, |
| "step": 1821 |
| }, |
| { |
| "epoch": 2.051830985915493, |
| "grad_norm": 0.22903689190851212, |
| "learning_rate": 1.7522974101921472e-05, |
| "loss": 0.2748, |
| "step": 1822 |
| }, |
| { |
| "epoch": 2.052957746478873, |
| "grad_norm": 0.24241577149726312, |
| "learning_rate": 1.7502088554720132e-05, |
| "loss": 0.2747, |
| "step": 1823 |
| }, |
| { |
| "epoch": 2.0540845070422535, |
| "grad_norm": 0.23394955802543638, |
| "learning_rate": 1.74812030075188e-05, |
| "loss": 0.2641, |
| "step": 1824 |
| }, |
| { |
| "epoch": 2.055211267605634, |
| "grad_norm": 0.2560912344902298, |
| "learning_rate": 1.746031746031746e-05, |
| "loss": 0.2754, |
| "step": 1825 |
| }, |
| { |
| "epoch": 2.056338028169014, |
| "grad_norm": 0.2598392341032889, |
| "learning_rate": 1.7439431913116126e-05, |
| "loss": 0.2872, |
| "step": 1826 |
| }, |
| { |
| "epoch": 2.0574647887323945, |
| "grad_norm": 0.22978850883466367, |
| "learning_rate": 1.7418546365914786e-05, |
| "loss": 0.2598, |
| "step": 1827 |
| }, |
| { |
| "epoch": 2.0585915492957745, |
| "grad_norm": 0.22254614744090132, |
| "learning_rate": 1.7397660818713453e-05, |
| "loss": 0.2517, |
| "step": 1828 |
| }, |
| { |
| "epoch": 2.059718309859155, |
| "grad_norm": 0.24504177887758496, |
| "learning_rate": 1.7376775271512113e-05, |
| "loss": 0.2756, |
| "step": 1829 |
| }, |
| { |
| "epoch": 2.0608450704225354, |
| "grad_norm": 0.23609279494765378, |
| "learning_rate": 1.735588972431078e-05, |
| "loss": 0.2529, |
| "step": 1830 |
| }, |
| { |
| "epoch": 2.0619718309859154, |
| "grad_norm": 0.2572397612590121, |
| "learning_rate": 1.733500417710944e-05, |
| "loss": 0.2736, |
| "step": 1831 |
| }, |
| { |
| "epoch": 2.063098591549296, |
| "grad_norm": 0.24132343329906028, |
| "learning_rate": 1.7314118629908106e-05, |
| "loss": 0.268, |
| "step": 1832 |
| }, |
| { |
| "epoch": 2.064225352112676, |
| "grad_norm": 0.2571060147938181, |
| "learning_rate": 1.7293233082706766e-05, |
| "loss": 0.2635, |
| "step": 1833 |
| }, |
| { |
| "epoch": 2.0653521126760563, |
| "grad_norm": 0.2207625937897721, |
| "learning_rate": 1.7272347535505433e-05, |
| "loss": 0.2652, |
| "step": 1834 |
| }, |
| { |
| "epoch": 2.066478873239437, |
| "grad_norm": 0.22787090805449173, |
| "learning_rate": 1.7251461988304093e-05, |
| "loss": 0.2598, |
| "step": 1835 |
| }, |
| { |
| "epoch": 2.067605633802817, |
| "grad_norm": 0.2357951714204803, |
| "learning_rate": 1.723057644110276e-05, |
| "loss": 0.2702, |
| "step": 1836 |
| }, |
| { |
| "epoch": 2.0687323943661973, |
| "grad_norm": 0.26844254869193424, |
| "learning_rate": 1.720969089390142e-05, |
| "loss": 0.2688, |
| "step": 1837 |
| }, |
| { |
| "epoch": 2.0698591549295773, |
| "grad_norm": 0.24650719910111354, |
| "learning_rate": 1.7188805346700086e-05, |
| "loss": 0.2845, |
| "step": 1838 |
| }, |
| { |
| "epoch": 2.0709859154929577, |
| "grad_norm": 0.21170408427745419, |
| "learning_rate": 1.7167919799498746e-05, |
| "loss": 0.2722, |
| "step": 1839 |
| }, |
| { |
| "epoch": 2.072112676056338, |
| "grad_norm": 0.23270936286588711, |
| "learning_rate": 1.714703425229741e-05, |
| "loss": 0.2732, |
| "step": 1840 |
| }, |
| { |
| "epoch": 2.073239436619718, |
| "grad_norm": 0.2776846836673971, |
| "learning_rate": 1.7126148705096073e-05, |
| "loss": 0.275, |
| "step": 1841 |
| }, |
| { |
| "epoch": 2.0743661971830987, |
| "grad_norm": 0.2425811214938422, |
| "learning_rate": 1.7105263157894737e-05, |
| "loss": 0.2696, |
| "step": 1842 |
| }, |
| { |
| "epoch": 2.0754929577464787, |
| "grad_norm": 0.22164171885336945, |
| "learning_rate": 1.70843776106934e-05, |
| "loss": 0.2592, |
| "step": 1843 |
| }, |
| { |
| "epoch": 2.076619718309859, |
| "grad_norm": 0.23609097727556208, |
| "learning_rate": 1.7063492063492063e-05, |
| "loss": 0.2785, |
| "step": 1844 |
| }, |
| { |
| "epoch": 2.0777464788732396, |
| "grad_norm": 0.20312418904361224, |
| "learning_rate": 1.7042606516290727e-05, |
| "loss": 0.2538, |
| "step": 1845 |
| }, |
| { |
| "epoch": 2.0788732394366196, |
| "grad_norm": 0.21196290946837307, |
| "learning_rate": 1.702172096908939e-05, |
| "loss": 0.2567, |
| "step": 1846 |
| }, |
| { |
| "epoch": 2.08, |
| "grad_norm": 0.229275235692593, |
| "learning_rate": 1.7000835421888053e-05, |
| "loss": 0.2825, |
| "step": 1847 |
| }, |
| { |
| "epoch": 2.08112676056338, |
| "grad_norm": 0.23106628116686928, |
| "learning_rate": 1.6979949874686717e-05, |
| "loss": 0.2672, |
| "step": 1848 |
| }, |
| { |
| "epoch": 2.0822535211267605, |
| "grad_norm": 0.22809768966738436, |
| "learning_rate": 1.695906432748538e-05, |
| "loss": 0.2758, |
| "step": 1849 |
| }, |
| { |
| "epoch": 2.083380281690141, |
| "grad_norm": 0.23270495974715888, |
| "learning_rate": 1.6938178780284044e-05, |
| "loss": 0.2824, |
| "step": 1850 |
| }, |
| { |
| "epoch": 2.084507042253521, |
| "grad_norm": 0.22996623133844957, |
| "learning_rate": 1.6917293233082707e-05, |
| "loss": 0.2601, |
| "step": 1851 |
| }, |
| { |
| "epoch": 2.0856338028169015, |
| "grad_norm": 0.22968083512308185, |
| "learning_rate": 1.689640768588137e-05, |
| "loss": 0.2729, |
| "step": 1852 |
| }, |
| { |
| "epoch": 2.0867605633802815, |
| "grad_norm": 0.24673638985141516, |
| "learning_rate": 1.6875522138680034e-05, |
| "loss": 0.2567, |
| "step": 1853 |
| }, |
| { |
| "epoch": 2.087887323943662, |
| "grad_norm": 0.23766764086953465, |
| "learning_rate": 1.6854636591478697e-05, |
| "loss": 0.2701, |
| "step": 1854 |
| }, |
| { |
| "epoch": 2.0890140845070424, |
| "grad_norm": 0.2399021649038314, |
| "learning_rate": 1.683375104427736e-05, |
| "loss": 0.2696, |
| "step": 1855 |
| }, |
| { |
| "epoch": 2.0901408450704224, |
| "grad_norm": 0.23486201387497946, |
| "learning_rate": 1.6812865497076024e-05, |
| "loss": 0.2721, |
| "step": 1856 |
| }, |
| { |
| "epoch": 2.091267605633803, |
| "grad_norm": 0.22721396393200446, |
| "learning_rate": 1.6791979949874687e-05, |
| "loss": 0.2733, |
| "step": 1857 |
| }, |
| { |
| "epoch": 2.092394366197183, |
| "grad_norm": 0.2355585301253839, |
| "learning_rate": 1.677109440267335e-05, |
| "loss": 0.278, |
| "step": 1858 |
| }, |
| { |
| "epoch": 2.0935211267605633, |
| "grad_norm": 0.2178378033963335, |
| "learning_rate": 1.6750208855472014e-05, |
| "loss": 0.2692, |
| "step": 1859 |
| }, |
| { |
| "epoch": 2.094647887323944, |
| "grad_norm": 0.24264463238025005, |
| "learning_rate": 1.6729323308270677e-05, |
| "loss": 0.2727, |
| "step": 1860 |
| }, |
| { |
| "epoch": 2.095774647887324, |
| "grad_norm": 0.21911277932782425, |
| "learning_rate": 1.670843776106934e-05, |
| "loss": 0.2505, |
| "step": 1861 |
| }, |
| { |
| "epoch": 2.0969014084507043, |
| "grad_norm": 0.23178142619843944, |
| "learning_rate": 1.6687552213868004e-05, |
| "loss": 0.2619, |
| "step": 1862 |
| }, |
| { |
| "epoch": 2.0980281690140847, |
| "grad_norm": 0.252731984105388, |
| "learning_rate": 1.6666666666666667e-05, |
| "loss": 0.2784, |
| "step": 1863 |
| }, |
| { |
| "epoch": 2.0991549295774647, |
| "grad_norm": 0.21672351302271223, |
| "learning_rate": 1.664578111946533e-05, |
| "loss": 0.2636, |
| "step": 1864 |
| }, |
| { |
| "epoch": 2.100281690140845, |
| "grad_norm": 0.25773586636908935, |
| "learning_rate": 1.6624895572263994e-05, |
| "loss": 0.2798, |
| "step": 1865 |
| }, |
| { |
| "epoch": 2.101408450704225, |
| "grad_norm": 0.25307351443490506, |
| "learning_rate": 1.6604010025062658e-05, |
| "loss": 0.2706, |
| "step": 1866 |
| }, |
| { |
| "epoch": 2.1025352112676057, |
| "grad_norm": 0.27472243989047346, |
| "learning_rate": 1.658312447786132e-05, |
| "loss": 0.2946, |
| "step": 1867 |
| }, |
| { |
| "epoch": 2.103661971830986, |
| "grad_norm": 0.24031345131054146, |
| "learning_rate": 1.6562238930659984e-05, |
| "loss": 0.2682, |
| "step": 1868 |
| }, |
| { |
| "epoch": 2.104788732394366, |
| "grad_norm": 0.25162922606579063, |
| "learning_rate": 1.6541353383458648e-05, |
| "loss": 0.283, |
| "step": 1869 |
| }, |
| { |
| "epoch": 2.1059154929577466, |
| "grad_norm": 0.24837686555353697, |
| "learning_rate": 1.652046783625731e-05, |
| "loss": 0.2799, |
| "step": 1870 |
| }, |
| { |
| "epoch": 2.1070422535211266, |
| "grad_norm": 0.23073933513436476, |
| "learning_rate": 1.6499582289055974e-05, |
| "loss": 0.2618, |
| "step": 1871 |
| }, |
| { |
| "epoch": 2.108169014084507, |
| "grad_norm": 0.2153699627040841, |
| "learning_rate": 1.6478696741854638e-05, |
| "loss": 0.2615, |
| "step": 1872 |
| }, |
| { |
| "epoch": 2.1092957746478875, |
| "grad_norm": 0.23948382707329402, |
| "learning_rate": 1.64578111946533e-05, |
| "loss": 0.2728, |
| "step": 1873 |
| }, |
| { |
| "epoch": 2.1104225352112675, |
| "grad_norm": 0.2520250671386909, |
| "learning_rate": 1.6436925647451965e-05, |
| "loss": 0.2685, |
| "step": 1874 |
| }, |
| { |
| "epoch": 2.111549295774648, |
| "grad_norm": 0.2553208905599901, |
| "learning_rate": 1.6416040100250628e-05, |
| "loss": 0.2694, |
| "step": 1875 |
| }, |
| { |
| "epoch": 2.112676056338028, |
| "grad_norm": 0.23661034067017267, |
| "learning_rate": 1.639515455304929e-05, |
| "loss": 0.2612, |
| "step": 1876 |
| }, |
| { |
| "epoch": 2.1138028169014085, |
| "grad_norm": 0.2751315951325099, |
| "learning_rate": 1.6374269005847955e-05, |
| "loss": 0.2683, |
| "step": 1877 |
| }, |
| { |
| "epoch": 2.114929577464789, |
| "grad_norm": 0.2518068149348879, |
| "learning_rate": 1.6353383458646618e-05, |
| "loss": 0.2836, |
| "step": 1878 |
| }, |
| { |
| "epoch": 2.116056338028169, |
| "grad_norm": 0.2611569444554147, |
| "learning_rate": 1.6332497911445278e-05, |
| "loss": 0.291, |
| "step": 1879 |
| }, |
| { |
| "epoch": 2.1171830985915494, |
| "grad_norm": 0.28586130206224214, |
| "learning_rate": 1.6311612364243945e-05, |
| "loss": 0.2582, |
| "step": 1880 |
| }, |
| { |
| "epoch": 2.1183098591549294, |
| "grad_norm": 0.2683318383289673, |
| "learning_rate": 1.6290726817042605e-05, |
| "loss": 0.2725, |
| "step": 1881 |
| }, |
| { |
| "epoch": 2.11943661971831, |
| "grad_norm": 0.29793498860011597, |
| "learning_rate": 1.626984126984127e-05, |
| "loss": 0.2909, |
| "step": 1882 |
| }, |
| { |
| "epoch": 2.1205633802816903, |
| "grad_norm": 0.21648859977673668, |
| "learning_rate": 1.624895572263993e-05, |
| "loss": 0.2911, |
| "step": 1883 |
| }, |
| { |
| "epoch": 2.1216901408450703, |
| "grad_norm": 0.2519696022244229, |
| "learning_rate": 1.62280701754386e-05, |
| "loss": 0.2632, |
| "step": 1884 |
| }, |
| { |
| "epoch": 2.122816901408451, |
| "grad_norm": 0.2409211931064604, |
| "learning_rate": 1.620718462823726e-05, |
| "loss": 0.2569, |
| "step": 1885 |
| }, |
| { |
| "epoch": 2.123943661971831, |
| "grad_norm": 0.2405120416752815, |
| "learning_rate": 1.6186299081035925e-05, |
| "loss": 0.2773, |
| "step": 1886 |
| }, |
| { |
| "epoch": 2.1250704225352113, |
| "grad_norm": 0.23129455393921802, |
| "learning_rate": 1.6165413533834585e-05, |
| "loss": 0.2609, |
| "step": 1887 |
| }, |
| { |
| "epoch": 2.1261971830985917, |
| "grad_norm": 0.30413505581317013, |
| "learning_rate": 1.6144527986633252e-05, |
| "loss": 0.2961, |
| "step": 1888 |
| }, |
| { |
| "epoch": 2.1273239436619718, |
| "grad_norm": 0.22533717087570718, |
| "learning_rate": 1.6123642439431912e-05, |
| "loss": 0.273, |
| "step": 1889 |
| }, |
| { |
| "epoch": 2.128450704225352, |
| "grad_norm": 0.21954251927197982, |
| "learning_rate": 1.610275689223058e-05, |
| "loss": 0.2724, |
| "step": 1890 |
| }, |
| { |
| "epoch": 2.129577464788732, |
| "grad_norm": 0.22588652986281654, |
| "learning_rate": 1.608187134502924e-05, |
| "loss": 0.2752, |
| "step": 1891 |
| }, |
| { |
| "epoch": 2.1307042253521127, |
| "grad_norm": 0.2342667299365894, |
| "learning_rate": 1.6060985797827905e-05, |
| "loss": 0.2956, |
| "step": 1892 |
| }, |
| { |
| "epoch": 2.131830985915493, |
| "grad_norm": 0.22987006498758314, |
| "learning_rate": 1.6040100250626565e-05, |
| "loss": 0.2735, |
| "step": 1893 |
| }, |
| { |
| "epoch": 2.132957746478873, |
| "grad_norm": 0.2165995732033388, |
| "learning_rate": 1.6019214703425232e-05, |
| "loss": 0.2658, |
| "step": 1894 |
| }, |
| { |
| "epoch": 2.1340845070422536, |
| "grad_norm": 0.23369412691232638, |
| "learning_rate": 1.5998329156223892e-05, |
| "loss": 0.2768, |
| "step": 1895 |
| }, |
| { |
| "epoch": 2.1352112676056336, |
| "grad_norm": 0.22527571990992717, |
| "learning_rate": 1.597744360902256e-05, |
| "loss": 0.2803, |
| "step": 1896 |
| }, |
| { |
| "epoch": 2.136338028169014, |
| "grad_norm": 0.22307214273493317, |
| "learning_rate": 1.595655806182122e-05, |
| "loss": 0.2587, |
| "step": 1897 |
| }, |
| { |
| "epoch": 2.1374647887323945, |
| "grad_norm": 0.2308702963968873, |
| "learning_rate": 1.5935672514619886e-05, |
| "loss": 0.2667, |
| "step": 1898 |
| }, |
| { |
| "epoch": 2.1385915492957746, |
| "grad_norm": 0.21884535384212295, |
| "learning_rate": 1.5914786967418546e-05, |
| "loss": 0.2673, |
| "step": 1899 |
| }, |
| { |
| "epoch": 2.139718309859155, |
| "grad_norm": 0.23083539873570982, |
| "learning_rate": 1.5893901420217212e-05, |
| "loss": 0.2664, |
| "step": 1900 |
| }, |
| { |
| "epoch": 2.140845070422535, |
| "grad_norm": 0.24268574749356314, |
| "learning_rate": 1.5873015873015872e-05, |
| "loss": 0.2776, |
| "step": 1901 |
| }, |
| { |
| "epoch": 2.1419718309859155, |
| "grad_norm": 0.22809985206158837, |
| "learning_rate": 1.585213032581454e-05, |
| "loss": 0.2832, |
| "step": 1902 |
| }, |
| { |
| "epoch": 2.143098591549296, |
| "grad_norm": 0.21387933791759006, |
| "learning_rate": 1.58312447786132e-05, |
| "loss": 0.251, |
| "step": 1903 |
| }, |
| { |
| "epoch": 2.144225352112676, |
| "grad_norm": 0.2353271860833369, |
| "learning_rate": 1.5810359231411866e-05, |
| "loss": 0.2675, |
| "step": 1904 |
| }, |
| { |
| "epoch": 2.1453521126760564, |
| "grad_norm": 0.23509764350054024, |
| "learning_rate": 1.5789473684210526e-05, |
| "loss": 0.2615, |
| "step": 1905 |
| }, |
| { |
| "epoch": 2.1464788732394364, |
| "grad_norm": 0.22530232891108226, |
| "learning_rate": 1.5768588137009193e-05, |
| "loss": 0.2751, |
| "step": 1906 |
| }, |
| { |
| "epoch": 2.147605633802817, |
| "grad_norm": 0.23442604251542756, |
| "learning_rate": 1.5747702589807853e-05, |
| "loss": 0.2665, |
| "step": 1907 |
| }, |
| { |
| "epoch": 2.1487323943661973, |
| "grad_norm": 1.2319080043249988, |
| "learning_rate": 1.5726817042606516e-05, |
| "loss": 0.2872, |
| "step": 1908 |
| }, |
| { |
| "epoch": 2.1498591549295774, |
| "grad_norm": 0.2619177006852762, |
| "learning_rate": 1.570593149540518e-05, |
| "loss": 0.2799, |
| "step": 1909 |
| }, |
| { |
| "epoch": 2.150985915492958, |
| "grad_norm": 0.21937142768827655, |
| "learning_rate": 1.5685045948203843e-05, |
| "loss": 0.2815, |
| "step": 1910 |
| }, |
| { |
| "epoch": 2.152112676056338, |
| "grad_norm": 0.20711092031146155, |
| "learning_rate": 1.5664160401002506e-05, |
| "loss": 0.2655, |
| "step": 1911 |
| }, |
| { |
| "epoch": 2.1532394366197183, |
| "grad_norm": 0.24552607736755352, |
| "learning_rate": 1.564327485380117e-05, |
| "loss": 0.2786, |
| "step": 1912 |
| }, |
| { |
| "epoch": 2.1543661971830987, |
| "grad_norm": 0.28078844381407797, |
| "learning_rate": 1.5622389306599833e-05, |
| "loss": 0.2875, |
| "step": 1913 |
| }, |
| { |
| "epoch": 2.1554929577464788, |
| "grad_norm": 0.21221403574579847, |
| "learning_rate": 1.5601503759398496e-05, |
| "loss": 0.2664, |
| "step": 1914 |
| }, |
| { |
| "epoch": 2.156619718309859, |
| "grad_norm": 0.2602669647184901, |
| "learning_rate": 1.558061821219716e-05, |
| "loss": 0.2756, |
| "step": 1915 |
| }, |
| { |
| "epoch": 2.1577464788732392, |
| "grad_norm": 0.25509195485127495, |
| "learning_rate": 1.5559732664995823e-05, |
| "loss": 0.266, |
| "step": 1916 |
| }, |
| { |
| "epoch": 2.1588732394366197, |
| "grad_norm": 0.2328826021778946, |
| "learning_rate": 1.5538847117794486e-05, |
| "loss": 0.2735, |
| "step": 1917 |
| }, |
| { |
| "epoch": 2.16, |
| "grad_norm": 0.242712790494576, |
| "learning_rate": 1.551796157059315e-05, |
| "loss": 0.2759, |
| "step": 1918 |
| }, |
| { |
| "epoch": 2.16112676056338, |
| "grad_norm": 0.23179279279221807, |
| "learning_rate": 1.5497076023391813e-05, |
| "loss": 0.2617, |
| "step": 1919 |
| }, |
| { |
| "epoch": 2.1622535211267606, |
| "grad_norm": 0.26249600722408734, |
| "learning_rate": 1.5476190476190476e-05, |
| "loss": 0.2723, |
| "step": 1920 |
| }, |
| { |
| "epoch": 2.1633802816901406, |
| "grad_norm": 0.2256678161675639, |
| "learning_rate": 1.545530492898914e-05, |
| "loss": 0.2697, |
| "step": 1921 |
| }, |
| { |
| "epoch": 2.164507042253521, |
| "grad_norm": 0.22999297934710036, |
| "learning_rate": 1.5434419381787803e-05, |
| "loss": 0.2619, |
| "step": 1922 |
| }, |
| { |
| "epoch": 2.1656338028169015, |
| "grad_norm": 0.24393784025390963, |
| "learning_rate": 1.5413533834586467e-05, |
| "loss": 0.2768, |
| "step": 1923 |
| }, |
| { |
| "epoch": 2.1667605633802816, |
| "grad_norm": 0.23896347948557023, |
| "learning_rate": 1.539264828738513e-05, |
| "loss": 0.2673, |
| "step": 1924 |
| }, |
| { |
| "epoch": 2.167887323943662, |
| "grad_norm": 0.21106922466282554, |
| "learning_rate": 1.5371762740183793e-05, |
| "loss": 0.269, |
| "step": 1925 |
| }, |
| { |
| "epoch": 2.169014084507042, |
| "grad_norm": 0.23530802533165923, |
| "learning_rate": 1.5350877192982457e-05, |
| "loss": 0.2835, |
| "step": 1926 |
| }, |
| { |
| "epoch": 2.1701408450704225, |
| "grad_norm": 0.23668876249523446, |
| "learning_rate": 1.532999164578112e-05, |
| "loss": 0.2899, |
| "step": 1927 |
| }, |
| { |
| "epoch": 2.171267605633803, |
| "grad_norm": 0.23153335234833616, |
| "learning_rate": 1.5309106098579783e-05, |
| "loss": 0.2885, |
| "step": 1928 |
| }, |
| { |
| "epoch": 2.172394366197183, |
| "grad_norm": 0.23935356077783637, |
| "learning_rate": 1.5288220551378447e-05, |
| "loss": 0.262, |
| "step": 1929 |
| }, |
| { |
| "epoch": 2.1735211267605634, |
| "grad_norm": 0.24217670995829305, |
| "learning_rate": 1.526733500417711e-05, |
| "loss": 0.2661, |
| "step": 1930 |
| }, |
| { |
| "epoch": 2.1746478873239434, |
| "grad_norm": 0.23240003368990678, |
| "learning_rate": 1.5246449456975772e-05, |
| "loss": 0.2725, |
| "step": 1931 |
| }, |
| { |
| "epoch": 2.175774647887324, |
| "grad_norm": 0.23175726446485997, |
| "learning_rate": 1.5225563909774437e-05, |
| "loss": 0.2638, |
| "step": 1932 |
| }, |
| { |
| "epoch": 2.1769014084507043, |
| "grad_norm": 0.2111102205308594, |
| "learning_rate": 1.5204678362573099e-05, |
| "loss": 0.2673, |
| "step": 1933 |
| }, |
| { |
| "epoch": 2.1780281690140844, |
| "grad_norm": 0.23634239259785408, |
| "learning_rate": 1.5183792815371764e-05, |
| "loss": 0.2817, |
| "step": 1934 |
| }, |
| { |
| "epoch": 2.179154929577465, |
| "grad_norm": 0.225470067506582, |
| "learning_rate": 1.5162907268170425e-05, |
| "loss": 0.2791, |
| "step": 1935 |
| }, |
| { |
| "epoch": 2.1802816901408453, |
| "grad_norm": 0.23640365976070718, |
| "learning_rate": 1.514202172096909e-05, |
| "loss": 0.2739, |
| "step": 1936 |
| }, |
| { |
| "epoch": 2.1814084507042253, |
| "grad_norm": 0.22139305552383293, |
| "learning_rate": 1.5121136173767752e-05, |
| "loss": 0.2649, |
| "step": 1937 |
| }, |
| { |
| "epoch": 2.1825352112676057, |
| "grad_norm": 0.23104278414531945, |
| "learning_rate": 1.5100250626566417e-05, |
| "loss": 0.2585, |
| "step": 1938 |
| }, |
| { |
| "epoch": 2.1836619718309858, |
| "grad_norm": 0.20957672014576867, |
| "learning_rate": 1.5079365079365079e-05, |
| "loss": 0.2797, |
| "step": 1939 |
| }, |
| { |
| "epoch": 2.184788732394366, |
| "grad_norm": 0.34330613589580256, |
| "learning_rate": 1.5058479532163744e-05, |
| "loss": 0.2806, |
| "step": 1940 |
| }, |
| { |
| "epoch": 2.1859154929577467, |
| "grad_norm": 0.20503771036948973, |
| "learning_rate": 1.5037593984962406e-05, |
| "loss": 0.269, |
| "step": 1941 |
| }, |
| { |
| "epoch": 2.1870422535211267, |
| "grad_norm": 0.24007031747179594, |
| "learning_rate": 1.501670843776107e-05, |
| "loss": 0.2728, |
| "step": 1942 |
| }, |
| { |
| "epoch": 2.188169014084507, |
| "grad_norm": 0.25402861782615715, |
| "learning_rate": 1.4995822890559732e-05, |
| "loss": 0.2851, |
| "step": 1943 |
| }, |
| { |
| "epoch": 2.189295774647887, |
| "grad_norm": 0.23051212385499653, |
| "learning_rate": 1.4974937343358397e-05, |
| "loss": 0.2699, |
| "step": 1944 |
| }, |
| { |
| "epoch": 2.1904225352112676, |
| "grad_norm": 0.23558216483964192, |
| "learning_rate": 1.4954051796157059e-05, |
| "loss": 0.2665, |
| "step": 1945 |
| }, |
| { |
| "epoch": 2.191549295774648, |
| "grad_norm": 0.2515237013022344, |
| "learning_rate": 1.4933166248955724e-05, |
| "loss": 0.278, |
| "step": 1946 |
| }, |
| { |
| "epoch": 2.192676056338028, |
| "grad_norm": 0.2375005933780065, |
| "learning_rate": 1.4912280701754386e-05, |
| "loss": 0.2669, |
| "step": 1947 |
| }, |
| { |
| "epoch": 2.1938028169014085, |
| "grad_norm": 0.23052546797566495, |
| "learning_rate": 1.4891395154553051e-05, |
| "loss": 0.2736, |
| "step": 1948 |
| }, |
| { |
| "epoch": 2.1949295774647886, |
| "grad_norm": 0.24459193123583572, |
| "learning_rate": 1.4870509607351713e-05, |
| "loss": 0.2789, |
| "step": 1949 |
| }, |
| { |
| "epoch": 2.196056338028169, |
| "grad_norm": 0.22373640891967844, |
| "learning_rate": 1.4849624060150378e-05, |
| "loss": 0.2708, |
| "step": 1950 |
| }, |
| { |
| "epoch": 2.1971830985915495, |
| "grad_norm": 0.23185887909498595, |
| "learning_rate": 1.482873851294904e-05, |
| "loss": 0.2872, |
| "step": 1951 |
| }, |
| { |
| "epoch": 2.1983098591549295, |
| "grad_norm": 0.23657985465235495, |
| "learning_rate": 1.4807852965747704e-05, |
| "loss": 0.2675, |
| "step": 1952 |
| }, |
| { |
| "epoch": 2.19943661971831, |
| "grad_norm": 0.22371982233885945, |
| "learning_rate": 1.4786967418546366e-05, |
| "loss": 0.2743, |
| "step": 1953 |
| }, |
| { |
| "epoch": 2.20056338028169, |
| "grad_norm": 0.21136209655985594, |
| "learning_rate": 1.4766081871345031e-05, |
| "loss": 0.2542, |
| "step": 1954 |
| }, |
| { |
| "epoch": 2.2016901408450704, |
| "grad_norm": 0.21509580106115853, |
| "learning_rate": 1.4745196324143693e-05, |
| "loss": 0.2659, |
| "step": 1955 |
| }, |
| { |
| "epoch": 2.202816901408451, |
| "grad_norm": 0.23707195283541888, |
| "learning_rate": 1.4724310776942358e-05, |
| "loss": 0.2646, |
| "step": 1956 |
| }, |
| { |
| "epoch": 2.203943661971831, |
| "grad_norm": 0.2061422131991942, |
| "learning_rate": 1.470342522974102e-05, |
| "loss": 0.2616, |
| "step": 1957 |
| }, |
| { |
| "epoch": 2.2050704225352113, |
| "grad_norm": 0.23775130186960383, |
| "learning_rate": 1.4682539682539683e-05, |
| "loss": 0.2899, |
| "step": 1958 |
| }, |
| { |
| "epoch": 2.2061971830985914, |
| "grad_norm": 0.24788983547937893, |
| "learning_rate": 1.4661654135338346e-05, |
| "loss": 0.276, |
| "step": 1959 |
| }, |
| { |
| "epoch": 2.207323943661972, |
| "grad_norm": 0.27365040649107353, |
| "learning_rate": 1.464076858813701e-05, |
| "loss": 0.2742, |
| "step": 1960 |
| }, |
| { |
| "epoch": 2.2084507042253523, |
| "grad_norm": 0.21230372072638573, |
| "learning_rate": 1.4619883040935673e-05, |
| "loss": 0.2651, |
| "step": 1961 |
| }, |
| { |
| "epoch": 2.2095774647887323, |
| "grad_norm": 0.21542674747871746, |
| "learning_rate": 1.4598997493734337e-05, |
| "loss": 0.2746, |
| "step": 1962 |
| }, |
| { |
| "epoch": 2.2107042253521128, |
| "grad_norm": 0.23177548101881454, |
| "learning_rate": 1.4578111946533e-05, |
| "loss": 0.2641, |
| "step": 1963 |
| }, |
| { |
| "epoch": 2.2118309859154928, |
| "grad_norm": 0.22616947980308638, |
| "learning_rate": 1.4557226399331663e-05, |
| "loss": 0.2725, |
| "step": 1964 |
| }, |
| { |
| "epoch": 2.2129577464788732, |
| "grad_norm": 0.22554747009445886, |
| "learning_rate": 1.4536340852130325e-05, |
| "loss": 0.2939, |
| "step": 1965 |
| }, |
| { |
| "epoch": 2.2140845070422537, |
| "grad_norm": 0.22472896879143436, |
| "learning_rate": 1.451545530492899e-05, |
| "loss": 0.2681, |
| "step": 1966 |
| }, |
| { |
| "epoch": 2.2152112676056337, |
| "grad_norm": 0.20992257256681324, |
| "learning_rate": 1.4494569757727652e-05, |
| "loss": 0.2596, |
| "step": 1967 |
| }, |
| { |
| "epoch": 2.216338028169014, |
| "grad_norm": 0.22871327607085135, |
| "learning_rate": 1.4473684210526317e-05, |
| "loss": 0.262, |
| "step": 1968 |
| }, |
| { |
| "epoch": 2.217464788732394, |
| "grad_norm": 0.23046973280343408, |
| "learning_rate": 1.4452798663324978e-05, |
| "loss": 0.2824, |
| "step": 1969 |
| }, |
| { |
| "epoch": 2.2185915492957746, |
| "grad_norm": 0.2096113182042097, |
| "learning_rate": 1.4431913116123644e-05, |
| "loss": 0.272, |
| "step": 1970 |
| }, |
| { |
| "epoch": 2.219718309859155, |
| "grad_norm": 0.20335565864164426, |
| "learning_rate": 1.4411027568922305e-05, |
| "loss": 0.2658, |
| "step": 1971 |
| }, |
| { |
| "epoch": 2.220845070422535, |
| "grad_norm": 0.21750740359226126, |
| "learning_rate": 1.439014202172097e-05, |
| "loss": 0.2744, |
| "step": 1972 |
| }, |
| { |
| "epoch": 2.2219718309859156, |
| "grad_norm": 0.22524066181597557, |
| "learning_rate": 1.4369256474519632e-05, |
| "loss": 0.2753, |
| "step": 1973 |
| }, |
| { |
| "epoch": 2.223098591549296, |
| "grad_norm": 0.23929892166160505, |
| "learning_rate": 1.4348370927318297e-05, |
| "loss": 0.28, |
| "step": 1974 |
| }, |
| { |
| "epoch": 2.224225352112676, |
| "grad_norm": 0.2248746927793624, |
| "learning_rate": 1.4327485380116959e-05, |
| "loss": 0.2734, |
| "step": 1975 |
| }, |
| { |
| "epoch": 2.2253521126760565, |
| "grad_norm": 0.20356256188239322, |
| "learning_rate": 1.4306599832915624e-05, |
| "loss": 0.2675, |
| "step": 1976 |
| }, |
| { |
| "epoch": 2.2264788732394365, |
| "grad_norm": 0.21154119418913328, |
| "learning_rate": 1.4285714285714285e-05, |
| "loss": 0.2658, |
| "step": 1977 |
| }, |
| { |
| "epoch": 2.227605633802817, |
| "grad_norm": 0.21918481100532844, |
| "learning_rate": 1.426482873851295e-05, |
| "loss": 0.2819, |
| "step": 1978 |
| }, |
| { |
| "epoch": 2.2287323943661974, |
| "grad_norm": 0.21407891485923058, |
| "learning_rate": 1.4243943191311612e-05, |
| "loss": 0.2668, |
| "step": 1979 |
| }, |
| { |
| "epoch": 2.2298591549295774, |
| "grad_norm": 0.21161000632068389, |
| "learning_rate": 1.4223057644110277e-05, |
| "loss": 0.2736, |
| "step": 1980 |
| }, |
| { |
| "epoch": 2.230985915492958, |
| "grad_norm": 0.22280466855017053, |
| "learning_rate": 1.4202172096908939e-05, |
| "loss": 0.2734, |
| "step": 1981 |
| }, |
| { |
| "epoch": 2.232112676056338, |
| "grad_norm": 0.23417364326010645, |
| "learning_rate": 1.4181286549707604e-05, |
| "loss": 0.2717, |
| "step": 1982 |
| }, |
| { |
| "epoch": 2.2332394366197184, |
| "grad_norm": 0.21160121555283712, |
| "learning_rate": 1.4160401002506266e-05, |
| "loss": 0.2765, |
| "step": 1983 |
| }, |
| { |
| "epoch": 2.234366197183099, |
| "grad_norm": 0.23573792906579782, |
| "learning_rate": 1.413951545530493e-05, |
| "loss": 0.2843, |
| "step": 1984 |
| }, |
| { |
| "epoch": 2.235492957746479, |
| "grad_norm": 0.23981660549956635, |
| "learning_rate": 1.4118629908103592e-05, |
| "loss": 0.2916, |
| "step": 1985 |
| }, |
| { |
| "epoch": 2.2366197183098593, |
| "grad_norm": 0.2106101201254402, |
| "learning_rate": 1.4097744360902257e-05, |
| "loss": 0.2841, |
| "step": 1986 |
| }, |
| { |
| "epoch": 2.2377464788732393, |
| "grad_norm": 0.23639851155741806, |
| "learning_rate": 1.407685881370092e-05, |
| "loss": 0.277, |
| "step": 1987 |
| }, |
| { |
| "epoch": 2.2388732394366198, |
| "grad_norm": 0.22583083758980332, |
| "learning_rate": 1.4055973266499584e-05, |
| "loss": 0.2823, |
| "step": 1988 |
| }, |
| { |
| "epoch": 2.24, |
| "grad_norm": 0.2223738892554726, |
| "learning_rate": 1.4035087719298246e-05, |
| "loss": 0.271, |
| "step": 1989 |
| }, |
| { |
| "epoch": 2.2411267605633802, |
| "grad_norm": 0.22063232817325545, |
| "learning_rate": 1.4014202172096911e-05, |
| "loss": 0.2734, |
| "step": 1990 |
| }, |
| { |
| "epoch": 2.2422535211267607, |
| "grad_norm": 0.2312948424666875, |
| "learning_rate": 1.3993316624895573e-05, |
| "loss": 0.2699, |
| "step": 1991 |
| }, |
| { |
| "epoch": 2.2433802816901407, |
| "grad_norm": 0.2338652743050092, |
| "learning_rate": 1.3972431077694236e-05, |
| "loss": 0.2778, |
| "step": 1992 |
| }, |
| { |
| "epoch": 2.244507042253521, |
| "grad_norm": 0.24131557161139464, |
| "learning_rate": 1.39515455304929e-05, |
| "loss": 0.2664, |
| "step": 1993 |
| }, |
| { |
| "epoch": 2.2456338028169016, |
| "grad_norm": 0.2489872832324652, |
| "learning_rate": 1.3930659983291563e-05, |
| "loss": 0.2807, |
| "step": 1994 |
| }, |
| { |
| "epoch": 2.2467605633802816, |
| "grad_norm": 0.22613350399229276, |
| "learning_rate": 1.3909774436090226e-05, |
| "loss": 0.2798, |
| "step": 1995 |
| }, |
| { |
| "epoch": 2.247887323943662, |
| "grad_norm": 0.22409833184125685, |
| "learning_rate": 1.388888888888889e-05, |
| "loss": 0.2815, |
| "step": 1996 |
| }, |
| { |
| "epoch": 2.249014084507042, |
| "grad_norm": 0.21011752202705167, |
| "learning_rate": 1.3868003341687553e-05, |
| "loss": 0.269, |
| "step": 1997 |
| }, |
| { |
| "epoch": 2.2501408450704226, |
| "grad_norm": 0.21734688044205402, |
| "learning_rate": 1.3847117794486216e-05, |
| "loss": 0.274, |
| "step": 1998 |
| }, |
| { |
| "epoch": 2.251267605633803, |
| "grad_norm": 0.2450339290069644, |
| "learning_rate": 1.3826232247284878e-05, |
| "loss": 0.2767, |
| "step": 1999 |
| }, |
| { |
| "epoch": 2.252394366197183, |
| "grad_norm": 0.21843275358835337, |
| "learning_rate": 1.3805346700083543e-05, |
| "loss": 0.2776, |
| "step": 2000 |
| }, |
| { |
| "epoch": 2.2535211267605635, |
| "grad_norm": 0.22482809048283803, |
| "learning_rate": 1.3784461152882205e-05, |
| "loss": 0.2724, |
| "step": 2001 |
| }, |
| { |
| "epoch": 2.2546478873239435, |
| "grad_norm": 0.21721684317508147, |
| "learning_rate": 1.376357560568087e-05, |
| "loss": 0.279, |
| "step": 2002 |
| }, |
| { |
| "epoch": 2.255774647887324, |
| "grad_norm": 0.24301326583221047, |
| "learning_rate": 1.3742690058479531e-05, |
| "loss": 0.2588, |
| "step": 2003 |
| }, |
| { |
| "epoch": 2.2569014084507044, |
| "grad_norm": 0.19746584879494172, |
| "learning_rate": 1.3721804511278197e-05, |
| "loss": 0.253, |
| "step": 2004 |
| }, |
| { |
| "epoch": 2.2580281690140844, |
| "grad_norm": 0.2390184562770489, |
| "learning_rate": 1.3700918964076858e-05, |
| "loss": 0.2822, |
| "step": 2005 |
| }, |
| { |
| "epoch": 2.259154929577465, |
| "grad_norm": 0.23217244184241698, |
| "learning_rate": 1.3680033416875523e-05, |
| "loss": 0.29, |
| "step": 2006 |
| }, |
| { |
| "epoch": 2.260281690140845, |
| "grad_norm": 0.22506809172530984, |
| "learning_rate": 1.3659147869674185e-05, |
| "loss": 0.2676, |
| "step": 2007 |
| }, |
| { |
| "epoch": 2.2614084507042254, |
| "grad_norm": 0.21517913805104338, |
| "learning_rate": 1.363826232247285e-05, |
| "loss": 0.2792, |
| "step": 2008 |
| }, |
| { |
| "epoch": 2.262535211267606, |
| "grad_norm": 0.24456781917421866, |
| "learning_rate": 1.3617376775271512e-05, |
| "loss": 0.2944, |
| "step": 2009 |
| }, |
| { |
| "epoch": 2.263661971830986, |
| "grad_norm": 0.23981555634152454, |
| "learning_rate": 1.3596491228070177e-05, |
| "loss": 0.2708, |
| "step": 2010 |
| }, |
| { |
| "epoch": 2.2647887323943663, |
| "grad_norm": 0.22061927832632905, |
| "learning_rate": 1.3575605680868838e-05, |
| "loss": 0.2522, |
| "step": 2011 |
| }, |
| { |
| "epoch": 2.2659154929577463, |
| "grad_norm": 0.2300492242646862, |
| "learning_rate": 1.3554720133667504e-05, |
| "loss": 0.29, |
| "step": 2012 |
| }, |
| { |
| "epoch": 2.2670422535211268, |
| "grad_norm": 0.22722745124981358, |
| "learning_rate": 1.3533834586466165e-05, |
| "loss": 0.2646, |
| "step": 2013 |
| }, |
| { |
| "epoch": 2.268169014084507, |
| "grad_norm": 0.22878725894793864, |
| "learning_rate": 1.351294903926483e-05, |
| "loss": 0.2623, |
| "step": 2014 |
| }, |
| { |
| "epoch": 2.2692957746478872, |
| "grad_norm": 0.2124223044313935, |
| "learning_rate": 1.3492063492063492e-05, |
| "loss": 0.2906, |
| "step": 2015 |
| }, |
| { |
| "epoch": 2.2704225352112677, |
| "grad_norm": 0.2036252987427459, |
| "learning_rate": 1.3471177944862157e-05, |
| "loss": 0.2547, |
| "step": 2016 |
| }, |
| { |
| "epoch": 2.2715492957746477, |
| "grad_norm": 0.2399812429436266, |
| "learning_rate": 1.3450292397660819e-05, |
| "loss": 0.2819, |
| "step": 2017 |
| }, |
| { |
| "epoch": 2.272676056338028, |
| "grad_norm": 0.25316518682365097, |
| "learning_rate": 1.3429406850459484e-05, |
| "loss": 0.2706, |
| "step": 2018 |
| }, |
| { |
| "epoch": 2.2738028169014086, |
| "grad_norm": 0.2252764130337124, |
| "learning_rate": 1.3408521303258145e-05, |
| "loss": 0.273, |
| "step": 2019 |
| }, |
| { |
| "epoch": 2.2749295774647886, |
| "grad_norm": 0.20614835043806035, |
| "learning_rate": 1.338763575605681e-05, |
| "loss": 0.2687, |
| "step": 2020 |
| }, |
| { |
| "epoch": 2.276056338028169, |
| "grad_norm": 0.23543495966359457, |
| "learning_rate": 1.3366750208855472e-05, |
| "loss": 0.2784, |
| "step": 2021 |
| }, |
| { |
| "epoch": 2.277183098591549, |
| "grad_norm": 0.23084199615754333, |
| "learning_rate": 1.3345864661654137e-05, |
| "loss": 0.2521, |
| "step": 2022 |
| }, |
| { |
| "epoch": 2.2783098591549296, |
| "grad_norm": 0.23191754255578015, |
| "learning_rate": 1.3324979114452799e-05, |
| "loss": 0.2834, |
| "step": 2023 |
| }, |
| { |
| "epoch": 2.27943661971831, |
| "grad_norm": 0.21490002373296088, |
| "learning_rate": 1.3304093567251464e-05, |
| "loss": 0.264, |
| "step": 2024 |
| }, |
| { |
| "epoch": 2.28056338028169, |
| "grad_norm": 0.2285277364686723, |
| "learning_rate": 1.3283208020050126e-05, |
| "loss": 0.2961, |
| "step": 2025 |
| }, |
| { |
| "epoch": 2.2816901408450705, |
| "grad_norm": 0.22139617147513796, |
| "learning_rate": 1.3262322472848789e-05, |
| "loss": 0.2493, |
| "step": 2026 |
| }, |
| { |
| "epoch": 2.2828169014084505, |
| "grad_norm": 0.2266693203995399, |
| "learning_rate": 1.3241436925647452e-05, |
| "loss": 0.2513, |
| "step": 2027 |
| }, |
| { |
| "epoch": 2.283943661971831, |
| "grad_norm": 0.21507464866756967, |
| "learning_rate": 1.3220551378446116e-05, |
| "loss": 0.2727, |
| "step": 2028 |
| }, |
| { |
| "epoch": 2.2850704225352114, |
| "grad_norm": 0.21760306384693703, |
| "learning_rate": 1.319966583124478e-05, |
| "loss": 0.2837, |
| "step": 2029 |
| }, |
| { |
| "epoch": 2.2861971830985914, |
| "grad_norm": 0.22955926412886085, |
| "learning_rate": 1.3178780284043443e-05, |
| "loss": 0.2792, |
| "step": 2030 |
| }, |
| { |
| "epoch": 2.287323943661972, |
| "grad_norm": 0.22954629278050026, |
| "learning_rate": 1.3157894736842106e-05, |
| "loss": 0.2781, |
| "step": 2031 |
| }, |
| { |
| "epoch": 2.288450704225352, |
| "grad_norm": 0.21520773497636855, |
| "learning_rate": 1.313700918964077e-05, |
| "loss": 0.2681, |
| "step": 2032 |
| }, |
| { |
| "epoch": 2.2895774647887324, |
| "grad_norm": 0.21696543795200143, |
| "learning_rate": 1.3116123642439431e-05, |
| "loss": 0.2595, |
| "step": 2033 |
| }, |
| { |
| "epoch": 2.290704225352113, |
| "grad_norm": 0.22952681284858015, |
| "learning_rate": 1.3095238095238096e-05, |
| "loss": 0.282, |
| "step": 2034 |
| }, |
| { |
| "epoch": 2.291830985915493, |
| "grad_norm": 0.33170692132136104, |
| "learning_rate": 1.3074352548036758e-05, |
| "loss": 0.2919, |
| "step": 2035 |
| }, |
| { |
| "epoch": 2.2929577464788733, |
| "grad_norm": 0.2533477575243943, |
| "learning_rate": 1.3053467000835423e-05, |
| "loss": 0.2962, |
| "step": 2036 |
| }, |
| { |
| "epoch": 2.2940845070422533, |
| "grad_norm": 0.2168408112742443, |
| "learning_rate": 1.3032581453634085e-05, |
| "loss": 0.275, |
| "step": 2037 |
| }, |
| { |
| "epoch": 2.2952112676056338, |
| "grad_norm": 0.21500736585640562, |
| "learning_rate": 1.301169590643275e-05, |
| "loss": 0.2686, |
| "step": 2038 |
| }, |
| { |
| "epoch": 2.2963380281690142, |
| "grad_norm": 0.2676815326816881, |
| "learning_rate": 1.2990810359231411e-05, |
| "loss": 0.2796, |
| "step": 2039 |
| }, |
| { |
| "epoch": 2.2974647887323942, |
| "grad_norm": 0.2263627947111304, |
| "learning_rate": 1.2969924812030076e-05, |
| "loss": 0.2656, |
| "step": 2040 |
| }, |
| { |
| "epoch": 2.2985915492957747, |
| "grad_norm": 0.22688933416191495, |
| "learning_rate": 1.2949039264828738e-05, |
| "loss": 0.277, |
| "step": 2041 |
| }, |
| { |
| "epoch": 2.2997183098591547, |
| "grad_norm": 0.2257086440618408, |
| "learning_rate": 1.2928153717627403e-05, |
| "loss": 0.2674, |
| "step": 2042 |
| }, |
| { |
| "epoch": 2.300845070422535, |
| "grad_norm": 0.2416933950813198, |
| "learning_rate": 1.2907268170426065e-05, |
| "loss": 0.2687, |
| "step": 2043 |
| }, |
| { |
| "epoch": 2.3019718309859156, |
| "grad_norm": 0.2323204387295286, |
| "learning_rate": 1.288638262322473e-05, |
| "loss": 0.2774, |
| "step": 2044 |
| }, |
| { |
| "epoch": 2.3030985915492956, |
| "grad_norm": 0.21946992538397853, |
| "learning_rate": 1.2865497076023392e-05, |
| "loss": 0.264, |
| "step": 2045 |
| }, |
| { |
| "epoch": 2.304225352112676, |
| "grad_norm": 0.24515276245328027, |
| "learning_rate": 1.2844611528822057e-05, |
| "loss": 0.2667, |
| "step": 2046 |
| }, |
| { |
| "epoch": 2.305352112676056, |
| "grad_norm": 0.2464566315493969, |
| "learning_rate": 1.2823725981620718e-05, |
| "loss": 0.2721, |
| "step": 2047 |
| }, |
| { |
| "epoch": 2.3064788732394366, |
| "grad_norm": 0.22334469262497983, |
| "learning_rate": 1.2802840434419383e-05, |
| "loss": 0.2812, |
| "step": 2048 |
| }, |
| { |
| "epoch": 2.307605633802817, |
| "grad_norm": 0.25873138298084397, |
| "learning_rate": 1.2781954887218045e-05, |
| "loss": 0.2917, |
| "step": 2049 |
| }, |
| { |
| "epoch": 2.308732394366197, |
| "grad_norm": 0.2778202804249837, |
| "learning_rate": 1.276106934001671e-05, |
| "loss": 0.273, |
| "step": 2050 |
| }, |
| { |
| "epoch": 2.3098591549295775, |
| "grad_norm": 0.21671557928806906, |
| "learning_rate": 1.2740183792815372e-05, |
| "loss": 0.2621, |
| "step": 2051 |
| }, |
| { |
| "epoch": 2.3109859154929575, |
| "grad_norm": 0.22789737210449681, |
| "learning_rate": 1.2719298245614037e-05, |
| "loss": 0.2576, |
| "step": 2052 |
| }, |
| { |
| "epoch": 2.312112676056338, |
| "grad_norm": 0.26173819893725, |
| "learning_rate": 1.2698412698412699e-05, |
| "loss": 0.2638, |
| "step": 2053 |
| }, |
| { |
| "epoch": 2.3132394366197184, |
| "grad_norm": 0.24649801061804577, |
| "learning_rate": 1.2677527151211364e-05, |
| "loss": 0.2784, |
| "step": 2054 |
| }, |
| { |
| "epoch": 2.3143661971830984, |
| "grad_norm": 0.23514114032370073, |
| "learning_rate": 1.2656641604010025e-05, |
| "loss": 0.2651, |
| "step": 2055 |
| }, |
| { |
| "epoch": 2.315492957746479, |
| "grad_norm": 0.2466224146716087, |
| "learning_rate": 1.263575605680869e-05, |
| "loss": 0.2917, |
| "step": 2056 |
| }, |
| { |
| "epoch": 2.316619718309859, |
| "grad_norm": 0.2872020468405315, |
| "learning_rate": 1.2614870509607352e-05, |
| "loss": 0.2536, |
| "step": 2057 |
| }, |
| { |
| "epoch": 2.3177464788732394, |
| "grad_norm": 0.21720874528836792, |
| "learning_rate": 1.2593984962406017e-05, |
| "loss": 0.28, |
| "step": 2058 |
| }, |
| { |
| "epoch": 2.31887323943662, |
| "grad_norm": 0.20741119190594928, |
| "learning_rate": 1.2573099415204679e-05, |
| "loss": 0.2718, |
| "step": 2059 |
| }, |
| { |
| "epoch": 2.32, |
| "grad_norm": 0.2560372734108066, |
| "learning_rate": 1.2552213868003342e-05, |
| "loss": 0.2789, |
| "step": 2060 |
| }, |
| { |
| "epoch": 2.3211267605633803, |
| "grad_norm": 0.24952747418775886, |
| "learning_rate": 1.2531328320802006e-05, |
| "loss": 0.2825, |
| "step": 2061 |
| }, |
| { |
| "epoch": 2.3222535211267608, |
| "grad_norm": 0.22738294194395234, |
| "learning_rate": 1.2510442773600669e-05, |
| "loss": 0.2937, |
| "step": 2062 |
| }, |
| { |
| "epoch": 2.3233802816901408, |
| "grad_norm": 0.22446187495202183, |
| "learning_rate": 1.2489557226399332e-05, |
| "loss": 0.2713, |
| "step": 2063 |
| }, |
| { |
| "epoch": 2.3245070422535212, |
| "grad_norm": 0.24144984465835664, |
| "learning_rate": 1.2468671679197996e-05, |
| "loss": 0.2756, |
| "step": 2064 |
| }, |
| { |
| "epoch": 2.3256338028169012, |
| "grad_norm": 0.23689819143768417, |
| "learning_rate": 1.2447786131996659e-05, |
| "loss": 0.2546, |
| "step": 2065 |
| }, |
| { |
| "epoch": 2.3267605633802817, |
| "grad_norm": 0.2611020677993963, |
| "learning_rate": 1.242690058479532e-05, |
| "loss": 0.2809, |
| "step": 2066 |
| }, |
| { |
| "epoch": 2.327887323943662, |
| "grad_norm": 0.2339885651831768, |
| "learning_rate": 1.2406015037593984e-05, |
| "loss": 0.2801, |
| "step": 2067 |
| }, |
| { |
| "epoch": 2.329014084507042, |
| "grad_norm": 0.2334254544471349, |
| "learning_rate": 1.2385129490392647e-05, |
| "loss": 0.2725, |
| "step": 2068 |
| }, |
| { |
| "epoch": 2.3301408450704226, |
| "grad_norm": 0.2246880889088873, |
| "learning_rate": 1.236424394319131e-05, |
| "loss": 0.2664, |
| "step": 2069 |
| }, |
| { |
| "epoch": 2.3312676056338026, |
| "grad_norm": 0.2507553837232819, |
| "learning_rate": 1.2343358395989974e-05, |
| "loss": 0.2788, |
| "step": 2070 |
| }, |
| { |
| "epoch": 2.332394366197183, |
| "grad_norm": 0.21634555471769967, |
| "learning_rate": 1.2322472848788638e-05, |
| "loss": 0.2635, |
| "step": 2071 |
| }, |
| { |
| "epoch": 2.3335211267605636, |
| "grad_norm": 0.2331492863630478, |
| "learning_rate": 1.2301587301587301e-05, |
| "loss": 0.2717, |
| "step": 2072 |
| }, |
| { |
| "epoch": 2.3346478873239436, |
| "grad_norm": 0.23764686043090247, |
| "learning_rate": 1.2280701754385964e-05, |
| "loss": 0.2719, |
| "step": 2073 |
| }, |
| { |
| "epoch": 2.335774647887324, |
| "grad_norm": 0.20696704110699093, |
| "learning_rate": 1.2259816207184628e-05, |
| "loss": 0.2623, |
| "step": 2074 |
| }, |
| { |
| "epoch": 2.336901408450704, |
| "grad_norm": 0.23715745041246034, |
| "learning_rate": 1.2238930659983291e-05, |
| "loss": 0.2804, |
| "step": 2075 |
| }, |
| { |
| "epoch": 2.3380281690140845, |
| "grad_norm": 0.23109068722265308, |
| "learning_rate": 1.2218045112781954e-05, |
| "loss": 0.2709, |
| "step": 2076 |
| }, |
| { |
| "epoch": 2.339154929577465, |
| "grad_norm": 0.1991020046023143, |
| "learning_rate": 1.2197159565580618e-05, |
| "loss": 0.2616, |
| "step": 2077 |
| }, |
| { |
| "epoch": 2.340281690140845, |
| "grad_norm": 0.20836991373278604, |
| "learning_rate": 1.2176274018379281e-05, |
| "loss": 0.2803, |
| "step": 2078 |
| }, |
| { |
| "epoch": 2.3414084507042254, |
| "grad_norm": 0.23228249419993582, |
| "learning_rate": 1.2155388471177945e-05, |
| "loss": 0.2775, |
| "step": 2079 |
| }, |
| { |
| "epoch": 2.3425352112676054, |
| "grad_norm": 0.23298791905416905, |
| "learning_rate": 1.2134502923976608e-05, |
| "loss": 0.2735, |
| "step": 2080 |
| }, |
| { |
| "epoch": 2.343661971830986, |
| "grad_norm": 0.20806123139516708, |
| "learning_rate": 1.2113617376775271e-05, |
| "loss": 0.2781, |
| "step": 2081 |
| }, |
| { |
| "epoch": 2.3447887323943664, |
| "grad_norm": 0.22064451961073028, |
| "learning_rate": 1.2092731829573935e-05, |
| "loss": 0.2734, |
| "step": 2082 |
| }, |
| { |
| "epoch": 2.3459154929577464, |
| "grad_norm": 0.21649854203207658, |
| "learning_rate": 1.2071846282372598e-05, |
| "loss": 0.2583, |
| "step": 2083 |
| }, |
| { |
| "epoch": 2.347042253521127, |
| "grad_norm": 0.21476663436618942, |
| "learning_rate": 1.2050960735171261e-05, |
| "loss": 0.2712, |
| "step": 2084 |
| }, |
| { |
| "epoch": 2.3481690140845073, |
| "grad_norm": 0.1982230142966004, |
| "learning_rate": 1.2030075187969925e-05, |
| "loss": 0.256, |
| "step": 2085 |
| }, |
| { |
| "epoch": 2.3492957746478873, |
| "grad_norm": 0.21709399899243265, |
| "learning_rate": 1.2009189640768588e-05, |
| "loss": 0.2653, |
| "step": 2086 |
| }, |
| { |
| "epoch": 2.3504225352112678, |
| "grad_norm": 0.23444500781279742, |
| "learning_rate": 1.1988304093567252e-05, |
| "loss": 0.283, |
| "step": 2087 |
| }, |
| { |
| "epoch": 2.3515492957746478, |
| "grad_norm": 0.22818369005780376, |
| "learning_rate": 1.1967418546365915e-05, |
| "loss": 0.2646, |
| "step": 2088 |
| }, |
| { |
| "epoch": 2.3526760563380282, |
| "grad_norm": 0.2028931812809309, |
| "learning_rate": 1.1946532999164578e-05, |
| "loss": 0.2748, |
| "step": 2089 |
| }, |
| { |
| "epoch": 2.3538028169014087, |
| "grad_norm": 0.2345211570550957, |
| "learning_rate": 1.1925647451963242e-05, |
| "loss": 0.2745, |
| "step": 2090 |
| }, |
| { |
| "epoch": 2.3549295774647887, |
| "grad_norm": 0.2753030384992383, |
| "learning_rate": 1.1904761904761905e-05, |
| "loss": 0.2729, |
| "step": 2091 |
| }, |
| { |
| "epoch": 2.356056338028169, |
| "grad_norm": 0.21352984178296622, |
| "learning_rate": 1.1883876357560568e-05, |
| "loss": 0.2608, |
| "step": 2092 |
| }, |
| { |
| "epoch": 2.357183098591549, |
| "grad_norm": 0.2728726782141452, |
| "learning_rate": 1.1862990810359232e-05, |
| "loss": 0.2793, |
| "step": 2093 |
| }, |
| { |
| "epoch": 2.3583098591549296, |
| "grad_norm": 0.2410185342470983, |
| "learning_rate": 1.1842105263157895e-05, |
| "loss": 0.2861, |
| "step": 2094 |
| }, |
| { |
| "epoch": 2.35943661971831, |
| "grad_norm": 0.24989805449908808, |
| "learning_rate": 1.1821219715956559e-05, |
| "loss": 0.2906, |
| "step": 2095 |
| }, |
| { |
| "epoch": 2.36056338028169, |
| "grad_norm": 0.23661941159683683, |
| "learning_rate": 1.1800334168755222e-05, |
| "loss": 0.2846, |
| "step": 2096 |
| }, |
| { |
| "epoch": 2.3616901408450706, |
| "grad_norm": 0.24911928401703032, |
| "learning_rate": 1.1779448621553885e-05, |
| "loss": 0.2842, |
| "step": 2097 |
| }, |
| { |
| "epoch": 2.3628169014084506, |
| "grad_norm": 0.2188034204109028, |
| "learning_rate": 1.1758563074352549e-05, |
| "loss": 0.2619, |
| "step": 2098 |
| }, |
| { |
| "epoch": 2.363943661971831, |
| "grad_norm": 0.23519778699583993, |
| "learning_rate": 1.1737677527151212e-05, |
| "loss": 0.2528, |
| "step": 2099 |
| }, |
| { |
| "epoch": 2.3650704225352115, |
| "grad_norm": 0.24858670761142815, |
| "learning_rate": 1.1716791979949874e-05, |
| "loss": 0.2764, |
| "step": 2100 |
| }, |
| { |
| "epoch": 2.3661971830985915, |
| "grad_norm": 0.2157432927871551, |
| "learning_rate": 1.1695906432748537e-05, |
| "loss": 0.2703, |
| "step": 2101 |
| }, |
| { |
| "epoch": 2.367323943661972, |
| "grad_norm": 0.20035197021874354, |
| "learning_rate": 1.16750208855472e-05, |
| "loss": 0.2635, |
| "step": 2102 |
| }, |
| { |
| "epoch": 2.368450704225352, |
| "grad_norm": 0.25971878714562346, |
| "learning_rate": 1.1654135338345864e-05, |
| "loss": 0.2766, |
| "step": 2103 |
| }, |
| { |
| "epoch": 2.3695774647887324, |
| "grad_norm": 0.22235337146940867, |
| "learning_rate": 1.1633249791144527e-05, |
| "loss": 0.2723, |
| "step": 2104 |
| }, |
| { |
| "epoch": 2.370704225352113, |
| "grad_norm": 0.19367345176317313, |
| "learning_rate": 1.161236424394319e-05, |
| "loss": 0.2672, |
| "step": 2105 |
| }, |
| { |
| "epoch": 2.371830985915493, |
| "grad_norm": 0.22489503194261767, |
| "learning_rate": 1.1591478696741854e-05, |
| "loss": 0.2705, |
| "step": 2106 |
| }, |
| { |
| "epoch": 2.3729577464788734, |
| "grad_norm": 0.22838289804635875, |
| "learning_rate": 1.1570593149540517e-05, |
| "loss": 0.2694, |
| "step": 2107 |
| }, |
| { |
| "epoch": 2.3740845070422534, |
| "grad_norm": 0.20986582906030665, |
| "learning_rate": 1.154970760233918e-05, |
| "loss": 0.2613, |
| "step": 2108 |
| }, |
| { |
| "epoch": 2.375211267605634, |
| "grad_norm": 0.2223157151237609, |
| "learning_rate": 1.1528822055137844e-05, |
| "loss": 0.2826, |
| "step": 2109 |
| }, |
| { |
| "epoch": 2.3763380281690143, |
| "grad_norm": 0.22977691867501657, |
| "learning_rate": 1.1507936507936508e-05, |
| "loss": 0.2789, |
| "step": 2110 |
| }, |
| { |
| "epoch": 2.3774647887323943, |
| "grad_norm": 0.21110855292816205, |
| "learning_rate": 1.1487050960735171e-05, |
| "loss": 0.2741, |
| "step": 2111 |
| }, |
| { |
| "epoch": 2.3785915492957748, |
| "grad_norm": 0.21244436892484309, |
| "learning_rate": 1.1466165413533834e-05, |
| "loss": 0.2709, |
| "step": 2112 |
| }, |
| { |
| "epoch": 2.379718309859155, |
| "grad_norm": 0.2175330654582257, |
| "learning_rate": 1.1445279866332498e-05, |
| "loss": 0.2622, |
| "step": 2113 |
| }, |
| { |
| "epoch": 2.3808450704225352, |
| "grad_norm": 0.23308321264773776, |
| "learning_rate": 1.1424394319131161e-05, |
| "loss": 0.2808, |
| "step": 2114 |
| }, |
| { |
| "epoch": 2.3819718309859157, |
| "grad_norm": 0.21031989758189668, |
| "learning_rate": 1.1403508771929824e-05, |
| "loss": 0.2802, |
| "step": 2115 |
| }, |
| { |
| "epoch": 2.3830985915492957, |
| "grad_norm": 0.20745254829798823, |
| "learning_rate": 1.1382623224728488e-05, |
| "loss": 0.2582, |
| "step": 2116 |
| }, |
| { |
| "epoch": 2.384225352112676, |
| "grad_norm": 0.2332964916151851, |
| "learning_rate": 1.1361737677527151e-05, |
| "loss": 0.2886, |
| "step": 2117 |
| }, |
| { |
| "epoch": 2.385352112676056, |
| "grad_norm": 0.2279416792446994, |
| "learning_rate": 1.1340852130325815e-05, |
| "loss": 0.2866, |
| "step": 2118 |
| }, |
| { |
| "epoch": 2.3864788732394366, |
| "grad_norm": 0.21711758088930125, |
| "learning_rate": 1.1319966583124478e-05, |
| "loss": 0.2805, |
| "step": 2119 |
| }, |
| { |
| "epoch": 2.387605633802817, |
| "grad_norm": 0.22213392090537115, |
| "learning_rate": 1.1299081035923141e-05, |
| "loss": 0.2561, |
| "step": 2120 |
| }, |
| { |
| "epoch": 2.388732394366197, |
| "grad_norm": 0.22722511191755496, |
| "learning_rate": 1.1278195488721805e-05, |
| "loss": 0.2651, |
| "step": 2121 |
| }, |
| { |
| "epoch": 2.3898591549295776, |
| "grad_norm": 0.24840655590846175, |
| "learning_rate": 1.1257309941520468e-05, |
| "loss": 0.2853, |
| "step": 2122 |
| }, |
| { |
| "epoch": 2.3909859154929576, |
| "grad_norm": 0.20463193123425508, |
| "learning_rate": 1.1236424394319131e-05, |
| "loss": 0.2638, |
| "step": 2123 |
| }, |
| { |
| "epoch": 2.392112676056338, |
| "grad_norm": 0.22768377246676877, |
| "learning_rate": 1.1215538847117795e-05, |
| "loss": 0.2814, |
| "step": 2124 |
| }, |
| { |
| "epoch": 2.3932394366197185, |
| "grad_norm": 0.24848832166520118, |
| "learning_rate": 1.1194653299916458e-05, |
| "loss": 0.2725, |
| "step": 2125 |
| }, |
| { |
| "epoch": 2.3943661971830985, |
| "grad_norm": 0.21778817501473433, |
| "learning_rate": 1.1173767752715122e-05, |
| "loss": 0.2763, |
| "step": 2126 |
| }, |
| { |
| "epoch": 2.395492957746479, |
| "grad_norm": 0.20652062321780992, |
| "learning_rate": 1.1152882205513785e-05, |
| "loss": 0.2626, |
| "step": 2127 |
| }, |
| { |
| "epoch": 2.396619718309859, |
| "grad_norm": 0.24174624714657003, |
| "learning_rate": 1.1131996658312448e-05, |
| "loss": 0.269, |
| "step": 2128 |
| }, |
| { |
| "epoch": 2.3977464788732394, |
| "grad_norm": 0.236893442627161, |
| "learning_rate": 1.1111111111111112e-05, |
| "loss": 0.2774, |
| "step": 2129 |
| }, |
| { |
| "epoch": 2.39887323943662, |
| "grad_norm": 0.21898107027200428, |
| "learning_rate": 1.1090225563909775e-05, |
| "loss": 0.2803, |
| "step": 2130 |
| }, |
| { |
| "epoch": 2.4, |
| "grad_norm": 0.22808988943103967, |
| "learning_rate": 1.1069340016708438e-05, |
| "loss": 0.2626, |
| "step": 2131 |
| }, |
| { |
| "epoch": 2.4011267605633804, |
| "grad_norm": 0.24093717514555563, |
| "learning_rate": 1.1048454469507102e-05, |
| "loss": 0.2762, |
| "step": 2132 |
| }, |
| { |
| "epoch": 2.4022535211267604, |
| "grad_norm": 0.240001552288654, |
| "learning_rate": 1.1027568922305765e-05, |
| "loss": 0.2688, |
| "step": 2133 |
| }, |
| { |
| "epoch": 2.403380281690141, |
| "grad_norm": 0.20401632119073637, |
| "learning_rate": 1.1006683375104427e-05, |
| "loss": 0.2685, |
| "step": 2134 |
| }, |
| { |
| "epoch": 2.4045070422535213, |
| "grad_norm": 0.23215631653494903, |
| "learning_rate": 1.098579782790309e-05, |
| "loss": 0.2739, |
| "step": 2135 |
| }, |
| { |
| "epoch": 2.4056338028169013, |
| "grad_norm": 0.2438746235505259, |
| "learning_rate": 1.0964912280701754e-05, |
| "loss": 0.2798, |
| "step": 2136 |
| }, |
| { |
| "epoch": 2.4067605633802818, |
| "grad_norm": 0.2185948791096995, |
| "learning_rate": 1.0944026733500417e-05, |
| "loss": 0.2763, |
| "step": 2137 |
| }, |
| { |
| "epoch": 2.407887323943662, |
| "grad_norm": 0.22927415544945678, |
| "learning_rate": 1.092314118629908e-05, |
| "loss": 0.2741, |
| "step": 2138 |
| }, |
| { |
| "epoch": 2.4090140845070422, |
| "grad_norm": 0.2352880631874138, |
| "learning_rate": 1.0902255639097744e-05, |
| "loss": 0.2836, |
| "step": 2139 |
| }, |
| { |
| "epoch": 2.4101408450704227, |
| "grad_norm": 0.22718997230465515, |
| "learning_rate": 1.0881370091896407e-05, |
| "loss": 0.2773, |
| "step": 2140 |
| }, |
| { |
| "epoch": 2.4112676056338027, |
| "grad_norm": 0.22040466124629957, |
| "learning_rate": 1.086048454469507e-05, |
| "loss": 0.2678, |
| "step": 2141 |
| }, |
| { |
| "epoch": 2.412394366197183, |
| "grad_norm": 0.23551257603799414, |
| "learning_rate": 1.0839598997493734e-05, |
| "loss": 0.269, |
| "step": 2142 |
| }, |
| { |
| "epoch": 2.413521126760563, |
| "grad_norm": 0.21954462480504305, |
| "learning_rate": 1.0818713450292397e-05, |
| "loss": 0.2731, |
| "step": 2143 |
| }, |
| { |
| "epoch": 2.4146478873239436, |
| "grad_norm": 0.22592886171330676, |
| "learning_rate": 1.079782790309106e-05, |
| "loss": 0.296, |
| "step": 2144 |
| }, |
| { |
| "epoch": 2.415774647887324, |
| "grad_norm": 0.2240484943300781, |
| "learning_rate": 1.0776942355889724e-05, |
| "loss": 0.2719, |
| "step": 2145 |
| }, |
| { |
| "epoch": 2.416901408450704, |
| "grad_norm": 0.22363256604144707, |
| "learning_rate": 1.0756056808688387e-05, |
| "loss": 0.2679, |
| "step": 2146 |
| }, |
| { |
| "epoch": 2.4180281690140846, |
| "grad_norm": 0.2154008784772404, |
| "learning_rate": 1.073517126148705e-05, |
| "loss": 0.2844, |
| "step": 2147 |
| }, |
| { |
| "epoch": 2.4191549295774646, |
| "grad_norm": 0.21320566200159038, |
| "learning_rate": 1.0714285714285714e-05, |
| "loss": 0.2586, |
| "step": 2148 |
| }, |
| { |
| "epoch": 2.420281690140845, |
| "grad_norm": 0.20909928611938552, |
| "learning_rate": 1.0693400167084377e-05, |
| "loss": 0.2606, |
| "step": 2149 |
| }, |
| { |
| "epoch": 2.4214084507042255, |
| "grad_norm": 0.20981361725139566, |
| "learning_rate": 1.067251461988304e-05, |
| "loss": 0.2705, |
| "step": 2150 |
| }, |
| { |
| "epoch": 2.4225352112676055, |
| "grad_norm": 0.21510118347835527, |
| "learning_rate": 1.0651629072681704e-05, |
| "loss": 0.2696, |
| "step": 2151 |
| }, |
| { |
| "epoch": 2.423661971830986, |
| "grad_norm": 0.21114872083025085, |
| "learning_rate": 1.0630743525480368e-05, |
| "loss": 0.2695, |
| "step": 2152 |
| }, |
| { |
| "epoch": 2.424788732394366, |
| "grad_norm": 0.20265014743832913, |
| "learning_rate": 1.0609857978279031e-05, |
| "loss": 0.2654, |
| "step": 2153 |
| }, |
| { |
| "epoch": 2.4259154929577464, |
| "grad_norm": 0.21350093970564465, |
| "learning_rate": 1.0588972431077694e-05, |
| "loss": 0.2795, |
| "step": 2154 |
| }, |
| { |
| "epoch": 2.427042253521127, |
| "grad_norm": 0.2118394159905768, |
| "learning_rate": 1.0568086883876358e-05, |
| "loss": 0.2642, |
| "step": 2155 |
| }, |
| { |
| "epoch": 2.428169014084507, |
| "grad_norm": 0.23040409136612594, |
| "learning_rate": 1.0547201336675021e-05, |
| "loss": 0.2929, |
| "step": 2156 |
| }, |
| { |
| "epoch": 2.4292957746478874, |
| "grad_norm": 0.2276700680735358, |
| "learning_rate": 1.0526315789473684e-05, |
| "loss": 0.258, |
| "step": 2157 |
| }, |
| { |
| "epoch": 2.4304225352112674, |
| "grad_norm": 0.23948540092079731, |
| "learning_rate": 1.0505430242272348e-05, |
| "loss": 0.2705, |
| "step": 2158 |
| }, |
| { |
| "epoch": 2.431549295774648, |
| "grad_norm": 0.22322257893047265, |
| "learning_rate": 1.0484544695071011e-05, |
| "loss": 0.273, |
| "step": 2159 |
| }, |
| { |
| "epoch": 2.4326760563380283, |
| "grad_norm": 0.2698826968184876, |
| "learning_rate": 1.0463659147869675e-05, |
| "loss": 0.2743, |
| "step": 2160 |
| }, |
| { |
| "epoch": 2.4338028169014083, |
| "grad_norm": 0.21669959538878936, |
| "learning_rate": 1.0442773600668338e-05, |
| "loss": 0.2655, |
| "step": 2161 |
| }, |
| { |
| "epoch": 2.4349295774647888, |
| "grad_norm": 0.2409956078704885, |
| "learning_rate": 1.0421888053467001e-05, |
| "loss": 0.2832, |
| "step": 2162 |
| }, |
| { |
| "epoch": 2.436056338028169, |
| "grad_norm": 0.20476452459540187, |
| "learning_rate": 1.0401002506265665e-05, |
| "loss": 0.2809, |
| "step": 2163 |
| }, |
| { |
| "epoch": 2.4371830985915492, |
| "grad_norm": 0.20986579303419386, |
| "learning_rate": 1.0380116959064328e-05, |
| "loss": 0.2789, |
| "step": 2164 |
| }, |
| { |
| "epoch": 2.4383098591549297, |
| "grad_norm": 0.2352840245085601, |
| "learning_rate": 1.0359231411862991e-05, |
| "loss": 0.2816, |
| "step": 2165 |
| }, |
| { |
| "epoch": 2.4394366197183097, |
| "grad_norm": 0.22355582613204722, |
| "learning_rate": 1.0338345864661655e-05, |
| "loss": 0.2738, |
| "step": 2166 |
| }, |
| { |
| "epoch": 2.44056338028169, |
| "grad_norm": 0.21389291437331132, |
| "learning_rate": 1.0317460317460318e-05, |
| "loss": 0.2727, |
| "step": 2167 |
| }, |
| { |
| "epoch": 2.44169014084507, |
| "grad_norm": 0.23646272987408282, |
| "learning_rate": 1.029657477025898e-05, |
| "loss": 0.271, |
| "step": 2168 |
| }, |
| { |
| "epoch": 2.4428169014084506, |
| "grad_norm": 0.23027802072594214, |
| "learning_rate": 1.0275689223057643e-05, |
| "loss": 0.2896, |
| "step": 2169 |
| }, |
| { |
| "epoch": 2.443943661971831, |
| "grad_norm": 0.2274161999681408, |
| "learning_rate": 1.0254803675856307e-05, |
| "loss": 0.2811, |
| "step": 2170 |
| }, |
| { |
| "epoch": 2.445070422535211, |
| "grad_norm": 0.22538041115209936, |
| "learning_rate": 1.023391812865497e-05, |
| "loss": 0.277, |
| "step": 2171 |
| }, |
| { |
| "epoch": 2.4461971830985916, |
| "grad_norm": 0.24038478413990452, |
| "learning_rate": 1.0213032581453633e-05, |
| "loss": 0.2848, |
| "step": 2172 |
| }, |
| { |
| "epoch": 2.4473239436619716, |
| "grad_norm": 0.244583573845353, |
| "learning_rate": 1.0192147034252297e-05, |
| "loss": 0.2825, |
| "step": 2173 |
| }, |
| { |
| "epoch": 2.448450704225352, |
| "grad_norm": 0.2093797766595799, |
| "learning_rate": 1.017126148705096e-05, |
| "loss": 0.248, |
| "step": 2174 |
| }, |
| { |
| "epoch": 2.4495774647887325, |
| "grad_norm": 0.23293668786136626, |
| "learning_rate": 1.0150375939849624e-05, |
| "loss": 0.282, |
| "step": 2175 |
| }, |
| { |
| "epoch": 2.4507042253521125, |
| "grad_norm": 0.22928470807328025, |
| "learning_rate": 1.0129490392648287e-05, |
| "loss": 0.2703, |
| "step": 2176 |
| }, |
| { |
| "epoch": 2.451830985915493, |
| "grad_norm": 0.23828534953004338, |
| "learning_rate": 1.010860484544695e-05, |
| "loss": 0.261, |
| "step": 2177 |
| }, |
| { |
| "epoch": 2.452957746478873, |
| "grad_norm": 0.22773354216922517, |
| "learning_rate": 1.0087719298245614e-05, |
| "loss": 0.2791, |
| "step": 2178 |
| }, |
| { |
| "epoch": 2.4540845070422534, |
| "grad_norm": 0.22213705135610468, |
| "learning_rate": 1.0066833751044277e-05, |
| "loss": 0.2748, |
| "step": 2179 |
| }, |
| { |
| "epoch": 2.455211267605634, |
| "grad_norm": 0.23231356179846382, |
| "learning_rate": 1.004594820384294e-05, |
| "loss": 0.2705, |
| "step": 2180 |
| }, |
| { |
| "epoch": 2.456338028169014, |
| "grad_norm": 0.22561566979144523, |
| "learning_rate": 1.0025062656641604e-05, |
| "loss": 0.2774, |
| "step": 2181 |
| }, |
| { |
| "epoch": 2.4574647887323944, |
| "grad_norm": 0.22022567511058663, |
| "learning_rate": 1.0004177109440267e-05, |
| "loss": 0.2406, |
| "step": 2182 |
| }, |
| { |
| "epoch": 2.458591549295775, |
| "grad_norm": 0.20961456153746924, |
| "learning_rate": 9.98329156223893e-06, |
| "loss": 0.2658, |
| "step": 2183 |
| }, |
| { |
| "epoch": 2.459718309859155, |
| "grad_norm": 0.2059883661597407, |
| "learning_rate": 9.962406015037594e-06, |
| "loss": 0.2775, |
| "step": 2184 |
| }, |
| { |
| "epoch": 2.4608450704225353, |
| "grad_norm": 0.256771919518465, |
| "learning_rate": 9.941520467836257e-06, |
| "loss": 0.2743, |
| "step": 2185 |
| }, |
| { |
| "epoch": 2.4619718309859153, |
| "grad_norm": 0.23952040840749259, |
| "learning_rate": 9.92063492063492e-06, |
| "loss": 0.2741, |
| "step": 2186 |
| }, |
| { |
| "epoch": 2.463098591549296, |
| "grad_norm": 0.21297152510598305, |
| "learning_rate": 9.899749373433584e-06, |
| "loss": 0.2775, |
| "step": 2187 |
| }, |
| { |
| "epoch": 2.4642253521126762, |
| "grad_norm": 0.2066578541585253, |
| "learning_rate": 9.878863826232247e-06, |
| "loss": 0.2639, |
| "step": 2188 |
| }, |
| { |
| "epoch": 2.4653521126760563, |
| "grad_norm": 0.2137411082320371, |
| "learning_rate": 9.85797827903091e-06, |
| "loss": 0.2779, |
| "step": 2189 |
| }, |
| { |
| "epoch": 2.4664788732394367, |
| "grad_norm": 0.23503564988189526, |
| "learning_rate": 9.837092731829574e-06, |
| "loss": 0.2734, |
| "step": 2190 |
| }, |
| { |
| "epoch": 2.4676056338028167, |
| "grad_norm": 0.22828241783204792, |
| "learning_rate": 9.816207184628238e-06, |
| "loss": 0.2559, |
| "step": 2191 |
| }, |
| { |
| "epoch": 2.468732394366197, |
| "grad_norm": 0.23122651593415716, |
| "learning_rate": 9.795321637426901e-06, |
| "loss": 0.2862, |
| "step": 2192 |
| }, |
| { |
| "epoch": 2.4698591549295776, |
| "grad_norm": 0.2323362117389597, |
| "learning_rate": 9.774436090225564e-06, |
| "loss": 0.291, |
| "step": 2193 |
| }, |
| { |
| "epoch": 2.4709859154929577, |
| "grad_norm": 0.20386352123726093, |
| "learning_rate": 9.753550543024228e-06, |
| "loss": 0.266, |
| "step": 2194 |
| }, |
| { |
| "epoch": 2.472112676056338, |
| "grad_norm": 0.23765114384248068, |
| "learning_rate": 9.732664995822891e-06, |
| "loss": 0.265, |
| "step": 2195 |
| }, |
| { |
| "epoch": 2.473239436619718, |
| "grad_norm": 0.22209086256665098, |
| "learning_rate": 9.711779448621554e-06, |
| "loss": 0.2649, |
| "step": 2196 |
| }, |
| { |
| "epoch": 2.4743661971830986, |
| "grad_norm": 0.22644727329815742, |
| "learning_rate": 9.690893901420218e-06, |
| "loss": 0.2635, |
| "step": 2197 |
| }, |
| { |
| "epoch": 2.475492957746479, |
| "grad_norm": 0.22367553789132952, |
| "learning_rate": 9.670008354218881e-06, |
| "loss": 0.2699, |
| "step": 2198 |
| }, |
| { |
| "epoch": 2.476619718309859, |
| "grad_norm": 0.22888798104554653, |
| "learning_rate": 9.649122807017545e-06, |
| "loss": 0.2705, |
| "step": 2199 |
| }, |
| { |
| "epoch": 2.4777464788732395, |
| "grad_norm": 0.22848461387767924, |
| "learning_rate": 9.628237259816208e-06, |
| "loss": 0.2809, |
| "step": 2200 |
| }, |
| { |
| "epoch": 2.4788732394366195, |
| "grad_norm": 0.248504426413411, |
| "learning_rate": 9.607351712614871e-06, |
| "loss": 0.29, |
| "step": 2201 |
| }, |
| { |
| "epoch": 2.48, |
| "grad_norm": 0.23464770896979678, |
| "learning_rate": 9.586466165413533e-06, |
| "loss": 0.2803, |
| "step": 2202 |
| }, |
| { |
| "epoch": 2.4811267605633804, |
| "grad_norm": 0.216404731594425, |
| "learning_rate": 9.565580618212196e-06, |
| "loss": 0.288, |
| "step": 2203 |
| }, |
| { |
| "epoch": 2.4822535211267605, |
| "grad_norm": 0.21606120576921523, |
| "learning_rate": 9.54469507101086e-06, |
| "loss": 0.276, |
| "step": 2204 |
| }, |
| { |
| "epoch": 2.483380281690141, |
| "grad_norm": 0.23724905187991394, |
| "learning_rate": 9.523809523809523e-06, |
| "loss": 0.2821, |
| "step": 2205 |
| }, |
| { |
| "epoch": 2.4845070422535214, |
| "grad_norm": 0.2213014785466372, |
| "learning_rate": 9.502923976608186e-06, |
| "loss": 0.2703, |
| "step": 2206 |
| }, |
| { |
| "epoch": 2.4856338028169014, |
| "grad_norm": 0.20649440183174905, |
| "learning_rate": 9.48203842940685e-06, |
| "loss": 0.2566, |
| "step": 2207 |
| }, |
| { |
| "epoch": 2.486760563380282, |
| "grad_norm": 0.2188221728681939, |
| "learning_rate": 9.461152882205513e-06, |
| "loss": 0.2829, |
| "step": 2208 |
| }, |
| { |
| "epoch": 2.487887323943662, |
| "grad_norm": 0.20971132597538833, |
| "learning_rate": 9.440267335004177e-06, |
| "loss": 0.2636, |
| "step": 2209 |
| }, |
| { |
| "epoch": 2.4890140845070423, |
| "grad_norm": 0.22017712989793567, |
| "learning_rate": 9.41938178780284e-06, |
| "loss": 0.2819, |
| "step": 2210 |
| }, |
| { |
| "epoch": 2.4901408450704228, |
| "grad_norm": 0.21163799913141193, |
| "learning_rate": 9.398496240601503e-06, |
| "loss": 0.2693, |
| "step": 2211 |
| }, |
| { |
| "epoch": 2.491267605633803, |
| "grad_norm": 0.22189885631357084, |
| "learning_rate": 9.377610693400167e-06, |
| "loss": 0.2704, |
| "step": 2212 |
| }, |
| { |
| "epoch": 2.4923943661971832, |
| "grad_norm": 0.218520608239522, |
| "learning_rate": 9.35672514619883e-06, |
| "loss": 0.2767, |
| "step": 2213 |
| }, |
| { |
| "epoch": 2.4935211267605633, |
| "grad_norm": 0.215359628487211, |
| "learning_rate": 9.335839598997493e-06, |
| "loss": 0.2783, |
| "step": 2214 |
| }, |
| { |
| "epoch": 2.4946478873239437, |
| "grad_norm": 0.22803232795887976, |
| "learning_rate": 9.314954051796157e-06, |
| "loss": 0.2525, |
| "step": 2215 |
| }, |
| { |
| "epoch": 2.495774647887324, |
| "grad_norm": 0.23560760814110923, |
| "learning_rate": 9.29406850459482e-06, |
| "loss": 0.2831, |
| "step": 2216 |
| }, |
| { |
| "epoch": 2.496901408450704, |
| "grad_norm": 0.22343667946076662, |
| "learning_rate": 9.273182957393484e-06, |
| "loss": 0.278, |
| "step": 2217 |
| }, |
| { |
| "epoch": 2.4980281690140846, |
| "grad_norm": 0.23027385335442674, |
| "learning_rate": 9.252297410192147e-06, |
| "loss": 0.2761, |
| "step": 2218 |
| }, |
| { |
| "epoch": 2.4991549295774647, |
| "grad_norm": 0.20879502549623066, |
| "learning_rate": 9.23141186299081e-06, |
| "loss": 0.284, |
| "step": 2219 |
| }, |
| { |
| "epoch": 2.500281690140845, |
| "grad_norm": 0.20835602045508547, |
| "learning_rate": 9.210526315789474e-06, |
| "loss": 0.2676, |
| "step": 2220 |
| }, |
| { |
| "epoch": 2.5014084507042256, |
| "grad_norm": 0.21675622542416495, |
| "learning_rate": 9.189640768588137e-06, |
| "loss": 0.2851, |
| "step": 2221 |
| }, |
| { |
| "epoch": 2.5025352112676056, |
| "grad_norm": 0.21434267814700203, |
| "learning_rate": 9.1687552213868e-06, |
| "loss": 0.2714, |
| "step": 2222 |
| }, |
| { |
| "epoch": 2.503661971830986, |
| "grad_norm": 0.21218470836676095, |
| "learning_rate": 9.147869674185464e-06, |
| "loss": 0.2752, |
| "step": 2223 |
| }, |
| { |
| "epoch": 2.504788732394366, |
| "grad_norm": 0.223752505258997, |
| "learning_rate": 9.126984126984127e-06, |
| "loss": 0.2776, |
| "step": 2224 |
| }, |
| { |
| "epoch": 2.5059154929577465, |
| "grad_norm": 0.22561799691711334, |
| "learning_rate": 9.10609857978279e-06, |
| "loss": 0.2733, |
| "step": 2225 |
| }, |
| { |
| "epoch": 2.507042253521127, |
| "grad_norm": 0.22188044096598836, |
| "learning_rate": 9.085213032581454e-06, |
| "loss": 0.2934, |
| "step": 2226 |
| }, |
| { |
| "epoch": 2.508169014084507, |
| "grad_norm": 0.2400515222028944, |
| "learning_rate": 9.064327485380117e-06, |
| "loss": 0.2751, |
| "step": 2227 |
| }, |
| { |
| "epoch": 2.5092957746478874, |
| "grad_norm": 1.1185047955601348, |
| "learning_rate": 9.04344193817878e-06, |
| "loss": 0.314, |
| "step": 2228 |
| }, |
| { |
| "epoch": 2.5104225352112675, |
| "grad_norm": 0.212220685527622, |
| "learning_rate": 9.022556390977444e-06, |
| "loss": 0.2547, |
| "step": 2229 |
| }, |
| { |
| "epoch": 2.511549295774648, |
| "grad_norm": 0.19524098475255758, |
| "learning_rate": 9.001670843776107e-06, |
| "loss": 0.2591, |
| "step": 2230 |
| }, |
| { |
| "epoch": 2.5126760563380284, |
| "grad_norm": 0.21921017676076446, |
| "learning_rate": 8.98078529657477e-06, |
| "loss": 0.2566, |
| "step": 2231 |
| }, |
| { |
| "epoch": 2.5138028169014084, |
| "grad_norm": 0.24290751995043525, |
| "learning_rate": 8.959899749373434e-06, |
| "loss": 0.2835, |
| "step": 2232 |
| }, |
| { |
| "epoch": 2.514929577464789, |
| "grad_norm": 0.2103964108730701, |
| "learning_rate": 8.939014202172098e-06, |
| "loss": 0.2669, |
| "step": 2233 |
| }, |
| { |
| "epoch": 2.516056338028169, |
| "grad_norm": 0.24321395392230583, |
| "learning_rate": 8.918128654970761e-06, |
| "loss": 0.2777, |
| "step": 2234 |
| }, |
| { |
| "epoch": 2.5171830985915493, |
| "grad_norm": 0.2193688002097274, |
| "learning_rate": 8.897243107769424e-06, |
| "loss": 0.2853, |
| "step": 2235 |
| }, |
| { |
| "epoch": 2.5183098591549298, |
| "grad_norm": 0.20127959943533763, |
| "learning_rate": 8.876357560568086e-06, |
| "loss": 0.2641, |
| "step": 2236 |
| }, |
| { |
| "epoch": 2.51943661971831, |
| "grad_norm": 0.23285952320693487, |
| "learning_rate": 8.85547201336675e-06, |
| "loss": 0.2828, |
| "step": 2237 |
| }, |
| { |
| "epoch": 2.5205633802816902, |
| "grad_norm": 0.24374568774168048, |
| "learning_rate": 8.834586466165413e-06, |
| "loss": 0.288, |
| "step": 2238 |
| }, |
| { |
| "epoch": 2.5216901408450703, |
| "grad_norm": 0.2076800484914963, |
| "learning_rate": 8.813700918964076e-06, |
| "loss": 0.2701, |
| "step": 2239 |
| }, |
| { |
| "epoch": 2.5228169014084507, |
| "grad_norm": 0.22275413320481321, |
| "learning_rate": 8.79281537176274e-06, |
| "loss": 0.2812, |
| "step": 2240 |
| }, |
| { |
| "epoch": 2.523943661971831, |
| "grad_norm": 0.2321323473115153, |
| "learning_rate": 8.771929824561403e-06, |
| "loss": 0.2847, |
| "step": 2241 |
| }, |
| { |
| "epoch": 2.525070422535211, |
| "grad_norm": 0.2261219354028944, |
| "learning_rate": 8.751044277360066e-06, |
| "loss": 0.272, |
| "step": 2242 |
| }, |
| { |
| "epoch": 2.5261971830985916, |
| "grad_norm": 0.2160864640984234, |
| "learning_rate": 8.73015873015873e-06, |
| "loss": 0.2738, |
| "step": 2243 |
| }, |
| { |
| "epoch": 2.5273239436619717, |
| "grad_norm": 0.20292547199970007, |
| "learning_rate": 8.709273182957393e-06, |
| "loss": 0.2775, |
| "step": 2244 |
| }, |
| { |
| "epoch": 2.528450704225352, |
| "grad_norm": 0.22177204904918363, |
| "learning_rate": 8.688387635756056e-06, |
| "loss": 0.2775, |
| "step": 2245 |
| }, |
| { |
| "epoch": 2.5295774647887326, |
| "grad_norm": 0.21710872413385202, |
| "learning_rate": 8.66750208855472e-06, |
| "loss": 0.2674, |
| "step": 2246 |
| }, |
| { |
| "epoch": 2.5307042253521126, |
| "grad_norm": 0.22683414573268926, |
| "learning_rate": 8.646616541353383e-06, |
| "loss": 0.282, |
| "step": 2247 |
| }, |
| { |
| "epoch": 2.531830985915493, |
| "grad_norm": 0.21072187393255432, |
| "learning_rate": 8.625730994152046e-06, |
| "loss": 0.2798, |
| "step": 2248 |
| }, |
| { |
| "epoch": 2.532957746478873, |
| "grad_norm": 0.2120038777625571, |
| "learning_rate": 8.60484544695071e-06, |
| "loss": 0.2687, |
| "step": 2249 |
| }, |
| { |
| "epoch": 2.5340845070422535, |
| "grad_norm": 0.21026382448537614, |
| "learning_rate": 8.583959899749373e-06, |
| "loss": 0.2743, |
| "step": 2250 |
| }, |
| { |
| "epoch": 2.535211267605634, |
| "grad_norm": 0.2106368497175309, |
| "learning_rate": 8.563074352548037e-06, |
| "loss": 0.2773, |
| "step": 2251 |
| }, |
| { |
| "epoch": 2.536338028169014, |
| "grad_norm": 0.20603896149924772, |
| "learning_rate": 8.5421888053467e-06, |
| "loss": 0.259, |
| "step": 2252 |
| }, |
| { |
| "epoch": 2.5374647887323944, |
| "grad_norm": 0.2118135718024562, |
| "learning_rate": 8.521303258145363e-06, |
| "loss": 0.2649, |
| "step": 2253 |
| }, |
| { |
| "epoch": 2.5385915492957745, |
| "grad_norm": 0.20585213804063746, |
| "learning_rate": 8.500417710944027e-06, |
| "loss": 0.2732, |
| "step": 2254 |
| }, |
| { |
| "epoch": 2.539718309859155, |
| "grad_norm": 0.20091087601640018, |
| "learning_rate": 8.47953216374269e-06, |
| "loss": 0.2717, |
| "step": 2255 |
| }, |
| { |
| "epoch": 2.5408450704225354, |
| "grad_norm": 0.21649323641482876, |
| "learning_rate": 8.458646616541353e-06, |
| "loss": 0.2666, |
| "step": 2256 |
| }, |
| { |
| "epoch": 2.5419718309859154, |
| "grad_norm": 0.20791601769314685, |
| "learning_rate": 8.437761069340017e-06, |
| "loss": 0.257, |
| "step": 2257 |
| }, |
| { |
| "epoch": 2.543098591549296, |
| "grad_norm": 0.20369955712165008, |
| "learning_rate": 8.41687552213868e-06, |
| "loss": 0.2602, |
| "step": 2258 |
| }, |
| { |
| "epoch": 2.544225352112676, |
| "grad_norm": 0.20343660503076466, |
| "learning_rate": 8.395989974937344e-06, |
| "loss": 0.2667, |
| "step": 2259 |
| }, |
| { |
| "epoch": 2.5453521126760563, |
| "grad_norm": 0.22171507016332392, |
| "learning_rate": 8.375104427736007e-06, |
| "loss": 0.2855, |
| "step": 2260 |
| }, |
| { |
| "epoch": 2.546478873239437, |
| "grad_norm": 0.21406067163893322, |
| "learning_rate": 8.35421888053467e-06, |
| "loss": 0.2695, |
| "step": 2261 |
| }, |
| { |
| "epoch": 2.547605633802817, |
| "grad_norm": 0.20333696608016158, |
| "learning_rate": 8.333333333333334e-06, |
| "loss": 0.2582, |
| "step": 2262 |
| }, |
| { |
| "epoch": 2.5487323943661973, |
| "grad_norm": 0.21157404363540527, |
| "learning_rate": 8.312447786131997e-06, |
| "loss": 0.2817, |
| "step": 2263 |
| }, |
| { |
| "epoch": 2.5498591549295773, |
| "grad_norm": 0.19044739916367748, |
| "learning_rate": 8.29156223893066e-06, |
| "loss": 0.2703, |
| "step": 2264 |
| }, |
| { |
| "epoch": 2.5509859154929577, |
| "grad_norm": 0.22007179198673688, |
| "learning_rate": 8.270676691729324e-06, |
| "loss": 0.278, |
| "step": 2265 |
| }, |
| { |
| "epoch": 2.552112676056338, |
| "grad_norm": 0.22748155158872424, |
| "learning_rate": 8.249791144527987e-06, |
| "loss": 0.2771, |
| "step": 2266 |
| }, |
| { |
| "epoch": 2.553239436619718, |
| "grad_norm": 0.22399759601611058, |
| "learning_rate": 8.22890559732665e-06, |
| "loss": 0.29, |
| "step": 2267 |
| }, |
| { |
| "epoch": 2.5543661971830987, |
| "grad_norm": 0.21124861709091, |
| "learning_rate": 8.208020050125314e-06, |
| "loss": 0.2808, |
| "step": 2268 |
| }, |
| { |
| "epoch": 2.5554929577464787, |
| "grad_norm": 0.20934774955328989, |
| "learning_rate": 8.187134502923977e-06, |
| "loss": 0.2775, |
| "step": 2269 |
| }, |
| { |
| "epoch": 2.556619718309859, |
| "grad_norm": 0.20286481973644913, |
| "learning_rate": 8.166248955722639e-06, |
| "loss": 0.2573, |
| "step": 2270 |
| }, |
| { |
| "epoch": 2.5577464788732396, |
| "grad_norm": 0.21768694947303274, |
| "learning_rate": 8.145363408521302e-06, |
| "loss": 0.2828, |
| "step": 2271 |
| }, |
| { |
| "epoch": 2.5588732394366196, |
| "grad_norm": 0.21611062790309854, |
| "learning_rate": 8.124477861319966e-06, |
| "loss": 0.2682, |
| "step": 2272 |
| }, |
| { |
| "epoch": 2.56, |
| "grad_norm": 0.20753855733262955, |
| "learning_rate": 8.10359231411863e-06, |
| "loss": 0.2581, |
| "step": 2273 |
| }, |
| { |
| "epoch": 2.56112676056338, |
| "grad_norm": 0.2263658555970021, |
| "learning_rate": 8.082706766917293e-06, |
| "loss": 0.2787, |
| "step": 2274 |
| }, |
| { |
| "epoch": 2.5622535211267605, |
| "grad_norm": 0.21119974183836335, |
| "learning_rate": 8.061821219715956e-06, |
| "loss": 0.2727, |
| "step": 2275 |
| }, |
| { |
| "epoch": 2.563380281690141, |
| "grad_norm": 0.24289625766805403, |
| "learning_rate": 8.04093567251462e-06, |
| "loss": 0.292, |
| "step": 2276 |
| }, |
| { |
| "epoch": 2.564507042253521, |
| "grad_norm": 0.20730419219969304, |
| "learning_rate": 8.020050125313283e-06, |
| "loss": 0.2759, |
| "step": 2277 |
| }, |
| { |
| "epoch": 2.5656338028169015, |
| "grad_norm": 0.20268603012888006, |
| "learning_rate": 7.999164578111946e-06, |
| "loss": 0.2846, |
| "step": 2278 |
| }, |
| { |
| "epoch": 2.5667605633802815, |
| "grad_norm": 0.24266035890675589, |
| "learning_rate": 7.97827903091061e-06, |
| "loss": 0.2744, |
| "step": 2279 |
| }, |
| { |
| "epoch": 2.567887323943662, |
| "grad_norm": 0.2255216230584472, |
| "learning_rate": 7.957393483709273e-06, |
| "loss": 0.2722, |
| "step": 2280 |
| }, |
| { |
| "epoch": 2.5690140845070424, |
| "grad_norm": 0.26969813040912644, |
| "learning_rate": 7.936507936507936e-06, |
| "loss": 0.2839, |
| "step": 2281 |
| }, |
| { |
| "epoch": 2.5701408450704224, |
| "grad_norm": 0.212315699062885, |
| "learning_rate": 7.9156223893066e-06, |
| "loss": 0.2742, |
| "step": 2282 |
| }, |
| { |
| "epoch": 2.571267605633803, |
| "grad_norm": 0.22140384088683024, |
| "learning_rate": 7.894736842105263e-06, |
| "loss": 0.2754, |
| "step": 2283 |
| }, |
| { |
| "epoch": 2.572394366197183, |
| "grad_norm": 0.22774101820964093, |
| "learning_rate": 7.873851294903926e-06, |
| "loss": 0.2759, |
| "step": 2284 |
| }, |
| { |
| "epoch": 2.5735211267605633, |
| "grad_norm": 0.21452111492793702, |
| "learning_rate": 7.85296574770259e-06, |
| "loss": 0.2711, |
| "step": 2285 |
| }, |
| { |
| "epoch": 2.574647887323944, |
| "grad_norm": 0.24422191612219893, |
| "learning_rate": 7.832080200501253e-06, |
| "loss": 0.2578, |
| "step": 2286 |
| }, |
| { |
| "epoch": 2.575774647887324, |
| "grad_norm": 0.21323333386316548, |
| "learning_rate": 7.811194653299916e-06, |
| "loss": 0.2771, |
| "step": 2287 |
| }, |
| { |
| "epoch": 2.5769014084507043, |
| "grad_norm": 0.23961948141565678, |
| "learning_rate": 7.79030910609858e-06, |
| "loss": 0.2861, |
| "step": 2288 |
| }, |
| { |
| "epoch": 2.5780281690140843, |
| "grad_norm": 0.23425870356594633, |
| "learning_rate": 7.769423558897243e-06, |
| "loss": 0.2421, |
| "step": 2289 |
| }, |
| { |
| "epoch": 2.5791549295774647, |
| "grad_norm": 0.1996419422515128, |
| "learning_rate": 7.748538011695907e-06, |
| "loss": 0.273, |
| "step": 2290 |
| }, |
| { |
| "epoch": 2.580281690140845, |
| "grad_norm": 0.20233534762369687, |
| "learning_rate": 7.72765246449457e-06, |
| "loss": 0.2703, |
| "step": 2291 |
| }, |
| { |
| "epoch": 2.581408450704225, |
| "grad_norm": 0.21712525623306356, |
| "learning_rate": 7.706766917293233e-06, |
| "loss": 0.2759, |
| "step": 2292 |
| }, |
| { |
| "epoch": 2.5825352112676057, |
| "grad_norm": 0.22883204279526698, |
| "learning_rate": 7.685881370091897e-06, |
| "loss": 0.2678, |
| "step": 2293 |
| }, |
| { |
| "epoch": 2.5836619718309857, |
| "grad_norm": 0.2204878352077323, |
| "learning_rate": 7.66499582289056e-06, |
| "loss": 0.275, |
| "step": 2294 |
| }, |
| { |
| "epoch": 2.584788732394366, |
| "grad_norm": 0.22141537977546438, |
| "learning_rate": 7.644110275689223e-06, |
| "loss": 0.2698, |
| "step": 2295 |
| }, |
| { |
| "epoch": 2.5859154929577466, |
| "grad_norm": 0.19423299809216446, |
| "learning_rate": 7.623224728487886e-06, |
| "loss": 0.2689, |
| "step": 2296 |
| }, |
| { |
| "epoch": 2.5870422535211266, |
| "grad_norm": 0.21596012544263102, |
| "learning_rate": 7.602339181286549e-06, |
| "loss": 0.2718, |
| "step": 2297 |
| }, |
| { |
| "epoch": 2.588169014084507, |
| "grad_norm": 0.21361017588707046, |
| "learning_rate": 7.581453634085213e-06, |
| "loss": 0.257, |
| "step": 2298 |
| }, |
| { |
| "epoch": 2.589295774647887, |
| "grad_norm": 0.2100094729735693, |
| "learning_rate": 7.560568086883876e-06, |
| "loss": 0.2728, |
| "step": 2299 |
| }, |
| { |
| "epoch": 2.5904225352112675, |
| "grad_norm": 0.23462890669836461, |
| "learning_rate": 7.5396825396825394e-06, |
| "loss": 0.2754, |
| "step": 2300 |
| }, |
| { |
| "epoch": 2.591549295774648, |
| "grad_norm": 0.22123571932368516, |
| "learning_rate": 7.518796992481203e-06, |
| "loss": 0.2865, |
| "step": 2301 |
| }, |
| { |
| "epoch": 2.5926760563380284, |
| "grad_norm": 0.20183983085558269, |
| "learning_rate": 7.497911445279866e-06, |
| "loss": 0.2539, |
| "step": 2302 |
| }, |
| { |
| "epoch": 2.5938028169014085, |
| "grad_norm": 0.21065368643205074, |
| "learning_rate": 7.4770258980785296e-06, |
| "loss": 0.2837, |
| "step": 2303 |
| }, |
| { |
| "epoch": 2.5949295774647885, |
| "grad_norm": 0.21056747302003648, |
| "learning_rate": 7.456140350877193e-06, |
| "loss": 0.2625, |
| "step": 2304 |
| }, |
| { |
| "epoch": 2.596056338028169, |
| "grad_norm": 0.2030094546961027, |
| "learning_rate": 7.435254803675856e-06, |
| "loss": 0.2737, |
| "step": 2305 |
| }, |
| { |
| "epoch": 2.5971830985915494, |
| "grad_norm": 0.21033048117190295, |
| "learning_rate": 7.41436925647452e-06, |
| "loss": 0.2701, |
| "step": 2306 |
| }, |
| { |
| "epoch": 2.59830985915493, |
| "grad_norm": 0.208205304479834, |
| "learning_rate": 7.393483709273183e-06, |
| "loss": 0.2885, |
| "step": 2307 |
| }, |
| { |
| "epoch": 2.59943661971831, |
| "grad_norm": 0.19802754212323007, |
| "learning_rate": 7.3725981620718464e-06, |
| "loss": 0.2615, |
| "step": 2308 |
| }, |
| { |
| "epoch": 2.60056338028169, |
| "grad_norm": 0.19863847632704087, |
| "learning_rate": 7.35171261487051e-06, |
| "loss": 0.2595, |
| "step": 2309 |
| }, |
| { |
| "epoch": 2.6016901408450703, |
| "grad_norm": 0.2060235191172473, |
| "learning_rate": 7.330827067669173e-06, |
| "loss": 0.2708, |
| "step": 2310 |
| }, |
| { |
| "epoch": 2.602816901408451, |
| "grad_norm": 0.19033175410373906, |
| "learning_rate": 7.3099415204678366e-06, |
| "loss": 0.2511, |
| "step": 2311 |
| }, |
| { |
| "epoch": 2.6039436619718312, |
| "grad_norm": 0.2273535376707046, |
| "learning_rate": 7.2890559732665e-06, |
| "loss": 0.305, |
| "step": 2312 |
| }, |
| { |
| "epoch": 2.6050704225352113, |
| "grad_norm": 0.21774376489548256, |
| "learning_rate": 7.2681704260651625e-06, |
| "loss": 0.2717, |
| "step": 2313 |
| }, |
| { |
| "epoch": 2.6061971830985913, |
| "grad_norm": 0.2123574175700146, |
| "learning_rate": 7.247284878863826e-06, |
| "loss": 0.2663, |
| "step": 2314 |
| }, |
| { |
| "epoch": 2.6073239436619717, |
| "grad_norm": 0.20251054768809612, |
| "learning_rate": 7.226399331662489e-06, |
| "loss": 0.2722, |
| "step": 2315 |
| }, |
| { |
| "epoch": 2.608450704225352, |
| "grad_norm": 0.20004193052090175, |
| "learning_rate": 7.205513784461153e-06, |
| "loss": 0.2744, |
| "step": 2316 |
| }, |
| { |
| "epoch": 2.6095774647887326, |
| "grad_norm": 0.2008765929340839, |
| "learning_rate": 7.184628237259816e-06, |
| "loss": 0.2707, |
| "step": 2317 |
| }, |
| { |
| "epoch": 2.6107042253521127, |
| "grad_norm": 0.20943784373994273, |
| "learning_rate": 7.163742690058479e-06, |
| "loss": 0.2806, |
| "step": 2318 |
| }, |
| { |
| "epoch": 2.611830985915493, |
| "grad_norm": 0.19336187833106966, |
| "learning_rate": 7.142857142857143e-06, |
| "loss": 0.2567, |
| "step": 2319 |
| }, |
| { |
| "epoch": 2.612957746478873, |
| "grad_norm": 0.20249874567077011, |
| "learning_rate": 7.121971595655806e-06, |
| "loss": 0.2821, |
| "step": 2320 |
| }, |
| { |
| "epoch": 2.6140845070422536, |
| "grad_norm": 0.2038952398146593, |
| "learning_rate": 7.1010860484544695e-06, |
| "loss": 0.2647, |
| "step": 2321 |
| }, |
| { |
| "epoch": 2.615211267605634, |
| "grad_norm": 0.199090713946215, |
| "learning_rate": 7.080200501253133e-06, |
| "loss": 0.2597, |
| "step": 2322 |
| }, |
| { |
| "epoch": 2.616338028169014, |
| "grad_norm": 0.20062706496466548, |
| "learning_rate": 7.059314954051796e-06, |
| "loss": 0.2536, |
| "step": 2323 |
| }, |
| { |
| "epoch": 2.6174647887323945, |
| "grad_norm": 0.21265638038127999, |
| "learning_rate": 7.03842940685046e-06, |
| "loss": 0.2735, |
| "step": 2324 |
| }, |
| { |
| "epoch": 2.6185915492957745, |
| "grad_norm": 0.2269643737061253, |
| "learning_rate": 7.017543859649123e-06, |
| "loss": 0.2748, |
| "step": 2325 |
| }, |
| { |
| "epoch": 2.619718309859155, |
| "grad_norm": 0.21587988309043532, |
| "learning_rate": 6.996658312447786e-06, |
| "loss": 0.2694, |
| "step": 2326 |
| }, |
| { |
| "epoch": 2.6208450704225354, |
| "grad_norm": 0.21524910286307244, |
| "learning_rate": 6.97577276524645e-06, |
| "loss": 0.289, |
| "step": 2327 |
| }, |
| { |
| "epoch": 2.6219718309859155, |
| "grad_norm": 0.2131855827627971, |
| "learning_rate": 6.954887218045113e-06, |
| "loss": 0.2662, |
| "step": 2328 |
| }, |
| { |
| "epoch": 2.623098591549296, |
| "grad_norm": 0.22605902643475048, |
| "learning_rate": 6.9340016708437765e-06, |
| "loss": 0.2739, |
| "step": 2329 |
| }, |
| { |
| "epoch": 2.624225352112676, |
| "grad_norm": 0.21724567124408525, |
| "learning_rate": 6.913116123642439e-06, |
| "loss": 0.2742, |
| "step": 2330 |
| }, |
| { |
| "epoch": 2.6253521126760564, |
| "grad_norm": 0.2462430805017593, |
| "learning_rate": 6.892230576441102e-06, |
| "loss": 0.2803, |
| "step": 2331 |
| }, |
| { |
| "epoch": 2.626478873239437, |
| "grad_norm": 0.2518622667237682, |
| "learning_rate": 6.871345029239766e-06, |
| "loss": 0.2846, |
| "step": 2332 |
| }, |
| { |
| "epoch": 2.627605633802817, |
| "grad_norm": 0.21629583479800257, |
| "learning_rate": 6.850459482038429e-06, |
| "loss": 0.2822, |
| "step": 2333 |
| }, |
| { |
| "epoch": 2.6287323943661973, |
| "grad_norm": 0.1996507832762662, |
| "learning_rate": 6.8295739348370925e-06, |
| "loss": 0.2508, |
| "step": 2334 |
| }, |
| { |
| "epoch": 2.6298591549295773, |
| "grad_norm": 0.20795118877473084, |
| "learning_rate": 6.808688387635756e-06, |
| "loss": 0.2697, |
| "step": 2335 |
| }, |
| { |
| "epoch": 2.630985915492958, |
| "grad_norm": 0.22770627889631342, |
| "learning_rate": 6.787802840434419e-06, |
| "loss": 0.2612, |
| "step": 2336 |
| }, |
| { |
| "epoch": 2.6321126760563383, |
| "grad_norm": 0.20644557543972056, |
| "learning_rate": 6.766917293233083e-06, |
| "loss": 0.2565, |
| "step": 2337 |
| }, |
| { |
| "epoch": 2.6332394366197183, |
| "grad_norm": 0.21461015321494206, |
| "learning_rate": 6.746031746031746e-06, |
| "loss": 0.27, |
| "step": 2338 |
| }, |
| { |
| "epoch": 2.6343661971830987, |
| "grad_norm": 0.19732503779094568, |
| "learning_rate": 6.725146198830409e-06, |
| "loss": 0.2634, |
| "step": 2339 |
| }, |
| { |
| "epoch": 2.6354929577464787, |
| "grad_norm": 0.26017499658400894, |
| "learning_rate": 6.704260651629073e-06, |
| "loss": 0.2706, |
| "step": 2340 |
| }, |
| { |
| "epoch": 2.636619718309859, |
| "grad_norm": 0.21222354482286887, |
| "learning_rate": 6.683375104427736e-06, |
| "loss": 0.2671, |
| "step": 2341 |
| }, |
| { |
| "epoch": 2.6377464788732397, |
| "grad_norm": 0.20910643969965562, |
| "learning_rate": 6.6624895572263995e-06, |
| "loss": 0.2754, |
| "step": 2342 |
| }, |
| { |
| "epoch": 2.6388732394366197, |
| "grad_norm": 0.2191016125969615, |
| "learning_rate": 6.641604010025063e-06, |
| "loss": 0.2666, |
| "step": 2343 |
| }, |
| { |
| "epoch": 2.64, |
| "grad_norm": 0.2335065855062414, |
| "learning_rate": 6.620718462823726e-06, |
| "loss": 0.2685, |
| "step": 2344 |
| }, |
| { |
| "epoch": 2.64112676056338, |
| "grad_norm": 0.2510005721444893, |
| "learning_rate": 6.59983291562239e-06, |
| "loss": 0.2665, |
| "step": 2345 |
| }, |
| { |
| "epoch": 2.6422535211267606, |
| "grad_norm": 0.20280531897037118, |
| "learning_rate": 6.578947368421053e-06, |
| "loss": 0.2803, |
| "step": 2346 |
| }, |
| { |
| "epoch": 2.643380281690141, |
| "grad_norm": 0.20905561403592426, |
| "learning_rate": 6.5580618212197155e-06, |
| "loss": 0.2668, |
| "step": 2347 |
| }, |
| { |
| "epoch": 2.644507042253521, |
| "grad_norm": 0.2263015920784223, |
| "learning_rate": 6.537176274018379e-06, |
| "loss": 0.2829, |
| "step": 2348 |
| }, |
| { |
| "epoch": 2.6456338028169015, |
| "grad_norm": 0.20955235757852655, |
| "learning_rate": 6.516290726817042e-06, |
| "loss": 0.2805, |
| "step": 2349 |
| }, |
| { |
| "epoch": 2.6467605633802815, |
| "grad_norm": 0.224891839804344, |
| "learning_rate": 6.495405179615706e-06, |
| "loss": 0.2785, |
| "step": 2350 |
| }, |
| { |
| "epoch": 2.647887323943662, |
| "grad_norm": 0.2514145879480467, |
| "learning_rate": 6.474519632414369e-06, |
| "loss": 0.2702, |
| "step": 2351 |
| }, |
| { |
| "epoch": 2.6490140845070425, |
| "grad_norm": 0.20702753379991554, |
| "learning_rate": 6.453634085213032e-06, |
| "loss": 0.2782, |
| "step": 2352 |
| }, |
| { |
| "epoch": 2.6501408450704225, |
| "grad_norm": 0.2076677000213851, |
| "learning_rate": 6.432748538011696e-06, |
| "loss": 0.2789, |
| "step": 2353 |
| }, |
| { |
| "epoch": 2.651267605633803, |
| "grad_norm": 0.20277332239505097, |
| "learning_rate": 6.411862990810359e-06, |
| "loss": 0.2735, |
| "step": 2354 |
| }, |
| { |
| "epoch": 2.652394366197183, |
| "grad_norm": 0.20395286758663664, |
| "learning_rate": 6.3909774436090225e-06, |
| "loss": 0.2563, |
| "step": 2355 |
| }, |
| { |
| "epoch": 2.6535211267605634, |
| "grad_norm": 0.20906280386651896, |
| "learning_rate": 6.370091896407686e-06, |
| "loss": 0.2599, |
| "step": 2356 |
| }, |
| { |
| "epoch": 2.654647887323944, |
| "grad_norm": 0.19817976429154502, |
| "learning_rate": 6.349206349206349e-06, |
| "loss": 0.266, |
| "step": 2357 |
| }, |
| { |
| "epoch": 2.655774647887324, |
| "grad_norm": 0.20810058909422713, |
| "learning_rate": 6.328320802005013e-06, |
| "loss": 0.2783, |
| "step": 2358 |
| }, |
| { |
| "epoch": 2.6569014084507043, |
| "grad_norm": 0.20241203375584763, |
| "learning_rate": 6.307435254803676e-06, |
| "loss": 0.261, |
| "step": 2359 |
| }, |
| { |
| "epoch": 2.6580281690140843, |
| "grad_norm": 0.2118363720707042, |
| "learning_rate": 6.286549707602339e-06, |
| "loss": 0.2678, |
| "step": 2360 |
| }, |
| { |
| "epoch": 2.659154929577465, |
| "grad_norm": 0.19848641252564173, |
| "learning_rate": 6.265664160401003e-06, |
| "loss": 0.271, |
| "step": 2361 |
| }, |
| { |
| "epoch": 2.6602816901408453, |
| "grad_norm": 0.20026019201154394, |
| "learning_rate": 6.244778613199666e-06, |
| "loss": 0.2791, |
| "step": 2362 |
| }, |
| { |
| "epoch": 2.6614084507042253, |
| "grad_norm": 0.20953873693762937, |
| "learning_rate": 6.2238930659983295e-06, |
| "loss": 0.2867, |
| "step": 2363 |
| }, |
| { |
| "epoch": 2.6625352112676057, |
| "grad_norm": 0.20698398472666224, |
| "learning_rate": 6.203007518796992e-06, |
| "loss": 0.278, |
| "step": 2364 |
| }, |
| { |
| "epoch": 2.6636619718309857, |
| "grad_norm": 0.19878641417622853, |
| "learning_rate": 6.182121971595655e-06, |
| "loss": 0.2652, |
| "step": 2365 |
| }, |
| { |
| "epoch": 2.664788732394366, |
| "grad_norm": 0.201533174808496, |
| "learning_rate": 6.161236424394319e-06, |
| "loss": 0.2692, |
| "step": 2366 |
| }, |
| { |
| "epoch": 2.6659154929577467, |
| "grad_norm": 0.1972095561695125, |
| "learning_rate": 6.140350877192982e-06, |
| "loss": 0.2772, |
| "step": 2367 |
| }, |
| { |
| "epoch": 2.6670422535211267, |
| "grad_norm": 0.20294816458612344, |
| "learning_rate": 6.1194653299916455e-06, |
| "loss": 0.2634, |
| "step": 2368 |
| }, |
| { |
| "epoch": 2.668169014084507, |
| "grad_norm": 0.2039384476289958, |
| "learning_rate": 6.098579782790309e-06, |
| "loss": 0.2702, |
| "step": 2369 |
| }, |
| { |
| "epoch": 2.669295774647887, |
| "grad_norm": 0.20279326725097785, |
| "learning_rate": 6.077694235588972e-06, |
| "loss": 0.2618, |
| "step": 2370 |
| }, |
| { |
| "epoch": 2.6704225352112676, |
| "grad_norm": 0.18879396983382535, |
| "learning_rate": 6.056808688387636e-06, |
| "loss": 0.2528, |
| "step": 2371 |
| }, |
| { |
| "epoch": 2.671549295774648, |
| "grad_norm": 0.1959472061397774, |
| "learning_rate": 6.035923141186299e-06, |
| "loss": 0.2681, |
| "step": 2372 |
| }, |
| { |
| "epoch": 2.672676056338028, |
| "grad_norm": 0.2021374855693646, |
| "learning_rate": 6.015037593984962e-06, |
| "loss": 0.2621, |
| "step": 2373 |
| }, |
| { |
| "epoch": 2.6738028169014085, |
| "grad_norm": 0.20173353152168114, |
| "learning_rate": 5.994152046783626e-06, |
| "loss": 0.2814, |
| "step": 2374 |
| }, |
| { |
| "epoch": 2.6749295774647885, |
| "grad_norm": 0.189284667490437, |
| "learning_rate": 5.973266499582289e-06, |
| "loss": 0.2591, |
| "step": 2375 |
| }, |
| { |
| "epoch": 2.676056338028169, |
| "grad_norm": 0.19970317017407754, |
| "learning_rate": 5.9523809523809525e-06, |
| "loss": 0.2679, |
| "step": 2376 |
| }, |
| { |
| "epoch": 2.6771830985915495, |
| "grad_norm": 0.20682008244281408, |
| "learning_rate": 5.931495405179616e-06, |
| "loss": 0.2551, |
| "step": 2377 |
| }, |
| { |
| "epoch": 2.6783098591549295, |
| "grad_norm": 0.19441488497018497, |
| "learning_rate": 5.910609857978279e-06, |
| "loss": 0.2694, |
| "step": 2378 |
| }, |
| { |
| "epoch": 2.67943661971831, |
| "grad_norm": 0.2339475027598514, |
| "learning_rate": 5.889724310776943e-06, |
| "loss": 0.2738, |
| "step": 2379 |
| }, |
| { |
| "epoch": 2.68056338028169, |
| "grad_norm": 0.18963715125286926, |
| "learning_rate": 5.868838763575606e-06, |
| "loss": 0.269, |
| "step": 2380 |
| }, |
| { |
| "epoch": 2.6816901408450704, |
| "grad_norm": 0.19746564345134807, |
| "learning_rate": 5.8479532163742686e-06, |
| "loss": 0.2539, |
| "step": 2381 |
| }, |
| { |
| "epoch": 2.682816901408451, |
| "grad_norm": 0.21340096415199547, |
| "learning_rate": 5.827067669172932e-06, |
| "loss": 0.2496, |
| "step": 2382 |
| }, |
| { |
| "epoch": 2.683943661971831, |
| "grad_norm": 0.2076517092942985, |
| "learning_rate": 5.806182121971595e-06, |
| "loss": 0.2711, |
| "step": 2383 |
| }, |
| { |
| "epoch": 2.6850704225352113, |
| "grad_norm": 0.21085166652762866, |
| "learning_rate": 5.785296574770259e-06, |
| "loss": 0.2678, |
| "step": 2384 |
| }, |
| { |
| "epoch": 2.6861971830985913, |
| "grad_norm": 0.20858055414942492, |
| "learning_rate": 5.764411027568922e-06, |
| "loss": 0.2768, |
| "step": 2385 |
| }, |
| { |
| "epoch": 2.687323943661972, |
| "grad_norm": 0.2020164011881153, |
| "learning_rate": 5.7435254803675854e-06, |
| "loss": 0.2806, |
| "step": 2386 |
| }, |
| { |
| "epoch": 2.6884507042253523, |
| "grad_norm": 0.199726718314877, |
| "learning_rate": 5.722639933166249e-06, |
| "loss": 0.2715, |
| "step": 2387 |
| }, |
| { |
| "epoch": 2.6895774647887323, |
| "grad_norm": 0.2080107369414713, |
| "learning_rate": 5.701754385964912e-06, |
| "loss": 0.276, |
| "step": 2388 |
| }, |
| { |
| "epoch": 2.6907042253521127, |
| "grad_norm": 0.1979896245434823, |
| "learning_rate": 5.6808688387635756e-06, |
| "loss": 0.2817, |
| "step": 2389 |
| }, |
| { |
| "epoch": 2.6918309859154927, |
| "grad_norm": 0.20562546122155004, |
| "learning_rate": 5.659983291562239e-06, |
| "loss": 0.2704, |
| "step": 2390 |
| }, |
| { |
| "epoch": 2.692957746478873, |
| "grad_norm": 0.20470550805401863, |
| "learning_rate": 5.639097744360902e-06, |
| "loss": 0.2771, |
| "step": 2391 |
| }, |
| { |
| "epoch": 2.6940845070422537, |
| "grad_norm": 0.19781324737141767, |
| "learning_rate": 5.618212197159566e-06, |
| "loss": 0.2759, |
| "step": 2392 |
| }, |
| { |
| "epoch": 2.6952112676056337, |
| "grad_norm": 0.20254476618474254, |
| "learning_rate": 5.597326649958229e-06, |
| "loss": 0.2704, |
| "step": 2393 |
| }, |
| { |
| "epoch": 2.696338028169014, |
| "grad_norm": 0.20992790173543557, |
| "learning_rate": 5.5764411027568924e-06, |
| "loss": 0.2839, |
| "step": 2394 |
| }, |
| { |
| "epoch": 2.697464788732394, |
| "grad_norm": 0.1954454347239477, |
| "learning_rate": 5.555555555555556e-06, |
| "loss": 0.2721, |
| "step": 2395 |
| }, |
| { |
| "epoch": 2.6985915492957746, |
| "grad_norm": 0.2071654200170682, |
| "learning_rate": 5.534670008354219e-06, |
| "loss": 0.2752, |
| "step": 2396 |
| }, |
| { |
| "epoch": 2.699718309859155, |
| "grad_norm": 0.2242536610374017, |
| "learning_rate": 5.5137844611528826e-06, |
| "loss": 0.2762, |
| "step": 2397 |
| }, |
| { |
| "epoch": 2.700845070422535, |
| "grad_norm": 0.1907324431506242, |
| "learning_rate": 5.492898913951545e-06, |
| "loss": 0.2771, |
| "step": 2398 |
| }, |
| { |
| "epoch": 2.7019718309859155, |
| "grad_norm": 0.19484330126498606, |
| "learning_rate": 5.4720133667502085e-06, |
| "loss": 0.2706, |
| "step": 2399 |
| }, |
| { |
| "epoch": 2.7030985915492955, |
| "grad_norm": 0.20695536111287274, |
| "learning_rate": 5.451127819548872e-06, |
| "loss": 0.281, |
| "step": 2400 |
| }, |
| { |
| "epoch": 2.704225352112676, |
| "grad_norm": 0.20319374269659216, |
| "learning_rate": 5.430242272347535e-06, |
| "loss": 0.2685, |
| "step": 2401 |
| }, |
| { |
| "epoch": 2.7053521126760565, |
| "grad_norm": 0.21148004686086, |
| "learning_rate": 5.409356725146199e-06, |
| "loss": 0.2799, |
| "step": 2402 |
| }, |
| { |
| "epoch": 2.7064788732394365, |
| "grad_norm": 0.21311727421898696, |
| "learning_rate": 5.388471177944862e-06, |
| "loss": 0.2714, |
| "step": 2403 |
| }, |
| { |
| "epoch": 2.707605633802817, |
| "grad_norm": 0.22574161475977417, |
| "learning_rate": 5.367585630743525e-06, |
| "loss": 0.2767, |
| "step": 2404 |
| }, |
| { |
| "epoch": 2.708732394366197, |
| "grad_norm": 0.20029944977930578, |
| "learning_rate": 5.346700083542189e-06, |
| "loss": 0.2728, |
| "step": 2405 |
| }, |
| { |
| "epoch": 2.7098591549295774, |
| "grad_norm": 1.1118092180835053, |
| "learning_rate": 5.325814536340852e-06, |
| "loss": 0.3041, |
| "step": 2406 |
| }, |
| { |
| "epoch": 2.710985915492958, |
| "grad_norm": 0.19540775371133393, |
| "learning_rate": 5.3049289891395155e-06, |
| "loss": 0.2557, |
| "step": 2407 |
| }, |
| { |
| "epoch": 2.712112676056338, |
| "grad_norm": 0.23050010758305498, |
| "learning_rate": 5.284043441938179e-06, |
| "loss": 0.2738, |
| "step": 2408 |
| }, |
| { |
| "epoch": 2.7132394366197183, |
| "grad_norm": 0.21861879662954065, |
| "learning_rate": 5.263157894736842e-06, |
| "loss": 0.2627, |
| "step": 2409 |
| }, |
| { |
| "epoch": 2.7143661971830984, |
| "grad_norm": 0.2412625976478493, |
| "learning_rate": 5.242272347535506e-06, |
| "loss": 0.2972, |
| "step": 2410 |
| }, |
| { |
| "epoch": 2.715492957746479, |
| "grad_norm": 0.21315491181399193, |
| "learning_rate": 5.221386800334169e-06, |
| "loss": 0.2598, |
| "step": 2411 |
| }, |
| { |
| "epoch": 2.7166197183098593, |
| "grad_norm": 0.21801887717412052, |
| "learning_rate": 5.200501253132832e-06, |
| "loss": 0.277, |
| "step": 2412 |
| }, |
| { |
| "epoch": 2.7177464788732393, |
| "grad_norm": 0.2288078040701529, |
| "learning_rate": 5.179615705931496e-06, |
| "loss": 0.2709, |
| "step": 2413 |
| }, |
| { |
| "epoch": 2.7188732394366197, |
| "grad_norm": 0.24229940991558882, |
| "learning_rate": 5.158730158730159e-06, |
| "loss": 0.2763, |
| "step": 2414 |
| }, |
| { |
| "epoch": 2.7199999999999998, |
| "grad_norm": 0.20780929808380763, |
| "learning_rate": 5.137844611528822e-06, |
| "loss": 0.2587, |
| "step": 2415 |
| }, |
| { |
| "epoch": 2.72112676056338, |
| "grad_norm": 0.1965927790337949, |
| "learning_rate": 5.116959064327485e-06, |
| "loss": 0.2559, |
| "step": 2416 |
| }, |
| { |
| "epoch": 2.7222535211267607, |
| "grad_norm": 0.21812181825805288, |
| "learning_rate": 5.096073517126148e-06, |
| "loss": 0.2717, |
| "step": 2417 |
| }, |
| { |
| "epoch": 2.7233802816901407, |
| "grad_norm": 0.20356344576897678, |
| "learning_rate": 5.075187969924812e-06, |
| "loss": 0.2644, |
| "step": 2418 |
| }, |
| { |
| "epoch": 2.724507042253521, |
| "grad_norm": 0.21794435243405022, |
| "learning_rate": 5.054302422723475e-06, |
| "loss": 0.2972, |
| "step": 2419 |
| }, |
| { |
| "epoch": 2.725633802816901, |
| "grad_norm": 0.20482210124602934, |
| "learning_rate": 5.0334168755221385e-06, |
| "loss": 0.26, |
| "step": 2420 |
| }, |
| { |
| "epoch": 2.7267605633802816, |
| "grad_norm": 0.22141978206467378, |
| "learning_rate": 5.012531328320802e-06, |
| "loss": 0.2792, |
| "step": 2421 |
| }, |
| { |
| "epoch": 2.727887323943662, |
| "grad_norm": 0.20919522927581857, |
| "learning_rate": 4.991645781119465e-06, |
| "loss": 0.2684, |
| "step": 2422 |
| }, |
| { |
| "epoch": 2.7290140845070425, |
| "grad_norm": 0.2216358246660207, |
| "learning_rate": 4.970760233918129e-06, |
| "loss": 0.2699, |
| "step": 2423 |
| }, |
| { |
| "epoch": 2.7301408450704225, |
| "grad_norm": 0.1969750748283934, |
| "learning_rate": 4.949874686716792e-06, |
| "loss": 0.2809, |
| "step": 2424 |
| }, |
| { |
| "epoch": 2.7312676056338026, |
| "grad_norm": 0.2079510314461725, |
| "learning_rate": 4.928989139515455e-06, |
| "loss": 0.2696, |
| "step": 2425 |
| }, |
| { |
| "epoch": 2.732394366197183, |
| "grad_norm": 0.194241562661248, |
| "learning_rate": 4.908103592314119e-06, |
| "loss": 0.2774, |
| "step": 2426 |
| }, |
| { |
| "epoch": 2.7335211267605635, |
| "grad_norm": 0.2026621133239319, |
| "learning_rate": 4.887218045112782e-06, |
| "loss": 0.2725, |
| "step": 2427 |
| }, |
| { |
| "epoch": 2.734647887323944, |
| "grad_norm": 0.20101449945353245, |
| "learning_rate": 4.8663324979114455e-06, |
| "loss": 0.2653, |
| "step": 2428 |
| }, |
| { |
| "epoch": 2.735774647887324, |
| "grad_norm": 0.2006548082586194, |
| "learning_rate": 4.845446950710109e-06, |
| "loss": 0.2726, |
| "step": 2429 |
| }, |
| { |
| "epoch": 2.736901408450704, |
| "grad_norm": 0.20515002494448348, |
| "learning_rate": 4.824561403508772e-06, |
| "loss": 0.2752, |
| "step": 2430 |
| }, |
| { |
| "epoch": 2.7380281690140844, |
| "grad_norm": 0.19443305928747673, |
| "learning_rate": 4.803675856307436e-06, |
| "loss": 0.26, |
| "step": 2431 |
| }, |
| { |
| "epoch": 2.739154929577465, |
| "grad_norm": 0.24936926145200294, |
| "learning_rate": 4.782790309106098e-06, |
| "loss": 0.2781, |
| "step": 2432 |
| }, |
| { |
| "epoch": 2.7402816901408453, |
| "grad_norm": 0.19672033980425266, |
| "learning_rate": 4.7619047619047615e-06, |
| "loss": 0.259, |
| "step": 2433 |
| }, |
| { |
| "epoch": 2.7414084507042253, |
| "grad_norm": 0.1925496189912426, |
| "learning_rate": 4.741019214703425e-06, |
| "loss": 0.2696, |
| "step": 2434 |
| }, |
| { |
| "epoch": 2.742535211267606, |
| "grad_norm": 0.2025168089756278, |
| "learning_rate": 4.720133667502088e-06, |
| "loss": 0.2702, |
| "step": 2435 |
| }, |
| { |
| "epoch": 2.743661971830986, |
| "grad_norm": 0.19985176043299094, |
| "learning_rate": 4.699248120300752e-06, |
| "loss": 0.2745, |
| "step": 2436 |
| }, |
| { |
| "epoch": 2.7447887323943663, |
| "grad_norm": 0.19981795334353009, |
| "learning_rate": 4.678362573099415e-06, |
| "loss": 0.2629, |
| "step": 2437 |
| }, |
| { |
| "epoch": 2.7459154929577467, |
| "grad_norm": 0.19927027839419753, |
| "learning_rate": 4.657477025898078e-06, |
| "loss": 0.2831, |
| "step": 2438 |
| }, |
| { |
| "epoch": 2.7470422535211267, |
| "grad_norm": 0.19225763956546674, |
| "learning_rate": 4.636591478696742e-06, |
| "loss": 0.262, |
| "step": 2439 |
| }, |
| { |
| "epoch": 2.748169014084507, |
| "grad_norm": 0.2040307365886815, |
| "learning_rate": 4.615705931495405e-06, |
| "loss": 0.2765, |
| "step": 2440 |
| }, |
| { |
| "epoch": 2.749295774647887, |
| "grad_norm": 0.19287934723693517, |
| "learning_rate": 4.5948203842940685e-06, |
| "loss": 0.2668, |
| "step": 2441 |
| }, |
| { |
| "epoch": 2.7504225352112677, |
| "grad_norm": 0.19461542697134238, |
| "learning_rate": 4.573934837092732e-06, |
| "loss": 0.2711, |
| "step": 2442 |
| }, |
| { |
| "epoch": 2.751549295774648, |
| "grad_norm": 0.1941443062206399, |
| "learning_rate": 4.553049289891395e-06, |
| "loss": 0.2757, |
| "step": 2443 |
| }, |
| { |
| "epoch": 2.752676056338028, |
| "grad_norm": 0.20276456368730925, |
| "learning_rate": 4.532163742690059e-06, |
| "loss": 0.2645, |
| "step": 2444 |
| }, |
| { |
| "epoch": 2.7538028169014086, |
| "grad_norm": 0.20240283977029078, |
| "learning_rate": 4.511278195488722e-06, |
| "loss": 0.2737, |
| "step": 2445 |
| }, |
| { |
| "epoch": 2.7549295774647886, |
| "grad_norm": 0.19047012180382308, |
| "learning_rate": 4.490392648287385e-06, |
| "loss": 0.2797, |
| "step": 2446 |
| }, |
| { |
| "epoch": 2.756056338028169, |
| "grad_norm": 0.19677082956627245, |
| "learning_rate": 4.469507101086049e-06, |
| "loss": 0.2697, |
| "step": 2447 |
| }, |
| { |
| "epoch": 2.7571830985915495, |
| "grad_norm": 0.18796618933611656, |
| "learning_rate": 4.448621553884712e-06, |
| "loss": 0.2751, |
| "step": 2448 |
| }, |
| { |
| "epoch": 2.7583098591549295, |
| "grad_norm": 0.18958326345231097, |
| "learning_rate": 4.427736006683375e-06, |
| "loss": 0.2764, |
| "step": 2449 |
| }, |
| { |
| "epoch": 2.75943661971831, |
| "grad_norm": 0.20261814792556176, |
| "learning_rate": 4.406850459482038e-06, |
| "loss": 0.271, |
| "step": 2450 |
| }, |
| { |
| "epoch": 2.76056338028169, |
| "grad_norm": 0.1807739197575019, |
| "learning_rate": 4.3859649122807014e-06, |
| "loss": 0.2473, |
| "step": 2451 |
| }, |
| { |
| "epoch": 2.7616901408450705, |
| "grad_norm": 0.20748841468743903, |
| "learning_rate": 4.365079365079365e-06, |
| "loss": 0.2913, |
| "step": 2452 |
| }, |
| { |
| "epoch": 2.762816901408451, |
| "grad_norm": 0.20357770127659672, |
| "learning_rate": 4.344193817878028e-06, |
| "loss": 0.2691, |
| "step": 2453 |
| }, |
| { |
| "epoch": 2.763943661971831, |
| "grad_norm": 0.1954035900950154, |
| "learning_rate": 4.3233082706766916e-06, |
| "loss": 0.2687, |
| "step": 2454 |
| }, |
| { |
| "epoch": 2.7650704225352114, |
| "grad_norm": 0.20788295930137504, |
| "learning_rate": 4.302422723475355e-06, |
| "loss": 0.2782, |
| "step": 2455 |
| }, |
| { |
| "epoch": 2.7661971830985914, |
| "grad_norm": 0.20075359582721036, |
| "learning_rate": 4.281537176274018e-06, |
| "loss": 0.2785, |
| "step": 2456 |
| }, |
| { |
| "epoch": 2.767323943661972, |
| "grad_norm": 0.1948372109824818, |
| "learning_rate": 4.260651629072682e-06, |
| "loss": 0.2773, |
| "step": 2457 |
| }, |
| { |
| "epoch": 2.7684507042253523, |
| "grad_norm": 0.18984572990853224, |
| "learning_rate": 4.239766081871345e-06, |
| "loss": 0.2715, |
| "step": 2458 |
| }, |
| { |
| "epoch": 2.7695774647887323, |
| "grad_norm": 0.2867708967764196, |
| "learning_rate": 4.2188805346700084e-06, |
| "loss": 0.2768, |
| "step": 2459 |
| }, |
| { |
| "epoch": 2.770704225352113, |
| "grad_norm": 0.18287769839140158, |
| "learning_rate": 4.197994987468672e-06, |
| "loss": 0.2643, |
| "step": 2460 |
| }, |
| { |
| "epoch": 2.771830985915493, |
| "grad_norm": 0.20179759422784257, |
| "learning_rate": 4.177109440267335e-06, |
| "loss": 0.2711, |
| "step": 2461 |
| }, |
| { |
| "epoch": 2.7729577464788733, |
| "grad_norm": 0.18468214491025223, |
| "learning_rate": 4.1562238930659986e-06, |
| "loss": 0.2591, |
| "step": 2462 |
| }, |
| { |
| "epoch": 2.7740845070422537, |
| "grad_norm": 0.19385557248568827, |
| "learning_rate": 4.135338345864662e-06, |
| "loss": 0.27, |
| "step": 2463 |
| }, |
| { |
| "epoch": 2.7752112676056337, |
| "grad_norm": 0.19088025686792837, |
| "learning_rate": 4.114452798663325e-06, |
| "loss": 0.276, |
| "step": 2464 |
| }, |
| { |
| "epoch": 2.776338028169014, |
| "grad_norm": 0.19939344539211365, |
| "learning_rate": 4.093567251461989e-06, |
| "loss": 0.2698, |
| "step": 2465 |
| }, |
| { |
| "epoch": 2.777464788732394, |
| "grad_norm": 0.20468133899712737, |
| "learning_rate": 4.072681704260651e-06, |
| "loss": 0.2623, |
| "step": 2466 |
| }, |
| { |
| "epoch": 2.7785915492957747, |
| "grad_norm": 0.19499115099357528, |
| "learning_rate": 4.051796157059315e-06, |
| "loss": 0.2758, |
| "step": 2467 |
| }, |
| { |
| "epoch": 2.779718309859155, |
| "grad_norm": 0.19242817971755558, |
| "learning_rate": 4.030910609857978e-06, |
| "loss": 0.2738, |
| "step": 2468 |
| }, |
| { |
| "epoch": 2.780845070422535, |
| "grad_norm": 0.2064820572185044, |
| "learning_rate": 4.010025062656641e-06, |
| "loss": 0.2658, |
| "step": 2469 |
| }, |
| { |
| "epoch": 2.7819718309859156, |
| "grad_norm": 0.18478105216990628, |
| "learning_rate": 3.989139515455305e-06, |
| "loss": 0.2525, |
| "step": 2470 |
| }, |
| { |
| "epoch": 2.7830985915492956, |
| "grad_norm": 0.1923498212779712, |
| "learning_rate": 3.968253968253968e-06, |
| "loss": 0.2715, |
| "step": 2471 |
| }, |
| { |
| "epoch": 2.784225352112676, |
| "grad_norm": 0.18828664531802047, |
| "learning_rate": 3.9473684210526315e-06, |
| "loss": 0.2728, |
| "step": 2472 |
| }, |
| { |
| "epoch": 2.7853521126760565, |
| "grad_norm": 0.20210623933644906, |
| "learning_rate": 3.926482873851295e-06, |
| "loss": 0.2861, |
| "step": 2473 |
| }, |
| { |
| "epoch": 2.7864788732394365, |
| "grad_norm": 0.19619598558040613, |
| "learning_rate": 3.905597326649958e-06, |
| "loss": 0.2789, |
| "step": 2474 |
| }, |
| { |
| "epoch": 2.787605633802817, |
| "grad_norm": 0.18677663597929384, |
| "learning_rate": 3.884711779448622e-06, |
| "loss": 0.2557, |
| "step": 2475 |
| }, |
| { |
| "epoch": 2.788732394366197, |
| "grad_norm": 0.19370640944340184, |
| "learning_rate": 3.863826232247285e-06, |
| "loss": 0.261, |
| "step": 2476 |
| }, |
| { |
| "epoch": 2.7898591549295775, |
| "grad_norm": 0.18917738667503978, |
| "learning_rate": 3.842940685045948e-06, |
| "loss": 0.2846, |
| "step": 2477 |
| }, |
| { |
| "epoch": 2.790985915492958, |
| "grad_norm": 0.19624087872984702, |
| "learning_rate": 3.822055137844612e-06, |
| "loss": 0.2705, |
| "step": 2478 |
| }, |
| { |
| "epoch": 2.792112676056338, |
| "grad_norm": 0.2113346802276601, |
| "learning_rate": 3.8011695906432747e-06, |
| "loss": 0.2821, |
| "step": 2479 |
| }, |
| { |
| "epoch": 2.7932394366197184, |
| "grad_norm": 0.22120899061938676, |
| "learning_rate": 3.780284043441938e-06, |
| "loss": 0.2784, |
| "step": 2480 |
| }, |
| { |
| "epoch": 2.7943661971830984, |
| "grad_norm": 0.18299334408238654, |
| "learning_rate": 3.7593984962406014e-06, |
| "loss": 0.2691, |
| "step": 2481 |
| }, |
| { |
| "epoch": 2.795492957746479, |
| "grad_norm": 0.1990833006682935, |
| "learning_rate": 3.7385129490392648e-06, |
| "loss": 0.2652, |
| "step": 2482 |
| }, |
| { |
| "epoch": 2.7966197183098593, |
| "grad_norm": 0.1987106354910443, |
| "learning_rate": 3.717627401837928e-06, |
| "loss": 0.2632, |
| "step": 2483 |
| }, |
| { |
| "epoch": 2.7977464788732394, |
| "grad_norm": 0.18900103030662868, |
| "learning_rate": 3.6967418546365915e-06, |
| "loss": 0.2697, |
| "step": 2484 |
| }, |
| { |
| "epoch": 2.79887323943662, |
| "grad_norm": 0.2043997405036503, |
| "learning_rate": 3.675856307435255e-06, |
| "loss": 0.2877, |
| "step": 2485 |
| }, |
| { |
| "epoch": 2.8, |
| "grad_norm": 0.1967067147976069, |
| "learning_rate": 3.6549707602339183e-06, |
| "loss": 0.2565, |
| "step": 2486 |
| }, |
| { |
| "epoch": 2.8011267605633803, |
| "grad_norm": 0.19951594066115694, |
| "learning_rate": 3.6340852130325812e-06, |
| "loss": 0.2621, |
| "step": 2487 |
| }, |
| { |
| "epoch": 2.8022535211267607, |
| "grad_norm": 0.19440731106003983, |
| "learning_rate": 3.6131996658312446e-06, |
| "loss": 0.2583, |
| "step": 2488 |
| }, |
| { |
| "epoch": 2.8033802816901408, |
| "grad_norm": 0.20889642401441064, |
| "learning_rate": 3.592314118629908e-06, |
| "loss": 0.2769, |
| "step": 2489 |
| }, |
| { |
| "epoch": 2.804507042253521, |
| "grad_norm": 0.21369620152884275, |
| "learning_rate": 3.5714285714285714e-06, |
| "loss": 0.2608, |
| "step": 2490 |
| }, |
| { |
| "epoch": 2.8056338028169012, |
| "grad_norm": 0.20041112286087975, |
| "learning_rate": 3.5505430242272347e-06, |
| "loss": 0.2658, |
| "step": 2491 |
| }, |
| { |
| "epoch": 2.8067605633802817, |
| "grad_norm": 0.18226035415201983, |
| "learning_rate": 3.529657477025898e-06, |
| "loss": 0.2674, |
| "step": 2492 |
| }, |
| { |
| "epoch": 2.807887323943662, |
| "grad_norm": 0.2148547507215812, |
| "learning_rate": 3.5087719298245615e-06, |
| "loss": 0.2715, |
| "step": 2493 |
| }, |
| { |
| "epoch": 2.809014084507042, |
| "grad_norm": 0.19484195408298655, |
| "learning_rate": 3.487886382623225e-06, |
| "loss": 0.2699, |
| "step": 2494 |
| }, |
| { |
| "epoch": 2.8101408450704226, |
| "grad_norm": 0.20732677521305964, |
| "learning_rate": 3.4670008354218882e-06, |
| "loss": 0.2722, |
| "step": 2495 |
| }, |
| { |
| "epoch": 2.8112676056338026, |
| "grad_norm": 0.1955496382063183, |
| "learning_rate": 3.446115288220551e-06, |
| "loss": 0.2822, |
| "step": 2496 |
| }, |
| { |
| "epoch": 2.812394366197183, |
| "grad_norm": 0.19650717309897933, |
| "learning_rate": 3.4252297410192146e-06, |
| "loss": 0.2735, |
| "step": 2497 |
| }, |
| { |
| "epoch": 2.8135211267605635, |
| "grad_norm": 0.19374665236211555, |
| "learning_rate": 3.404344193817878e-06, |
| "loss": 0.2651, |
| "step": 2498 |
| }, |
| { |
| "epoch": 2.8146478873239436, |
| "grad_norm": 0.19408012707468997, |
| "learning_rate": 3.3834586466165413e-06, |
| "loss": 0.2736, |
| "step": 2499 |
| }, |
| { |
| "epoch": 2.815774647887324, |
| "grad_norm": 0.22293055676113496, |
| "learning_rate": 3.3625730994152047e-06, |
| "loss": 0.2942, |
| "step": 2500 |
| }, |
| { |
| "epoch": 2.816901408450704, |
| "grad_norm": 0.18823841692528656, |
| "learning_rate": 3.341687552213868e-06, |
| "loss": 0.2711, |
| "step": 2501 |
| }, |
| { |
| "epoch": 2.8180281690140845, |
| "grad_norm": 0.20046700932004663, |
| "learning_rate": 3.3208020050125314e-06, |
| "loss": 0.2839, |
| "step": 2502 |
| }, |
| { |
| "epoch": 2.819154929577465, |
| "grad_norm": 0.1993489442831212, |
| "learning_rate": 3.299916457811195e-06, |
| "loss": 0.2591, |
| "step": 2503 |
| }, |
| { |
| "epoch": 2.820281690140845, |
| "grad_norm": 0.21648223238685965, |
| "learning_rate": 3.2790309106098578e-06, |
| "loss": 0.285, |
| "step": 2504 |
| }, |
| { |
| "epoch": 2.8214084507042254, |
| "grad_norm": 0.1973020058583428, |
| "learning_rate": 3.258145363408521e-06, |
| "loss": 0.2843, |
| "step": 2505 |
| }, |
| { |
| "epoch": 2.8225352112676054, |
| "grad_norm": 0.19872411461682024, |
| "learning_rate": 3.2372598162071845e-06, |
| "loss": 0.2641, |
| "step": 2506 |
| }, |
| { |
| "epoch": 2.823661971830986, |
| "grad_norm": 0.20169863379712635, |
| "learning_rate": 3.216374269005848e-06, |
| "loss": 0.2656, |
| "step": 2507 |
| }, |
| { |
| "epoch": 2.8247887323943663, |
| "grad_norm": 0.1888517270151852, |
| "learning_rate": 3.1954887218045113e-06, |
| "loss": 0.2653, |
| "step": 2508 |
| }, |
| { |
| "epoch": 2.8259154929577464, |
| "grad_norm": 0.20216080686759164, |
| "learning_rate": 3.1746031746031746e-06, |
| "loss": 0.2744, |
| "step": 2509 |
| }, |
| { |
| "epoch": 2.827042253521127, |
| "grad_norm": 0.20181970891877923, |
| "learning_rate": 3.153717627401838e-06, |
| "loss": 0.2829, |
| "step": 2510 |
| }, |
| { |
| "epoch": 2.828169014084507, |
| "grad_norm": 0.20300929741881887, |
| "learning_rate": 3.1328320802005014e-06, |
| "loss": 0.2563, |
| "step": 2511 |
| }, |
| { |
| "epoch": 2.8292957746478873, |
| "grad_norm": 0.19968083349162075, |
| "learning_rate": 3.1119465329991648e-06, |
| "loss": 0.2627, |
| "step": 2512 |
| }, |
| { |
| "epoch": 2.8304225352112677, |
| "grad_norm": 0.20002016294034097, |
| "learning_rate": 3.0910609857978277e-06, |
| "loss": 0.2857, |
| "step": 2513 |
| }, |
| { |
| "epoch": 2.8315492957746478, |
| "grad_norm": 0.19989915565084954, |
| "learning_rate": 3.070175438596491e-06, |
| "loss": 0.2635, |
| "step": 2514 |
| }, |
| { |
| "epoch": 2.832676056338028, |
| "grad_norm": 0.1931276854494213, |
| "learning_rate": 3.0492898913951545e-06, |
| "loss": 0.2692, |
| "step": 2515 |
| }, |
| { |
| "epoch": 2.8338028169014082, |
| "grad_norm": 0.20152537458785733, |
| "learning_rate": 3.028404344193818e-06, |
| "loss": 0.2718, |
| "step": 2516 |
| }, |
| { |
| "epoch": 2.8349295774647887, |
| "grad_norm": 0.2167040764450326, |
| "learning_rate": 3.007518796992481e-06, |
| "loss": 0.2638, |
| "step": 2517 |
| }, |
| { |
| "epoch": 2.836056338028169, |
| "grad_norm": 0.19834634026979267, |
| "learning_rate": 2.9866332497911446e-06, |
| "loss": 0.2605, |
| "step": 2518 |
| }, |
| { |
| "epoch": 2.837183098591549, |
| "grad_norm": 0.20216873108165062, |
| "learning_rate": 2.965747702589808e-06, |
| "loss": 0.2831, |
| "step": 2519 |
| }, |
| { |
| "epoch": 2.8383098591549296, |
| "grad_norm": 0.19829831038853793, |
| "learning_rate": 2.9448621553884713e-06, |
| "loss": 0.2704, |
| "step": 2520 |
| }, |
| { |
| "epoch": 2.8394366197183096, |
| "grad_norm": 0.18583659228225755, |
| "learning_rate": 2.9239766081871343e-06, |
| "loss": 0.2686, |
| "step": 2521 |
| }, |
| { |
| "epoch": 2.84056338028169, |
| "grad_norm": 0.18548834091769986, |
| "learning_rate": 2.9030910609857977e-06, |
| "loss": 0.2611, |
| "step": 2522 |
| }, |
| { |
| "epoch": 2.8416901408450705, |
| "grad_norm": 0.18533496072735434, |
| "learning_rate": 2.882205513784461e-06, |
| "loss": 0.2518, |
| "step": 2523 |
| }, |
| { |
| "epoch": 2.8428169014084506, |
| "grad_norm": 0.23150060299736044, |
| "learning_rate": 2.8613199665831244e-06, |
| "loss": 0.2827, |
| "step": 2524 |
| }, |
| { |
| "epoch": 2.843943661971831, |
| "grad_norm": 0.1886977964132381, |
| "learning_rate": 2.8404344193817878e-06, |
| "loss": 0.265, |
| "step": 2525 |
| }, |
| { |
| "epoch": 2.845070422535211, |
| "grad_norm": 0.2013505228422611, |
| "learning_rate": 2.819548872180451e-06, |
| "loss": 0.2674, |
| "step": 2526 |
| }, |
| { |
| "epoch": 2.8461971830985915, |
| "grad_norm": 0.2007294096521873, |
| "learning_rate": 2.7986633249791145e-06, |
| "loss": 0.291, |
| "step": 2527 |
| }, |
| { |
| "epoch": 2.847323943661972, |
| "grad_norm": 0.183745634563008, |
| "learning_rate": 2.777777777777778e-06, |
| "loss": 0.2589, |
| "step": 2528 |
| }, |
| { |
| "epoch": 2.848450704225352, |
| "grad_norm": 0.18212939429070288, |
| "learning_rate": 2.7568922305764413e-06, |
| "loss": 0.2697, |
| "step": 2529 |
| }, |
| { |
| "epoch": 2.8495774647887324, |
| "grad_norm": 0.18979119231193187, |
| "learning_rate": 2.7360066833751042e-06, |
| "loss": 0.2701, |
| "step": 2530 |
| }, |
| { |
| "epoch": 2.8507042253521124, |
| "grad_norm": 0.1972009385195689, |
| "learning_rate": 2.7151211361737676e-06, |
| "loss": 0.2739, |
| "step": 2531 |
| }, |
| { |
| "epoch": 2.851830985915493, |
| "grad_norm": 0.19723175664487233, |
| "learning_rate": 2.694235588972431e-06, |
| "loss": 0.2812, |
| "step": 2532 |
| }, |
| { |
| "epoch": 2.8529577464788733, |
| "grad_norm": 0.1937792010166364, |
| "learning_rate": 2.6733500417710944e-06, |
| "loss": 0.2745, |
| "step": 2533 |
| }, |
| { |
| "epoch": 2.8540845070422534, |
| "grad_norm": 0.3750659868633327, |
| "learning_rate": 2.6524644945697577e-06, |
| "loss": 0.2743, |
| "step": 2534 |
| }, |
| { |
| "epoch": 2.855211267605634, |
| "grad_norm": 0.1945898375011954, |
| "learning_rate": 2.631578947368421e-06, |
| "loss": 0.2746, |
| "step": 2535 |
| }, |
| { |
| "epoch": 2.856338028169014, |
| "grad_norm": 0.2150933913305423, |
| "learning_rate": 2.6106934001670845e-06, |
| "loss": 0.2916, |
| "step": 2536 |
| }, |
| { |
| "epoch": 2.8574647887323943, |
| "grad_norm": 0.21264387967988305, |
| "learning_rate": 2.589807852965748e-06, |
| "loss": 0.2811, |
| "step": 2537 |
| }, |
| { |
| "epoch": 2.8585915492957747, |
| "grad_norm": 0.23079403788796102, |
| "learning_rate": 2.568922305764411e-06, |
| "loss": 0.285, |
| "step": 2538 |
| }, |
| { |
| "epoch": 2.859718309859155, |
| "grad_norm": 0.20558029784877604, |
| "learning_rate": 2.548036758563074e-06, |
| "loss": 0.2759, |
| "step": 2539 |
| }, |
| { |
| "epoch": 2.860845070422535, |
| "grad_norm": 0.20785868781053038, |
| "learning_rate": 2.5271512113617376e-06, |
| "loss": 0.2679, |
| "step": 2540 |
| }, |
| { |
| "epoch": 2.8619718309859152, |
| "grad_norm": 0.19430868517354405, |
| "learning_rate": 2.506265664160401e-06, |
| "loss": 0.2885, |
| "step": 2541 |
| }, |
| { |
| "epoch": 2.8630985915492957, |
| "grad_norm": 0.1828472425239832, |
| "learning_rate": 2.4853801169590643e-06, |
| "loss": 0.2638, |
| "step": 2542 |
| }, |
| { |
| "epoch": 2.864225352112676, |
| "grad_norm": 0.5729244338095114, |
| "learning_rate": 2.4644945697577277e-06, |
| "loss": 0.2849, |
| "step": 2543 |
| }, |
| { |
| "epoch": 2.8653521126760566, |
| "grad_norm": 0.22029494980868425, |
| "learning_rate": 2.443609022556391e-06, |
| "loss": 0.272, |
| "step": 2544 |
| }, |
| { |
| "epoch": 2.8664788732394366, |
| "grad_norm": 0.22565936277758128, |
| "learning_rate": 2.4227234753550544e-06, |
| "loss": 0.2675, |
| "step": 2545 |
| }, |
| { |
| "epoch": 2.8676056338028166, |
| "grad_norm": 0.1924515218434605, |
| "learning_rate": 2.401837928153718e-06, |
| "loss": 0.2669, |
| "step": 2546 |
| }, |
| { |
| "epoch": 2.868732394366197, |
| "grad_norm": 0.1970704537926288, |
| "learning_rate": 2.3809523809523808e-06, |
| "loss": 0.2772, |
| "step": 2547 |
| }, |
| { |
| "epoch": 2.8698591549295775, |
| "grad_norm": 0.1968336302115501, |
| "learning_rate": 2.360066833751044e-06, |
| "loss": 0.2733, |
| "step": 2548 |
| }, |
| { |
| "epoch": 2.870985915492958, |
| "grad_norm": 0.19951417016284853, |
| "learning_rate": 2.3391812865497075e-06, |
| "loss": 0.2748, |
| "step": 2549 |
| }, |
| { |
| "epoch": 2.872112676056338, |
| "grad_norm": 0.19731362576994593, |
| "learning_rate": 2.318295739348371e-06, |
| "loss": 0.2701, |
| "step": 2550 |
| }, |
| { |
| "epoch": 2.873239436619718, |
| "grad_norm": 0.1992218992308437, |
| "learning_rate": 2.2974101921470343e-06, |
| "loss": 0.2683, |
| "step": 2551 |
| }, |
| { |
| "epoch": 2.8743661971830985, |
| "grad_norm": 0.19687956744752266, |
| "learning_rate": 2.2765246449456976e-06, |
| "loss": 0.269, |
| "step": 2552 |
| }, |
| { |
| "epoch": 2.875492957746479, |
| "grad_norm": 0.20090642987027768, |
| "learning_rate": 2.255639097744361e-06, |
| "loss": 0.281, |
| "step": 2553 |
| }, |
| { |
| "epoch": 2.8766197183098594, |
| "grad_norm": 0.18297310576256712, |
| "learning_rate": 2.2347535505430244e-06, |
| "loss": 0.2654, |
| "step": 2554 |
| }, |
| { |
| "epoch": 2.8777464788732394, |
| "grad_norm": 0.19179779807002117, |
| "learning_rate": 2.2138680033416873e-06, |
| "loss": 0.2741, |
| "step": 2555 |
| }, |
| { |
| "epoch": 2.87887323943662, |
| "grad_norm": 0.1875378969243087, |
| "learning_rate": 2.1929824561403507e-06, |
| "loss": 0.2732, |
| "step": 2556 |
| }, |
| { |
| "epoch": 2.88, |
| "grad_norm": 0.20155911135753982, |
| "learning_rate": 2.172096908939014e-06, |
| "loss": 0.2643, |
| "step": 2557 |
| }, |
| { |
| "epoch": 2.8811267605633804, |
| "grad_norm": 0.17436401072988408, |
| "learning_rate": 2.1512113617376775e-06, |
| "loss": 0.2576, |
| "step": 2558 |
| }, |
| { |
| "epoch": 2.882253521126761, |
| "grad_norm": 0.1888243340130565, |
| "learning_rate": 2.130325814536341e-06, |
| "loss": 0.2687, |
| "step": 2559 |
| }, |
| { |
| "epoch": 2.883380281690141, |
| "grad_norm": 0.19001310090354823, |
| "learning_rate": 2.1094402673350042e-06, |
| "loss": 0.2845, |
| "step": 2560 |
| }, |
| { |
| "epoch": 2.8845070422535213, |
| "grad_norm": 0.20043987055068513, |
| "learning_rate": 2.0885547201336676e-06, |
| "loss": 0.2591, |
| "step": 2561 |
| }, |
| { |
| "epoch": 2.8856338028169013, |
| "grad_norm": 0.18832534506583973, |
| "learning_rate": 2.067669172932331e-06, |
| "loss": 0.2703, |
| "step": 2562 |
| }, |
| { |
| "epoch": 2.8867605633802818, |
| "grad_norm": 0.18059472089368747, |
| "learning_rate": 2.0467836257309943e-06, |
| "loss": 0.2619, |
| "step": 2563 |
| }, |
| { |
| "epoch": 2.887887323943662, |
| "grad_norm": 0.18024598808699047, |
| "learning_rate": 2.0258980785296573e-06, |
| "loss": 0.2738, |
| "step": 2564 |
| }, |
| { |
| "epoch": 2.8890140845070422, |
| "grad_norm": 0.19845150497229927, |
| "learning_rate": 2.0050125313283207e-06, |
| "loss": 0.2684, |
| "step": 2565 |
| }, |
| { |
| "epoch": 2.8901408450704227, |
| "grad_norm": 0.19833786406257187, |
| "learning_rate": 1.984126984126984e-06, |
| "loss": 0.2783, |
| "step": 2566 |
| }, |
| { |
| "epoch": 2.8912676056338027, |
| "grad_norm": 0.19626086542361154, |
| "learning_rate": 1.9632414369256474e-06, |
| "loss": 0.2596, |
| "step": 2567 |
| }, |
| { |
| "epoch": 2.892394366197183, |
| "grad_norm": 0.20338902305801612, |
| "learning_rate": 1.942355889724311e-06, |
| "loss": 0.2764, |
| "step": 2568 |
| }, |
| { |
| "epoch": 2.8935211267605636, |
| "grad_norm": 0.19781084939411023, |
| "learning_rate": 1.921470342522974e-06, |
| "loss": 0.2778, |
| "step": 2569 |
| }, |
| { |
| "epoch": 2.8946478873239436, |
| "grad_norm": 0.19561908254215962, |
| "learning_rate": 1.9005847953216373e-06, |
| "loss": 0.2699, |
| "step": 2570 |
| }, |
| { |
| "epoch": 2.895774647887324, |
| "grad_norm": 0.1935427762584011, |
| "learning_rate": 1.8796992481203007e-06, |
| "loss": 0.2838, |
| "step": 2571 |
| }, |
| { |
| "epoch": 2.896901408450704, |
| "grad_norm": 0.19901870718500267, |
| "learning_rate": 1.858813700918964e-06, |
| "loss": 0.2607, |
| "step": 2572 |
| }, |
| { |
| "epoch": 2.8980281690140846, |
| "grad_norm": 0.2041526853343621, |
| "learning_rate": 1.8379281537176275e-06, |
| "loss": 0.2632, |
| "step": 2573 |
| }, |
| { |
| "epoch": 2.899154929577465, |
| "grad_norm": 0.20369090183683206, |
| "learning_rate": 1.8170426065162906e-06, |
| "loss": 0.2775, |
| "step": 2574 |
| }, |
| { |
| "epoch": 2.900281690140845, |
| "grad_norm": 0.19548877752031224, |
| "learning_rate": 1.796157059314954e-06, |
| "loss": 0.2744, |
| "step": 2575 |
| }, |
| { |
| "epoch": 2.9014084507042255, |
| "grad_norm": 0.1966975673022479, |
| "learning_rate": 1.7752715121136174e-06, |
| "loss": 0.2833, |
| "step": 2576 |
| }, |
| { |
| "epoch": 2.9025352112676055, |
| "grad_norm": 0.17786942131721542, |
| "learning_rate": 1.7543859649122807e-06, |
| "loss": 0.2723, |
| "step": 2577 |
| }, |
| { |
| "epoch": 2.903661971830986, |
| "grad_norm": 0.19925521250604009, |
| "learning_rate": 1.7335004177109441e-06, |
| "loss": 0.2636, |
| "step": 2578 |
| }, |
| { |
| "epoch": 2.9047887323943664, |
| "grad_norm": 0.23409309646200915, |
| "learning_rate": 1.7126148705096073e-06, |
| "loss": 0.2921, |
| "step": 2579 |
| }, |
| { |
| "epoch": 2.9059154929577464, |
| "grad_norm": 0.18937871949826304, |
| "learning_rate": 1.6917293233082707e-06, |
| "loss": 0.2813, |
| "step": 2580 |
| }, |
| { |
| "epoch": 2.907042253521127, |
| "grad_norm": 0.17928190877065076, |
| "learning_rate": 1.670843776106934e-06, |
| "loss": 0.2581, |
| "step": 2581 |
| }, |
| { |
| "epoch": 2.908169014084507, |
| "grad_norm": 0.19881594048337264, |
| "learning_rate": 1.6499582289055974e-06, |
| "loss": 0.2791, |
| "step": 2582 |
| }, |
| { |
| "epoch": 2.9092957746478874, |
| "grad_norm": 0.18798769970014184, |
| "learning_rate": 1.6290726817042606e-06, |
| "loss": 0.2692, |
| "step": 2583 |
| }, |
| { |
| "epoch": 2.910422535211268, |
| "grad_norm": 0.18484773070092542, |
| "learning_rate": 1.608187134502924e-06, |
| "loss": 0.2718, |
| "step": 2584 |
| }, |
| { |
| "epoch": 2.911549295774648, |
| "grad_norm": 0.1968185538288094, |
| "learning_rate": 1.5873015873015873e-06, |
| "loss": 0.2581, |
| "step": 2585 |
| }, |
| { |
| "epoch": 2.9126760563380283, |
| "grad_norm": 0.1799512428536084, |
| "learning_rate": 1.5664160401002507e-06, |
| "loss": 0.2494, |
| "step": 2586 |
| }, |
| { |
| "epoch": 2.9138028169014083, |
| "grad_norm": 0.19311808887546641, |
| "learning_rate": 1.5455304928989139e-06, |
| "loss": 0.2612, |
| "step": 2587 |
| }, |
| { |
| "epoch": 2.9149295774647888, |
| "grad_norm": 0.1977538053337172, |
| "learning_rate": 1.5246449456975772e-06, |
| "loss": 0.2791, |
| "step": 2588 |
| }, |
| { |
| "epoch": 2.916056338028169, |
| "grad_norm": 0.19783783260320706, |
| "learning_rate": 1.5037593984962406e-06, |
| "loss": 0.286, |
| "step": 2589 |
| }, |
| { |
| "epoch": 2.9171830985915492, |
| "grad_norm": 0.18923595980392527, |
| "learning_rate": 1.482873851294904e-06, |
| "loss": 0.2756, |
| "step": 2590 |
| }, |
| { |
| "epoch": 2.9183098591549297, |
| "grad_norm": 0.1903715800057653, |
| "learning_rate": 1.4619883040935671e-06, |
| "loss": 0.2527, |
| "step": 2591 |
| }, |
| { |
| "epoch": 2.9194366197183097, |
| "grad_norm": 0.17790209769789897, |
| "learning_rate": 1.4411027568922305e-06, |
| "loss": 0.2518, |
| "step": 2592 |
| }, |
| { |
| "epoch": 2.92056338028169, |
| "grad_norm": 0.1935001923293554, |
| "learning_rate": 1.4202172096908939e-06, |
| "loss": 0.2765, |
| "step": 2593 |
| }, |
| { |
| "epoch": 2.9216901408450706, |
| "grad_norm": 0.19754748528918087, |
| "learning_rate": 1.3993316624895573e-06, |
| "loss": 0.2857, |
| "step": 2594 |
| }, |
| { |
| "epoch": 2.9228169014084506, |
| "grad_norm": 0.20086815277045125, |
| "learning_rate": 1.3784461152882206e-06, |
| "loss": 0.2751, |
| "step": 2595 |
| }, |
| { |
| "epoch": 2.923943661971831, |
| "grad_norm": 0.18549765450299266, |
| "learning_rate": 1.3575605680868838e-06, |
| "loss": 0.2519, |
| "step": 2596 |
| }, |
| { |
| "epoch": 2.925070422535211, |
| "grad_norm": 0.20347648261235304, |
| "learning_rate": 1.3366750208855472e-06, |
| "loss": 0.2703, |
| "step": 2597 |
| }, |
| { |
| "epoch": 2.9261971830985916, |
| "grad_norm": 0.18863661663852818, |
| "learning_rate": 1.3157894736842106e-06, |
| "loss": 0.2815, |
| "step": 2598 |
| }, |
| { |
| "epoch": 2.927323943661972, |
| "grad_norm": 0.1909766983577419, |
| "learning_rate": 1.294903926482874e-06, |
| "loss": 0.2898, |
| "step": 2599 |
| }, |
| { |
| "epoch": 2.928450704225352, |
| "grad_norm": 0.1999440510850279, |
| "learning_rate": 1.274018379281537e-06, |
| "loss": 0.2755, |
| "step": 2600 |
| }, |
| { |
| "epoch": 2.9295774647887325, |
| "grad_norm": 0.20261542499258492, |
| "learning_rate": 1.2531328320802005e-06, |
| "loss": 0.2816, |
| "step": 2601 |
| }, |
| { |
| "epoch": 2.9307042253521125, |
| "grad_norm": 0.18547039231567655, |
| "learning_rate": 1.2322472848788638e-06, |
| "loss": 0.2706, |
| "step": 2602 |
| }, |
| { |
| "epoch": 2.931830985915493, |
| "grad_norm": 0.1769366856054714, |
| "learning_rate": 1.2113617376775272e-06, |
| "loss": 0.2477, |
| "step": 2603 |
| }, |
| { |
| "epoch": 2.9329577464788734, |
| "grad_norm": 0.18674158365631324, |
| "learning_rate": 1.1904761904761904e-06, |
| "loss": 0.2719, |
| "step": 2604 |
| }, |
| { |
| "epoch": 2.9340845070422534, |
| "grad_norm": 0.18119752904576938, |
| "learning_rate": 1.1695906432748538e-06, |
| "loss": 0.2601, |
| "step": 2605 |
| }, |
| { |
| "epoch": 2.935211267605634, |
| "grad_norm": 0.17563623868661624, |
| "learning_rate": 1.1487050960735171e-06, |
| "loss": 0.2541, |
| "step": 2606 |
| }, |
| { |
| "epoch": 2.936338028169014, |
| "grad_norm": 0.18921047383106981, |
| "learning_rate": 1.1278195488721805e-06, |
| "loss": 0.2641, |
| "step": 2607 |
| }, |
| { |
| "epoch": 2.9374647887323944, |
| "grad_norm": 0.18681988729634014, |
| "learning_rate": 1.1069340016708437e-06, |
| "loss": 0.2758, |
| "step": 2608 |
| }, |
| { |
| "epoch": 2.938591549295775, |
| "grad_norm": 0.19596833579258421, |
| "learning_rate": 1.086048454469507e-06, |
| "loss": 0.2726, |
| "step": 2609 |
| }, |
| { |
| "epoch": 2.939718309859155, |
| "grad_norm": 0.1825674306794061, |
| "learning_rate": 1.0651629072681704e-06, |
| "loss": 0.2698, |
| "step": 2610 |
| }, |
| { |
| "epoch": 2.9408450704225353, |
| "grad_norm": 0.18430912912728567, |
| "learning_rate": 1.0442773600668338e-06, |
| "loss": 0.2685, |
| "step": 2611 |
| }, |
| { |
| "epoch": 2.9419718309859153, |
| "grad_norm": 0.19244209622286726, |
| "learning_rate": 1.0233918128654972e-06, |
| "loss": 0.2806, |
| "step": 2612 |
| }, |
| { |
| "epoch": 2.9430985915492958, |
| "grad_norm": 0.20302337096941844, |
| "learning_rate": 1.0025062656641603e-06, |
| "loss": 0.2716, |
| "step": 2613 |
| }, |
| { |
| "epoch": 2.944225352112676, |
| "grad_norm": 0.18903859461596323, |
| "learning_rate": 9.816207184628237e-07, |
| "loss": 0.2642, |
| "step": 2614 |
| }, |
| { |
| "epoch": 2.9453521126760562, |
| "grad_norm": 0.18638171625330813, |
| "learning_rate": 9.60735171261487e-07, |
| "loss": 0.2591, |
| "step": 2615 |
| }, |
| { |
| "epoch": 2.9464788732394367, |
| "grad_norm": 0.1833750682955007, |
| "learning_rate": 9.398496240601504e-07, |
| "loss": 0.2716, |
| "step": 2616 |
| }, |
| { |
| "epoch": 2.9476056338028167, |
| "grad_norm": 0.18228414426130218, |
| "learning_rate": 9.189640768588137e-07, |
| "loss": 0.2515, |
| "step": 2617 |
| }, |
| { |
| "epoch": 2.948732394366197, |
| "grad_norm": 0.1798516100935427, |
| "learning_rate": 8.98078529657477e-07, |
| "loss": 0.259, |
| "step": 2618 |
| }, |
| { |
| "epoch": 2.9498591549295776, |
| "grad_norm": 0.18413985904175006, |
| "learning_rate": 8.771929824561404e-07, |
| "loss": 0.2607, |
| "step": 2619 |
| }, |
| { |
| "epoch": 2.9509859154929576, |
| "grad_norm": 0.1840718951735798, |
| "learning_rate": 8.563074352548036e-07, |
| "loss": 0.2589, |
| "step": 2620 |
| }, |
| { |
| "epoch": 2.952112676056338, |
| "grad_norm": 0.18727995547384207, |
| "learning_rate": 8.35421888053467e-07, |
| "loss": 0.2723, |
| "step": 2621 |
| }, |
| { |
| "epoch": 2.953239436619718, |
| "grad_norm": 0.18116030630941662, |
| "learning_rate": 8.145363408521303e-07, |
| "loss": 0.2612, |
| "step": 2622 |
| }, |
| { |
| "epoch": 2.9543661971830986, |
| "grad_norm": 0.18810161068825337, |
| "learning_rate": 7.936507936507937e-07, |
| "loss": 0.2861, |
| "step": 2623 |
| }, |
| { |
| "epoch": 2.955492957746479, |
| "grad_norm": 0.18331900835667217, |
| "learning_rate": 7.727652464494569e-07, |
| "loss": 0.2792, |
| "step": 2624 |
| }, |
| { |
| "epoch": 2.956619718309859, |
| "grad_norm": 0.182289911621126, |
| "learning_rate": 7.518796992481203e-07, |
| "loss": 0.2651, |
| "step": 2625 |
| }, |
| { |
| "epoch": 2.9577464788732395, |
| "grad_norm": 0.18706591671355421, |
| "learning_rate": 7.309941520467836e-07, |
| "loss": 0.2797, |
| "step": 2626 |
| }, |
| { |
| "epoch": 2.9588732394366195, |
| "grad_norm": 0.18099206297431417, |
| "learning_rate": 7.101086048454469e-07, |
| "loss": 0.2731, |
| "step": 2627 |
| }, |
| { |
| "epoch": 2.96, |
| "grad_norm": 0.1953971293038329, |
| "learning_rate": 6.892230576441103e-07, |
| "loss": 0.2729, |
| "step": 2628 |
| }, |
| { |
| "epoch": 2.9611267605633804, |
| "grad_norm": 0.20373835878570956, |
| "learning_rate": 6.683375104427736e-07, |
| "loss": 0.2632, |
| "step": 2629 |
| }, |
| { |
| "epoch": 2.9622535211267604, |
| "grad_norm": 0.17540883485928005, |
| "learning_rate": 6.47451963241437e-07, |
| "loss": 0.2615, |
| "step": 2630 |
| }, |
| { |
| "epoch": 2.963380281690141, |
| "grad_norm": 0.18358270628844636, |
| "learning_rate": 6.265664160401002e-07, |
| "loss": 0.2597, |
| "step": 2631 |
| }, |
| { |
| "epoch": 2.964507042253521, |
| "grad_norm": 0.1839418900023207, |
| "learning_rate": 6.056808688387636e-07, |
| "loss": 0.2629, |
| "step": 2632 |
| }, |
| { |
| "epoch": 2.9656338028169014, |
| "grad_norm": 0.1901007031151732, |
| "learning_rate": 5.847953216374269e-07, |
| "loss": 0.283, |
| "step": 2633 |
| }, |
| { |
| "epoch": 2.966760563380282, |
| "grad_norm": 0.191252903805876, |
| "learning_rate": 5.639097744360903e-07, |
| "loss": 0.2696, |
| "step": 2634 |
| }, |
| { |
| "epoch": 2.967887323943662, |
| "grad_norm": 0.1873473949837531, |
| "learning_rate": 5.430242272347535e-07, |
| "loss": 0.2744, |
| "step": 2635 |
| }, |
| { |
| "epoch": 2.9690140845070423, |
| "grad_norm": 0.20423475149382705, |
| "learning_rate": 5.221386800334169e-07, |
| "loss": 0.2816, |
| "step": 2636 |
| }, |
| { |
| "epoch": 2.9701408450704223, |
| "grad_norm": 0.18777274154193785, |
| "learning_rate": 5.012531328320802e-07, |
| "loss": 0.2845, |
| "step": 2637 |
| }, |
| { |
| "epoch": 2.9712676056338028, |
| "grad_norm": 0.18260602994902567, |
| "learning_rate": 4.803675856307435e-07, |
| "loss": 0.2581, |
| "step": 2638 |
| }, |
| { |
| "epoch": 2.9723943661971832, |
| "grad_norm": 0.1802755775949997, |
| "learning_rate": 4.5948203842940686e-07, |
| "loss": 0.2682, |
| "step": 2639 |
| }, |
| { |
| "epoch": 2.9735211267605632, |
| "grad_norm": 0.18791079252994222, |
| "learning_rate": 4.385964912280702e-07, |
| "loss": 0.2744, |
| "step": 2640 |
| }, |
| { |
| "epoch": 2.9746478873239437, |
| "grad_norm": 0.18215279311987378, |
| "learning_rate": 4.177109440267335e-07, |
| "loss": 0.2727, |
| "step": 2641 |
| }, |
| { |
| "epoch": 2.9757746478873237, |
| "grad_norm": 0.1927350306855454, |
| "learning_rate": 3.9682539682539683e-07, |
| "loss": 0.2625, |
| "step": 2642 |
| }, |
| { |
| "epoch": 2.976901408450704, |
| "grad_norm": 0.1924027562763381, |
| "learning_rate": 3.7593984962406015e-07, |
| "loss": 0.2782, |
| "step": 2643 |
| }, |
| { |
| "epoch": 2.9780281690140846, |
| "grad_norm": 0.179203470085156, |
| "learning_rate": 3.5505430242272347e-07, |
| "loss": 0.2719, |
| "step": 2644 |
| }, |
| { |
| "epoch": 2.9791549295774646, |
| "grad_norm": 0.18815877380613574, |
| "learning_rate": 3.341687552213868e-07, |
| "loss": 0.2835, |
| "step": 2645 |
| }, |
| { |
| "epoch": 2.980281690140845, |
| "grad_norm": 0.1863822803452641, |
| "learning_rate": 3.132832080200501e-07, |
| "loss": 0.2762, |
| "step": 2646 |
| }, |
| { |
| "epoch": 2.981408450704225, |
| "grad_norm": 0.18297056791628377, |
| "learning_rate": 2.9239766081871344e-07, |
| "loss": 0.2758, |
| "step": 2647 |
| }, |
| { |
| "epoch": 2.9825352112676056, |
| "grad_norm": 0.18737249129907566, |
| "learning_rate": 2.7151211361737676e-07, |
| "loss": 0.2562, |
| "step": 2648 |
| }, |
| { |
| "epoch": 2.983661971830986, |
| "grad_norm": 0.19612228107081542, |
| "learning_rate": 2.506265664160401e-07, |
| "loss": 0.2766, |
| "step": 2649 |
| }, |
| { |
| "epoch": 2.984788732394366, |
| "grad_norm": 0.1854161232189919, |
| "learning_rate": 2.2974101921470343e-07, |
| "loss": 0.2752, |
| "step": 2650 |
| }, |
| { |
| "epoch": 2.9859154929577465, |
| "grad_norm": 0.20183176340985043, |
| "learning_rate": 2.0885547201336675e-07, |
| "loss": 0.2534, |
| "step": 2651 |
| }, |
| { |
| "epoch": 2.9870422535211265, |
| "grad_norm": 0.18318455589567129, |
| "learning_rate": 1.8796992481203008e-07, |
| "loss": 0.2667, |
| "step": 2652 |
| }, |
| { |
| "epoch": 2.988169014084507, |
| "grad_norm": 0.18114138777343547, |
| "learning_rate": 1.670843776106934e-07, |
| "loss": 0.2649, |
| "step": 2653 |
| }, |
| { |
| "epoch": 2.9892957746478874, |
| "grad_norm": 0.18142311856831886, |
| "learning_rate": 1.4619883040935672e-07, |
| "loss": 0.2541, |
| "step": 2654 |
| }, |
| { |
| "epoch": 2.9904225352112674, |
| "grad_norm": 0.19061057401571532, |
| "learning_rate": 1.2531328320802004e-07, |
| "loss": 0.2723, |
| "step": 2655 |
| }, |
| { |
| "epoch": 2.991549295774648, |
| "grad_norm": 0.17998880127291522, |
| "learning_rate": 1.0442773600668338e-07, |
| "loss": 0.2723, |
| "step": 2656 |
| }, |
| { |
| "epoch": 2.992676056338028, |
| "grad_norm": 0.19166047768195757, |
| "learning_rate": 8.35421888053467e-08, |
| "loss": 0.2698, |
| "step": 2657 |
| }, |
| { |
| "epoch": 2.9938028169014084, |
| "grad_norm": 0.20913016553214908, |
| "learning_rate": 6.265664160401002e-08, |
| "loss": 0.2941, |
| "step": 2658 |
| }, |
| { |
| "epoch": 2.994929577464789, |
| "grad_norm": 0.187476388632694, |
| "learning_rate": 4.177109440267335e-08, |
| "loss": 0.2747, |
| "step": 2659 |
| }, |
| { |
| "epoch": 2.9960563380281693, |
| "grad_norm": 0.18706857618185885, |
| "learning_rate": 2.0885547201336675e-08, |
| "loss": 0.2755, |
| "step": 2660 |
| }, |
| { |
| "epoch": 2.9971830985915493, |
| "grad_norm": 0.1863776553285958, |
| "learning_rate": 0.0, |
| "loss": 0.2707, |
| "step": 2661 |
| }, |
| { |
| "epoch": 2.9971830985915493, |
| "step": 2661, |
| "total_flos": 2.2756805496474173e+18, |
| "train_loss": 0.4145932649813483, |
| "train_runtime": 154577.0289, |
| "train_samples_per_second": 0.275, |
| "train_steps_per_second": 0.017 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 2661, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.2756805496474173e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|