| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.996102883865939, |
| "eval_steps": 500, |
| "global_step": 1923, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.001558846453624318, |
| "grad_norm": 52.22184914143673, |
| "learning_rate": 2.590673575129534e-07, |
| "loss": 11.3056, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.003117692907248636, |
| "grad_norm": 55.3801908988395, |
| "learning_rate": 5.181347150259068e-07, |
| "loss": 11.1148, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.004676539360872954, |
| "grad_norm": 53.76354651632375, |
| "learning_rate": 7.772020725388602e-07, |
| "loss": 11.215, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.006235385814497272, |
| "grad_norm": 52.06232793344078, |
| "learning_rate": 1.0362694300518136e-06, |
| "loss": 11.2145, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.00779423226812159, |
| "grad_norm": 55.06385321064369, |
| "learning_rate": 1.2953367875647669e-06, |
| "loss": 11.1779, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.009353078721745909, |
| "grad_norm": 53.1236773756813, |
| "learning_rate": 1.5544041450777204e-06, |
| "loss": 11.1812, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.010911925175370226, |
| "grad_norm": 58.28685190281901, |
| "learning_rate": 1.8134715025906736e-06, |
| "loss": 10.864, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.012470771628994544, |
| "grad_norm": 57.49860454762129, |
| "learning_rate": 2.0725388601036273e-06, |
| "loss": 10.8997, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.014029618082618862, |
| "grad_norm": 61.33406437121332, |
| "learning_rate": 2.3316062176165805e-06, |
| "loss": 10.7659, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.01558846453624318, |
| "grad_norm": 83.51848150551032, |
| "learning_rate": 2.5906735751295338e-06, |
| "loss": 9.7158, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.0171473109898675, |
| "grad_norm": 87.40268379033651, |
| "learning_rate": 2.8497409326424875e-06, |
| "loss": 9.3576, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.018706157443491817, |
| "grad_norm": 94.86364555239567, |
| "learning_rate": 3.1088082901554407e-06, |
| "loss": 9.0476, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.020265003897116135, |
| "grad_norm": 66.58727850770262, |
| "learning_rate": 3.367875647668394e-06, |
| "loss": 3.7998, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.021823850350740453, |
| "grad_norm": 60.23863297900514, |
| "learning_rate": 3.626943005181347e-06, |
| "loss": 3.5631, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.02338269680436477, |
| "grad_norm": 42.715651899746156, |
| "learning_rate": 3.886010362694301e-06, |
| "loss": 2.8496, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.02494154325798909, |
| "grad_norm": 35.86036496654402, |
| "learning_rate": 4.1450777202072546e-06, |
| "loss": 2.5679, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.026500389711613406, |
| "grad_norm": 8.204333077091338, |
| "learning_rate": 4.404145077720207e-06, |
| "loss": 1.4646, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.028059236165237724, |
| "grad_norm": 5.413485907710649, |
| "learning_rate": 4.663212435233161e-06, |
| "loss": 1.3395, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.029618082618862042, |
| "grad_norm": 4.181557631031653, |
| "learning_rate": 4.922279792746115e-06, |
| "loss": 1.282, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.03117692907248636, |
| "grad_norm": 3.3106118750979614, |
| "learning_rate": 5.1813471502590676e-06, |
| "loss": 1.192, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.03273577552611068, |
| "grad_norm": 2.5377335055220023, |
| "learning_rate": 5.440414507772021e-06, |
| "loss": 1.1684, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.034294621979735, |
| "grad_norm": 2.1309505059833436, |
| "learning_rate": 5.699481865284975e-06, |
| "loss": 1.0969, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.03585346843335931, |
| "grad_norm": 3.8622301532964998, |
| "learning_rate": 5.958549222797928e-06, |
| "loss": 1.0732, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.037412314886983634, |
| "grad_norm": 47.58759142573856, |
| "learning_rate": 6.217616580310881e-06, |
| "loss": 1.0184, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.03897116134060795, |
| "grad_norm": 16.958376825756968, |
| "learning_rate": 6.476683937823834e-06, |
| "loss": 0.9784, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.04053000779423227, |
| "grad_norm": 1.7724891024655978, |
| "learning_rate": 6.735751295336788e-06, |
| "loss": 0.9303, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.042088854247856584, |
| "grad_norm": 1.2517023402408312, |
| "learning_rate": 6.994818652849741e-06, |
| "loss": 0.8952, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.043647700701480906, |
| "grad_norm": 1.0169924063509375, |
| "learning_rate": 7.253886010362694e-06, |
| "loss": 0.8697, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.04520654715510522, |
| "grad_norm": 0.8637449789891917, |
| "learning_rate": 7.512953367875648e-06, |
| "loss": 0.8588, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.04676539360872954, |
| "grad_norm": 0.8722633450269738, |
| "learning_rate": 7.772020725388602e-06, |
| "loss": 0.8327, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.048324240062353856, |
| "grad_norm": 0.7979319477336262, |
| "learning_rate": 8.031088082901555e-06, |
| "loss": 0.815, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.04988308651597818, |
| "grad_norm": 0.7636414965508945, |
| "learning_rate": 8.290155440414509e-06, |
| "loss": 0.7699, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.05144193296960249, |
| "grad_norm": 0.6723426478607053, |
| "learning_rate": 8.549222797927462e-06, |
| "loss": 0.7777, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.05300077942322681, |
| "grad_norm": 0.671680033034605, |
| "learning_rate": 8.808290155440415e-06, |
| "loss": 0.7429, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.05455962587685113, |
| "grad_norm": 0.6367482766420466, |
| "learning_rate": 9.06735751295337e-06, |
| "loss": 0.7442, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.05611847233047545, |
| "grad_norm": 0.5985756418410207, |
| "learning_rate": 9.326424870466322e-06, |
| "loss": 0.7354, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.05767731878409977, |
| "grad_norm": 0.5965999524800757, |
| "learning_rate": 9.585492227979275e-06, |
| "loss": 0.7226, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.059236165237724084, |
| "grad_norm": 0.5390447035986822, |
| "learning_rate": 9.84455958549223e-06, |
| "loss": 0.6732, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.060795011691348405, |
| "grad_norm": 0.4740676330023613, |
| "learning_rate": 1.0103626943005182e-05, |
| "loss": 0.6923, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.06235385814497272, |
| "grad_norm": 0.4870908612880822, |
| "learning_rate": 1.0362694300518135e-05, |
| "loss": 0.6898, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.06391270459859703, |
| "grad_norm": 0.49048808676105743, |
| "learning_rate": 1.062176165803109e-05, |
| "loss": 0.6817, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.06547155105222136, |
| "grad_norm": 0.4775360211701637, |
| "learning_rate": 1.0880829015544042e-05, |
| "loss": 0.6806, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.06703039750584568, |
| "grad_norm": 0.540847742943483, |
| "learning_rate": 1.1139896373056995e-05, |
| "loss": 0.6548, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.06858924395947, |
| "grad_norm": 0.4168099062285217, |
| "learning_rate": 1.139896373056995e-05, |
| "loss": 0.6584, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.0701480904130943, |
| "grad_norm": 0.4163477313182357, |
| "learning_rate": 1.1658031088082903e-05, |
| "loss": 0.6511, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.07170693686671863, |
| "grad_norm": 0.4425510534250163, |
| "learning_rate": 1.1917098445595855e-05, |
| "loss": 0.6432, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.07326578332034295, |
| "grad_norm": 0.3769848201392934, |
| "learning_rate": 1.2176165803108808e-05, |
| "loss": 0.6004, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.07482462977396727, |
| "grad_norm": 0.36307003967305024, |
| "learning_rate": 1.2435233160621763e-05, |
| "loss": 0.6339, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.07638347622759158, |
| "grad_norm": 0.36365771900218874, |
| "learning_rate": 1.2694300518134716e-05, |
| "loss": 0.6573, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.0779423226812159, |
| "grad_norm": 0.3554573096821652, |
| "learning_rate": 1.2953367875647668e-05, |
| "loss": 0.6606, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.07950116913484022, |
| "grad_norm": 0.4045859236452954, |
| "learning_rate": 1.3212435233160623e-05, |
| "loss": 0.6475, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.08106001558846454, |
| "grad_norm": 0.32890043969215965, |
| "learning_rate": 1.3471502590673576e-05, |
| "loss": 0.6148, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.08261886204208885, |
| "grad_norm": 0.3083895203558219, |
| "learning_rate": 1.3730569948186529e-05, |
| "loss": 0.6347, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.08417770849571317, |
| "grad_norm": 0.33340100324523925, |
| "learning_rate": 1.3989637305699481e-05, |
| "loss": 0.6165, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.08573655494933749, |
| "grad_norm": 0.29517015584889045, |
| "learning_rate": 1.4248704663212436e-05, |
| "loss": 0.5802, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.08729540140296181, |
| "grad_norm": 0.28430427065168046, |
| "learning_rate": 1.4507772020725389e-05, |
| "loss": 0.5895, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.08885424785658613, |
| "grad_norm": 0.3338831939267004, |
| "learning_rate": 1.4766839378238342e-05, |
| "loss": 0.5715, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.09041309431021044, |
| "grad_norm": 0.3093980059650245, |
| "learning_rate": 1.5025906735751296e-05, |
| "loss": 0.5847, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.09197194076383476, |
| "grad_norm": 0.2948901838276863, |
| "learning_rate": 1.528497409326425e-05, |
| "loss": 0.5995, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.09353078721745908, |
| "grad_norm": 0.31762615759493373, |
| "learning_rate": 1.5544041450777204e-05, |
| "loss": 0.6043, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.0950896336710834, |
| "grad_norm": 0.29491512166284634, |
| "learning_rate": 1.5803108808290158e-05, |
| "loss": 0.5992, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.09664848012470771, |
| "grad_norm": 0.29897052685770187, |
| "learning_rate": 1.606217616580311e-05, |
| "loss": 0.5931, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.09820732657833203, |
| "grad_norm": 0.30055023786228835, |
| "learning_rate": 1.6321243523316064e-05, |
| "loss": 0.5849, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.09976617303195635, |
| "grad_norm": 0.26046107559568865, |
| "learning_rate": 1.6580310880829018e-05, |
| "loss": 0.5512, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.10132501948558068, |
| "grad_norm": 0.30065721102538445, |
| "learning_rate": 1.683937823834197e-05, |
| "loss": 0.5814, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.10288386593920498, |
| "grad_norm": 0.29570037471107435, |
| "learning_rate": 1.7098445595854924e-05, |
| "loss": 0.5889, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.1044427123928293, |
| "grad_norm": 0.2675735266669228, |
| "learning_rate": 1.735751295336788e-05, |
| "loss": 0.5442, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.10600155884645363, |
| "grad_norm": 0.3100468880818413, |
| "learning_rate": 1.761658031088083e-05, |
| "loss": 0.5776, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.10756040530007795, |
| "grad_norm": 0.27731104939451146, |
| "learning_rate": 1.7875647668393784e-05, |
| "loss": 0.5644, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.10911925175370225, |
| "grad_norm": 0.3214790758034934, |
| "learning_rate": 1.813471502590674e-05, |
| "loss": 0.5361, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.11067809820732658, |
| "grad_norm": 0.29991490625376055, |
| "learning_rate": 1.839378238341969e-05, |
| "loss": 0.5567, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.1122369446609509, |
| "grad_norm": 0.2826573502857747, |
| "learning_rate": 1.8652849740932644e-05, |
| "loss": 0.5901, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.11379579111457522, |
| "grad_norm": 0.30249730284978976, |
| "learning_rate": 1.89119170984456e-05, |
| "loss": 0.5699, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.11535463756819954, |
| "grad_norm": 0.32336196262393, |
| "learning_rate": 1.917098445595855e-05, |
| "loss": 0.5842, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.11691348402182385, |
| "grad_norm": 0.2940197733300188, |
| "learning_rate": 1.9430051813471504e-05, |
| "loss": 0.56, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.11847233047544817, |
| "grad_norm": 0.27234922818526347, |
| "learning_rate": 1.968911917098446e-05, |
| "loss": 0.5596, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.12003117692907249, |
| "grad_norm": 0.31604251280714674, |
| "learning_rate": 1.994818652849741e-05, |
| "loss": 0.5638, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.12159002338269681, |
| "grad_norm": 0.2640909764215779, |
| "learning_rate": 2.0207253886010365e-05, |
| "loss": 0.5357, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.12314886983632112, |
| "grad_norm": 0.3325125655849214, |
| "learning_rate": 2.046632124352332e-05, |
| "loss": 0.5634, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.12470771628994544, |
| "grad_norm": 0.2728623091830791, |
| "learning_rate": 2.072538860103627e-05, |
| "loss": 0.567, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.12626656274356976, |
| "grad_norm": 0.3120421349580941, |
| "learning_rate": 2.0984455958549225e-05, |
| "loss": 0.5428, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.12782540919719407, |
| "grad_norm": 0.2771731681198735, |
| "learning_rate": 2.124352331606218e-05, |
| "loss": 0.5526, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.1293842556508184, |
| "grad_norm": 0.3027775941751013, |
| "learning_rate": 2.150259067357513e-05, |
| "loss": 0.5432, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.1309431021044427, |
| "grad_norm": 0.3063900417637035, |
| "learning_rate": 2.1761658031088085e-05, |
| "loss": 0.5633, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.13250194855806702, |
| "grad_norm": 0.29978974426938987, |
| "learning_rate": 2.202072538860104e-05, |
| "loss": 0.5521, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.13406079501169135, |
| "grad_norm": 0.3066667325804301, |
| "learning_rate": 2.227979274611399e-05, |
| "loss": 0.5576, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.13561964146531566, |
| "grad_norm": 0.3257258293999494, |
| "learning_rate": 2.2538860103626945e-05, |
| "loss": 0.5409, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.13717848791894, |
| "grad_norm": 0.32964630331063166, |
| "learning_rate": 2.27979274611399e-05, |
| "loss": 0.5524, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.1387373343725643, |
| "grad_norm": 0.3390745114943544, |
| "learning_rate": 2.305699481865285e-05, |
| "loss": 0.5539, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.1402961808261886, |
| "grad_norm": 0.296430130882943, |
| "learning_rate": 2.3316062176165805e-05, |
| "loss": 0.5519, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.14185502727981295, |
| "grad_norm": 0.3159884102421548, |
| "learning_rate": 2.3575129533678756e-05, |
| "loss": 0.5265, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.14341387373343725, |
| "grad_norm": 0.3136672635617097, |
| "learning_rate": 2.383419689119171e-05, |
| "loss": 0.5331, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.1449727201870616, |
| "grad_norm": 0.3203194252949439, |
| "learning_rate": 2.4093264248704665e-05, |
| "loss": 0.5472, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.1465315666406859, |
| "grad_norm": 0.34456319750216174, |
| "learning_rate": 2.4352331606217617e-05, |
| "loss": 0.5575, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.1480904130943102, |
| "grad_norm": 0.3228736119075391, |
| "learning_rate": 2.461139896373057e-05, |
| "loss": 0.5293, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.14964925954793454, |
| "grad_norm": 0.34523710171851074, |
| "learning_rate": 2.4870466321243526e-05, |
| "loss": 0.5287, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.15120810600155885, |
| "grad_norm": 0.3667004301126633, |
| "learning_rate": 2.5129533678756477e-05, |
| "loss": 0.5385, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.15276695245518315, |
| "grad_norm": 0.36137195700706454, |
| "learning_rate": 2.538860103626943e-05, |
| "loss": 0.5182, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.1543257989088075, |
| "grad_norm": 0.27897710920322044, |
| "learning_rate": 2.5647668393782386e-05, |
| "loss": 0.5301, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.1558846453624318, |
| "grad_norm": 0.28832759123856166, |
| "learning_rate": 2.5906735751295337e-05, |
| "loss": 0.5118, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.15744349181605613, |
| "grad_norm": 0.34377322242370634, |
| "learning_rate": 2.616580310880829e-05, |
| "loss": 0.532, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.15900233826968044, |
| "grad_norm": 0.3036711410586048, |
| "learning_rate": 2.6424870466321246e-05, |
| "loss": 0.5165, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.16056118472330475, |
| "grad_norm": 0.36103093391647273, |
| "learning_rate": 2.6683937823834197e-05, |
| "loss": 0.5287, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.16212003117692908, |
| "grad_norm": 0.29337030826817656, |
| "learning_rate": 2.694300518134715e-05, |
| "loss": 0.528, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.1636788776305534, |
| "grad_norm": 0.33805107509606247, |
| "learning_rate": 2.7202072538860106e-05, |
| "loss": 0.5354, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.1652377240841777, |
| "grad_norm": 0.31771328671144855, |
| "learning_rate": 2.7461139896373057e-05, |
| "loss": 0.5282, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.16679657053780203, |
| "grad_norm": 0.28313518361082185, |
| "learning_rate": 2.7720207253886012e-05, |
| "loss": 0.5086, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.16835541699142634, |
| "grad_norm": 0.34173304219782913, |
| "learning_rate": 2.7979274611398963e-05, |
| "loss": 0.5266, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.16991426344505067, |
| "grad_norm": 0.2895019297309633, |
| "learning_rate": 2.8238341968911917e-05, |
| "loss": 0.5305, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.17147310989867498, |
| "grad_norm": 0.3271405004182382, |
| "learning_rate": 2.8497409326424872e-05, |
| "loss": 0.5412, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.1730319563522993, |
| "grad_norm": 0.3254108424111249, |
| "learning_rate": 2.8756476683937827e-05, |
| "loss": 0.5267, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.17459080280592362, |
| "grad_norm": 0.43636064754159903, |
| "learning_rate": 2.9015544041450778e-05, |
| "loss": 0.546, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.17614964925954793, |
| "grad_norm": 0.35895062463773536, |
| "learning_rate": 2.9274611398963732e-05, |
| "loss": 0.5379, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.17770849571317227, |
| "grad_norm": 0.48008348852035654, |
| "learning_rate": 2.9533678756476683e-05, |
| "loss": 0.5251, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.17926734216679657, |
| "grad_norm": 0.40042199319895483, |
| "learning_rate": 2.9792746113989638e-05, |
| "loss": 0.5043, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.18082618862042088, |
| "grad_norm": 0.44332954681106973, |
| "learning_rate": 3.0051813471502592e-05, |
| "loss": 0.5135, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.18238503507404522, |
| "grad_norm": 0.43275195875835987, |
| "learning_rate": 3.0310880829015547e-05, |
| "loss": 0.5196, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.18394388152766952, |
| "grad_norm": 0.3939079245195568, |
| "learning_rate": 3.05699481865285e-05, |
| "loss": 0.5261, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.18550272798129383, |
| "grad_norm": 0.45003761498432127, |
| "learning_rate": 3.082901554404145e-05, |
| "loss": 0.5285, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.18706157443491817, |
| "grad_norm": 0.38744988267232733, |
| "learning_rate": 3.108808290155441e-05, |
| "loss": 0.5183, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.18862042088854247, |
| "grad_norm": 0.42417740462219883, |
| "learning_rate": 3.134715025906736e-05, |
| "loss": 0.5366, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.1901792673421668, |
| "grad_norm": 0.38878746316666607, |
| "learning_rate": 3.1606217616580316e-05, |
| "loss": 0.5405, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.19173811379579112, |
| "grad_norm": 0.39470480511600553, |
| "learning_rate": 3.186528497409327e-05, |
| "loss": 0.5177, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.19329696024941542, |
| "grad_norm": 0.3425139995303132, |
| "learning_rate": 3.212435233160622e-05, |
| "loss": 0.5278, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.19485580670303976, |
| "grad_norm": 0.3855003673988402, |
| "learning_rate": 3.238341968911917e-05, |
| "loss": 0.5303, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.19641465315666407, |
| "grad_norm": 0.3708329304695599, |
| "learning_rate": 3.264248704663213e-05, |
| "loss": 0.5308, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.1979734996102884, |
| "grad_norm": 0.42197888435438696, |
| "learning_rate": 3.290155440414508e-05, |
| "loss": 0.5199, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.1995323460639127, |
| "grad_norm": 0.39698217513178863, |
| "learning_rate": 3.3160621761658036e-05, |
| "loss": 0.506, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.20109119251753702, |
| "grad_norm": 0.4043130382762473, |
| "learning_rate": 3.341968911917099e-05, |
| "loss": 0.5065, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.20265003897116135, |
| "grad_norm": 0.34367190967592626, |
| "learning_rate": 3.367875647668394e-05, |
| "loss": 0.509, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.20420888542478566, |
| "grad_norm": 0.49458742478228435, |
| "learning_rate": 3.393782383419689e-05, |
| "loss": 0.5057, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.20576773187840997, |
| "grad_norm": 0.3946670126655184, |
| "learning_rate": 3.419689119170985e-05, |
| "loss": 0.5074, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.2073265783320343, |
| "grad_norm": 0.4590062123156051, |
| "learning_rate": 3.44559585492228e-05, |
| "loss": 0.5196, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.2088854247856586, |
| "grad_norm": 0.41794015093559495, |
| "learning_rate": 3.471502590673576e-05, |
| "loss": 0.5293, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.21044427123928294, |
| "grad_norm": 0.3976262150889235, |
| "learning_rate": 3.497409326424871e-05, |
| "loss": 0.4958, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.21200311769290725, |
| "grad_norm": 0.3801765387110536, |
| "learning_rate": 3.523316062176166e-05, |
| "loss": 0.5283, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.21356196414653156, |
| "grad_norm": 0.4393098932539108, |
| "learning_rate": 3.549222797927461e-05, |
| "loss": 0.5106, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.2151208106001559, |
| "grad_norm": 0.37730905366487805, |
| "learning_rate": 3.575129533678757e-05, |
| "loss": 0.5004, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.2166796570537802, |
| "grad_norm": 0.47264900412277805, |
| "learning_rate": 3.601036269430052e-05, |
| "loss": 0.5107, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.2182385035074045, |
| "grad_norm": 0.4572211602618532, |
| "learning_rate": 3.626943005181348e-05, |
| "loss": 0.5258, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.21979734996102884, |
| "grad_norm": 0.566903195196336, |
| "learning_rate": 3.652849740932643e-05, |
| "loss": 0.4926, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.22135619641465315, |
| "grad_norm": 0.40493119123467375, |
| "learning_rate": 3.678756476683938e-05, |
| "loss": 0.5167, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.22291504286827749, |
| "grad_norm": 0.5800300099928885, |
| "learning_rate": 3.704663212435233e-05, |
| "loss": 0.4888, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.2244738893219018, |
| "grad_norm": 0.49267006079774145, |
| "learning_rate": 3.730569948186529e-05, |
| "loss": 0.4931, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.2260327357755261, |
| "grad_norm": 0.5509681396457901, |
| "learning_rate": 3.756476683937824e-05, |
| "loss": 0.515, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.22759158222915044, |
| "grad_norm": 0.7109027605533598, |
| "learning_rate": 3.78238341968912e-05, |
| "loss": 0.4932, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.22915042868277474, |
| "grad_norm": 0.35899713776420916, |
| "learning_rate": 3.808290155440415e-05, |
| "loss": 0.4893, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.23070927513639908, |
| "grad_norm": 0.5392148285010113, |
| "learning_rate": 3.83419689119171e-05, |
| "loss": 0.4865, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.23226812159002339, |
| "grad_norm": 0.4200166849207286, |
| "learning_rate": 3.860103626943005e-05, |
| "loss": 0.516, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.2338269680436477, |
| "grad_norm": 0.4668106441613405, |
| "learning_rate": 3.886010362694301e-05, |
| "loss": 0.5104, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.23538581449727203, |
| "grad_norm": 0.388638095128956, |
| "learning_rate": 3.911917098445596e-05, |
| "loss": 0.5251, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.23694466095089634, |
| "grad_norm": 0.3927756772978556, |
| "learning_rate": 3.937823834196892e-05, |
| "loss": 0.4898, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.23850350740452064, |
| "grad_norm": 0.4474650309815615, |
| "learning_rate": 3.963730569948187e-05, |
| "loss": 0.5002, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.24006235385814498, |
| "grad_norm": 0.40465455399123634, |
| "learning_rate": 3.989637305699482e-05, |
| "loss": 0.4805, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.24162120031176929, |
| "grad_norm": 0.46329230044256475, |
| "learning_rate": 4.015544041450777e-05, |
| "loss": 0.5148, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.24318004676539362, |
| "grad_norm": 0.4221898620021292, |
| "learning_rate": 4.041450777202073e-05, |
| "loss": 0.5087, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.24473889321901793, |
| "grad_norm": 0.41405351180679445, |
| "learning_rate": 4.067357512953368e-05, |
| "loss": 0.5206, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.24629773967264224, |
| "grad_norm": 0.3718428876018395, |
| "learning_rate": 4.093264248704664e-05, |
| "loss": 0.5018, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.24785658612626657, |
| "grad_norm": 0.4108732557264086, |
| "learning_rate": 4.119170984455959e-05, |
| "loss": 0.4721, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.24941543257989088, |
| "grad_norm": 0.5196664214895078, |
| "learning_rate": 4.145077720207254e-05, |
| "loss": 0.501, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.2509742790335152, |
| "grad_norm": 0.34375475907972514, |
| "learning_rate": 4.170984455958549e-05, |
| "loss": 0.4944, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.2525331254871395, |
| "grad_norm": 0.5064449794040312, |
| "learning_rate": 4.196891191709845e-05, |
| "loss": 0.5029, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.2540919719407638, |
| "grad_norm": 0.4647723748878803, |
| "learning_rate": 4.22279792746114e-05, |
| "loss": 0.5024, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.25565081839438814, |
| "grad_norm": 0.4006925999521958, |
| "learning_rate": 4.248704663212436e-05, |
| "loss": 0.4655, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.2572096648480125, |
| "grad_norm": 0.45646986120048183, |
| "learning_rate": 4.274611398963731e-05, |
| "loss": 0.5059, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.2587685113016368, |
| "grad_norm": 0.4337454689280252, |
| "learning_rate": 4.300518134715026e-05, |
| "loss": 0.4847, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.2603273577552611, |
| "grad_norm": 0.4818011779118697, |
| "learning_rate": 4.326424870466321e-05, |
| "loss": 0.5039, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.2618862042088854, |
| "grad_norm": 0.4004895532370599, |
| "learning_rate": 4.352331606217617e-05, |
| "loss": 0.4997, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.2634450506625097, |
| "grad_norm": 0.4776133221437833, |
| "learning_rate": 4.378238341968912e-05, |
| "loss": 0.4747, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.26500389711613404, |
| "grad_norm": 0.3542482573234714, |
| "learning_rate": 4.404145077720208e-05, |
| "loss": 0.4995, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.2665627435697584, |
| "grad_norm": 0.47546134775525584, |
| "learning_rate": 4.430051813471503e-05, |
| "loss": 0.5021, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.2681215900233827, |
| "grad_norm": 0.37668384089912865, |
| "learning_rate": 4.455958549222798e-05, |
| "loss": 0.4847, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.269680436477007, |
| "grad_norm": 0.5250094003553915, |
| "learning_rate": 4.481865284974093e-05, |
| "loss": 0.4965, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.2712392829306313, |
| "grad_norm": 0.5118060286220544, |
| "learning_rate": 4.507772020725389e-05, |
| "loss": 0.5002, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.2727981293842556, |
| "grad_norm": 0.5439924404821223, |
| "learning_rate": 4.533678756476684e-05, |
| "loss": 0.4935, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.27435697583788, |
| "grad_norm": 0.46651416281963, |
| "learning_rate": 4.55958549222798e-05, |
| "loss": 0.4936, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.2759158222915043, |
| "grad_norm": 0.6025736893384493, |
| "learning_rate": 4.585492227979275e-05, |
| "loss": 0.4936, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.2774746687451286, |
| "grad_norm": 0.49477813313240576, |
| "learning_rate": 4.61139896373057e-05, |
| "loss": 0.5014, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.2790335151987529, |
| "grad_norm": 0.38453152894256626, |
| "learning_rate": 4.637305699481865e-05, |
| "loss": 0.5026, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.2805923616523772, |
| "grad_norm": 0.6693985713554009, |
| "learning_rate": 4.663212435233161e-05, |
| "loss": 0.4966, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.2821512081060016, |
| "grad_norm": 0.49890072866209745, |
| "learning_rate": 4.689119170984456e-05, |
| "loss": 0.4951, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.2837100545596259, |
| "grad_norm": 0.4986049703889912, |
| "learning_rate": 4.715025906735751e-05, |
| "loss": 0.4848, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.2852689010132502, |
| "grad_norm": 0.7123183153704133, |
| "learning_rate": 4.740932642487047e-05, |
| "loss": 0.5087, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.2868277474668745, |
| "grad_norm": 0.5101437582246441, |
| "learning_rate": 4.766839378238342e-05, |
| "loss": 0.496, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.2883865939204988, |
| "grad_norm": 0.5462740650609419, |
| "learning_rate": 4.792746113989637e-05, |
| "loss": 0.4891, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.2899454403741232, |
| "grad_norm": 0.6739608210631324, |
| "learning_rate": 4.818652849740933e-05, |
| "loss": 0.4809, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.2915042868277475, |
| "grad_norm": 0.36454838249845395, |
| "learning_rate": 4.844559585492228e-05, |
| "loss": 0.4826, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.2930631332813718, |
| "grad_norm": 0.8180118832431814, |
| "learning_rate": 4.870466321243523e-05, |
| "loss": 0.5007, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.2946219797349961, |
| "grad_norm": 0.8582599255383014, |
| "learning_rate": 4.896373056994819e-05, |
| "loss": 0.5101, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.2961808261886204, |
| "grad_norm": 0.5339198441824459, |
| "learning_rate": 4.922279792746114e-05, |
| "loss": 0.4755, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.2977396726422447, |
| "grad_norm": 0.5038691245377587, |
| "learning_rate": 4.948186528497409e-05, |
| "loss": 0.4987, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.2992985190958691, |
| "grad_norm": 0.7512130026001776, |
| "learning_rate": 4.974093264248705e-05, |
| "loss": 0.5168, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.3008573655494934, |
| "grad_norm": 0.5965449485798306, |
| "learning_rate": 5e-05, |
| "loss": 0.4825, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.3024162120031177, |
| "grad_norm": 0.5874775355232069, |
| "learning_rate": 4.9971098265895956e-05, |
| "loss": 0.484, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.303975058456742, |
| "grad_norm": 0.6794164991276913, |
| "learning_rate": 4.994219653179191e-05, |
| "loss": 0.4956, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.3055339049103663, |
| "grad_norm": 0.5324211441774743, |
| "learning_rate": 4.991329479768786e-05, |
| "loss": 0.469, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.30709275136399067, |
| "grad_norm": 0.7381982819903451, |
| "learning_rate": 4.9884393063583816e-05, |
| "loss": 0.4859, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.308651597817615, |
| "grad_norm": 0.7316455279398204, |
| "learning_rate": 4.985549132947977e-05, |
| "loss": 0.4962, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.3102104442712393, |
| "grad_norm": 0.5513173476365167, |
| "learning_rate": 4.982658959537572e-05, |
| "loss": 0.4973, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.3117692907248636, |
| "grad_norm": 0.6961187825800337, |
| "learning_rate": 4.979768786127168e-05, |
| "loss": 0.504, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.3133281371784879, |
| "grad_norm": 0.6675059607265288, |
| "learning_rate": 4.976878612716763e-05, |
| "loss": 0.4818, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.31488698363211226, |
| "grad_norm": 0.5775315130950791, |
| "learning_rate": 4.9739884393063583e-05, |
| "loss": 0.5108, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.31644583008573657, |
| "grad_norm": 0.5493567678427346, |
| "learning_rate": 4.971098265895954e-05, |
| "loss": 0.4933, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.3180046765393609, |
| "grad_norm": 0.5788493631561638, |
| "learning_rate": 4.96820809248555e-05, |
| "loss": 0.4921, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.3195635229929852, |
| "grad_norm": 0.5111462707368106, |
| "learning_rate": 4.965317919075145e-05, |
| "loss": 0.4617, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.3211223694466095, |
| "grad_norm": 0.5647593330505578, |
| "learning_rate": 4.96242774566474e-05, |
| "loss": 0.4953, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.32268121590023385, |
| "grad_norm": 0.4207725153414849, |
| "learning_rate": 4.959537572254335e-05, |
| "loss": 0.5115, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.32424006235385816, |
| "grad_norm": 0.4624044434848398, |
| "learning_rate": 4.956647398843931e-05, |
| "loss": 0.4722, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.32579890880748247, |
| "grad_norm": 0.4376386673543866, |
| "learning_rate": 4.9537572254335264e-05, |
| "loss": 0.4832, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.3273577552611068, |
| "grad_norm": 0.531865499974187, |
| "learning_rate": 4.950867052023122e-05, |
| "loss": 0.5055, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.3289166017147311, |
| "grad_norm": 0.5050602424671378, |
| "learning_rate": 4.9479768786127164e-05, |
| "loss": 0.4697, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.3304754481683554, |
| "grad_norm": 0.38168626861209426, |
| "learning_rate": 4.9450867052023125e-05, |
| "loss": 0.4907, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.33203429462197975, |
| "grad_norm": 0.5281790174146758, |
| "learning_rate": 4.942196531791908e-05, |
| "loss": 0.4876, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.33359314107560406, |
| "grad_norm": 0.5297198653114203, |
| "learning_rate": 4.939306358381503e-05, |
| "loss": 0.4913, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.33515198752922837, |
| "grad_norm": 0.45152753639301624, |
| "learning_rate": 4.9364161849710985e-05, |
| "loss": 0.4731, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.3367108339828527, |
| "grad_norm": 0.3683086256111994, |
| "learning_rate": 4.933526011560694e-05, |
| "loss": 0.4779, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.338269680436477, |
| "grad_norm": 0.4739643840797579, |
| "learning_rate": 4.930635838150289e-05, |
| "loss": 0.4992, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.33982852689010135, |
| "grad_norm": 0.4068837265657986, |
| "learning_rate": 4.9277456647398845e-05, |
| "loss": 0.4996, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.34138737334372565, |
| "grad_norm": 0.4885011148922096, |
| "learning_rate": 4.92485549132948e-05, |
| "loss": 0.4865, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.34294621979734996, |
| "grad_norm": 0.493814836908405, |
| "learning_rate": 4.921965317919075e-05, |
| "loss": 0.4731, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.34450506625097427, |
| "grad_norm": 0.46892553233657863, |
| "learning_rate": 4.9190751445086706e-05, |
| "loss": 0.4699, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.3460639127045986, |
| "grad_norm": 0.464609243189492, |
| "learning_rate": 4.916184971098266e-05, |
| "loss": 0.478, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.34762275915822294, |
| "grad_norm": 0.37152543938557336, |
| "learning_rate": 4.913294797687861e-05, |
| "loss": 0.4694, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.34918160561184725, |
| "grad_norm": 0.5025252675976578, |
| "learning_rate": 4.910404624277457e-05, |
| "loss": 0.4715, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.35074045206547155, |
| "grad_norm": 0.3402466722282753, |
| "learning_rate": 4.9075144508670526e-05, |
| "loss": 0.463, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.35229929851909586, |
| "grad_norm": 0.3965061942369021, |
| "learning_rate": 4.904624277456647e-05, |
| "loss": 0.4792, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.35385814497272017, |
| "grad_norm": 0.4206144149046459, |
| "learning_rate": 4.9017341040462426e-05, |
| "loss": 0.4901, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.35541699142634453, |
| "grad_norm": 0.4286051420798536, |
| "learning_rate": 4.8988439306358387e-05, |
| "loss": 0.4742, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.35697583787996884, |
| "grad_norm": 0.47141158644523007, |
| "learning_rate": 4.895953757225434e-05, |
| "loss": 0.4968, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.35853468433359315, |
| "grad_norm": 0.3415005284976459, |
| "learning_rate": 4.8930635838150293e-05, |
| "loss": 0.4568, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.36009353078721745, |
| "grad_norm": 0.48287718381612604, |
| "learning_rate": 4.890173410404624e-05, |
| "loss": 0.4993, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.36165237724084176, |
| "grad_norm": 0.5803189730929726, |
| "learning_rate": 4.88728323699422e-05, |
| "loss": 0.4653, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.36321122369446607, |
| "grad_norm": 0.3907951855741137, |
| "learning_rate": 4.8843930635838154e-05, |
| "loss": 0.4794, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.36477007014809043, |
| "grad_norm": 0.5028194362808669, |
| "learning_rate": 4.881502890173411e-05, |
| "loss": 0.4942, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.36632891660171474, |
| "grad_norm": 0.4945111081672912, |
| "learning_rate": 4.878612716763006e-05, |
| "loss": 0.4771, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.36788776305533905, |
| "grad_norm": 0.43827490144476094, |
| "learning_rate": 4.8757225433526014e-05, |
| "loss": 0.459, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.36944660950896335, |
| "grad_norm": 0.5256559700101235, |
| "learning_rate": 4.872832369942197e-05, |
| "loss": 0.4696, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.37100545596258766, |
| "grad_norm": 0.5899114320019979, |
| "learning_rate": 4.869942196531792e-05, |
| "loss": 0.4769, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.372564302416212, |
| "grad_norm": 0.4326187078710821, |
| "learning_rate": 4.8670520231213874e-05, |
| "loss": 0.4761, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.37412314886983633, |
| "grad_norm": 0.4922313590375816, |
| "learning_rate": 4.864161849710983e-05, |
| "loss": 0.4871, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.37568199532346064, |
| "grad_norm": 0.4403169878610757, |
| "learning_rate": 4.861271676300578e-05, |
| "loss": 0.4918, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.37724084177708495, |
| "grad_norm": 0.39128925993823416, |
| "learning_rate": 4.8583815028901735e-05, |
| "loss": 0.4695, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.37879968823070925, |
| "grad_norm": 0.4947169815070576, |
| "learning_rate": 4.855491329479769e-05, |
| "loss": 0.4785, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.3803585346843336, |
| "grad_norm": 0.36561636850124596, |
| "learning_rate": 4.852601156069365e-05, |
| "loss": 0.4687, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.3819173811379579, |
| "grad_norm": 0.5740065574334354, |
| "learning_rate": 4.8497109826589595e-05, |
| "loss": 0.4777, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.38347622759158223, |
| "grad_norm": 0.37164208100522045, |
| "learning_rate": 4.846820809248555e-05, |
| "loss": 0.4769, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.38503507404520654, |
| "grad_norm": 0.45893063215148555, |
| "learning_rate": 4.84393063583815e-05, |
| "loss": 0.4754, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.38659392049883085, |
| "grad_norm": 0.4592126499581869, |
| "learning_rate": 4.841040462427746e-05, |
| "loss": 0.4667, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.3881527669524552, |
| "grad_norm": 0.42130274935477224, |
| "learning_rate": 4.8381502890173416e-05, |
| "loss": 0.4727, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.3897116134060795, |
| "grad_norm": 0.5562234140465554, |
| "learning_rate": 4.835260115606937e-05, |
| "loss": 0.5142, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.3912704598597038, |
| "grad_norm": 0.35335610698932396, |
| "learning_rate": 4.8323699421965316e-05, |
| "loss": 0.4726, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.39282930631332813, |
| "grad_norm": 0.5355853337204556, |
| "learning_rate": 4.8294797687861276e-05, |
| "loss": 0.468, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.39438815276695244, |
| "grad_norm": 0.3940457770663424, |
| "learning_rate": 4.826589595375723e-05, |
| "loss": 0.4795, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.3959469992205768, |
| "grad_norm": 0.48308987476155735, |
| "learning_rate": 4.823699421965318e-05, |
| "loss": 0.5007, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.3975058456742011, |
| "grad_norm": 0.529695325269085, |
| "learning_rate": 4.8208092485549136e-05, |
| "loss": 0.4928, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.3990646921278254, |
| "grad_norm": 0.46950723041884723, |
| "learning_rate": 4.817919075144509e-05, |
| "loss": 0.4637, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.4006235385814497, |
| "grad_norm": 0.5659006167244565, |
| "learning_rate": 4.815028901734104e-05, |
| "loss": 0.4719, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.40218238503507403, |
| "grad_norm": 0.39896571715761564, |
| "learning_rate": 4.8121387283237e-05, |
| "loss": 0.4847, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.40374123148869834, |
| "grad_norm": 0.5485795723559732, |
| "learning_rate": 4.809248554913295e-05, |
| "loss": 0.4782, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.4053000779423227, |
| "grad_norm": 0.3776722335922268, |
| "learning_rate": 4.8063583815028904e-05, |
| "loss": 0.4728, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.406858924395947, |
| "grad_norm": 0.42769770440042826, |
| "learning_rate": 4.803468208092486e-05, |
| "loss": 0.4718, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.4084177708495713, |
| "grad_norm": 0.38140213686869234, |
| "learning_rate": 4.800578034682081e-05, |
| "loss": 0.4798, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.4099766173031956, |
| "grad_norm": 0.3693912240886004, |
| "learning_rate": 4.7976878612716764e-05, |
| "loss": 0.4677, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.41153546375681993, |
| "grad_norm": 0.540507172297762, |
| "learning_rate": 4.7947976878612724e-05, |
| "loss": 0.4783, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.4130943102104443, |
| "grad_norm": 0.38027657459985387, |
| "learning_rate": 4.791907514450867e-05, |
| "loss": 0.4977, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.4146531566640686, |
| "grad_norm": 0.5498501393924234, |
| "learning_rate": 4.7890173410404624e-05, |
| "loss": 0.5207, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.4162120031176929, |
| "grad_norm": 0.44454634948662736, |
| "learning_rate": 4.786127167630058e-05, |
| "loss": 0.449, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.4177708495713172, |
| "grad_norm": 0.5112086728008995, |
| "learning_rate": 4.783236994219654e-05, |
| "loss": 0.4803, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.4193296960249415, |
| "grad_norm": 0.6274330925404005, |
| "learning_rate": 4.780346820809249e-05, |
| "loss": 0.477, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.4208885424785659, |
| "grad_norm": 0.45606522557073254, |
| "learning_rate": 4.777456647398844e-05, |
| "loss": 0.4794, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.4224473889321902, |
| "grad_norm": 0.5630490443389089, |
| "learning_rate": 4.774566473988439e-05, |
| "loss": 0.48, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.4240062353858145, |
| "grad_norm": 0.5538678824248618, |
| "learning_rate": 4.771676300578035e-05, |
| "loss": 0.4972, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.4255650818394388, |
| "grad_norm": 0.5075041955013312, |
| "learning_rate": 4.7687861271676305e-05, |
| "loss": 0.4935, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.4271239282930631, |
| "grad_norm": 0.5416003321516639, |
| "learning_rate": 4.765895953757226e-05, |
| "loss": 0.4732, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.4286827747466875, |
| "grad_norm": 0.4993962101559131, |
| "learning_rate": 4.763005780346821e-05, |
| "loss": 0.477, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.4302416212003118, |
| "grad_norm": 0.5254171958882733, |
| "learning_rate": 4.7601156069364166e-05, |
| "loss": 0.4899, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.4318004676539361, |
| "grad_norm": 0.5484868681986969, |
| "learning_rate": 4.757225433526012e-05, |
| "loss": 0.4608, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.4333593141075604, |
| "grad_norm": 0.6405428710095884, |
| "learning_rate": 4.754335260115607e-05, |
| "loss": 0.4816, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.4349181605611847, |
| "grad_norm": 0.48377922320779415, |
| "learning_rate": 4.7514450867052026e-05, |
| "loss": 0.4838, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.436477007014809, |
| "grad_norm": 0.7085436078292358, |
| "learning_rate": 4.748554913294798e-05, |
| "loss": 0.4718, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.4380358534684334, |
| "grad_norm": 0.4043009368444505, |
| "learning_rate": 4.745664739884393e-05, |
| "loss": 0.4442, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.4395946999220577, |
| "grad_norm": 0.5866137208959042, |
| "learning_rate": 4.7427745664739886e-05, |
| "loss": 0.461, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.441153546375682, |
| "grad_norm": 0.5810371524784574, |
| "learning_rate": 4.739884393063584e-05, |
| "loss": 0.4839, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.4427123928293063, |
| "grad_norm": 0.438670614683413, |
| "learning_rate": 4.73699421965318e-05, |
| "loss": 0.4615, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.4442712392829306, |
| "grad_norm": 0.6406824783627434, |
| "learning_rate": 4.7341040462427747e-05, |
| "loss": 0.4611, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.44583008573655497, |
| "grad_norm": 0.529477326835477, |
| "learning_rate": 4.73121387283237e-05, |
| "loss": 0.4788, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.4473889321901793, |
| "grad_norm": 0.4838077152970437, |
| "learning_rate": 4.7283236994219653e-05, |
| "loss": 0.4727, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.4489477786438036, |
| "grad_norm": 0.47119490593405255, |
| "learning_rate": 4.7254335260115614e-05, |
| "loss": 0.473, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.4505066250974279, |
| "grad_norm": 0.4366369786625669, |
| "learning_rate": 4.722543352601157e-05, |
| "loss": 0.4526, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.4520654715510522, |
| "grad_norm": 0.4281616340000569, |
| "learning_rate": 4.7196531791907514e-05, |
| "loss": 0.482, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.45362431800467656, |
| "grad_norm": 0.5820798503771535, |
| "learning_rate": 4.716763005780347e-05, |
| "loss": 0.4716, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.45518316445830087, |
| "grad_norm": 0.45736556696474184, |
| "learning_rate": 4.713872832369942e-05, |
| "loss": 0.4569, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.4567420109119252, |
| "grad_norm": 0.5568676859423672, |
| "learning_rate": 4.710982658959538e-05, |
| "loss": 0.4738, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.4583008573655495, |
| "grad_norm": 0.6028680330451817, |
| "learning_rate": 4.7080924855491334e-05, |
| "loss": 0.476, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.4598597038191738, |
| "grad_norm": 0.47393834722502826, |
| "learning_rate": 4.705202312138728e-05, |
| "loss": 0.4896, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.46141855027279816, |
| "grad_norm": 0.5313704646163404, |
| "learning_rate": 4.7023121387283234e-05, |
| "loss": 0.4805, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.46297739672642246, |
| "grad_norm": 0.5051367616146644, |
| "learning_rate": 4.6994219653179195e-05, |
| "loss": 0.4569, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.46453624318004677, |
| "grad_norm": 0.3831029585767748, |
| "learning_rate": 4.696531791907515e-05, |
| "loss": 0.4744, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.4660950896336711, |
| "grad_norm": 0.521461479028099, |
| "learning_rate": 4.69364161849711e-05, |
| "loss": 0.4483, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.4676539360872954, |
| "grad_norm": 0.38896958455232095, |
| "learning_rate": 4.690751445086705e-05, |
| "loss": 0.4498, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.4692127825409197, |
| "grad_norm": 0.39085029504274005, |
| "learning_rate": 4.687861271676301e-05, |
| "loss": 0.4671, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.47077162899454406, |
| "grad_norm": 0.4788937605695569, |
| "learning_rate": 4.684971098265896e-05, |
| "loss": 0.476, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.47233047544816836, |
| "grad_norm": 0.34601184680325847, |
| "learning_rate": 4.6820809248554915e-05, |
| "loss": 0.4638, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.47388932190179267, |
| "grad_norm": 0.43700546489204795, |
| "learning_rate": 4.679190751445087e-05, |
| "loss": 0.4814, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.475448168355417, |
| "grad_norm": 0.3838353985965739, |
| "learning_rate": 4.676300578034682e-05, |
| "loss": 0.451, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.4770070148090413, |
| "grad_norm": 0.4177639814145164, |
| "learning_rate": 4.6734104046242776e-05, |
| "loss": 0.4758, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.47856586126266565, |
| "grad_norm": 0.3922398233693093, |
| "learning_rate": 4.670520231213873e-05, |
| "loss": 0.4657, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.48012470771628996, |
| "grad_norm": 0.3757724195621053, |
| "learning_rate": 4.667630057803468e-05, |
| "loss": 0.4793, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.48168355416991426, |
| "grad_norm": 0.3414831861075698, |
| "learning_rate": 4.664739884393064e-05, |
| "loss": 0.4636, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.48324240062353857, |
| "grad_norm": 0.41270011789282307, |
| "learning_rate": 4.661849710982659e-05, |
| "loss": 0.4508, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.4848012470771629, |
| "grad_norm": 0.40583792569372235, |
| "learning_rate": 4.658959537572254e-05, |
| "loss": 0.4849, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.48636009353078724, |
| "grad_norm": 0.44731637705198873, |
| "learning_rate": 4.6560693641618496e-05, |
| "loss": 0.4956, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.48791893998441155, |
| "grad_norm": 0.39572009537399316, |
| "learning_rate": 4.653179190751446e-05, |
| "loss": 0.4752, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.48947778643803586, |
| "grad_norm": 0.39239985837197133, |
| "learning_rate": 4.650289017341041e-05, |
| "loss": 0.4448, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.49103663289166016, |
| "grad_norm": 0.47254520649722037, |
| "learning_rate": 4.647398843930636e-05, |
| "loss": 0.4424, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.49259547934528447, |
| "grad_norm": 0.3655766693795544, |
| "learning_rate": 4.644508670520231e-05, |
| "loss": 0.4585, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.49415432579890883, |
| "grad_norm": 0.5024262207840765, |
| "learning_rate": 4.641618497109827e-05, |
| "loss": 0.4711, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.49571317225253314, |
| "grad_norm": 0.41554970235113214, |
| "learning_rate": 4.6387283236994224e-05, |
| "loss": 0.4606, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.49727201870615745, |
| "grad_norm": 0.4597741099913984, |
| "learning_rate": 4.635838150289018e-05, |
| "loss": 0.4527, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.49883086515978176, |
| "grad_norm": 0.4425235361753541, |
| "learning_rate": 4.6329479768786124e-05, |
| "loss": 0.4769, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.5003897116134061, |
| "grad_norm": 0.38187283401074307, |
| "learning_rate": 4.6300578034682084e-05, |
| "loss": 0.4594, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.5019485580670304, |
| "grad_norm": 0.3797910266033653, |
| "learning_rate": 4.627167630057804e-05, |
| "loss": 0.462, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.5035074045206547, |
| "grad_norm": 0.39511922436954366, |
| "learning_rate": 4.624277456647399e-05, |
| "loss": 0.4703, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.505066250974279, |
| "grad_norm": 0.4061473825369823, |
| "learning_rate": 4.6213872832369945e-05, |
| "loss": 0.4497, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.5066250974279034, |
| "grad_norm": 0.36187280391895393, |
| "learning_rate": 4.61849710982659e-05, |
| "loss": 0.4536, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.5081839438815277, |
| "grad_norm": 0.4400520460188613, |
| "learning_rate": 4.615606936416185e-05, |
| "loss": 0.4608, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.509742790335152, |
| "grad_norm": 0.3663575693429021, |
| "learning_rate": 4.6127167630057805e-05, |
| "loss": 0.4626, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.5113016367887763, |
| "grad_norm": 0.4766184153960843, |
| "learning_rate": 4.609826589595376e-05, |
| "loss": 0.4773, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.5128604832424006, |
| "grad_norm": 0.45206345178046475, |
| "learning_rate": 4.606936416184972e-05, |
| "loss": 0.4469, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.514419329696025, |
| "grad_norm": 0.39026528576069053, |
| "learning_rate": 4.6040462427745665e-05, |
| "loss": 0.4604, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.5159781761496492, |
| "grad_norm": 0.5361204288029048, |
| "learning_rate": 4.601156069364162e-05, |
| "loss": 0.4613, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.5175370226032736, |
| "grad_norm": 0.43012235247767305, |
| "learning_rate": 4.598265895953757e-05, |
| "loss": 0.4579, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.5190958690568979, |
| "grad_norm": 0.527243694253446, |
| "learning_rate": 4.595375722543353e-05, |
| "loss": 0.4602, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.5206547155105222, |
| "grad_norm": 0.4179764821236328, |
| "learning_rate": 4.5924855491329486e-05, |
| "loss": 0.4585, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.5222135619641465, |
| "grad_norm": 0.4510997406406127, |
| "learning_rate": 4.589595375722543e-05, |
| "loss": 0.4636, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.5237724084177708, |
| "grad_norm": 0.3187587883976425, |
| "learning_rate": 4.5867052023121386e-05, |
| "loss": 0.4612, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.5253312548713952, |
| "grad_norm": 0.46286631819853313, |
| "learning_rate": 4.5838150289017346e-05, |
| "loss": 0.4455, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.5268901013250195, |
| "grad_norm": 0.43393995615442665, |
| "learning_rate": 4.58092485549133e-05, |
| "loss": 0.481, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.5284489477786438, |
| "grad_norm": 0.5297318671699833, |
| "learning_rate": 4.578034682080925e-05, |
| "loss": 0.464, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.5300077942322681, |
| "grad_norm": 0.42416355410040585, |
| "learning_rate": 4.57514450867052e-05, |
| "loss": 0.4644, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.5315666406858924, |
| "grad_norm": 0.3712827662390051, |
| "learning_rate": 4.572254335260116e-05, |
| "loss": 0.4675, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.5331254871395168, |
| "grad_norm": 0.39762495205102294, |
| "learning_rate": 4.569364161849711e-05, |
| "loss": 0.469, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.534684333593141, |
| "grad_norm": 0.3254212965910093, |
| "learning_rate": 4.566473988439307e-05, |
| "loss": 0.4534, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.5362431800467654, |
| "grad_norm": 0.4848016190354553, |
| "learning_rate": 4.563583815028902e-05, |
| "loss": 0.4616, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.5378020265003897, |
| "grad_norm": 0.31661190588364, |
| "learning_rate": 4.5606936416184974e-05, |
| "loss": 0.4568, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.539360872954014, |
| "grad_norm": 0.4676445060911043, |
| "learning_rate": 4.557803468208093e-05, |
| "loss": 0.472, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.5409197194076384, |
| "grad_norm": 0.3160669772415641, |
| "learning_rate": 4.554913294797688e-05, |
| "loss": 0.444, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.5424785658612626, |
| "grad_norm": 0.4323013049552334, |
| "learning_rate": 4.5520231213872834e-05, |
| "loss": 0.4392, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.544037412314887, |
| "grad_norm": 0.4523674768546962, |
| "learning_rate": 4.549132947976879e-05, |
| "loss": 0.4682, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.5455962587685113, |
| "grad_norm": 0.3575280341982122, |
| "learning_rate": 4.546242774566474e-05, |
| "loss": 0.4483, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.5471551052221356, |
| "grad_norm": 0.4418729569254676, |
| "learning_rate": 4.5433526011560694e-05, |
| "loss": 0.4489, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.54871395167576, |
| "grad_norm": 0.3280270922434657, |
| "learning_rate": 4.540462427745665e-05, |
| "loss": 0.4616, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.5502727981293842, |
| "grad_norm": 0.4873805692806027, |
| "learning_rate": 4.537572254335261e-05, |
| "loss": 0.4802, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.5518316445830086, |
| "grad_norm": 0.36573476600102434, |
| "learning_rate": 4.5346820809248555e-05, |
| "loss": 0.475, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.5533904910366328, |
| "grad_norm": 0.41050124588561904, |
| "learning_rate": 4.531791907514451e-05, |
| "loss": 0.4535, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.5549493374902572, |
| "grad_norm": 0.3620545249321528, |
| "learning_rate": 4.528901734104046e-05, |
| "loss": 0.4428, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.5565081839438816, |
| "grad_norm": 0.467231893264037, |
| "learning_rate": 4.526011560693642e-05, |
| "loss": 0.4566, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.5580670303975058, |
| "grad_norm": 0.4030396125824727, |
| "learning_rate": 4.5231213872832375e-05, |
| "loss": 0.4717, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.5596258768511302, |
| "grad_norm": 0.34726035881210865, |
| "learning_rate": 4.520231213872833e-05, |
| "loss": 0.4497, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.5611847233047544, |
| "grad_norm": 0.39289264377072153, |
| "learning_rate": 4.5173410404624275e-05, |
| "loss": 0.4318, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.5627435697583788, |
| "grad_norm": 0.3890869555190299, |
| "learning_rate": 4.5144508670520236e-05, |
| "loss": 0.4569, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.5643024162120032, |
| "grad_norm": 0.39781174379277523, |
| "learning_rate": 4.511560693641619e-05, |
| "loss": 0.4638, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.5658612626656274, |
| "grad_norm": 0.36372440090872343, |
| "learning_rate": 4.508670520231214e-05, |
| "loss": 0.441, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.5674201091192518, |
| "grad_norm": 0.4284734591617738, |
| "learning_rate": 4.5057803468208096e-05, |
| "loss": 0.4745, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.568978955572876, |
| "grad_norm": 0.3838382539954402, |
| "learning_rate": 4.502890173410405e-05, |
| "loss": 0.4538, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.5705378020265004, |
| "grad_norm": 0.4837580518409696, |
| "learning_rate": 4.5e-05, |
| "loss": 0.4291, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.5720966484801248, |
| "grad_norm": 0.3807661371797664, |
| "learning_rate": 4.4971098265895956e-05, |
| "loss": 0.4681, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.573655494933749, |
| "grad_norm": 0.4409972991204447, |
| "learning_rate": 4.494219653179191e-05, |
| "loss": 0.4425, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.5752143413873734, |
| "grad_norm": 0.371972546132041, |
| "learning_rate": 4.491329479768786e-05, |
| "loss": 0.4543, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.5767731878409976, |
| "grad_norm": 0.37683721417276733, |
| "learning_rate": 4.488439306358382e-05, |
| "loss": 0.4575, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.578332034294622, |
| "grad_norm": 0.45855850398477666, |
| "learning_rate": 4.485549132947977e-05, |
| "loss": 0.4847, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.5798908807482464, |
| "grad_norm": 0.4223420944336656, |
| "learning_rate": 4.4826589595375724e-05, |
| "loss": 0.4649, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.5814497272018706, |
| "grad_norm": 0.4597887227283376, |
| "learning_rate": 4.4797687861271684e-05, |
| "loss": 0.4828, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.583008573655495, |
| "grad_norm": 0.4998303192690309, |
| "learning_rate": 4.476878612716763e-05, |
| "loss": 0.452, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.5845674201091192, |
| "grad_norm": 0.3756104589899762, |
| "learning_rate": 4.4739884393063584e-05, |
| "loss": 0.4411, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.5861262665627436, |
| "grad_norm": 0.4224811548039192, |
| "learning_rate": 4.471098265895954e-05, |
| "loss": 0.4564, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.5876851130163678, |
| "grad_norm": 0.4569199095589889, |
| "learning_rate": 4.46820809248555e-05, |
| "loss": 0.4664, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.5892439594699922, |
| "grad_norm": 0.3608347158374211, |
| "learning_rate": 4.465317919075145e-05, |
| "loss": 0.4734, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.5908028059236166, |
| "grad_norm": 0.4552865050320062, |
| "learning_rate": 4.46242774566474e-05, |
| "loss": 0.4723, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.5923616523772408, |
| "grad_norm": 0.3539125767256611, |
| "learning_rate": 4.459537572254335e-05, |
| "loss": 0.4762, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.5939204988308652, |
| "grad_norm": 0.3880352014434992, |
| "learning_rate": 4.456647398843931e-05, |
| "loss": 0.4388, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.5954793452844894, |
| "grad_norm": 0.37410095368321145, |
| "learning_rate": 4.4537572254335265e-05, |
| "loss": 0.4533, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.5970381917381138, |
| "grad_norm": 0.3133270680014226, |
| "learning_rate": 4.450867052023122e-05, |
| "loss": 0.4525, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.5985970381917382, |
| "grad_norm": 0.41920980039661443, |
| "learning_rate": 4.447976878612717e-05, |
| "loss": 0.4333, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.6001558846453624, |
| "grad_norm": 0.3774275243895145, |
| "learning_rate": 4.4450867052023125e-05, |
| "loss": 0.4459, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.6017147310989868, |
| "grad_norm": 0.369753586583569, |
| "learning_rate": 4.442196531791908e-05, |
| "loss": 0.4628, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.603273577552611, |
| "grad_norm": 0.43933813534803284, |
| "learning_rate": 4.439306358381503e-05, |
| "loss": 0.4866, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.6048324240062354, |
| "grad_norm": 0.36877162976354194, |
| "learning_rate": 4.4364161849710985e-05, |
| "loss": 0.4489, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.6063912704598597, |
| "grad_norm": 0.36797218493960576, |
| "learning_rate": 4.433526011560694e-05, |
| "loss": 0.4681, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.607950116913484, |
| "grad_norm": 0.3598201801544274, |
| "learning_rate": 4.430635838150289e-05, |
| "loss": 0.4451, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.6095089633671084, |
| "grad_norm": 0.37415526691302536, |
| "learning_rate": 4.4277456647398846e-05, |
| "loss": 0.4649, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.6110678098207326, |
| "grad_norm": 0.3217861980835781, |
| "learning_rate": 4.42485549132948e-05, |
| "loss": 0.4579, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.612626656274357, |
| "grad_norm": 0.4455765725783138, |
| "learning_rate": 4.421965317919075e-05, |
| "loss": 0.4603, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.6141855027279813, |
| "grad_norm": 0.37219414271910367, |
| "learning_rate": 4.4190751445086706e-05, |
| "loss": 0.4548, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.6157443491816056, |
| "grad_norm": 0.45080978894718876, |
| "learning_rate": 4.416184971098266e-05, |
| "loss": 0.4504, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.61730319563523, |
| "grad_norm": 0.4358441423072049, |
| "learning_rate": 4.413294797687861e-05, |
| "loss": 0.4446, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.6188620420888542, |
| "grad_norm": 0.37738329645269036, |
| "learning_rate": 4.4104046242774566e-05, |
| "loss": 0.4436, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.6204208885424786, |
| "grad_norm": 0.4204646254215696, |
| "learning_rate": 4.407514450867053e-05, |
| "loss": 0.4512, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.6219797349961029, |
| "grad_norm": 0.39901225883937147, |
| "learning_rate": 4.404624277456647e-05, |
| "loss": 0.463, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.6235385814497272, |
| "grad_norm": 0.320265409159274, |
| "learning_rate": 4.401734104046243e-05, |
| "loss": 0.4329, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.6250974279033515, |
| "grad_norm": 0.49013995892880263, |
| "learning_rate": 4.398843930635838e-05, |
| "loss": 0.4542, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.6266562743569758, |
| "grad_norm": 0.3510400983584625, |
| "learning_rate": 4.395953757225434e-05, |
| "loss": 0.4589, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.6282151208106002, |
| "grad_norm": 0.4402933914634823, |
| "learning_rate": 4.3930635838150294e-05, |
| "loss": 0.4593, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.6297739672642245, |
| "grad_norm": 0.47377011043879347, |
| "learning_rate": 4.390173410404624e-05, |
| "loss": 0.4675, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.6313328137178488, |
| "grad_norm": 0.37916907989218646, |
| "learning_rate": 4.3872832369942194e-05, |
| "loss": 0.4412, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.6328916601714731, |
| "grad_norm": 0.35621367384404673, |
| "learning_rate": 4.3843930635838154e-05, |
| "loss": 0.4618, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.6344505066250974, |
| "grad_norm": 0.3834554558658467, |
| "learning_rate": 4.381502890173411e-05, |
| "loss": 0.4591, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.6360093530787218, |
| "grad_norm": 0.3273576509892712, |
| "learning_rate": 4.378612716763006e-05, |
| "loss": 0.4307, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.6375681995323461, |
| "grad_norm": 0.3260428886276838, |
| "learning_rate": 4.3757225433526015e-05, |
| "loss": 0.4315, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.6391270459859704, |
| "grad_norm": 0.3828677152566117, |
| "learning_rate": 4.372832369942197e-05, |
| "loss": 0.431, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.6406858924395947, |
| "grad_norm": 0.3428782387615178, |
| "learning_rate": 4.369942196531792e-05, |
| "loss": 0.4535, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.642244738893219, |
| "grad_norm": 0.3568689766352524, |
| "learning_rate": 4.3670520231213875e-05, |
| "loss": 0.4471, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.6438035853468433, |
| "grad_norm": 0.3200198497931682, |
| "learning_rate": 4.364161849710983e-05, |
| "loss": 0.4675, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.6453624318004677, |
| "grad_norm": 0.3439904016830579, |
| "learning_rate": 4.361271676300578e-05, |
| "loss": 0.4434, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.646921278254092, |
| "grad_norm": 0.3910285780339108, |
| "learning_rate": 4.3583815028901735e-05, |
| "loss": 0.4291, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.6484801247077163, |
| "grad_norm": 0.35633702898935654, |
| "learning_rate": 4.355491329479769e-05, |
| "loss": 0.4465, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.6500389711613406, |
| "grad_norm": 0.4280187254199856, |
| "learning_rate": 4.352601156069364e-05, |
| "loss": 0.4467, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.6515978176149649, |
| "grad_norm": 0.3860941239745589, |
| "learning_rate": 4.34971098265896e-05, |
| "loss": 0.4443, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.6531566640685893, |
| "grad_norm": 0.44940034037874554, |
| "learning_rate": 4.346820809248555e-05, |
| "loss": 0.48, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.6547155105222136, |
| "grad_norm": 0.4625797028642205, |
| "learning_rate": 4.34393063583815e-05, |
| "loss": 0.4568, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.6562743569758379, |
| "grad_norm": 0.36493835250776474, |
| "learning_rate": 4.3410404624277456e-05, |
| "loss": 0.4622, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.6578332034294622, |
| "grad_norm": 0.4451390677090527, |
| "learning_rate": 4.3381502890173416e-05, |
| "loss": 0.4399, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.6593920498830865, |
| "grad_norm": 0.40509517400153716, |
| "learning_rate": 4.335260115606937e-05, |
| "loss": 0.4548, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.6609508963367108, |
| "grad_norm": 0.4745439842667765, |
| "learning_rate": 4.3323699421965316e-05, |
| "loss": 0.4409, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.6625097427903351, |
| "grad_norm": 0.40589601301353223, |
| "learning_rate": 4.329479768786127e-05, |
| "loss": 0.4386, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.6640685892439595, |
| "grad_norm": 0.410367648733596, |
| "learning_rate": 4.326589595375723e-05, |
| "loss": 0.4359, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.6656274356975838, |
| "grad_norm": 0.39329540338329594, |
| "learning_rate": 4.3236994219653183e-05, |
| "loss": 0.4503, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.6671862821512081, |
| "grad_norm": 0.4110086370635056, |
| "learning_rate": 4.320809248554914e-05, |
| "loss": 0.4447, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.6687451286048324, |
| "grad_norm": 0.47772877851625145, |
| "learning_rate": 4.3179190751445084e-05, |
| "loss": 0.4687, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.6703039750584567, |
| "grad_norm": 0.3866102449562328, |
| "learning_rate": 4.3150289017341044e-05, |
| "loss": 0.4393, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.6718628215120811, |
| "grad_norm": 0.45739240597886627, |
| "learning_rate": 4.3121387283237e-05, |
| "loss": 0.4668, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.6734216679657054, |
| "grad_norm": 0.4314476402559539, |
| "learning_rate": 4.309248554913295e-05, |
| "loss": 0.4649, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.6749805144193297, |
| "grad_norm": 0.45634509121859473, |
| "learning_rate": 4.3063583815028904e-05, |
| "loss": 0.4441, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.676539360872954, |
| "grad_norm": 0.4811305736485706, |
| "learning_rate": 4.303468208092486e-05, |
| "loss": 0.4529, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.6780982073265783, |
| "grad_norm": 0.39207564033387, |
| "learning_rate": 4.300578034682081e-05, |
| "loss": 0.4588, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.6796570537802027, |
| "grad_norm": 0.4658600522478725, |
| "learning_rate": 4.2976878612716764e-05, |
| "loss": 0.4379, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.6812159002338269, |
| "grad_norm": 0.41119750867971183, |
| "learning_rate": 4.294797687861272e-05, |
| "loss": 0.4418, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.6827747466874513, |
| "grad_norm": 0.4230535920977332, |
| "learning_rate": 4.291907514450868e-05, |
| "loss": 0.4536, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.6843335931410756, |
| "grad_norm": 0.5092465838859024, |
| "learning_rate": 4.2890173410404625e-05, |
| "loss": 0.4584, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.6858924395946999, |
| "grad_norm": 0.3601271310299026, |
| "learning_rate": 4.286127167630058e-05, |
| "loss": 0.4368, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.6874512860483243, |
| "grad_norm": 0.5033625655714722, |
| "learning_rate": 4.283236994219653e-05, |
| "loss": 0.4484, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.6890101325019485, |
| "grad_norm": 0.3994249544123872, |
| "learning_rate": 4.280346820809249e-05, |
| "loss": 0.4418, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.6905689789555729, |
| "grad_norm": 0.45539241489062715, |
| "learning_rate": 4.2774566473988445e-05, |
| "loss": 0.4473, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.6921278254091972, |
| "grad_norm": 0.4417751638358608, |
| "learning_rate": 4.274566473988439e-05, |
| "loss": 0.4496, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.6936866718628215, |
| "grad_norm": 0.40882073328312935, |
| "learning_rate": 4.2716763005780345e-05, |
| "loss": 0.4622, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.6952455183164459, |
| "grad_norm": 0.39444852000587594, |
| "learning_rate": 4.2687861271676306e-05, |
| "loss": 0.4769, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.6968043647700701, |
| "grad_norm": 0.37669038630165647, |
| "learning_rate": 4.265895953757226e-05, |
| "loss": 0.4463, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.6983632112236945, |
| "grad_norm": 0.4180560176644135, |
| "learning_rate": 4.263005780346821e-05, |
| "loss": 0.4502, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.6999220576773187, |
| "grad_norm": 0.3606230739957135, |
| "learning_rate": 4.260115606936416e-05, |
| "loss": 0.4686, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.7014809041309431, |
| "grad_norm": 0.4763884079373346, |
| "learning_rate": 4.257225433526012e-05, |
| "loss": 0.4465, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.7030397505845675, |
| "grad_norm": 0.36030780349125946, |
| "learning_rate": 4.254335260115607e-05, |
| "loss": 0.4475, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.7045985970381917, |
| "grad_norm": 0.4451176525342345, |
| "learning_rate": 4.2514450867052026e-05, |
| "loss": 0.4268, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.7061574434918161, |
| "grad_norm": 0.33875598529236145, |
| "learning_rate": 4.248554913294798e-05, |
| "loss": 0.4499, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.7077162899454403, |
| "grad_norm": 0.5071769434422747, |
| "learning_rate": 4.245664739884393e-05, |
| "loss": 0.4537, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.7092751363990647, |
| "grad_norm": 0.3488438534196029, |
| "learning_rate": 4.242774566473989e-05, |
| "loss": 0.4623, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.7108339828526891, |
| "grad_norm": 0.4454725104594618, |
| "learning_rate": 4.239884393063584e-05, |
| "loss": 0.4516, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.7123928293063133, |
| "grad_norm": 0.3583891995078908, |
| "learning_rate": 4.2369942196531794e-05, |
| "loss": 0.4581, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.7139516757599377, |
| "grad_norm": 0.36757937011684644, |
| "learning_rate": 4.234104046242775e-05, |
| "loss": 0.4505, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.7155105222135619, |
| "grad_norm": 0.39437787323637835, |
| "learning_rate": 4.23121387283237e-05, |
| "loss": 0.4555, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.7170693686671863, |
| "grad_norm": 0.3526260112578523, |
| "learning_rate": 4.2283236994219654e-05, |
| "loss": 0.4462, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.7186282151208107, |
| "grad_norm": 0.3610517367016722, |
| "learning_rate": 4.225433526011561e-05, |
| "loss": 0.4439, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.7201870615744349, |
| "grad_norm": 0.33046116660611863, |
| "learning_rate": 4.222543352601157e-05, |
| "loss": 0.4553, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.7217459080280593, |
| "grad_norm": 0.3479382421778036, |
| "learning_rate": 4.2196531791907514e-05, |
| "loss": 0.4615, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.7233047544816835, |
| "grad_norm": 0.3563546046165941, |
| "learning_rate": 4.216763005780347e-05, |
| "loss": 0.4502, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.7248636009353079, |
| "grad_norm": 0.33104574079840804, |
| "learning_rate": 4.213872832369942e-05, |
| "loss": 0.4522, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.7264224473889321, |
| "grad_norm": 0.3615689785424514, |
| "learning_rate": 4.210982658959538e-05, |
| "loss": 0.4456, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.7279812938425565, |
| "grad_norm": 0.33245430385705754, |
| "learning_rate": 4.2080924855491335e-05, |
| "loss": 0.451, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.7295401402961809, |
| "grad_norm": 0.31570139553177473, |
| "learning_rate": 4.205202312138729e-05, |
| "loss": 0.4387, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.7310989867498051, |
| "grad_norm": 0.38612290372770497, |
| "learning_rate": 4.2023121387283235e-05, |
| "loss": 0.4391, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.7326578332034295, |
| "grad_norm": 0.4573667515885595, |
| "learning_rate": 4.1994219653179195e-05, |
| "loss": 0.4689, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.7342166796570537, |
| "grad_norm": 0.3807562323188079, |
| "learning_rate": 4.196531791907515e-05, |
| "loss": 0.4632, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.7357755261106781, |
| "grad_norm": 0.3525392715340891, |
| "learning_rate": 4.19364161849711e-05, |
| "loss": 0.4381, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.7373343725643025, |
| "grad_norm": 0.3782760064170116, |
| "learning_rate": 4.1907514450867055e-05, |
| "loss": 0.4477, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.7388932190179267, |
| "grad_norm": 0.41115398176210943, |
| "learning_rate": 4.187861271676301e-05, |
| "loss": 0.4602, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.7404520654715511, |
| "grad_norm": 0.32624650584367754, |
| "learning_rate": 4.184971098265896e-05, |
| "loss": 0.4569, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.7420109119251753, |
| "grad_norm": 0.378893068450349, |
| "learning_rate": 4.1820809248554916e-05, |
| "loss": 0.4489, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.7435697583787997, |
| "grad_norm": 0.3829792535253302, |
| "learning_rate": 4.179190751445087e-05, |
| "loss": 0.4466, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.745128604832424, |
| "grad_norm": 0.33863233675349563, |
| "learning_rate": 4.176300578034682e-05, |
| "loss": 0.4383, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.7466874512860483, |
| "grad_norm": 0.40164735104812754, |
| "learning_rate": 4.1734104046242776e-05, |
| "loss": 0.4203, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.7482462977396727, |
| "grad_norm": 0.35580789823430714, |
| "learning_rate": 4.170520231213873e-05, |
| "loss": 0.4744, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.7498051441932969, |
| "grad_norm": 0.31246598578051804, |
| "learning_rate": 4.167630057803468e-05, |
| "loss": 0.4219, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.7513639906469213, |
| "grad_norm": 0.329237795782184, |
| "learning_rate": 4.1647398843930636e-05, |
| "loss": 0.4462, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.7529228371005456, |
| "grad_norm": 0.32134761939251955, |
| "learning_rate": 4.161849710982659e-05, |
| "loss": 0.4533, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.7544816835541699, |
| "grad_norm": 0.34349515426855415, |
| "learning_rate": 4.1589595375722543e-05, |
| "loss": 0.4381, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.7560405300077943, |
| "grad_norm": 0.3706250923736234, |
| "learning_rate": 4.15606936416185e-05, |
| "loss": 0.4406, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.7575993764614185, |
| "grad_norm": 0.30867896526600563, |
| "learning_rate": 4.153179190751445e-05, |
| "loss": 0.4328, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.7591582229150429, |
| "grad_norm": 0.3756083443390548, |
| "learning_rate": 4.150289017341041e-05, |
| "loss": 0.4593, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.7607170693686672, |
| "grad_norm": 0.3359590028525238, |
| "learning_rate": 4.147398843930636e-05, |
| "loss": 0.4452, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.7622759158222915, |
| "grad_norm": 0.3805120865629876, |
| "learning_rate": 4.144508670520231e-05, |
| "loss": 0.448, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.7638347622759158, |
| "grad_norm": 0.36856409628466563, |
| "learning_rate": 4.1416184971098264e-05, |
| "loss": 0.4361, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.7653936087295401, |
| "grad_norm": 0.3130789506998712, |
| "learning_rate": 4.1387283236994224e-05, |
| "loss": 0.4422, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.7669524551831645, |
| "grad_norm": 0.3420964053109533, |
| "learning_rate": 4.135838150289018e-05, |
| "loss": 0.44, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.7685113016367888, |
| "grad_norm": 0.3761751528633877, |
| "learning_rate": 4.132947976878613e-05, |
| "loss": 0.4512, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.7700701480904131, |
| "grad_norm": 0.3217947372011256, |
| "learning_rate": 4.130057803468208e-05, |
| "loss": 0.4542, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.7716289945440374, |
| "grad_norm": 0.36411196358092746, |
| "learning_rate": 4.127167630057804e-05, |
| "loss": 0.4453, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.7731878409976617, |
| "grad_norm": 0.36062847920871005, |
| "learning_rate": 4.124277456647399e-05, |
| "loss": 0.432, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.774746687451286, |
| "grad_norm": 0.31936162560311365, |
| "learning_rate": 4.1213872832369945e-05, |
| "loss": 0.4477, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.7763055339049104, |
| "grad_norm": 0.3761636426405107, |
| "learning_rate": 4.11849710982659e-05, |
| "loss": 0.4353, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.7778643803585347, |
| "grad_norm": 0.32932231835478515, |
| "learning_rate": 4.115606936416185e-05, |
| "loss": 0.4467, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.779423226812159, |
| "grad_norm": 0.38070437317284483, |
| "learning_rate": 4.1127167630057805e-05, |
| "loss": 0.4395, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.7809820732657833, |
| "grad_norm": 0.3246837506466452, |
| "learning_rate": 4.109826589595376e-05, |
| "loss": 0.4447, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.7825409197194076, |
| "grad_norm": 0.34953071175085054, |
| "learning_rate": 4.106936416184971e-05, |
| "loss": 0.4333, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.784099766173032, |
| "grad_norm": 0.37305833228096014, |
| "learning_rate": 4.1040462427745666e-05, |
| "loss": 0.4314, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.7856586126266563, |
| "grad_norm": 0.3472419499146332, |
| "learning_rate": 4.101156069364162e-05, |
| "loss": 0.4363, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.7872174590802806, |
| "grad_norm": 0.38866158249187055, |
| "learning_rate": 4.098265895953757e-05, |
| "loss": 0.4429, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.7887763055339049, |
| "grad_norm": 0.330824609948283, |
| "learning_rate": 4.0953757225433526e-05, |
| "loss": 0.4442, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.7903351519875292, |
| "grad_norm": 0.39663219339611827, |
| "learning_rate": 4.0924855491329486e-05, |
| "loss": 0.4434, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.7918939984411536, |
| "grad_norm": 0.3619324831325355, |
| "learning_rate": 4.089595375722543e-05, |
| "loss": 0.4416, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.7934528448947779, |
| "grad_norm": 0.35246995122654845, |
| "learning_rate": 4.0867052023121386e-05, |
| "loss": 0.4265, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.7950116913484022, |
| "grad_norm": 0.3612965001810029, |
| "learning_rate": 4.083815028901734e-05, |
| "loss": 0.433, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.7965705378020265, |
| "grad_norm": 0.38138895157014485, |
| "learning_rate": 4.08092485549133e-05, |
| "loss": 0.4341, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.7981293842556508, |
| "grad_norm": 0.3449144364761348, |
| "learning_rate": 4.0780346820809253e-05, |
| "loss": 0.457, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.7996882307092751, |
| "grad_norm": 0.4015780437671286, |
| "learning_rate": 4.07514450867052e-05, |
| "loss": 0.43, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.8012470771628994, |
| "grad_norm": 0.31072059103382504, |
| "learning_rate": 4.0722543352601154e-05, |
| "loss": 0.4424, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.8028059236165238, |
| "grad_norm": 0.3758869241826082, |
| "learning_rate": 4.0693641618497114e-05, |
| "loss": 0.4568, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.8043647700701481, |
| "grad_norm": 0.3383461562057852, |
| "learning_rate": 4.066473988439307e-05, |
| "loss": 0.4513, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.8059236165237724, |
| "grad_norm": 0.35697829825213634, |
| "learning_rate": 4.063583815028902e-05, |
| "loss": 0.4512, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.8074824629773967, |
| "grad_norm": 0.3930071983033836, |
| "learning_rate": 4.0606936416184974e-05, |
| "loss": 0.4386, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.809041309431021, |
| "grad_norm": 0.34936814110167913, |
| "learning_rate": 4.057803468208093e-05, |
| "loss": 0.4249, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.8106001558846454, |
| "grad_norm": 0.3477156813236279, |
| "learning_rate": 4.054913294797688e-05, |
| "loss": 0.4421, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.8121590023382697, |
| "grad_norm": 0.3771551756284452, |
| "learning_rate": 4.0520231213872834e-05, |
| "loss": 0.4422, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.813717848791894, |
| "grad_norm": 0.3915428314100825, |
| "learning_rate": 4.049132947976879e-05, |
| "loss": 0.4489, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.8152766952455183, |
| "grad_norm": 0.3438944943929536, |
| "learning_rate": 4.046242774566474e-05, |
| "loss": 0.4403, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.8168355416991426, |
| "grad_norm": 0.41104576558641404, |
| "learning_rate": 4.0433526011560695e-05, |
| "loss": 0.4606, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.818394388152767, |
| "grad_norm": 0.37712289992365605, |
| "learning_rate": 4.040462427745665e-05, |
| "loss": 0.4325, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.8199532346063912, |
| "grad_norm": 0.3008825974501281, |
| "learning_rate": 4.03757225433526e-05, |
| "loss": 0.4348, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.8215120810600156, |
| "grad_norm": 0.3701724452827897, |
| "learning_rate": 4.034682080924856e-05, |
| "loss": 0.444, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.8230709275136399, |
| "grad_norm": 0.4810485926905224, |
| "learning_rate": 4.031791907514451e-05, |
| "loss": 0.4608, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.8246297739672642, |
| "grad_norm": 0.3549215989456713, |
| "learning_rate": 4.028901734104046e-05, |
| "loss": 0.4539, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.8261886204208886, |
| "grad_norm": 0.37028937018443525, |
| "learning_rate": 4.0260115606936415e-05, |
| "loss": 0.4485, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.8277474668745128, |
| "grad_norm": 0.4323084487068229, |
| "learning_rate": 4.0231213872832376e-05, |
| "loss": 0.4622, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.8293063133281372, |
| "grad_norm": 0.37099996438121063, |
| "learning_rate": 4.020231213872833e-05, |
| "loss": 0.4389, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.8308651597817615, |
| "grad_norm": 0.4485476616523096, |
| "learning_rate": 4.0173410404624276e-05, |
| "loss": 0.458, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.8324240062353858, |
| "grad_norm": 0.47843701934513816, |
| "learning_rate": 4.014450867052023e-05, |
| "loss": 0.4313, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.8339828526890102, |
| "grad_norm": 0.39097267691711673, |
| "learning_rate": 4.011560693641619e-05, |
| "loss": 0.457, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.8355416991426344, |
| "grad_norm": 0.44705834158808144, |
| "learning_rate": 4.008670520231214e-05, |
| "loss": 0.4411, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.8371005455962588, |
| "grad_norm": 0.39009820610398244, |
| "learning_rate": 4.0057803468208096e-05, |
| "loss": 0.4666, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.838659392049883, |
| "grad_norm": 0.40676152233104423, |
| "learning_rate": 4.002890173410404e-05, |
| "loss": 0.4464, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.8402182385035074, |
| "grad_norm": 0.38980475885919486, |
| "learning_rate": 4e-05, |
| "loss": 0.4287, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.8417770849571318, |
| "grad_norm": 0.4155553631446244, |
| "learning_rate": 3.997109826589596e-05, |
| "loss": 0.4592, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.843335931410756, |
| "grad_norm": 0.4683782615674356, |
| "learning_rate": 3.994219653179191e-05, |
| "loss": 0.4434, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.8448947778643804, |
| "grad_norm": 0.4270570741139377, |
| "learning_rate": 3.9913294797687864e-05, |
| "loss": 0.4471, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.8464536243180046, |
| "grad_norm": 0.3328227432824558, |
| "learning_rate": 3.988439306358382e-05, |
| "loss": 0.4342, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.848012470771629, |
| "grad_norm": 0.383218659320037, |
| "learning_rate": 3.985549132947977e-05, |
| "loss": 0.4418, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.8495713172252534, |
| "grad_norm": 0.41491251710244376, |
| "learning_rate": 3.9826589595375724e-05, |
| "loss": 0.4562, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.8511301636788776, |
| "grad_norm": 0.4581749465782902, |
| "learning_rate": 3.979768786127168e-05, |
| "loss": 0.4543, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.852689010132502, |
| "grad_norm": 0.39063683965855545, |
| "learning_rate": 3.976878612716764e-05, |
| "loss": 0.4403, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.8542478565861262, |
| "grad_norm": 0.4827779522726547, |
| "learning_rate": 3.9739884393063584e-05, |
| "loss": 0.4371, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.8558067030397506, |
| "grad_norm": 0.4619643532977416, |
| "learning_rate": 3.971098265895954e-05, |
| "loss": 0.4481, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.857365549493375, |
| "grad_norm": 0.38833379166687454, |
| "learning_rate": 3.968208092485549e-05, |
| "loss": 0.4298, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.8589243959469992, |
| "grad_norm": 0.47500496297331074, |
| "learning_rate": 3.965317919075145e-05, |
| "loss": 0.4242, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.8604832424006236, |
| "grad_norm": 0.43102309998132354, |
| "learning_rate": 3.9624277456647405e-05, |
| "loss": 0.4417, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.8620420888542478, |
| "grad_norm": 0.5161140253561636, |
| "learning_rate": 3.959537572254335e-05, |
| "loss": 0.4497, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.8636009353078722, |
| "grad_norm": 0.5456731985809448, |
| "learning_rate": 3.9566473988439305e-05, |
| "loss": 0.4179, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.8651597817614964, |
| "grad_norm": 0.4207485633634238, |
| "learning_rate": 3.9537572254335265e-05, |
| "loss": 0.4427, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.8667186282151208, |
| "grad_norm": 0.5759488662135421, |
| "learning_rate": 3.950867052023122e-05, |
| "loss": 0.4477, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.8682774746687452, |
| "grad_norm": 0.4021704450074726, |
| "learning_rate": 3.947976878612717e-05, |
| "loss": 0.4463, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.8698363211223694, |
| "grad_norm": 0.42177083348273137, |
| "learning_rate": 3.945086705202312e-05, |
| "loss": 0.4598, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.8713951675759938, |
| "grad_norm": 0.4701952158517483, |
| "learning_rate": 3.942196531791908e-05, |
| "loss": 0.4394, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.872954014029618, |
| "grad_norm": 0.44080844830392035, |
| "learning_rate": 3.939306358381503e-05, |
| "loss": 0.4493, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.8745128604832424, |
| "grad_norm": 0.3977306328304877, |
| "learning_rate": 3.9364161849710986e-05, |
| "loss": 0.4524, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.8760717069368668, |
| "grad_norm": 0.43018670105627926, |
| "learning_rate": 3.933526011560694e-05, |
| "loss": 0.4267, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.877630553390491, |
| "grad_norm": 0.4268384770791736, |
| "learning_rate": 3.930635838150289e-05, |
| "loss": 0.4607, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.8791893998441154, |
| "grad_norm": 0.38783009673051294, |
| "learning_rate": 3.9277456647398846e-05, |
| "loss": 0.4476, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.8807482462977396, |
| "grad_norm": 0.35687832758423854, |
| "learning_rate": 3.92485549132948e-05, |
| "loss": 0.435, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.882307092751364, |
| "grad_norm": 0.43764488183272643, |
| "learning_rate": 3.921965317919075e-05, |
| "loss": 0.4466, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.8838659392049883, |
| "grad_norm": 0.40005960295573123, |
| "learning_rate": 3.9190751445086707e-05, |
| "loss": 0.4288, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.8854247856586126, |
| "grad_norm": 0.46289540585836514, |
| "learning_rate": 3.916184971098266e-05, |
| "loss": 0.4545, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.886983632112237, |
| "grad_norm": 0.3824987032253999, |
| "learning_rate": 3.9132947976878613e-05, |
| "loss": 0.4414, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.8885424785658612, |
| "grad_norm": 0.47993501337835254, |
| "learning_rate": 3.910404624277457e-05, |
| "loss": 0.4411, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.8901013250194856, |
| "grad_norm": 0.3194097378426562, |
| "learning_rate": 3.907514450867053e-05, |
| "loss": 0.4532, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.8916601714731099, |
| "grad_norm": 0.5232202230976029, |
| "learning_rate": 3.904624277456648e-05, |
| "loss": 0.423, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.8932190179267342, |
| "grad_norm": 0.3126690343426845, |
| "learning_rate": 3.901734104046243e-05, |
| "loss": 0.4333, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.8947778643803586, |
| "grad_norm": 0.45718182082215625, |
| "learning_rate": 3.898843930635838e-05, |
| "loss": 0.4579, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.8963367108339828, |
| "grad_norm": 0.3578668564536035, |
| "learning_rate": 3.895953757225434e-05, |
| "loss": 0.4365, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.8978955572876072, |
| "grad_norm": 0.39528003293735503, |
| "learning_rate": 3.8930635838150294e-05, |
| "loss": 0.4373, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.8994544037412315, |
| "grad_norm": 0.4144241684007145, |
| "learning_rate": 3.890173410404625e-05, |
| "loss": 0.4261, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.9010132501948558, |
| "grad_norm": 0.37062244540768974, |
| "learning_rate": 3.8872832369942194e-05, |
| "loss": 0.4495, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.9025720966484801, |
| "grad_norm": 0.38183987870941294, |
| "learning_rate": 3.884393063583815e-05, |
| "loss": 0.4338, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.9041309431021044, |
| "grad_norm": 0.39082808942712405, |
| "learning_rate": 3.881502890173411e-05, |
| "loss": 0.4444, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.9056897895557288, |
| "grad_norm": 0.4477498201218956, |
| "learning_rate": 3.878612716763006e-05, |
| "loss": 0.4519, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.9072486360093531, |
| "grad_norm": 0.44239045295490875, |
| "learning_rate": 3.8757225433526015e-05, |
| "loss": 0.4481, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.9088074824629774, |
| "grad_norm": 0.3680081228407661, |
| "learning_rate": 3.872832369942196e-05, |
| "loss": 0.44, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.9103663289166017, |
| "grad_norm": 0.49017226032397, |
| "learning_rate": 3.869942196531792e-05, |
| "loss": 0.4523, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.911925175370226, |
| "grad_norm": 0.36889936915401544, |
| "learning_rate": 3.8670520231213875e-05, |
| "loss": 0.4252, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.9134840218238504, |
| "grad_norm": 0.42106930607963194, |
| "learning_rate": 3.864161849710983e-05, |
| "loss": 0.4366, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.9150428682774747, |
| "grad_norm": 0.33426016889258103, |
| "learning_rate": 3.861271676300578e-05, |
| "loss": 0.4364, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.916601714731099, |
| "grad_norm": 0.38283532007129306, |
| "learning_rate": 3.8583815028901736e-05, |
| "loss": 0.4299, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.9181605611847233, |
| "grad_norm": 0.3843431490728921, |
| "learning_rate": 3.855491329479769e-05, |
| "loss": 0.4423, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.9197194076383476, |
| "grad_norm": 0.39391225219289805, |
| "learning_rate": 3.852601156069364e-05, |
| "loss": 0.4553, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.921278254091972, |
| "grad_norm": 0.3921483022893861, |
| "learning_rate": 3.8497109826589596e-05, |
| "loss": 0.4374, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.9228371005455963, |
| "grad_norm": 0.4586348772859173, |
| "learning_rate": 3.846820809248555e-05, |
| "loss": 0.4391, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.9243959469992206, |
| "grad_norm": 0.3870941468579518, |
| "learning_rate": 3.84393063583815e-05, |
| "loss": 0.4588, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.9259547934528449, |
| "grad_norm": 0.4121790906090321, |
| "learning_rate": 3.8410404624277456e-05, |
| "loss": 0.4192, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.9275136399064692, |
| "grad_norm": 0.3287491996044717, |
| "learning_rate": 3.838150289017341e-05, |
| "loss": 0.4376, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.9290724863600935, |
| "grad_norm": 0.4668763195586917, |
| "learning_rate": 3.835260115606937e-05, |
| "loss": 0.4513, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.9306313328137179, |
| "grad_norm": 0.39213339962767724, |
| "learning_rate": 3.832369942196532e-05, |
| "loss": 0.4349, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.9321901792673422, |
| "grad_norm": 0.42578108338194876, |
| "learning_rate": 3.829479768786127e-05, |
| "loss": 0.4365, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.9337490257209665, |
| "grad_norm": 0.3949037922002061, |
| "learning_rate": 3.8265895953757224e-05, |
| "loss": 0.4366, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.9353078721745908, |
| "grad_norm": 0.3826228697949301, |
| "learning_rate": 3.8236994219653184e-05, |
| "loss": 0.4426, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.9368667186282151, |
| "grad_norm": 0.3650038749614227, |
| "learning_rate": 3.820809248554914e-05, |
| "loss": 0.4339, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.9384255650818394, |
| "grad_norm": 0.40293156157563836, |
| "learning_rate": 3.817919075144509e-05, |
| "loss": 0.4744, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.9399844115354637, |
| "grad_norm": 0.40210846514825743, |
| "learning_rate": 3.815028901734104e-05, |
| "loss": 0.4404, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.9415432579890881, |
| "grad_norm": 0.39097895288839746, |
| "learning_rate": 3.8121387283237e-05, |
| "loss": 0.4401, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.9431021044427124, |
| "grad_norm": 0.3227109176868314, |
| "learning_rate": 3.809248554913295e-05, |
| "loss": 0.4312, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.9446609508963367, |
| "grad_norm": 0.4608466312979325, |
| "learning_rate": 3.8063583815028905e-05, |
| "loss": 0.4591, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.946219797349961, |
| "grad_norm": 0.34756293281666123, |
| "learning_rate": 3.803468208092486e-05, |
| "loss": 0.4566, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.9477786438035853, |
| "grad_norm": 0.37558636760339875, |
| "learning_rate": 3.800578034682081e-05, |
| "loss": 0.4205, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.9493374902572097, |
| "grad_norm": 0.4205365075958774, |
| "learning_rate": 3.7976878612716765e-05, |
| "loss": 0.4388, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.950896336710834, |
| "grad_norm": 0.45096900527646666, |
| "learning_rate": 3.794797687861272e-05, |
| "loss": 0.4316, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.9524551831644583, |
| "grad_norm": 0.3812977384877062, |
| "learning_rate": 3.791907514450867e-05, |
| "loss": 0.4363, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.9540140296180826, |
| "grad_norm": 0.48866253778453445, |
| "learning_rate": 3.7890173410404625e-05, |
| "loss": 0.428, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.9555728760717069, |
| "grad_norm": 0.40499141402747296, |
| "learning_rate": 3.786127167630058e-05, |
| "loss": 0.4381, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.9571317225253313, |
| "grad_norm": 0.46430129913008983, |
| "learning_rate": 3.783236994219653e-05, |
| "loss": 0.4429, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.9586905689789555, |
| "grad_norm": 0.34966737213764454, |
| "learning_rate": 3.7803468208092486e-05, |
| "loss": 0.4381, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.9602494154325799, |
| "grad_norm": 0.4138090253112475, |
| "learning_rate": 3.7774566473988446e-05, |
| "loss": 0.4258, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.9618082618862042, |
| "grad_norm": 0.3297979382576517, |
| "learning_rate": 3.774566473988439e-05, |
| "loss": 0.4413, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.9633671083398285, |
| "grad_norm": 0.3863735923761598, |
| "learning_rate": 3.7716763005780346e-05, |
| "loss": 0.431, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.9649259547934529, |
| "grad_norm": 0.33528959794283103, |
| "learning_rate": 3.76878612716763e-05, |
| "loss": 0.4377, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.9664848012470771, |
| "grad_norm": 0.3292524974297176, |
| "learning_rate": 3.765895953757226e-05, |
| "loss": 0.4326, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.9680436477007015, |
| "grad_norm": 0.3156599159357699, |
| "learning_rate": 3.763005780346821e-05, |
| "loss": 0.4236, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.9696024941543258, |
| "grad_norm": 0.349891638695045, |
| "learning_rate": 3.760115606936416e-05, |
| "loss": 0.4226, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.9711613406079501, |
| "grad_norm": 0.3027036773941601, |
| "learning_rate": 3.757225433526011e-05, |
| "loss": 0.4357, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.9727201870615745, |
| "grad_norm": 0.3418983655828593, |
| "learning_rate": 3.754335260115607e-05, |
| "loss": 0.4072, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.9742790335151987, |
| "grad_norm": 0.32424716914194174, |
| "learning_rate": 3.751445086705203e-05, |
| "loss": 0.4229, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.9758378799688231, |
| "grad_norm": 0.35397027714852347, |
| "learning_rate": 3.748554913294798e-05, |
| "loss": 0.443, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.9773967264224473, |
| "grad_norm": 0.35983171780105105, |
| "learning_rate": 3.7456647398843934e-05, |
| "loss": 0.4218, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.9789555728760717, |
| "grad_norm": 0.36410259602213924, |
| "learning_rate": 3.742774566473989e-05, |
| "loss": 0.4293, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.9805144193296961, |
| "grad_norm": 0.3865124494448919, |
| "learning_rate": 3.739884393063584e-05, |
| "loss": 0.4229, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.9820732657833203, |
| "grad_norm": 0.3646826145021347, |
| "learning_rate": 3.7369942196531794e-05, |
| "loss": 0.4308, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.9836321122369447, |
| "grad_norm": 0.35897147135714, |
| "learning_rate": 3.734104046242775e-05, |
| "loss": 0.4407, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.9851909586905689, |
| "grad_norm": 0.3391861968595338, |
| "learning_rate": 3.73121387283237e-05, |
| "loss": 0.4354, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.9867498051441933, |
| "grad_norm": 0.43647757995986053, |
| "learning_rate": 3.7283236994219654e-05, |
| "loss": 0.4274, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.9883086515978177, |
| "grad_norm": 0.3618260493378971, |
| "learning_rate": 3.725433526011561e-05, |
| "loss": 0.4203, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.9898674980514419, |
| "grad_norm": 0.40188293786147516, |
| "learning_rate": 3.722543352601156e-05, |
| "loss": 0.4306, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.9914263445050663, |
| "grad_norm": 0.4129221508285703, |
| "learning_rate": 3.719653179190752e-05, |
| "loss": 0.4248, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.9929851909586905, |
| "grad_norm": 0.39766604692773766, |
| "learning_rate": 3.716763005780347e-05, |
| "loss": 0.4574, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.9945440374123149, |
| "grad_norm": 0.5559578417632596, |
| "learning_rate": 3.713872832369942e-05, |
| "loss": 0.469, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.9961028838659393, |
| "grad_norm": 0.3580889923606767, |
| "learning_rate": 3.7109826589595375e-05, |
| "loss": 0.4091, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.9976617303195635, |
| "grad_norm": 0.5076402218866569, |
| "learning_rate": 3.7080924855491335e-05, |
| "loss": 0.4305, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.9992205767731879, |
| "grad_norm": 0.3789119376045301, |
| "learning_rate": 3.705202312138729e-05, |
| "loss": 0.4306, |
| "step": 641 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.3789119376045301, |
| "learning_rate": 3.7023121387283235e-05, |
| "loss": 0.4336, |
| "step": 642 |
| }, |
| { |
| "epoch": 1.0015588464536243, |
| "grad_norm": 0.628371201501632, |
| "learning_rate": 3.699421965317919e-05, |
| "loss": 0.3748, |
| "step": 643 |
| }, |
| { |
| "epoch": 1.0031176929072487, |
| "grad_norm": 0.49964431541022886, |
| "learning_rate": 3.696531791907515e-05, |
| "loss": 0.3842, |
| "step": 644 |
| }, |
| { |
| "epoch": 1.004676539360873, |
| "grad_norm": 0.4115388078983886, |
| "learning_rate": 3.69364161849711e-05, |
| "loss": 0.3921, |
| "step": 645 |
| }, |
| { |
| "epoch": 1.0062353858144972, |
| "grad_norm": 0.36847896294616717, |
| "learning_rate": 3.6907514450867056e-05, |
| "loss": 0.3917, |
| "step": 646 |
| }, |
| { |
| "epoch": 1.0077942322681215, |
| "grad_norm": 0.41235306439266933, |
| "learning_rate": 3.6878612716763e-05, |
| "loss": 0.3661, |
| "step": 647 |
| }, |
| { |
| "epoch": 1.009353078721746, |
| "grad_norm": 0.36043638686025053, |
| "learning_rate": 3.684971098265896e-05, |
| "loss": 0.3793, |
| "step": 648 |
| }, |
| { |
| "epoch": 1.0109119251753702, |
| "grad_norm": 0.5072805021746171, |
| "learning_rate": 3.6820809248554916e-05, |
| "loss": 0.3731, |
| "step": 649 |
| }, |
| { |
| "epoch": 1.0124707716289945, |
| "grad_norm": 0.3774967084990004, |
| "learning_rate": 3.679190751445087e-05, |
| "loss": 0.3837, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.014029618082619, |
| "grad_norm": 0.47148649785731656, |
| "learning_rate": 3.676300578034682e-05, |
| "loss": 0.3655, |
| "step": 651 |
| }, |
| { |
| "epoch": 1.0155884645362432, |
| "grad_norm": 0.37322483271463347, |
| "learning_rate": 3.6734104046242777e-05, |
| "loss": 0.3955, |
| "step": 652 |
| }, |
| { |
| "epoch": 1.0171473109898674, |
| "grad_norm": 0.3705138885107383, |
| "learning_rate": 3.670520231213873e-05, |
| "loss": 0.3753, |
| "step": 653 |
| }, |
| { |
| "epoch": 1.018706157443492, |
| "grad_norm": 0.5310690376603894, |
| "learning_rate": 3.6676300578034683e-05, |
| "loss": 0.365, |
| "step": 654 |
| }, |
| { |
| "epoch": 1.0202650038971162, |
| "grad_norm": 0.3622397954678426, |
| "learning_rate": 3.664739884393064e-05, |
| "loss": 0.3691, |
| "step": 655 |
| }, |
| { |
| "epoch": 1.0218238503507404, |
| "grad_norm": 0.48489876341543203, |
| "learning_rate": 3.66184971098266e-05, |
| "loss": 0.3757, |
| "step": 656 |
| }, |
| { |
| "epoch": 1.0233826968043647, |
| "grad_norm": 0.3846528970782493, |
| "learning_rate": 3.6589595375722544e-05, |
| "loss": 0.3766, |
| "step": 657 |
| }, |
| { |
| "epoch": 1.0249415432579891, |
| "grad_norm": 0.4581460525204425, |
| "learning_rate": 3.65606936416185e-05, |
| "loss": 0.3513, |
| "step": 658 |
| }, |
| { |
| "epoch": 1.0265003897116134, |
| "grad_norm": 0.3465258787798717, |
| "learning_rate": 3.653179190751445e-05, |
| "loss": 0.3624, |
| "step": 659 |
| }, |
| { |
| "epoch": 1.0280592361652376, |
| "grad_norm": 0.336442810655498, |
| "learning_rate": 3.650289017341041e-05, |
| "loss": 0.3572, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.0296180826188621, |
| "grad_norm": 0.3003091419390878, |
| "learning_rate": 3.6473988439306364e-05, |
| "loss": 0.367, |
| "step": 661 |
| }, |
| { |
| "epoch": 1.0311769290724864, |
| "grad_norm": 0.32146010358650395, |
| "learning_rate": 3.644508670520231e-05, |
| "loss": 0.3758, |
| "step": 662 |
| }, |
| { |
| "epoch": 1.0327357755261106, |
| "grad_norm": 0.3233773848232951, |
| "learning_rate": 3.6416184971098265e-05, |
| "loss": 0.3602, |
| "step": 663 |
| }, |
| { |
| "epoch": 1.034294621979735, |
| "grad_norm": 0.3511338330800115, |
| "learning_rate": 3.6387283236994225e-05, |
| "loss": 0.3496, |
| "step": 664 |
| }, |
| { |
| "epoch": 1.0358534684333593, |
| "grad_norm": 0.2996717533713208, |
| "learning_rate": 3.635838150289018e-05, |
| "loss": 0.3462, |
| "step": 665 |
| }, |
| { |
| "epoch": 1.0374123148869836, |
| "grad_norm": 0.32342558777316216, |
| "learning_rate": 3.632947976878613e-05, |
| "loss": 0.3747, |
| "step": 666 |
| }, |
| { |
| "epoch": 1.0389711613406079, |
| "grad_norm": 0.3367570922014961, |
| "learning_rate": 3.630057803468208e-05, |
| "loss": 0.3614, |
| "step": 667 |
| }, |
| { |
| "epoch": 1.0405300077942323, |
| "grad_norm": 0.3441547440118924, |
| "learning_rate": 3.627167630057804e-05, |
| "loss": 0.3917, |
| "step": 668 |
| }, |
| { |
| "epoch": 1.0420888542478566, |
| "grad_norm": 0.3361181158280621, |
| "learning_rate": 3.624277456647399e-05, |
| "loss": 0.3832, |
| "step": 669 |
| }, |
| { |
| "epoch": 1.0436477007014808, |
| "grad_norm": 0.3291404615432134, |
| "learning_rate": 3.6213872832369945e-05, |
| "loss": 0.3622, |
| "step": 670 |
| }, |
| { |
| "epoch": 1.0452065471551053, |
| "grad_norm": 0.30771644853510227, |
| "learning_rate": 3.61849710982659e-05, |
| "loss": 0.3796, |
| "step": 671 |
| }, |
| { |
| "epoch": 1.0467653936087296, |
| "grad_norm": 0.33176245015646244, |
| "learning_rate": 3.615606936416185e-05, |
| "loss": 0.3788, |
| "step": 672 |
| }, |
| { |
| "epoch": 1.0483242400623538, |
| "grad_norm": 0.9925708666951382, |
| "learning_rate": 3.6127167630057806e-05, |
| "loss": 0.4087, |
| "step": 673 |
| }, |
| { |
| "epoch": 1.0498830865159783, |
| "grad_norm": 0.3681749130556859, |
| "learning_rate": 3.609826589595376e-05, |
| "loss": 0.3647, |
| "step": 674 |
| }, |
| { |
| "epoch": 1.0514419329696025, |
| "grad_norm": 0.3804125438000747, |
| "learning_rate": 3.606936416184971e-05, |
| "loss": 0.387, |
| "step": 675 |
| }, |
| { |
| "epoch": 1.0530007794232268, |
| "grad_norm": 0.3792677722995949, |
| "learning_rate": 3.6040462427745666e-05, |
| "loss": 0.3806, |
| "step": 676 |
| }, |
| { |
| "epoch": 1.054559625876851, |
| "grad_norm": 0.3546135860064871, |
| "learning_rate": 3.601156069364162e-05, |
| "loss": 0.3858, |
| "step": 677 |
| }, |
| { |
| "epoch": 1.0561184723304755, |
| "grad_norm": 0.3603149324311687, |
| "learning_rate": 3.598265895953757e-05, |
| "loss": 0.3744, |
| "step": 678 |
| }, |
| { |
| "epoch": 1.0576773187840998, |
| "grad_norm": 0.34480487604338456, |
| "learning_rate": 3.5953757225433526e-05, |
| "loss": 0.3642, |
| "step": 679 |
| }, |
| { |
| "epoch": 1.059236165237724, |
| "grad_norm": 0.3405204986814321, |
| "learning_rate": 3.592485549132948e-05, |
| "loss": 0.364, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.0607950116913485, |
| "grad_norm": 0.3605005587786407, |
| "learning_rate": 3.589595375722544e-05, |
| "loss": 0.3619, |
| "step": 681 |
| }, |
| { |
| "epoch": 1.0623538581449727, |
| "grad_norm": 0.4006257143609181, |
| "learning_rate": 3.586705202312139e-05, |
| "loss": 0.3818, |
| "step": 682 |
| }, |
| { |
| "epoch": 1.063912704598597, |
| "grad_norm": 0.29455634443366796, |
| "learning_rate": 3.583815028901734e-05, |
| "loss": 0.3673, |
| "step": 683 |
| }, |
| { |
| "epoch": 1.0654715510522212, |
| "grad_norm": 0.36050845424709793, |
| "learning_rate": 3.5809248554913294e-05, |
| "loss": 0.367, |
| "step": 684 |
| }, |
| { |
| "epoch": 1.0670303975058457, |
| "grad_norm": 0.3404885010186498, |
| "learning_rate": 3.5780346820809254e-05, |
| "loss": 0.3931, |
| "step": 685 |
| }, |
| { |
| "epoch": 1.06858924395947, |
| "grad_norm": 0.3209534029364564, |
| "learning_rate": 3.575144508670521e-05, |
| "loss": 0.371, |
| "step": 686 |
| }, |
| { |
| "epoch": 1.0701480904130942, |
| "grad_norm": 0.33657202823646476, |
| "learning_rate": 3.5722543352601154e-05, |
| "loss": 0.3789, |
| "step": 687 |
| }, |
| { |
| "epoch": 1.0717069368667187, |
| "grad_norm": 0.3056381970556524, |
| "learning_rate": 3.569364161849711e-05, |
| "loss": 0.3781, |
| "step": 688 |
| }, |
| { |
| "epoch": 1.073265783320343, |
| "grad_norm": 0.2869272425111672, |
| "learning_rate": 3.566473988439307e-05, |
| "loss": 0.3795, |
| "step": 689 |
| }, |
| { |
| "epoch": 1.0748246297739672, |
| "grad_norm": 0.3423224557400581, |
| "learning_rate": 3.563583815028902e-05, |
| "loss": 0.3569, |
| "step": 690 |
| }, |
| { |
| "epoch": 1.0763834762275917, |
| "grad_norm": 0.3620299786614207, |
| "learning_rate": 3.5606936416184975e-05, |
| "loss": 0.3813, |
| "step": 691 |
| }, |
| { |
| "epoch": 1.077942322681216, |
| "grad_norm": 0.3164910374888722, |
| "learning_rate": 3.557803468208092e-05, |
| "loss": 0.3548, |
| "step": 692 |
| }, |
| { |
| "epoch": 1.0795011691348402, |
| "grad_norm": 0.32077378022129677, |
| "learning_rate": 3.554913294797688e-05, |
| "loss": 0.3714, |
| "step": 693 |
| }, |
| { |
| "epoch": 1.0810600155884647, |
| "grad_norm": 0.3449220187992264, |
| "learning_rate": 3.5520231213872835e-05, |
| "loss": 0.363, |
| "step": 694 |
| }, |
| { |
| "epoch": 1.082618862042089, |
| "grad_norm": 0.3342998444359934, |
| "learning_rate": 3.549132947976879e-05, |
| "loss": 0.3482, |
| "step": 695 |
| }, |
| { |
| "epoch": 1.0841777084957132, |
| "grad_norm": 0.35266941801482377, |
| "learning_rate": 3.546242774566474e-05, |
| "loss": 0.3784, |
| "step": 696 |
| }, |
| { |
| "epoch": 1.0857365549493374, |
| "grad_norm": 0.4488757216839682, |
| "learning_rate": 3.5433526011560695e-05, |
| "loss": 0.3788, |
| "step": 697 |
| }, |
| { |
| "epoch": 1.0872954014029619, |
| "grad_norm": 0.3288290304532232, |
| "learning_rate": 3.540462427745665e-05, |
| "loss": 0.3812, |
| "step": 698 |
| }, |
| { |
| "epoch": 1.0888542478565861, |
| "grad_norm": 0.4417200363539964, |
| "learning_rate": 3.53757225433526e-05, |
| "loss": 0.377, |
| "step": 699 |
| }, |
| { |
| "epoch": 1.0904130943102104, |
| "grad_norm": 0.31742949476273763, |
| "learning_rate": 3.5346820809248556e-05, |
| "loss": 0.3639, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.0919719407638349, |
| "grad_norm": 0.3308705930780861, |
| "learning_rate": 3.531791907514451e-05, |
| "loss": 0.3634, |
| "step": 701 |
| }, |
| { |
| "epoch": 1.093530787217459, |
| "grad_norm": 0.3382413793786327, |
| "learning_rate": 3.528901734104046e-05, |
| "loss": 0.3598, |
| "step": 702 |
| }, |
| { |
| "epoch": 1.0950896336710834, |
| "grad_norm": 0.3621055356977116, |
| "learning_rate": 3.5260115606936416e-05, |
| "loss": 0.3657, |
| "step": 703 |
| }, |
| { |
| "epoch": 1.0966484801247076, |
| "grad_norm": 0.30903567078078115, |
| "learning_rate": 3.523121387283237e-05, |
| "loss": 0.3746, |
| "step": 704 |
| }, |
| { |
| "epoch": 1.098207326578332, |
| "grad_norm": 0.43168788302923716, |
| "learning_rate": 3.520231213872833e-05, |
| "loss": 0.3786, |
| "step": 705 |
| }, |
| { |
| "epoch": 1.0997661730319563, |
| "grad_norm": 0.3735078603768068, |
| "learning_rate": 3.5173410404624276e-05, |
| "loss": 0.3554, |
| "step": 706 |
| }, |
| { |
| "epoch": 1.1013250194855806, |
| "grad_norm": 0.3705144355540284, |
| "learning_rate": 3.514450867052023e-05, |
| "loss": 0.3491, |
| "step": 707 |
| }, |
| { |
| "epoch": 1.102883865939205, |
| "grad_norm": 0.36212715939080664, |
| "learning_rate": 3.511560693641618e-05, |
| "loss": 0.3665, |
| "step": 708 |
| }, |
| { |
| "epoch": 1.1044427123928293, |
| "grad_norm": 0.3844205403442548, |
| "learning_rate": 3.508670520231214e-05, |
| "loss": 0.369, |
| "step": 709 |
| }, |
| { |
| "epoch": 1.1060015588464536, |
| "grad_norm": 0.3434945710583639, |
| "learning_rate": 3.50578034682081e-05, |
| "loss": 0.3724, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.107560405300078, |
| "grad_norm": 0.4376944680100341, |
| "learning_rate": 3.502890173410405e-05, |
| "loss": 0.3829, |
| "step": 711 |
| }, |
| { |
| "epoch": 1.1091192517537023, |
| "grad_norm": 0.3162790873067193, |
| "learning_rate": 3.5e-05, |
| "loss": 0.3794, |
| "step": 712 |
| }, |
| { |
| "epoch": 1.1106780982073265, |
| "grad_norm": 0.30611808166388454, |
| "learning_rate": 3.497109826589596e-05, |
| "loss": 0.3652, |
| "step": 713 |
| }, |
| { |
| "epoch": 1.1122369446609508, |
| "grad_norm": 0.3100967615655972, |
| "learning_rate": 3.494219653179191e-05, |
| "loss": 0.3715, |
| "step": 714 |
| }, |
| { |
| "epoch": 1.1137957911145753, |
| "grad_norm": 0.29998053632892707, |
| "learning_rate": 3.4913294797687864e-05, |
| "loss": 0.385, |
| "step": 715 |
| }, |
| { |
| "epoch": 1.1153546375681995, |
| "grad_norm": 0.3531103029098729, |
| "learning_rate": 3.488439306358382e-05, |
| "loss": 0.3852, |
| "step": 716 |
| }, |
| { |
| "epoch": 1.1169134840218238, |
| "grad_norm": 0.28836989657125967, |
| "learning_rate": 3.485549132947977e-05, |
| "loss": 0.369, |
| "step": 717 |
| }, |
| { |
| "epoch": 1.1184723304754483, |
| "grad_norm": 0.3726105295277464, |
| "learning_rate": 3.4826589595375724e-05, |
| "loss": 0.3901, |
| "step": 718 |
| }, |
| { |
| "epoch": 1.1200311769290725, |
| "grad_norm": 0.30082206554969804, |
| "learning_rate": 3.479768786127168e-05, |
| "loss": 0.3672, |
| "step": 719 |
| }, |
| { |
| "epoch": 1.1215900233826968, |
| "grad_norm": 0.3229975948532406, |
| "learning_rate": 3.476878612716763e-05, |
| "loss": 0.3741, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.1231488698363212, |
| "grad_norm": 0.30690351827295265, |
| "learning_rate": 3.4739884393063585e-05, |
| "loss": 0.3612, |
| "step": 721 |
| }, |
| { |
| "epoch": 1.1247077162899455, |
| "grad_norm": 0.3558269454615068, |
| "learning_rate": 3.471098265895954e-05, |
| "loss": 0.3887, |
| "step": 722 |
| }, |
| { |
| "epoch": 1.1262665627435697, |
| "grad_norm": 0.34137813880164614, |
| "learning_rate": 3.468208092485549e-05, |
| "loss": 0.3835, |
| "step": 723 |
| }, |
| { |
| "epoch": 1.127825409197194, |
| "grad_norm": 0.3060779272980073, |
| "learning_rate": 3.4653179190751445e-05, |
| "loss": 0.3825, |
| "step": 724 |
| }, |
| { |
| "epoch": 1.1293842556508185, |
| "grad_norm": 0.34107259706747284, |
| "learning_rate": 3.4624277456647405e-05, |
| "loss": 0.3716, |
| "step": 725 |
| }, |
| { |
| "epoch": 1.1309431021044427, |
| "grad_norm": 0.28915982893470976, |
| "learning_rate": 3.459537572254335e-05, |
| "loss": 0.3658, |
| "step": 726 |
| }, |
| { |
| "epoch": 1.132501948558067, |
| "grad_norm": 0.2927164690689312, |
| "learning_rate": 3.4566473988439305e-05, |
| "loss": 0.3604, |
| "step": 727 |
| }, |
| { |
| "epoch": 1.1340607950116914, |
| "grad_norm": 0.3191085857306229, |
| "learning_rate": 3.453757225433526e-05, |
| "loss": 0.3837, |
| "step": 728 |
| }, |
| { |
| "epoch": 1.1356196414653157, |
| "grad_norm": 0.3079933409669794, |
| "learning_rate": 3.450867052023122e-05, |
| "loss": 0.374, |
| "step": 729 |
| }, |
| { |
| "epoch": 1.13717848791894, |
| "grad_norm": 0.2904492631644862, |
| "learning_rate": 3.447976878612717e-05, |
| "loss": 0.3778, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.1387373343725642, |
| "grad_norm": 0.29735218541832303, |
| "learning_rate": 3.445086705202312e-05, |
| "loss": 0.3573, |
| "step": 731 |
| }, |
| { |
| "epoch": 1.1402961808261887, |
| "grad_norm": 0.3152575212565156, |
| "learning_rate": 3.442196531791907e-05, |
| "loss": 0.373, |
| "step": 732 |
| }, |
| { |
| "epoch": 1.141855027279813, |
| "grad_norm": 0.32039618688552635, |
| "learning_rate": 3.439306358381503e-05, |
| "loss": 0.3779, |
| "step": 733 |
| }, |
| { |
| "epoch": 1.1434138737334372, |
| "grad_norm": 0.3168459279894005, |
| "learning_rate": 3.4364161849710986e-05, |
| "loss": 0.3612, |
| "step": 734 |
| }, |
| { |
| "epoch": 1.1449727201870616, |
| "grad_norm": 0.3382435631010463, |
| "learning_rate": 3.433526011560694e-05, |
| "loss": 0.375, |
| "step": 735 |
| }, |
| { |
| "epoch": 1.146531566640686, |
| "grad_norm": 0.3451916527844319, |
| "learning_rate": 3.430635838150289e-05, |
| "loss": 0.3656, |
| "step": 736 |
| }, |
| { |
| "epoch": 1.1480904130943101, |
| "grad_norm": 0.29795458770948763, |
| "learning_rate": 3.427745664739885e-05, |
| "loss": 0.3517, |
| "step": 737 |
| }, |
| { |
| "epoch": 1.1496492595479346, |
| "grad_norm": 0.3204048119901522, |
| "learning_rate": 3.42485549132948e-05, |
| "loss": 0.3753, |
| "step": 738 |
| }, |
| { |
| "epoch": 1.1512081060015589, |
| "grad_norm": 0.3509132683834904, |
| "learning_rate": 3.4219653179190754e-05, |
| "loss": 0.3718, |
| "step": 739 |
| }, |
| { |
| "epoch": 1.1527669524551831, |
| "grad_norm": 0.3252836365240409, |
| "learning_rate": 3.419075144508671e-05, |
| "loss": 0.3884, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.1543257989088076, |
| "grad_norm": 0.2938786860494216, |
| "learning_rate": 3.416184971098266e-05, |
| "loss": 0.3508, |
| "step": 741 |
| }, |
| { |
| "epoch": 1.1558846453624319, |
| "grad_norm": 0.3286847180631177, |
| "learning_rate": 3.4132947976878614e-05, |
| "loss": 0.3887, |
| "step": 742 |
| }, |
| { |
| "epoch": 1.157443491816056, |
| "grad_norm": 0.293702845880183, |
| "learning_rate": 3.410404624277457e-05, |
| "loss": 0.3625, |
| "step": 743 |
| }, |
| { |
| "epoch": 1.1590023382696804, |
| "grad_norm": 0.3637581698851474, |
| "learning_rate": 3.407514450867052e-05, |
| "loss": 0.3807, |
| "step": 744 |
| }, |
| { |
| "epoch": 1.1605611847233048, |
| "grad_norm": 0.33030454774450496, |
| "learning_rate": 3.404624277456648e-05, |
| "loss": 0.3625, |
| "step": 745 |
| }, |
| { |
| "epoch": 1.162120031176929, |
| "grad_norm": 0.3413072692858561, |
| "learning_rate": 3.401734104046243e-05, |
| "loss": 0.3799, |
| "step": 746 |
| }, |
| { |
| "epoch": 1.1636788776305533, |
| "grad_norm": 0.3282042861228878, |
| "learning_rate": 3.398843930635838e-05, |
| "loss": 0.3744, |
| "step": 747 |
| }, |
| { |
| "epoch": 1.1652377240841778, |
| "grad_norm": 0.4051993980268251, |
| "learning_rate": 3.3959537572254335e-05, |
| "loss": 0.3658, |
| "step": 748 |
| }, |
| { |
| "epoch": 1.166796570537802, |
| "grad_norm": 0.3513826597230518, |
| "learning_rate": 3.3930635838150295e-05, |
| "loss": 0.3641, |
| "step": 749 |
| }, |
| { |
| "epoch": 1.1683554169914263, |
| "grad_norm": 0.35299247249790444, |
| "learning_rate": 3.390173410404625e-05, |
| "loss": 0.3654, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.1699142634450506, |
| "grad_norm": 0.3014386293388838, |
| "learning_rate": 3.3872832369942195e-05, |
| "loss": 0.3765, |
| "step": 751 |
| }, |
| { |
| "epoch": 1.171473109898675, |
| "grad_norm": 0.3603523634257503, |
| "learning_rate": 3.384393063583815e-05, |
| "loss": 0.3845, |
| "step": 752 |
| }, |
| { |
| "epoch": 1.1730319563522993, |
| "grad_norm": 0.29665311038360115, |
| "learning_rate": 3.381502890173411e-05, |
| "loss": 0.3797, |
| "step": 753 |
| }, |
| { |
| "epoch": 1.1745908028059235, |
| "grad_norm": 0.2935340087785823, |
| "learning_rate": 3.378612716763006e-05, |
| "loss": 0.3602, |
| "step": 754 |
| }, |
| { |
| "epoch": 1.176149649259548, |
| "grad_norm": 0.33634845355876714, |
| "learning_rate": 3.3757225433526015e-05, |
| "loss": 0.3796, |
| "step": 755 |
| }, |
| { |
| "epoch": 1.1777084957131723, |
| "grad_norm": 0.33117541570348524, |
| "learning_rate": 3.372832369942196e-05, |
| "loss": 0.4078, |
| "step": 756 |
| }, |
| { |
| "epoch": 1.1792673421667965, |
| "grad_norm": 0.2734330693617624, |
| "learning_rate": 3.369942196531792e-05, |
| "loss": 0.3731, |
| "step": 757 |
| }, |
| { |
| "epoch": 1.1808261886204208, |
| "grad_norm": 0.35008178961287134, |
| "learning_rate": 3.3670520231213876e-05, |
| "loss": 0.3764, |
| "step": 758 |
| }, |
| { |
| "epoch": 1.1823850350740452, |
| "grad_norm": 0.3094977957875163, |
| "learning_rate": 3.364161849710983e-05, |
| "loss": 0.3565, |
| "step": 759 |
| }, |
| { |
| "epoch": 1.1839438815276695, |
| "grad_norm": 0.29463076564571844, |
| "learning_rate": 3.361271676300578e-05, |
| "loss": 0.3705, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.1855027279812937, |
| "grad_norm": 0.3164209650580971, |
| "learning_rate": 3.3583815028901736e-05, |
| "loss": 0.3764, |
| "step": 761 |
| }, |
| { |
| "epoch": 1.1870615744349182, |
| "grad_norm": 0.28906263199367943, |
| "learning_rate": 3.355491329479769e-05, |
| "loss": 0.3889, |
| "step": 762 |
| }, |
| { |
| "epoch": 1.1886204208885425, |
| "grad_norm": 0.3138486170125581, |
| "learning_rate": 3.352601156069364e-05, |
| "loss": 0.3781, |
| "step": 763 |
| }, |
| { |
| "epoch": 1.1901792673421667, |
| "grad_norm": 0.31679716557668985, |
| "learning_rate": 3.3497109826589596e-05, |
| "loss": 0.3725, |
| "step": 764 |
| }, |
| { |
| "epoch": 1.1917381137957912, |
| "grad_norm": 0.2819822969115435, |
| "learning_rate": 3.346820809248556e-05, |
| "loss": 0.3621, |
| "step": 765 |
| }, |
| { |
| "epoch": 1.1932969602494155, |
| "grad_norm": 0.2900191750020783, |
| "learning_rate": 3.34393063583815e-05, |
| "loss": 0.3864, |
| "step": 766 |
| }, |
| { |
| "epoch": 1.1948558067030397, |
| "grad_norm": 0.3475437886006249, |
| "learning_rate": 3.341040462427746e-05, |
| "loss": 0.3501, |
| "step": 767 |
| }, |
| { |
| "epoch": 1.1964146531566642, |
| "grad_norm": 0.27289955081057704, |
| "learning_rate": 3.338150289017341e-05, |
| "loss": 0.3738, |
| "step": 768 |
| }, |
| { |
| "epoch": 1.1979734996102884, |
| "grad_norm": 0.37715118746357845, |
| "learning_rate": 3.335260115606937e-05, |
| "loss": 0.3566, |
| "step": 769 |
| }, |
| { |
| "epoch": 1.1995323460639127, |
| "grad_norm": 0.30510592253569463, |
| "learning_rate": 3.3323699421965324e-05, |
| "loss": 0.3579, |
| "step": 770 |
| }, |
| { |
| "epoch": 1.201091192517537, |
| "grad_norm": 0.3705292948456225, |
| "learning_rate": 3.329479768786127e-05, |
| "loss": 0.3655, |
| "step": 771 |
| }, |
| { |
| "epoch": 1.2026500389711614, |
| "grad_norm": 0.32470886193713566, |
| "learning_rate": 3.3265895953757224e-05, |
| "loss": 0.3512, |
| "step": 772 |
| }, |
| { |
| "epoch": 1.2042088854247857, |
| "grad_norm": 0.3267984083578711, |
| "learning_rate": 3.323699421965318e-05, |
| "loss": 0.3559, |
| "step": 773 |
| }, |
| { |
| "epoch": 1.20576773187841, |
| "grad_norm": 0.316676369330289, |
| "learning_rate": 3.320809248554914e-05, |
| "loss": 0.3504, |
| "step": 774 |
| }, |
| { |
| "epoch": 1.2073265783320344, |
| "grad_norm": 0.3083068143586948, |
| "learning_rate": 3.317919075144509e-05, |
| "loss": 0.3497, |
| "step": 775 |
| }, |
| { |
| "epoch": 1.2088854247856586, |
| "grad_norm": 0.3112543283121555, |
| "learning_rate": 3.315028901734104e-05, |
| "loss": 0.3626, |
| "step": 776 |
| }, |
| { |
| "epoch": 1.2104442712392829, |
| "grad_norm": 0.2930169607949705, |
| "learning_rate": 3.312138728323699e-05, |
| "loss": 0.3767, |
| "step": 777 |
| }, |
| { |
| "epoch": 1.2120031176929071, |
| "grad_norm": 0.3740917898852477, |
| "learning_rate": 3.309248554913295e-05, |
| "loss": 0.3623, |
| "step": 778 |
| }, |
| { |
| "epoch": 1.2135619641465316, |
| "grad_norm": 0.2909666222558865, |
| "learning_rate": 3.3063583815028905e-05, |
| "loss": 0.3571, |
| "step": 779 |
| }, |
| { |
| "epoch": 1.2151208106001559, |
| "grad_norm": 0.3349084531099002, |
| "learning_rate": 3.303468208092486e-05, |
| "loss": 0.3809, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.2166796570537801, |
| "grad_norm": 0.32395231786814505, |
| "learning_rate": 3.3005780346820805e-05, |
| "loss": 0.3701, |
| "step": 781 |
| }, |
| { |
| "epoch": 1.2182385035074046, |
| "grad_norm": 0.35641499162371804, |
| "learning_rate": 3.2976878612716765e-05, |
| "loss": 0.3767, |
| "step": 782 |
| }, |
| { |
| "epoch": 1.2197973499610288, |
| "grad_norm": 0.371466143629439, |
| "learning_rate": 3.294797687861272e-05, |
| "loss": 0.3716, |
| "step": 783 |
| }, |
| { |
| "epoch": 1.221356196414653, |
| "grad_norm": 0.3246192454526884, |
| "learning_rate": 3.291907514450867e-05, |
| "loss": 0.3742, |
| "step": 784 |
| }, |
| { |
| "epoch": 1.2229150428682776, |
| "grad_norm": 0.35868229918432093, |
| "learning_rate": 3.2890173410404626e-05, |
| "loss": 0.38, |
| "step": 785 |
| }, |
| { |
| "epoch": 1.2244738893219018, |
| "grad_norm": 0.353282741567031, |
| "learning_rate": 3.286127167630058e-05, |
| "loss": 0.3621, |
| "step": 786 |
| }, |
| { |
| "epoch": 1.226032735775526, |
| "grad_norm": 0.34468589401361616, |
| "learning_rate": 3.283236994219653e-05, |
| "loss": 0.3739, |
| "step": 787 |
| }, |
| { |
| "epoch": 1.2275915822291505, |
| "grad_norm": 0.2790582877348234, |
| "learning_rate": 3.2803468208092486e-05, |
| "loss": 0.3759, |
| "step": 788 |
| }, |
| { |
| "epoch": 1.2291504286827748, |
| "grad_norm": 0.455365887136131, |
| "learning_rate": 3.277456647398844e-05, |
| "loss": 0.371, |
| "step": 789 |
| }, |
| { |
| "epoch": 1.230709275136399, |
| "grad_norm": 0.32167141704971974, |
| "learning_rate": 3.27456647398844e-05, |
| "loss": 0.3728, |
| "step": 790 |
| }, |
| { |
| "epoch": 1.2322681215900233, |
| "grad_norm": 0.3938144396135692, |
| "learning_rate": 3.2716763005780346e-05, |
| "loss": 0.3613, |
| "step": 791 |
| }, |
| { |
| "epoch": 1.2338269680436478, |
| "grad_norm": 0.35340743429724525, |
| "learning_rate": 3.26878612716763e-05, |
| "loss": 0.362, |
| "step": 792 |
| }, |
| { |
| "epoch": 1.235385814497272, |
| "grad_norm": 0.3887479835449903, |
| "learning_rate": 3.265895953757225e-05, |
| "loss": 0.3534, |
| "step": 793 |
| }, |
| { |
| "epoch": 1.2369446609508963, |
| "grad_norm": 0.3810148179704415, |
| "learning_rate": 3.2630057803468213e-05, |
| "loss": 0.3708, |
| "step": 794 |
| }, |
| { |
| "epoch": 1.2385035074045208, |
| "grad_norm": 0.45308168447987623, |
| "learning_rate": 3.260115606936417e-05, |
| "loss": 0.363, |
| "step": 795 |
| }, |
| { |
| "epoch": 1.240062353858145, |
| "grad_norm": 0.3530892057764186, |
| "learning_rate": 3.2572254335260114e-05, |
| "loss": 0.3748, |
| "step": 796 |
| }, |
| { |
| "epoch": 1.2416212003117693, |
| "grad_norm": 0.4323443583689573, |
| "learning_rate": 3.254335260115607e-05, |
| "loss": 0.3727, |
| "step": 797 |
| }, |
| { |
| "epoch": 1.2431800467653935, |
| "grad_norm": 0.3213425990151126, |
| "learning_rate": 3.251445086705203e-05, |
| "loss": 0.391, |
| "step": 798 |
| }, |
| { |
| "epoch": 1.244738893219018, |
| "grad_norm": 0.33159050533895373, |
| "learning_rate": 3.248554913294798e-05, |
| "loss": 0.378, |
| "step": 799 |
| }, |
| { |
| "epoch": 1.2462977396726422, |
| "grad_norm": 0.35918958392112355, |
| "learning_rate": 3.2456647398843934e-05, |
| "loss": 0.372, |
| "step": 800 |
| }, |
| { |
| "epoch": 1.2478565861262665, |
| "grad_norm": 0.3065526765937574, |
| "learning_rate": 3.242774566473988e-05, |
| "loss": 0.3678, |
| "step": 801 |
| }, |
| { |
| "epoch": 1.249415432579891, |
| "grad_norm": 0.3162819465443684, |
| "learning_rate": 3.239884393063584e-05, |
| "loss": 0.3554, |
| "step": 802 |
| }, |
| { |
| "epoch": 1.2509742790335152, |
| "grad_norm": 0.3623636030150004, |
| "learning_rate": 3.2369942196531794e-05, |
| "loss": 0.3742, |
| "step": 803 |
| }, |
| { |
| "epoch": 1.2525331254871395, |
| "grad_norm": 0.2955704493514888, |
| "learning_rate": 3.234104046242775e-05, |
| "loss": 0.3704, |
| "step": 804 |
| }, |
| { |
| "epoch": 1.2540919719407637, |
| "grad_norm": 0.36701054340991607, |
| "learning_rate": 3.23121387283237e-05, |
| "loss": 0.3728, |
| "step": 805 |
| }, |
| { |
| "epoch": 1.2556508183943882, |
| "grad_norm": 0.3328636144558375, |
| "learning_rate": 3.2283236994219655e-05, |
| "loss": 0.3816, |
| "step": 806 |
| }, |
| { |
| "epoch": 1.2572096648480124, |
| "grad_norm": 0.3578865290981359, |
| "learning_rate": 3.225433526011561e-05, |
| "loss": 0.3667, |
| "step": 807 |
| }, |
| { |
| "epoch": 1.258768511301637, |
| "grad_norm": 0.3408336723453489, |
| "learning_rate": 3.222543352601156e-05, |
| "loss": 0.3817, |
| "step": 808 |
| }, |
| { |
| "epoch": 1.2603273577552612, |
| "grad_norm": 0.3216030123089057, |
| "learning_rate": 3.2196531791907515e-05, |
| "loss": 0.3691, |
| "step": 809 |
| }, |
| { |
| "epoch": 1.2618862042088854, |
| "grad_norm": 0.37724572418313224, |
| "learning_rate": 3.216763005780347e-05, |
| "loss": 0.3824, |
| "step": 810 |
| }, |
| { |
| "epoch": 1.2634450506625097, |
| "grad_norm": 0.3056707439719446, |
| "learning_rate": 3.213872832369942e-05, |
| "loss": 0.3718, |
| "step": 811 |
| }, |
| { |
| "epoch": 1.265003897116134, |
| "grad_norm": 0.3614010471408866, |
| "learning_rate": 3.2109826589595375e-05, |
| "loss": 0.378, |
| "step": 812 |
| }, |
| { |
| "epoch": 1.2665627435697584, |
| "grad_norm": 0.30433373829385135, |
| "learning_rate": 3.208092485549133e-05, |
| "loss": 0.4059, |
| "step": 813 |
| }, |
| { |
| "epoch": 1.2681215900233826, |
| "grad_norm": 0.3789679238319672, |
| "learning_rate": 3.205202312138729e-05, |
| "loss": 0.3739, |
| "step": 814 |
| }, |
| { |
| "epoch": 1.2696804364770071, |
| "grad_norm": 0.31090210614669855, |
| "learning_rate": 3.202312138728324e-05, |
| "loss": 0.3796, |
| "step": 815 |
| }, |
| { |
| "epoch": 1.2712392829306314, |
| "grad_norm": 0.3395673581732934, |
| "learning_rate": 3.199421965317919e-05, |
| "loss": 0.3842, |
| "step": 816 |
| }, |
| { |
| "epoch": 1.2727981293842556, |
| "grad_norm": 0.3814013963708837, |
| "learning_rate": 3.196531791907514e-05, |
| "loss": 0.3818, |
| "step": 817 |
| }, |
| { |
| "epoch": 1.2743569758378799, |
| "grad_norm": 0.2881312269831677, |
| "learning_rate": 3.19364161849711e-05, |
| "loss": 0.3755, |
| "step": 818 |
| }, |
| { |
| "epoch": 1.2759158222915044, |
| "grad_norm": 0.3386831635441485, |
| "learning_rate": 3.1907514450867056e-05, |
| "loss": 0.3666, |
| "step": 819 |
| }, |
| { |
| "epoch": 1.2774746687451286, |
| "grad_norm": 0.30697028027634937, |
| "learning_rate": 3.187861271676301e-05, |
| "loss": 0.3843, |
| "step": 820 |
| }, |
| { |
| "epoch": 1.2790335151987529, |
| "grad_norm": 0.29980913900639733, |
| "learning_rate": 3.1849710982658956e-05, |
| "loss": 0.3666, |
| "step": 821 |
| }, |
| { |
| "epoch": 1.2805923616523773, |
| "grad_norm": 0.3327607375144203, |
| "learning_rate": 3.182080924855492e-05, |
| "loss": 0.3858, |
| "step": 822 |
| }, |
| { |
| "epoch": 1.2821512081060016, |
| "grad_norm": 0.29432860827743634, |
| "learning_rate": 3.179190751445087e-05, |
| "loss": 0.3614, |
| "step": 823 |
| }, |
| { |
| "epoch": 1.2837100545596258, |
| "grad_norm": 0.3621120310764712, |
| "learning_rate": 3.1763005780346824e-05, |
| "loss": 0.3737, |
| "step": 824 |
| }, |
| { |
| "epoch": 1.28526890101325, |
| "grad_norm": 0.3445821791263814, |
| "learning_rate": 3.173410404624278e-05, |
| "loss": 0.3672, |
| "step": 825 |
| }, |
| { |
| "epoch": 1.2868277474668746, |
| "grad_norm": 0.2954327384154805, |
| "learning_rate": 3.170520231213873e-05, |
| "loss": 0.3607, |
| "step": 826 |
| }, |
| { |
| "epoch": 1.2883865939204988, |
| "grad_norm": 0.328451959411728, |
| "learning_rate": 3.1676300578034684e-05, |
| "loss": 0.3702, |
| "step": 827 |
| }, |
| { |
| "epoch": 1.2899454403741233, |
| "grad_norm": 0.2944126016476037, |
| "learning_rate": 3.164739884393064e-05, |
| "loss": 0.3889, |
| "step": 828 |
| }, |
| { |
| "epoch": 1.2915042868277475, |
| "grad_norm": 0.3969777724956226, |
| "learning_rate": 3.161849710982659e-05, |
| "loss": 0.3547, |
| "step": 829 |
| }, |
| { |
| "epoch": 1.2930631332813718, |
| "grad_norm": 0.2951235605441101, |
| "learning_rate": 3.1589595375722544e-05, |
| "loss": 0.378, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.294621979734996, |
| "grad_norm": 0.28308239600029, |
| "learning_rate": 3.15606936416185e-05, |
| "loss": 0.3828, |
| "step": 831 |
| }, |
| { |
| "epoch": 1.2961808261886203, |
| "grad_norm": 0.3901401668691733, |
| "learning_rate": 3.153179190751445e-05, |
| "loss": 0.3842, |
| "step": 832 |
| }, |
| { |
| "epoch": 1.2977396726422448, |
| "grad_norm": 0.3137907326756719, |
| "learning_rate": 3.1502890173410405e-05, |
| "loss": 0.3517, |
| "step": 833 |
| }, |
| { |
| "epoch": 1.299298519095869, |
| "grad_norm": 0.30348942495737735, |
| "learning_rate": 3.1473988439306365e-05, |
| "loss": 0.3584, |
| "step": 834 |
| }, |
| { |
| "epoch": 1.3008573655494935, |
| "grad_norm": 0.3134452203812044, |
| "learning_rate": 3.144508670520231e-05, |
| "loss": 0.3668, |
| "step": 835 |
| }, |
| { |
| "epoch": 1.3024162120031177, |
| "grad_norm": 0.3330164303286052, |
| "learning_rate": 3.1416184971098265e-05, |
| "loss": 0.3791, |
| "step": 836 |
| }, |
| { |
| "epoch": 1.303975058456742, |
| "grad_norm": 0.3242958410056007, |
| "learning_rate": 3.138728323699422e-05, |
| "loss": 0.3644, |
| "step": 837 |
| }, |
| { |
| "epoch": 1.3055339049103662, |
| "grad_norm": 0.3372448607214704, |
| "learning_rate": 3.135838150289018e-05, |
| "loss": 0.3755, |
| "step": 838 |
| }, |
| { |
| "epoch": 1.3070927513639907, |
| "grad_norm": 0.34805951436795246, |
| "learning_rate": 3.132947976878613e-05, |
| "loss": 0.4029, |
| "step": 839 |
| }, |
| { |
| "epoch": 1.308651597817615, |
| "grad_norm": 0.3375892298162743, |
| "learning_rate": 3.130057803468208e-05, |
| "loss": 0.3688, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.3102104442712392, |
| "grad_norm": 0.3427372693638577, |
| "learning_rate": 3.127167630057803e-05, |
| "loss": 0.3583, |
| "step": 841 |
| }, |
| { |
| "epoch": 1.3117692907248637, |
| "grad_norm": 0.39528017078512534, |
| "learning_rate": 3.124277456647399e-05, |
| "loss": 0.3815, |
| "step": 842 |
| }, |
| { |
| "epoch": 1.313328137178488, |
| "grad_norm": 0.300797549857878, |
| "learning_rate": 3.1213872832369946e-05, |
| "loss": 0.3789, |
| "step": 843 |
| }, |
| { |
| "epoch": 1.3148869836321122, |
| "grad_norm": 0.37253735153697626, |
| "learning_rate": 3.11849710982659e-05, |
| "loss": 0.38, |
| "step": 844 |
| }, |
| { |
| "epoch": 1.3164458300857365, |
| "grad_norm": 0.3636396680715121, |
| "learning_rate": 3.115606936416185e-05, |
| "loss": 0.3584, |
| "step": 845 |
| }, |
| { |
| "epoch": 1.318004676539361, |
| "grad_norm": 0.3225603467896192, |
| "learning_rate": 3.1127167630057806e-05, |
| "loss": 0.3801, |
| "step": 846 |
| }, |
| { |
| "epoch": 1.3195635229929852, |
| "grad_norm": 0.3923444106986942, |
| "learning_rate": 3.109826589595376e-05, |
| "loss": 0.3838, |
| "step": 847 |
| }, |
| { |
| "epoch": 1.3211223694466094, |
| "grad_norm": 0.29792571705489457, |
| "learning_rate": 3.106936416184971e-05, |
| "loss": 0.3877, |
| "step": 848 |
| }, |
| { |
| "epoch": 1.322681215900234, |
| "grad_norm": 0.41101804688404436, |
| "learning_rate": 3.1040462427745667e-05, |
| "loss": 0.3711, |
| "step": 849 |
| }, |
| { |
| "epoch": 1.3242400623538582, |
| "grad_norm": 0.31527820005688345, |
| "learning_rate": 3.101156069364162e-05, |
| "loss": 0.3672, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.3257989088074824, |
| "grad_norm": 0.3247891491953755, |
| "learning_rate": 3.0982658959537573e-05, |
| "loss": 0.3875, |
| "step": 851 |
| }, |
| { |
| "epoch": 1.3273577552611067, |
| "grad_norm": 0.31678580564634845, |
| "learning_rate": 3.095375722543353e-05, |
| "loss": 0.3767, |
| "step": 852 |
| }, |
| { |
| "epoch": 1.3289166017147311, |
| "grad_norm": 0.42289684401021704, |
| "learning_rate": 3.092485549132948e-05, |
| "loss": 0.3896, |
| "step": 853 |
| }, |
| { |
| "epoch": 1.3304754481683554, |
| "grad_norm": 0.34344843446611606, |
| "learning_rate": 3.089595375722544e-05, |
| "loss": 0.3837, |
| "step": 854 |
| }, |
| { |
| "epoch": 1.3320342946219799, |
| "grad_norm": 0.46291047151845677, |
| "learning_rate": 3.086705202312139e-05, |
| "loss": 0.3757, |
| "step": 855 |
| }, |
| { |
| "epoch": 1.3335931410756041, |
| "grad_norm": 0.30760449980458326, |
| "learning_rate": 3.083815028901734e-05, |
| "loss": 0.3784, |
| "step": 856 |
| }, |
| { |
| "epoch": 1.3351519875292284, |
| "grad_norm": 0.36618124968476307, |
| "learning_rate": 3.0809248554913294e-05, |
| "loss": 0.3646, |
| "step": 857 |
| }, |
| { |
| "epoch": 1.3367108339828526, |
| "grad_norm": 0.3517633452852137, |
| "learning_rate": 3.0780346820809254e-05, |
| "loss": 0.3678, |
| "step": 858 |
| }, |
| { |
| "epoch": 1.3382696804364769, |
| "grad_norm": 0.36650573052741386, |
| "learning_rate": 3.075144508670521e-05, |
| "loss": 0.3666, |
| "step": 859 |
| }, |
| { |
| "epoch": 1.3398285268901013, |
| "grad_norm": 0.32309525776364156, |
| "learning_rate": 3.0722543352601154e-05, |
| "loss": 0.3596, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.3413873733437256, |
| "grad_norm": 0.2918364251771976, |
| "learning_rate": 3.069364161849711e-05, |
| "loss": 0.363, |
| "step": 861 |
| }, |
| { |
| "epoch": 1.34294621979735, |
| "grad_norm": 0.34546628975016486, |
| "learning_rate": 3.066473988439307e-05, |
| "loss": 0.3553, |
| "step": 862 |
| }, |
| { |
| "epoch": 1.3445050662509743, |
| "grad_norm": 0.2923967277989431, |
| "learning_rate": 3.063583815028902e-05, |
| "loss": 0.3489, |
| "step": 863 |
| }, |
| { |
| "epoch": 1.3460639127045986, |
| "grad_norm": 0.29914296381783523, |
| "learning_rate": 3.0606936416184975e-05, |
| "loss": 0.3787, |
| "step": 864 |
| }, |
| { |
| "epoch": 1.3476227591582228, |
| "grad_norm": 0.31943444292620105, |
| "learning_rate": 3.057803468208092e-05, |
| "loss": 0.3846, |
| "step": 865 |
| }, |
| { |
| "epoch": 1.3491816056118473, |
| "grad_norm": 0.29039633067671383, |
| "learning_rate": 3.0549132947976875e-05, |
| "loss": 0.3639, |
| "step": 866 |
| }, |
| { |
| "epoch": 1.3507404520654716, |
| "grad_norm": 0.3421893491096786, |
| "learning_rate": 3.0520231213872835e-05, |
| "loss": 0.3734, |
| "step": 867 |
| }, |
| { |
| "epoch": 1.3522992985190958, |
| "grad_norm": 0.3170017953585186, |
| "learning_rate": 3.049132947976879e-05, |
| "loss": 0.369, |
| "step": 868 |
| }, |
| { |
| "epoch": 1.3538581449727203, |
| "grad_norm": 0.31067447367056183, |
| "learning_rate": 3.046242774566474e-05, |
| "loss": 0.3879, |
| "step": 869 |
| }, |
| { |
| "epoch": 1.3554169914263445, |
| "grad_norm": 0.3212256602786557, |
| "learning_rate": 3.0433526011560692e-05, |
| "loss": 0.3805, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.3569758378799688, |
| "grad_norm": 0.32052080705580777, |
| "learning_rate": 3.040462427745665e-05, |
| "loss": 0.3879, |
| "step": 871 |
| }, |
| { |
| "epoch": 1.358534684333593, |
| "grad_norm": 0.27323028010679573, |
| "learning_rate": 3.0375722543352603e-05, |
| "loss": 0.3753, |
| "step": 872 |
| }, |
| { |
| "epoch": 1.3600935307872175, |
| "grad_norm": 0.31472694270289026, |
| "learning_rate": 3.0346820809248556e-05, |
| "loss": 0.3676, |
| "step": 873 |
| }, |
| { |
| "epoch": 1.3616523772408418, |
| "grad_norm": 0.2817828326023501, |
| "learning_rate": 3.0317919075144506e-05, |
| "loss": 0.3602, |
| "step": 874 |
| }, |
| { |
| "epoch": 1.363211223694466, |
| "grad_norm": 0.31483550680330125, |
| "learning_rate": 3.0289017341040466e-05, |
| "loss": 0.3801, |
| "step": 875 |
| }, |
| { |
| "epoch": 1.3647700701480905, |
| "grad_norm": 0.31361863279521, |
| "learning_rate": 3.0260115606936416e-05, |
| "loss": 0.3574, |
| "step": 876 |
| }, |
| { |
| "epoch": 1.3663289166017147, |
| "grad_norm": 0.3795186696828694, |
| "learning_rate": 3.023121387283237e-05, |
| "loss": 0.3608, |
| "step": 877 |
| }, |
| { |
| "epoch": 1.367887763055339, |
| "grad_norm": 0.3024681697597954, |
| "learning_rate": 3.0202312138728323e-05, |
| "loss": 0.3597, |
| "step": 878 |
| }, |
| { |
| "epoch": 1.3694466095089632, |
| "grad_norm": 0.2839484436314578, |
| "learning_rate": 3.017341040462428e-05, |
| "loss": 0.354, |
| "step": 879 |
| }, |
| { |
| "epoch": 1.3710054559625877, |
| "grad_norm": 0.2989185656953038, |
| "learning_rate": 3.0144508670520234e-05, |
| "loss": 0.3997, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.372564302416212, |
| "grad_norm": 0.3332125838479124, |
| "learning_rate": 3.0115606936416184e-05, |
| "loss": 0.3866, |
| "step": 881 |
| }, |
| { |
| "epoch": 1.3741231488698364, |
| "grad_norm": 0.32196595663758937, |
| "learning_rate": 3.0086705202312137e-05, |
| "loss": 0.3703, |
| "step": 882 |
| }, |
| { |
| "epoch": 1.3756819953234607, |
| "grad_norm": 0.3218885635026921, |
| "learning_rate": 3.0057803468208097e-05, |
| "loss": 0.3644, |
| "step": 883 |
| }, |
| { |
| "epoch": 1.377240841777085, |
| "grad_norm": 0.3653050901483605, |
| "learning_rate": 3.0028901734104047e-05, |
| "loss": 0.3589, |
| "step": 884 |
| }, |
| { |
| "epoch": 1.3787996882307092, |
| "grad_norm": 0.3686597692797899, |
| "learning_rate": 3e-05, |
| "loss": 0.3691, |
| "step": 885 |
| }, |
| { |
| "epoch": 1.3803585346843337, |
| "grad_norm": 0.31536889278803354, |
| "learning_rate": 2.9971098265895954e-05, |
| "loss": 0.3719, |
| "step": 886 |
| }, |
| { |
| "epoch": 1.381917381137958, |
| "grad_norm": 0.33299163893566386, |
| "learning_rate": 2.994219653179191e-05, |
| "loss": 0.376, |
| "step": 887 |
| }, |
| { |
| "epoch": 1.3834762275915822, |
| "grad_norm": 0.3637154017248889, |
| "learning_rate": 2.9913294797687864e-05, |
| "loss": 0.3763, |
| "step": 888 |
| }, |
| { |
| "epoch": 1.3850350740452066, |
| "grad_norm": 0.31459018536275674, |
| "learning_rate": 2.9884393063583815e-05, |
| "loss": 0.3719, |
| "step": 889 |
| }, |
| { |
| "epoch": 1.386593920498831, |
| "grad_norm": 0.38863024038763483, |
| "learning_rate": 2.9855491329479768e-05, |
| "loss": 0.3897, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.3881527669524552, |
| "grad_norm": 0.2713739072144362, |
| "learning_rate": 2.9826589595375725e-05, |
| "loss": 0.3774, |
| "step": 891 |
| }, |
| { |
| "epoch": 1.3897116134060794, |
| "grad_norm": 0.3161296579386695, |
| "learning_rate": 2.9797687861271678e-05, |
| "loss": 0.367, |
| "step": 892 |
| }, |
| { |
| "epoch": 1.3912704598597039, |
| "grad_norm": 0.32321623162651764, |
| "learning_rate": 2.9768786127167632e-05, |
| "loss": 0.3908, |
| "step": 893 |
| }, |
| { |
| "epoch": 1.3928293063133281, |
| "grad_norm": 0.28969209370393095, |
| "learning_rate": 2.9739884393063582e-05, |
| "loss": 0.3715, |
| "step": 894 |
| }, |
| { |
| "epoch": 1.3943881527669524, |
| "grad_norm": 0.35841207954661103, |
| "learning_rate": 2.9710982658959542e-05, |
| "loss": 0.3714, |
| "step": 895 |
| }, |
| { |
| "epoch": 1.3959469992205769, |
| "grad_norm": 0.3324679248139433, |
| "learning_rate": 2.9682080924855492e-05, |
| "loss": 0.3717, |
| "step": 896 |
| }, |
| { |
| "epoch": 1.397505845674201, |
| "grad_norm": 0.43923218037881906, |
| "learning_rate": 2.9653179190751446e-05, |
| "loss": 0.3619, |
| "step": 897 |
| }, |
| { |
| "epoch": 1.3990646921278254, |
| "grad_norm": 0.31489100441245577, |
| "learning_rate": 2.96242774566474e-05, |
| "loss": 0.3523, |
| "step": 898 |
| }, |
| { |
| "epoch": 1.4006235385814496, |
| "grad_norm": 0.3292376441511829, |
| "learning_rate": 2.9595375722543356e-05, |
| "loss": 0.3661, |
| "step": 899 |
| }, |
| { |
| "epoch": 1.402182385035074, |
| "grad_norm": 0.2988144575593582, |
| "learning_rate": 2.956647398843931e-05, |
| "loss": 0.3749, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.4037412314886983, |
| "grad_norm": 0.41020723022893746, |
| "learning_rate": 2.953757225433526e-05, |
| "loss": 0.366, |
| "step": 901 |
| }, |
| { |
| "epoch": 1.4053000779423228, |
| "grad_norm": 0.310023862219356, |
| "learning_rate": 2.9508670520231213e-05, |
| "loss": 0.3884, |
| "step": 902 |
| }, |
| { |
| "epoch": 1.406858924395947, |
| "grad_norm": 0.3454601431005727, |
| "learning_rate": 2.947976878612717e-05, |
| "loss": 0.3634, |
| "step": 903 |
| }, |
| { |
| "epoch": 1.4084177708495713, |
| "grad_norm": 0.34356963535549423, |
| "learning_rate": 2.9450867052023123e-05, |
| "loss": 0.3588, |
| "step": 904 |
| }, |
| { |
| "epoch": 1.4099766173031956, |
| "grad_norm": 0.3283997995226211, |
| "learning_rate": 2.9421965317919076e-05, |
| "loss": 0.3637, |
| "step": 905 |
| }, |
| { |
| "epoch": 1.4115354637568198, |
| "grad_norm": 0.3554228840104719, |
| "learning_rate": 2.9393063583815027e-05, |
| "loss": 0.343, |
| "step": 906 |
| }, |
| { |
| "epoch": 1.4130943102104443, |
| "grad_norm": 0.36071645685077747, |
| "learning_rate": 2.9364161849710987e-05, |
| "loss": 0.3596, |
| "step": 907 |
| }, |
| { |
| "epoch": 1.4146531566640685, |
| "grad_norm": 0.3112359765865261, |
| "learning_rate": 2.9335260115606937e-05, |
| "loss": 0.3526, |
| "step": 908 |
| }, |
| { |
| "epoch": 1.416212003117693, |
| "grad_norm": 0.34359020030558024, |
| "learning_rate": 2.930635838150289e-05, |
| "loss": 0.3844, |
| "step": 909 |
| }, |
| { |
| "epoch": 1.4177708495713173, |
| "grad_norm": 0.3008878572757736, |
| "learning_rate": 2.9277456647398844e-05, |
| "loss": 0.3772, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.4193296960249415, |
| "grad_norm": 0.349466144708206, |
| "learning_rate": 2.92485549132948e-05, |
| "loss": 0.3473, |
| "step": 911 |
| }, |
| { |
| "epoch": 1.4208885424785658, |
| "grad_norm": 0.3382202790340046, |
| "learning_rate": 2.9219653179190754e-05, |
| "loss": 0.3757, |
| "step": 912 |
| }, |
| { |
| "epoch": 1.4224473889321902, |
| "grad_norm": 0.32188902141101255, |
| "learning_rate": 2.9190751445086707e-05, |
| "loss": 0.3712, |
| "step": 913 |
| }, |
| { |
| "epoch": 1.4240062353858145, |
| "grad_norm": 0.29754261721596836, |
| "learning_rate": 2.9161849710982657e-05, |
| "loss": 0.3674, |
| "step": 914 |
| }, |
| { |
| "epoch": 1.4255650818394388, |
| "grad_norm": 0.41389196935864453, |
| "learning_rate": 2.9132947976878618e-05, |
| "loss": 0.3835, |
| "step": 915 |
| }, |
| { |
| "epoch": 1.4271239282930632, |
| "grad_norm": 0.2943029270951012, |
| "learning_rate": 2.9104046242774568e-05, |
| "loss": 0.359, |
| "step": 916 |
| }, |
| { |
| "epoch": 1.4286827747466875, |
| "grad_norm": 0.28723479498577376, |
| "learning_rate": 2.907514450867052e-05, |
| "loss": 0.3695, |
| "step": 917 |
| }, |
| { |
| "epoch": 1.4302416212003117, |
| "grad_norm": 0.35137358020533505, |
| "learning_rate": 2.9046242774566475e-05, |
| "loss": 0.3626, |
| "step": 918 |
| }, |
| { |
| "epoch": 1.431800467653936, |
| "grad_norm": 0.3198068865556259, |
| "learning_rate": 2.901734104046243e-05, |
| "loss": 0.3855, |
| "step": 919 |
| }, |
| { |
| "epoch": 1.4333593141075605, |
| "grad_norm": 0.30362892326696816, |
| "learning_rate": 2.8988439306358385e-05, |
| "loss": 0.3882, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.4349181605611847, |
| "grad_norm": 0.3191735883892925, |
| "learning_rate": 2.8959537572254335e-05, |
| "loss": 0.381, |
| "step": 921 |
| }, |
| { |
| "epoch": 1.436477007014809, |
| "grad_norm": 0.3034703934282239, |
| "learning_rate": 2.893063583815029e-05, |
| "loss": 0.3756, |
| "step": 922 |
| }, |
| { |
| "epoch": 1.4380358534684334, |
| "grad_norm": 0.32215086435520635, |
| "learning_rate": 2.8901734104046245e-05, |
| "loss": 0.3505, |
| "step": 923 |
| }, |
| { |
| "epoch": 1.4395946999220577, |
| "grad_norm": 0.3038326438989388, |
| "learning_rate": 2.88728323699422e-05, |
| "loss": 0.3578, |
| "step": 924 |
| }, |
| { |
| "epoch": 1.441153546375682, |
| "grad_norm": 0.32683148429237635, |
| "learning_rate": 2.8843930635838152e-05, |
| "loss": 0.358, |
| "step": 925 |
| }, |
| { |
| "epoch": 1.4427123928293062, |
| "grad_norm": 0.3682668995271643, |
| "learning_rate": 2.8815028901734102e-05, |
| "loss": 0.3693, |
| "step": 926 |
| }, |
| { |
| "epoch": 1.4442712392829307, |
| "grad_norm": 0.2743528467933646, |
| "learning_rate": 2.8786127167630062e-05, |
| "loss": 0.366, |
| "step": 927 |
| }, |
| { |
| "epoch": 1.445830085736555, |
| "grad_norm": 0.3966807244623201, |
| "learning_rate": 2.8757225433526013e-05, |
| "loss": 0.386, |
| "step": 928 |
| }, |
| { |
| "epoch": 1.4473889321901794, |
| "grad_norm": 0.32237328497617107, |
| "learning_rate": 2.8728323699421966e-05, |
| "loss": 0.3659, |
| "step": 929 |
| }, |
| { |
| "epoch": 1.4489477786438036, |
| "grad_norm": 0.31054470894310937, |
| "learning_rate": 2.869942196531792e-05, |
| "loss": 0.3512, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.450506625097428, |
| "grad_norm": 0.43587348795942593, |
| "learning_rate": 2.8670520231213876e-05, |
| "loss": 0.3673, |
| "step": 931 |
| }, |
| { |
| "epoch": 1.4520654715510521, |
| "grad_norm": 0.2889720776490319, |
| "learning_rate": 2.864161849710983e-05, |
| "loss": 0.3834, |
| "step": 932 |
| }, |
| { |
| "epoch": 1.4536243180046766, |
| "grad_norm": 0.41243764294739627, |
| "learning_rate": 2.861271676300578e-05, |
| "loss": 0.3718, |
| "step": 933 |
| }, |
| { |
| "epoch": 1.4551831644583009, |
| "grad_norm": 0.3609717686516069, |
| "learning_rate": 2.8583815028901733e-05, |
| "loss": 0.3666, |
| "step": 934 |
| }, |
| { |
| "epoch": 1.4567420109119251, |
| "grad_norm": 0.3210455895295045, |
| "learning_rate": 2.855491329479769e-05, |
| "loss": 0.3512, |
| "step": 935 |
| }, |
| { |
| "epoch": 1.4583008573655496, |
| "grad_norm": 0.36374442891877706, |
| "learning_rate": 2.8526011560693643e-05, |
| "loss": 0.3789, |
| "step": 936 |
| }, |
| { |
| "epoch": 1.4598597038191738, |
| "grad_norm": 0.302661164884665, |
| "learning_rate": 2.8497109826589597e-05, |
| "loss": 0.3849, |
| "step": 937 |
| }, |
| { |
| "epoch": 1.461418550272798, |
| "grad_norm": 0.2922986064549059, |
| "learning_rate": 2.846820809248555e-05, |
| "loss": 0.3799, |
| "step": 938 |
| }, |
| { |
| "epoch": 1.4629773967264224, |
| "grad_norm": 0.3106937568151618, |
| "learning_rate": 2.8439306358381507e-05, |
| "loss": 0.3934, |
| "step": 939 |
| }, |
| { |
| "epoch": 1.4645362431800468, |
| "grad_norm": 0.3894062278825112, |
| "learning_rate": 2.841040462427746e-05, |
| "loss": 0.366, |
| "step": 940 |
| }, |
| { |
| "epoch": 1.466095089633671, |
| "grad_norm": 0.3094641338569837, |
| "learning_rate": 2.838150289017341e-05, |
| "loss": 0.3667, |
| "step": 941 |
| }, |
| { |
| "epoch": 1.4676539360872953, |
| "grad_norm": 0.2993216786356107, |
| "learning_rate": 2.8352601156069364e-05, |
| "loss": 0.3778, |
| "step": 942 |
| }, |
| { |
| "epoch": 1.4692127825409198, |
| "grad_norm": 0.290935367620464, |
| "learning_rate": 2.832369942196532e-05, |
| "loss": 0.3705, |
| "step": 943 |
| }, |
| { |
| "epoch": 1.470771628994544, |
| "grad_norm": 0.3157318710559096, |
| "learning_rate": 2.8294797687861274e-05, |
| "loss": 0.3595, |
| "step": 944 |
| }, |
| { |
| "epoch": 1.4723304754481683, |
| "grad_norm": 0.2890101359777515, |
| "learning_rate": 2.8265895953757228e-05, |
| "loss": 0.3773, |
| "step": 945 |
| }, |
| { |
| "epoch": 1.4738893219017926, |
| "grad_norm": 0.3502615396853657, |
| "learning_rate": 2.8236994219653178e-05, |
| "loss": 0.3605, |
| "step": 946 |
| }, |
| { |
| "epoch": 1.475448168355417, |
| "grad_norm": 0.34510628572007923, |
| "learning_rate": 2.8208092485549138e-05, |
| "loss": 0.3567, |
| "step": 947 |
| }, |
| { |
| "epoch": 1.4770070148090413, |
| "grad_norm": 0.30987576563699276, |
| "learning_rate": 2.8179190751445088e-05, |
| "loss": 0.3876, |
| "step": 948 |
| }, |
| { |
| "epoch": 1.4785658612626658, |
| "grad_norm": 0.38023748103458505, |
| "learning_rate": 2.815028901734104e-05, |
| "loss": 0.3692, |
| "step": 949 |
| }, |
| { |
| "epoch": 1.48012470771629, |
| "grad_norm": 0.30194848904733007, |
| "learning_rate": 2.8121387283236995e-05, |
| "loss": 0.3639, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.4816835541699143, |
| "grad_norm": 0.36247398033072276, |
| "learning_rate": 2.8092485549132952e-05, |
| "loss": 0.3647, |
| "step": 951 |
| }, |
| { |
| "epoch": 1.4832424006235385, |
| "grad_norm": 0.3225418884761915, |
| "learning_rate": 2.8063583815028905e-05, |
| "loss": 0.3728, |
| "step": 952 |
| }, |
| { |
| "epoch": 1.4848012470771628, |
| "grad_norm": 0.33143640861599943, |
| "learning_rate": 2.8034682080924855e-05, |
| "loss": 0.3786, |
| "step": 953 |
| }, |
| { |
| "epoch": 1.4863600935307872, |
| "grad_norm": 0.37859874084366085, |
| "learning_rate": 2.800578034682081e-05, |
| "loss": 0.3676, |
| "step": 954 |
| }, |
| { |
| "epoch": 1.4879189399844115, |
| "grad_norm": 0.376572202242789, |
| "learning_rate": 2.7976878612716766e-05, |
| "loss": 0.3741, |
| "step": 955 |
| }, |
| { |
| "epoch": 1.489477786438036, |
| "grad_norm": 0.33305139704253905, |
| "learning_rate": 2.794797687861272e-05, |
| "loss": 0.3676, |
| "step": 956 |
| }, |
| { |
| "epoch": 1.4910366328916602, |
| "grad_norm": 0.5151154703791708, |
| "learning_rate": 2.7919075144508673e-05, |
| "loss": 0.3712, |
| "step": 957 |
| }, |
| { |
| "epoch": 1.4925954793452845, |
| "grad_norm": 0.289014937897802, |
| "learning_rate": 2.7890173410404623e-05, |
| "loss": 0.3522, |
| "step": 958 |
| }, |
| { |
| "epoch": 1.4941543257989087, |
| "grad_norm": 0.4050759220300978, |
| "learning_rate": 2.7861271676300583e-05, |
| "loss": 0.3695, |
| "step": 959 |
| }, |
| { |
| "epoch": 1.4957131722525332, |
| "grad_norm": 0.4162682771540926, |
| "learning_rate": 2.7832369942196533e-05, |
| "loss": 0.3864, |
| "step": 960 |
| }, |
| { |
| "epoch": 1.4972720187061574, |
| "grad_norm": 0.3267115752346114, |
| "learning_rate": 2.7803468208092486e-05, |
| "loss": 0.398, |
| "step": 961 |
| }, |
| { |
| "epoch": 1.4988308651597817, |
| "grad_norm": 0.46629831946057043, |
| "learning_rate": 2.777456647398844e-05, |
| "loss": 0.3667, |
| "step": 962 |
| }, |
| { |
| "epoch": 1.5003897116134062, |
| "grad_norm": 0.3126837897461622, |
| "learning_rate": 2.7745664739884393e-05, |
| "loss": 0.3903, |
| "step": 963 |
| }, |
| { |
| "epoch": 1.5019485580670304, |
| "grad_norm": 0.3025185534818139, |
| "learning_rate": 2.771676300578035e-05, |
| "loss": 0.4, |
| "step": 964 |
| }, |
| { |
| "epoch": 1.5035074045206547, |
| "grad_norm": 0.4008151783936587, |
| "learning_rate": 2.7687861271676304e-05, |
| "loss": 0.3839, |
| "step": 965 |
| }, |
| { |
| "epoch": 1.505066250974279, |
| "grad_norm": 0.26063162497897896, |
| "learning_rate": 2.7658959537572254e-05, |
| "loss": 0.361, |
| "step": 966 |
| }, |
| { |
| "epoch": 1.5066250974279034, |
| "grad_norm": 0.2888143269712675, |
| "learning_rate": 2.7630057803468207e-05, |
| "loss": 0.365, |
| "step": 967 |
| }, |
| { |
| "epoch": 1.5081839438815277, |
| "grad_norm": 0.3227561551129935, |
| "learning_rate": 2.7601156069364164e-05, |
| "loss": 0.3867, |
| "step": 968 |
| }, |
| { |
| "epoch": 1.5097427903351521, |
| "grad_norm": 0.29415161701639214, |
| "learning_rate": 2.7572254335260117e-05, |
| "loss": 0.3766, |
| "step": 969 |
| }, |
| { |
| "epoch": 1.5113016367887764, |
| "grad_norm": 0.30622334937549417, |
| "learning_rate": 2.754335260115607e-05, |
| "loss": 0.362, |
| "step": 970 |
| }, |
| { |
| "epoch": 1.5128604832424006, |
| "grad_norm": 0.29765107975268507, |
| "learning_rate": 2.751445086705202e-05, |
| "loss": 0.3691, |
| "step": 971 |
| }, |
| { |
| "epoch": 1.5144193296960249, |
| "grad_norm": 0.28887375199419957, |
| "learning_rate": 2.748554913294798e-05, |
| "loss": 0.376, |
| "step": 972 |
| }, |
| { |
| "epoch": 1.5159781761496491, |
| "grad_norm": 0.30491806233389507, |
| "learning_rate": 2.745664739884393e-05, |
| "loss": 0.3767, |
| "step": 973 |
| }, |
| { |
| "epoch": 1.5175370226032736, |
| "grad_norm": 0.2645879031934002, |
| "learning_rate": 2.7427745664739885e-05, |
| "loss": 0.3812, |
| "step": 974 |
| }, |
| { |
| "epoch": 1.5190958690568979, |
| "grad_norm": 0.28565659154478995, |
| "learning_rate": 2.7398843930635838e-05, |
| "loss": 0.3785, |
| "step": 975 |
| }, |
| { |
| "epoch": 1.5206547155105223, |
| "grad_norm": 0.28811620089387, |
| "learning_rate": 2.7369942196531795e-05, |
| "loss": 0.3537, |
| "step": 976 |
| }, |
| { |
| "epoch": 1.5222135619641466, |
| "grad_norm": 0.2577090443673922, |
| "learning_rate": 2.734104046242775e-05, |
| "loss": 0.355, |
| "step": 977 |
| }, |
| { |
| "epoch": 1.5237724084177708, |
| "grad_norm": 0.2624546459669209, |
| "learning_rate": 2.73121387283237e-05, |
| "loss": 0.3665, |
| "step": 978 |
| }, |
| { |
| "epoch": 1.525331254871395, |
| "grad_norm": 0.29221496113302586, |
| "learning_rate": 2.7283236994219652e-05, |
| "loss": 0.3683, |
| "step": 979 |
| }, |
| { |
| "epoch": 1.5268901013250193, |
| "grad_norm": 0.2680361810204351, |
| "learning_rate": 2.725433526011561e-05, |
| "loss": 0.3699, |
| "step": 980 |
| }, |
| { |
| "epoch": 1.5284489477786438, |
| "grad_norm": 0.3207752203807436, |
| "learning_rate": 2.7225433526011562e-05, |
| "loss": 0.3741, |
| "step": 981 |
| }, |
| { |
| "epoch": 1.530007794232268, |
| "grad_norm": 0.282031401318972, |
| "learning_rate": 2.7196531791907516e-05, |
| "loss": 0.3853, |
| "step": 982 |
| }, |
| { |
| "epoch": 1.5315666406858925, |
| "grad_norm": 0.27982574114644004, |
| "learning_rate": 2.7167630057803466e-05, |
| "loss": 0.3832, |
| "step": 983 |
| }, |
| { |
| "epoch": 1.5331254871395168, |
| "grad_norm": 0.2804241252514004, |
| "learning_rate": 2.7138728323699426e-05, |
| "loss": 0.3593, |
| "step": 984 |
| }, |
| { |
| "epoch": 1.534684333593141, |
| "grad_norm": 0.2628827070596599, |
| "learning_rate": 2.7109826589595376e-05, |
| "loss": 0.3669, |
| "step": 985 |
| }, |
| { |
| "epoch": 1.5362431800467653, |
| "grad_norm": 0.29615449790999526, |
| "learning_rate": 2.708092485549133e-05, |
| "loss": 0.3559, |
| "step": 986 |
| }, |
| { |
| "epoch": 1.5378020265003896, |
| "grad_norm": 0.28549992629405135, |
| "learning_rate": 2.7052023121387283e-05, |
| "loss": 0.3859, |
| "step": 987 |
| }, |
| { |
| "epoch": 1.539360872954014, |
| "grad_norm": 0.34007692401926715, |
| "learning_rate": 2.702312138728324e-05, |
| "loss": 0.3715, |
| "step": 988 |
| }, |
| { |
| "epoch": 1.5409197194076385, |
| "grad_norm": 0.40294192708679744, |
| "learning_rate": 2.6994219653179193e-05, |
| "loss": 0.3821, |
| "step": 989 |
| }, |
| { |
| "epoch": 1.5424785658612628, |
| "grad_norm": 0.265218381447006, |
| "learning_rate": 2.6965317919075143e-05, |
| "loss": 0.369, |
| "step": 990 |
| }, |
| { |
| "epoch": 1.544037412314887, |
| "grad_norm": 0.37011390060826654, |
| "learning_rate": 2.6936416184971097e-05, |
| "loss": 0.3643, |
| "step": 991 |
| }, |
| { |
| "epoch": 1.5455962587685113, |
| "grad_norm": 0.31157711298908103, |
| "learning_rate": 2.6907514450867057e-05, |
| "loss": 0.3688, |
| "step": 992 |
| }, |
| { |
| "epoch": 1.5471551052221355, |
| "grad_norm": 0.30367205453380675, |
| "learning_rate": 2.6878612716763007e-05, |
| "loss": 0.362, |
| "step": 993 |
| }, |
| { |
| "epoch": 1.54871395167576, |
| "grad_norm": 0.40306120286710007, |
| "learning_rate": 2.684971098265896e-05, |
| "loss": 0.3679, |
| "step": 994 |
| }, |
| { |
| "epoch": 1.5502727981293842, |
| "grad_norm": 0.3044105968804601, |
| "learning_rate": 2.6820809248554914e-05, |
| "loss": 0.3719, |
| "step": 995 |
| }, |
| { |
| "epoch": 1.5518316445830087, |
| "grad_norm": 0.3384424306040368, |
| "learning_rate": 2.679190751445087e-05, |
| "loss": 0.3669, |
| "step": 996 |
| }, |
| { |
| "epoch": 1.553390491036633, |
| "grad_norm": 0.2879848276257589, |
| "learning_rate": 2.6763005780346824e-05, |
| "loss": 0.3698, |
| "step": 997 |
| }, |
| { |
| "epoch": 1.5549493374902572, |
| "grad_norm": 0.3215450830190318, |
| "learning_rate": 2.6734104046242774e-05, |
| "loss": 0.3687, |
| "step": 998 |
| }, |
| { |
| "epoch": 1.5565081839438815, |
| "grad_norm": 0.2820349630596858, |
| "learning_rate": 2.6705202312138728e-05, |
| "loss": 0.3665, |
| "step": 999 |
| }, |
| { |
| "epoch": 1.5580670303975057, |
| "grad_norm": 0.3606469331809056, |
| "learning_rate": 2.6676300578034684e-05, |
| "loss": 0.351, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.5596258768511302, |
| "grad_norm": 0.3028843154170325, |
| "learning_rate": 2.6647398843930638e-05, |
| "loss": 0.3736, |
| "step": 1001 |
| }, |
| { |
| "epoch": 1.5611847233047544, |
| "grad_norm": 0.3174824654872977, |
| "learning_rate": 2.661849710982659e-05, |
| "loss": 0.3629, |
| "step": 1002 |
| }, |
| { |
| "epoch": 1.562743569758379, |
| "grad_norm": 0.34721718744253455, |
| "learning_rate": 2.658959537572254e-05, |
| "loss": 0.379, |
| "step": 1003 |
| }, |
| { |
| "epoch": 1.5643024162120032, |
| "grad_norm": 0.2978398739891535, |
| "learning_rate": 2.65606936416185e-05, |
| "loss": 0.3683, |
| "step": 1004 |
| }, |
| { |
| "epoch": 1.5658612626656274, |
| "grad_norm": 0.39334684334782377, |
| "learning_rate": 2.653179190751445e-05, |
| "loss": 0.3569, |
| "step": 1005 |
| }, |
| { |
| "epoch": 1.5674201091192517, |
| "grad_norm": 0.2717962848929705, |
| "learning_rate": 2.6502890173410405e-05, |
| "loss": 0.3561, |
| "step": 1006 |
| }, |
| { |
| "epoch": 1.568978955572876, |
| "grad_norm": 0.3736251973751076, |
| "learning_rate": 2.647398843930636e-05, |
| "loss": 0.3669, |
| "step": 1007 |
| }, |
| { |
| "epoch": 1.5705378020265004, |
| "grad_norm": 0.29899355625093155, |
| "learning_rate": 2.6445086705202315e-05, |
| "loss": 0.3604, |
| "step": 1008 |
| }, |
| { |
| "epoch": 1.5720966484801249, |
| "grad_norm": 0.38876556071538754, |
| "learning_rate": 2.641618497109827e-05, |
| "loss": 0.3605, |
| "step": 1009 |
| }, |
| { |
| "epoch": 1.5736554949337491, |
| "grad_norm": 0.325884443622183, |
| "learning_rate": 2.638728323699422e-05, |
| "loss": 0.3858, |
| "step": 1010 |
| }, |
| { |
| "epoch": 1.5752143413873734, |
| "grad_norm": 0.3267871100560373, |
| "learning_rate": 2.6358381502890172e-05, |
| "loss": 0.3504, |
| "step": 1011 |
| }, |
| { |
| "epoch": 1.5767731878409976, |
| "grad_norm": 0.33526147496951697, |
| "learning_rate": 2.632947976878613e-05, |
| "loss": 0.3559, |
| "step": 1012 |
| }, |
| { |
| "epoch": 1.5783320342946219, |
| "grad_norm": 0.2944668930671466, |
| "learning_rate": 2.6300578034682083e-05, |
| "loss": 0.3755, |
| "step": 1013 |
| }, |
| { |
| "epoch": 1.5798908807482464, |
| "grad_norm": 0.3138451746470972, |
| "learning_rate": 2.6271676300578036e-05, |
| "loss": 0.3643, |
| "step": 1014 |
| }, |
| { |
| "epoch": 1.5814497272018706, |
| "grad_norm": 0.26776940795700943, |
| "learning_rate": 2.6242774566473986e-05, |
| "loss": 0.3466, |
| "step": 1015 |
| }, |
| { |
| "epoch": 1.583008573655495, |
| "grad_norm": 0.26784179313482087, |
| "learning_rate": 2.6213872832369946e-05, |
| "loss": 0.3486, |
| "step": 1016 |
| }, |
| { |
| "epoch": 1.5845674201091193, |
| "grad_norm": 0.38607190770403105, |
| "learning_rate": 2.6184971098265896e-05, |
| "loss": 0.353, |
| "step": 1017 |
| }, |
| { |
| "epoch": 1.5861262665627436, |
| "grad_norm": 0.3065474352566968, |
| "learning_rate": 2.615606936416185e-05, |
| "loss": 0.3589, |
| "step": 1018 |
| }, |
| { |
| "epoch": 1.5876851130163678, |
| "grad_norm": 0.2832505582830171, |
| "learning_rate": 2.6127167630057803e-05, |
| "loss": 0.3678, |
| "step": 1019 |
| }, |
| { |
| "epoch": 1.589243959469992, |
| "grad_norm": 0.36412176076022207, |
| "learning_rate": 2.609826589595376e-05, |
| "loss": 0.3798, |
| "step": 1020 |
| }, |
| { |
| "epoch": 1.5908028059236166, |
| "grad_norm": 0.28000224882500524, |
| "learning_rate": 2.6069364161849714e-05, |
| "loss": 0.3497, |
| "step": 1021 |
| }, |
| { |
| "epoch": 1.5923616523772408, |
| "grad_norm": 1.8214222618798932, |
| "learning_rate": 2.6040462427745667e-05, |
| "loss": 0.4108, |
| "step": 1022 |
| }, |
| { |
| "epoch": 1.5939204988308653, |
| "grad_norm": 0.44865333521920003, |
| "learning_rate": 2.6011560693641617e-05, |
| "loss": 0.3772, |
| "step": 1023 |
| }, |
| { |
| "epoch": 1.5954793452844895, |
| "grad_norm": 0.25598641910972036, |
| "learning_rate": 2.5982658959537577e-05, |
| "loss": 0.3657, |
| "step": 1024 |
| }, |
| { |
| "epoch": 1.5970381917381138, |
| "grad_norm": 0.33942270049254686, |
| "learning_rate": 2.5953757225433527e-05, |
| "loss": 0.3995, |
| "step": 1025 |
| }, |
| { |
| "epoch": 1.598597038191738, |
| "grad_norm": 0.3657115461703349, |
| "learning_rate": 2.592485549132948e-05, |
| "loss": 0.3941, |
| "step": 1026 |
| }, |
| { |
| "epoch": 1.6001558846453623, |
| "grad_norm": 0.28531237255153097, |
| "learning_rate": 2.5895953757225434e-05, |
| "loss": 0.3806, |
| "step": 1027 |
| }, |
| { |
| "epoch": 1.6017147310989868, |
| "grad_norm": 0.35592372312382153, |
| "learning_rate": 2.586705202312139e-05, |
| "loss": 0.3781, |
| "step": 1028 |
| }, |
| { |
| "epoch": 1.603273577552611, |
| "grad_norm": 0.29099766422855, |
| "learning_rate": 2.5838150289017344e-05, |
| "loss": 0.3638, |
| "step": 1029 |
| }, |
| { |
| "epoch": 1.6048324240062355, |
| "grad_norm": 0.3174734446542449, |
| "learning_rate": 2.5809248554913295e-05, |
| "loss": 0.3435, |
| "step": 1030 |
| }, |
| { |
| "epoch": 1.6063912704598597, |
| "grad_norm": 0.4203285741115712, |
| "learning_rate": 2.5780346820809248e-05, |
| "loss": 0.3788, |
| "step": 1031 |
| }, |
| { |
| "epoch": 1.607950116913484, |
| "grad_norm": 0.32251687798177475, |
| "learning_rate": 2.5751445086705205e-05, |
| "loss": 0.3632, |
| "step": 1032 |
| }, |
| { |
| "epoch": 1.6095089633671082, |
| "grad_norm": 0.3531877282131029, |
| "learning_rate": 2.5722543352601158e-05, |
| "loss": 0.366, |
| "step": 1033 |
| }, |
| { |
| "epoch": 1.6110678098207325, |
| "grad_norm": 0.34051504848522857, |
| "learning_rate": 2.5693641618497112e-05, |
| "loss": 0.3591, |
| "step": 1034 |
| }, |
| { |
| "epoch": 1.612626656274357, |
| "grad_norm": 0.3072876546153309, |
| "learning_rate": 2.5664739884393062e-05, |
| "loss": 0.3807, |
| "step": 1035 |
| }, |
| { |
| "epoch": 1.6141855027279814, |
| "grad_norm": 0.3229272762875624, |
| "learning_rate": 2.5635838150289022e-05, |
| "loss": 0.3707, |
| "step": 1036 |
| }, |
| { |
| "epoch": 1.6157443491816057, |
| "grad_norm": 0.3309237615111176, |
| "learning_rate": 2.5606936416184972e-05, |
| "loss": 0.3612, |
| "step": 1037 |
| }, |
| { |
| "epoch": 1.61730319563523, |
| "grad_norm": 0.3111215230340758, |
| "learning_rate": 2.5578034682080925e-05, |
| "loss": 0.3803, |
| "step": 1038 |
| }, |
| { |
| "epoch": 1.6188620420888542, |
| "grad_norm": 0.32805176015176535, |
| "learning_rate": 2.554913294797688e-05, |
| "loss": 0.3695, |
| "step": 1039 |
| }, |
| { |
| "epoch": 1.6204208885424785, |
| "grad_norm": 0.2835660539290897, |
| "learning_rate": 2.5520231213872836e-05, |
| "loss": 0.3807, |
| "step": 1040 |
| }, |
| { |
| "epoch": 1.621979734996103, |
| "grad_norm": 0.3032998483914643, |
| "learning_rate": 2.549132947976879e-05, |
| "loss": 0.3695, |
| "step": 1041 |
| }, |
| { |
| "epoch": 1.6235385814497272, |
| "grad_norm": 0.27617969523468905, |
| "learning_rate": 2.546242774566474e-05, |
| "loss": 0.3723, |
| "step": 1042 |
| }, |
| { |
| "epoch": 1.6250974279033517, |
| "grad_norm": 0.32157009299636713, |
| "learning_rate": 2.5433526011560693e-05, |
| "loss": 0.3572, |
| "step": 1043 |
| }, |
| { |
| "epoch": 1.626656274356976, |
| "grad_norm": 0.2842521187908904, |
| "learning_rate": 2.540462427745665e-05, |
| "loss": 0.3656, |
| "step": 1044 |
| }, |
| { |
| "epoch": 1.6282151208106002, |
| "grad_norm": 0.3288780675289122, |
| "learning_rate": 2.5375722543352603e-05, |
| "loss": 0.385, |
| "step": 1045 |
| }, |
| { |
| "epoch": 1.6297739672642244, |
| "grad_norm": 0.32678871102435747, |
| "learning_rate": 2.5346820809248556e-05, |
| "loss": 0.3844, |
| "step": 1046 |
| }, |
| { |
| "epoch": 1.6313328137178487, |
| "grad_norm": 0.39119637546888175, |
| "learning_rate": 2.531791907514451e-05, |
| "loss": 0.3542, |
| "step": 1047 |
| }, |
| { |
| "epoch": 1.6328916601714731, |
| "grad_norm": 0.288052529248597, |
| "learning_rate": 2.5289017341040467e-05, |
| "loss": 0.3572, |
| "step": 1048 |
| }, |
| { |
| "epoch": 1.6344505066250974, |
| "grad_norm": 0.3487713869931321, |
| "learning_rate": 2.526011560693642e-05, |
| "loss": 0.3542, |
| "step": 1049 |
| }, |
| { |
| "epoch": 1.6360093530787219, |
| "grad_norm": 0.26513201445656637, |
| "learning_rate": 2.523121387283237e-05, |
| "loss": 0.3731, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.6375681995323461, |
| "grad_norm": 0.32844205151640604, |
| "learning_rate": 2.5202312138728324e-05, |
| "loss": 0.3691, |
| "step": 1051 |
| }, |
| { |
| "epoch": 1.6391270459859704, |
| "grad_norm": 0.30490163955005906, |
| "learning_rate": 2.517341040462428e-05, |
| "loss": 0.3813, |
| "step": 1052 |
| }, |
| { |
| "epoch": 1.6406858924395946, |
| "grad_norm": 0.4555429114132785, |
| "learning_rate": 2.5144508670520234e-05, |
| "loss": 0.3818, |
| "step": 1053 |
| }, |
| { |
| "epoch": 1.6422447388932189, |
| "grad_norm": 0.29026515692355687, |
| "learning_rate": 2.5115606936416187e-05, |
| "loss": 0.3626, |
| "step": 1054 |
| }, |
| { |
| "epoch": 1.6438035853468433, |
| "grad_norm": 0.39179865186142615, |
| "learning_rate": 2.5086705202312137e-05, |
| "loss": 0.3502, |
| "step": 1055 |
| }, |
| { |
| "epoch": 1.6453624318004678, |
| "grad_norm": 0.32433692268192194, |
| "learning_rate": 2.5057803468208098e-05, |
| "loss": 0.3781, |
| "step": 1056 |
| }, |
| { |
| "epoch": 1.646921278254092, |
| "grad_norm": 0.33527613308127024, |
| "learning_rate": 2.5028901734104048e-05, |
| "loss": 0.3619, |
| "step": 1057 |
| }, |
| { |
| "epoch": 1.6484801247077163, |
| "grad_norm": 0.3159961287138508, |
| "learning_rate": 2.5e-05, |
| "loss": 0.3632, |
| "step": 1058 |
| }, |
| { |
| "epoch": 1.6500389711613406, |
| "grad_norm": 0.31940038082116945, |
| "learning_rate": 2.4971098265895955e-05, |
| "loss": 0.3633, |
| "step": 1059 |
| }, |
| { |
| "epoch": 1.6515978176149648, |
| "grad_norm": 0.3303935987168502, |
| "learning_rate": 2.4942196531791908e-05, |
| "loss": 0.3745, |
| "step": 1060 |
| }, |
| { |
| "epoch": 1.6531566640685893, |
| "grad_norm": 0.34507486262396847, |
| "learning_rate": 2.491329479768786e-05, |
| "loss": 0.3507, |
| "step": 1061 |
| }, |
| { |
| "epoch": 1.6547155105222136, |
| "grad_norm": 0.2898972340965827, |
| "learning_rate": 2.4884393063583815e-05, |
| "loss": 0.3583, |
| "step": 1062 |
| }, |
| { |
| "epoch": 1.656274356975838, |
| "grad_norm": 0.29250387172577985, |
| "learning_rate": 2.485549132947977e-05, |
| "loss": 0.3754, |
| "step": 1063 |
| }, |
| { |
| "epoch": 1.6578332034294623, |
| "grad_norm": 0.3144587373068325, |
| "learning_rate": 2.4826589595375725e-05, |
| "loss": 0.3675, |
| "step": 1064 |
| }, |
| { |
| "epoch": 1.6593920498830865, |
| "grad_norm": 0.28878104517546993, |
| "learning_rate": 2.4797687861271675e-05, |
| "loss": 0.3535, |
| "step": 1065 |
| }, |
| { |
| "epoch": 1.6609508963367108, |
| "grad_norm": 0.2915446431012894, |
| "learning_rate": 2.4768786127167632e-05, |
| "loss": 0.3461, |
| "step": 1066 |
| }, |
| { |
| "epoch": 1.662509742790335, |
| "grad_norm": 0.3180916504846977, |
| "learning_rate": 2.4739884393063582e-05, |
| "loss": 0.3482, |
| "step": 1067 |
| }, |
| { |
| "epoch": 1.6640685892439595, |
| "grad_norm": 0.35825594962772983, |
| "learning_rate": 2.471098265895954e-05, |
| "loss": 0.3822, |
| "step": 1068 |
| }, |
| { |
| "epoch": 1.6656274356975838, |
| "grad_norm": 0.2716382477039464, |
| "learning_rate": 2.4682080924855492e-05, |
| "loss": 0.3712, |
| "step": 1069 |
| }, |
| { |
| "epoch": 1.6671862821512082, |
| "grad_norm": 0.3832340579480356, |
| "learning_rate": 2.4653179190751446e-05, |
| "loss": 0.3744, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.6687451286048325, |
| "grad_norm": 0.30678572957271294, |
| "learning_rate": 2.46242774566474e-05, |
| "loss": 0.3579, |
| "step": 1071 |
| }, |
| { |
| "epoch": 1.6703039750584567, |
| "grad_norm": 0.2843857794151713, |
| "learning_rate": 2.4595375722543353e-05, |
| "loss": 0.3826, |
| "step": 1072 |
| }, |
| { |
| "epoch": 1.671862821512081, |
| "grad_norm": 0.34064253973495867, |
| "learning_rate": 2.4566473988439306e-05, |
| "loss": 0.3761, |
| "step": 1073 |
| }, |
| { |
| "epoch": 1.6734216679657052, |
| "grad_norm": 0.29119739353949486, |
| "learning_rate": 2.4537572254335263e-05, |
| "loss": 0.3755, |
| "step": 1074 |
| }, |
| { |
| "epoch": 1.6749805144193297, |
| "grad_norm": 0.2816956584902144, |
| "learning_rate": 2.4508670520231213e-05, |
| "loss": 0.3741, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.676539360872954, |
| "grad_norm": 0.3111411062453413, |
| "learning_rate": 2.447976878612717e-05, |
| "loss": 0.3825, |
| "step": 1076 |
| }, |
| { |
| "epoch": 1.6780982073265784, |
| "grad_norm": 0.2726926618664558, |
| "learning_rate": 2.445086705202312e-05, |
| "loss": 0.3879, |
| "step": 1077 |
| }, |
| { |
| "epoch": 1.6796570537802027, |
| "grad_norm": 0.2834300086794794, |
| "learning_rate": 2.4421965317919077e-05, |
| "loss": 0.3705, |
| "step": 1078 |
| }, |
| { |
| "epoch": 1.681215900233827, |
| "grad_norm": 0.31189081369621346, |
| "learning_rate": 2.439306358381503e-05, |
| "loss": 0.3663, |
| "step": 1079 |
| }, |
| { |
| "epoch": 1.6827747466874512, |
| "grad_norm": 0.2829807568606232, |
| "learning_rate": 2.4364161849710984e-05, |
| "loss": 0.3626, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.6843335931410754, |
| "grad_norm": 0.32489614588339644, |
| "learning_rate": 2.4335260115606937e-05, |
| "loss": 0.3733, |
| "step": 1081 |
| }, |
| { |
| "epoch": 1.6858924395947, |
| "grad_norm": 0.3379997934308957, |
| "learning_rate": 2.430635838150289e-05, |
| "loss": 0.3843, |
| "step": 1082 |
| }, |
| { |
| "epoch": 1.6874512860483244, |
| "grad_norm": 0.2730933075226193, |
| "learning_rate": 2.4277456647398844e-05, |
| "loss": 0.3825, |
| "step": 1083 |
| }, |
| { |
| "epoch": 1.6890101325019486, |
| "grad_norm": 0.3404758440705323, |
| "learning_rate": 2.4248554913294798e-05, |
| "loss": 0.3692, |
| "step": 1084 |
| }, |
| { |
| "epoch": 1.690568978955573, |
| "grad_norm": 0.2678166339094811, |
| "learning_rate": 2.421965317919075e-05, |
| "loss": 0.358, |
| "step": 1085 |
| }, |
| { |
| "epoch": 1.6921278254091972, |
| "grad_norm": 0.3179761438396162, |
| "learning_rate": 2.4190751445086708e-05, |
| "loss": 0.3615, |
| "step": 1086 |
| }, |
| { |
| "epoch": 1.6936866718628214, |
| "grad_norm": 0.30230071697163113, |
| "learning_rate": 2.4161849710982658e-05, |
| "loss": 0.3778, |
| "step": 1087 |
| }, |
| { |
| "epoch": 1.6952455183164459, |
| "grad_norm": 0.291898879189332, |
| "learning_rate": 2.4132947976878615e-05, |
| "loss": 0.373, |
| "step": 1088 |
| }, |
| { |
| "epoch": 1.6968043647700701, |
| "grad_norm": 0.2503981788139211, |
| "learning_rate": 2.4104046242774568e-05, |
| "loss": 0.3568, |
| "step": 1089 |
| }, |
| { |
| "epoch": 1.6983632112236946, |
| "grad_norm": 0.28111682299046925, |
| "learning_rate": 2.407514450867052e-05, |
| "loss": 0.352, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.6999220576773189, |
| "grad_norm": 0.2448550241175499, |
| "learning_rate": 2.4046242774566475e-05, |
| "loss": 0.3657, |
| "step": 1091 |
| }, |
| { |
| "epoch": 1.701480904130943, |
| "grad_norm": 0.3065398908103042, |
| "learning_rate": 2.401734104046243e-05, |
| "loss": 0.3632, |
| "step": 1092 |
| }, |
| { |
| "epoch": 1.7030397505845674, |
| "grad_norm": 0.2822693214761903, |
| "learning_rate": 2.3988439306358382e-05, |
| "loss": 0.401, |
| "step": 1093 |
| }, |
| { |
| "epoch": 1.7045985970381916, |
| "grad_norm": 0.29360065434429594, |
| "learning_rate": 2.3959537572254335e-05, |
| "loss": 0.3682, |
| "step": 1094 |
| }, |
| { |
| "epoch": 1.706157443491816, |
| "grad_norm": 0.24618104109851893, |
| "learning_rate": 2.393063583815029e-05, |
| "loss": 0.3653, |
| "step": 1095 |
| }, |
| { |
| "epoch": 1.7077162899454403, |
| "grad_norm": 0.27658727281555634, |
| "learning_rate": 2.3901734104046246e-05, |
| "loss": 0.3703, |
| "step": 1096 |
| }, |
| { |
| "epoch": 1.7092751363990648, |
| "grad_norm": 0.2777982792931725, |
| "learning_rate": 2.3872832369942196e-05, |
| "loss": 0.382, |
| "step": 1097 |
| }, |
| { |
| "epoch": 1.710833982852689, |
| "grad_norm": 0.30238858038905553, |
| "learning_rate": 2.3843930635838153e-05, |
| "loss": 0.3646, |
| "step": 1098 |
| }, |
| { |
| "epoch": 1.7123928293063133, |
| "grad_norm": 0.27806470539498046, |
| "learning_rate": 2.3815028901734106e-05, |
| "loss": 0.3745, |
| "step": 1099 |
| }, |
| { |
| "epoch": 1.7139516757599376, |
| "grad_norm": 0.29767876896518725, |
| "learning_rate": 2.378612716763006e-05, |
| "loss": 0.3758, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.7155105222135618, |
| "grad_norm": 0.2829737040629793, |
| "learning_rate": 2.3757225433526013e-05, |
| "loss": 0.3666, |
| "step": 1101 |
| }, |
| { |
| "epoch": 1.7170693686671863, |
| "grad_norm": 0.3578161092071975, |
| "learning_rate": 2.3728323699421966e-05, |
| "loss": 0.3886, |
| "step": 1102 |
| }, |
| { |
| "epoch": 1.7186282151208108, |
| "grad_norm": 0.29906079097318256, |
| "learning_rate": 2.369942196531792e-05, |
| "loss": 0.3671, |
| "step": 1103 |
| }, |
| { |
| "epoch": 1.720187061574435, |
| "grad_norm": 0.28648360330595535, |
| "learning_rate": 2.3670520231213873e-05, |
| "loss": 0.3552, |
| "step": 1104 |
| }, |
| { |
| "epoch": 1.7217459080280593, |
| "grad_norm": 0.2690083195767578, |
| "learning_rate": 2.3641618497109827e-05, |
| "loss": 0.3642, |
| "step": 1105 |
| }, |
| { |
| "epoch": 1.7233047544816835, |
| "grad_norm": 0.3412710940527978, |
| "learning_rate": 2.3612716763005784e-05, |
| "loss": 0.3851, |
| "step": 1106 |
| }, |
| { |
| "epoch": 1.7248636009353078, |
| "grad_norm": 0.2945093059468343, |
| "learning_rate": 2.3583815028901734e-05, |
| "loss": 0.3623, |
| "step": 1107 |
| }, |
| { |
| "epoch": 1.726422447388932, |
| "grad_norm": 0.2745278267359284, |
| "learning_rate": 2.355491329479769e-05, |
| "loss": 0.3573, |
| "step": 1108 |
| }, |
| { |
| "epoch": 1.7279812938425565, |
| "grad_norm": 0.2718225903508495, |
| "learning_rate": 2.352601156069364e-05, |
| "loss": 0.3641, |
| "step": 1109 |
| }, |
| { |
| "epoch": 1.729540140296181, |
| "grad_norm": 0.27774676623694455, |
| "learning_rate": 2.3497109826589597e-05, |
| "loss": 0.3621, |
| "step": 1110 |
| }, |
| { |
| "epoch": 1.7310989867498052, |
| "grad_norm": 0.26445890337068356, |
| "learning_rate": 2.346820809248555e-05, |
| "loss": 0.3616, |
| "step": 1111 |
| }, |
| { |
| "epoch": 1.7326578332034295, |
| "grad_norm": 0.34740235201216224, |
| "learning_rate": 2.3439306358381504e-05, |
| "loss": 0.3549, |
| "step": 1112 |
| }, |
| { |
| "epoch": 1.7342166796570537, |
| "grad_norm": 0.27846442379370767, |
| "learning_rate": 2.3410404624277458e-05, |
| "loss": 0.3582, |
| "step": 1113 |
| }, |
| { |
| "epoch": 1.735775526110678, |
| "grad_norm": 0.3157341078739885, |
| "learning_rate": 2.338150289017341e-05, |
| "loss": 0.3777, |
| "step": 1114 |
| }, |
| { |
| "epoch": 1.7373343725643025, |
| "grad_norm": 0.27171759230918247, |
| "learning_rate": 2.3352601156069365e-05, |
| "loss": 0.3752, |
| "step": 1115 |
| }, |
| { |
| "epoch": 1.7388932190179267, |
| "grad_norm": 0.3016475132282448, |
| "learning_rate": 2.332369942196532e-05, |
| "loss": 0.3815, |
| "step": 1116 |
| }, |
| { |
| "epoch": 1.7404520654715512, |
| "grad_norm": 0.28434294314632325, |
| "learning_rate": 2.329479768786127e-05, |
| "loss": 0.3771, |
| "step": 1117 |
| }, |
| { |
| "epoch": 1.7420109119251754, |
| "grad_norm": 0.3013627601659192, |
| "learning_rate": 2.326589595375723e-05, |
| "loss": 0.3668, |
| "step": 1118 |
| }, |
| { |
| "epoch": 1.7435697583787997, |
| "grad_norm": 0.2907093179335693, |
| "learning_rate": 2.323699421965318e-05, |
| "loss": 0.3939, |
| "step": 1119 |
| }, |
| { |
| "epoch": 1.745128604832424, |
| "grad_norm": 0.2727965148056477, |
| "learning_rate": 2.3208092485549135e-05, |
| "loss": 0.3892, |
| "step": 1120 |
| }, |
| { |
| "epoch": 1.7466874512860482, |
| "grad_norm": 0.2993429686182799, |
| "learning_rate": 2.317919075144509e-05, |
| "loss": 0.3519, |
| "step": 1121 |
| }, |
| { |
| "epoch": 1.7482462977396727, |
| "grad_norm": 0.2693671384061593, |
| "learning_rate": 2.3150289017341042e-05, |
| "loss": 0.3549, |
| "step": 1122 |
| }, |
| { |
| "epoch": 1.749805144193297, |
| "grad_norm": 0.3209589746292965, |
| "learning_rate": 2.3121387283236996e-05, |
| "loss": 0.375, |
| "step": 1123 |
| }, |
| { |
| "epoch": 1.7513639906469214, |
| "grad_norm": 0.24312926167519938, |
| "learning_rate": 2.309248554913295e-05, |
| "loss": 0.3499, |
| "step": 1124 |
| }, |
| { |
| "epoch": 1.7529228371005456, |
| "grad_norm": 0.3104273862594975, |
| "learning_rate": 2.3063583815028902e-05, |
| "loss": 0.389, |
| "step": 1125 |
| }, |
| { |
| "epoch": 1.75448168355417, |
| "grad_norm": 0.2752541588528496, |
| "learning_rate": 2.303468208092486e-05, |
| "loss": 0.3646, |
| "step": 1126 |
| }, |
| { |
| "epoch": 1.7560405300077941, |
| "grad_norm": 0.27374708814696136, |
| "learning_rate": 2.300578034682081e-05, |
| "loss": 0.3576, |
| "step": 1127 |
| }, |
| { |
| "epoch": 1.7575993764614184, |
| "grad_norm": 0.31492934426283914, |
| "learning_rate": 2.2976878612716766e-05, |
| "loss": 0.3872, |
| "step": 1128 |
| }, |
| { |
| "epoch": 1.7591582229150429, |
| "grad_norm": 0.28256761554526544, |
| "learning_rate": 2.2947976878612716e-05, |
| "loss": 0.3659, |
| "step": 1129 |
| }, |
| { |
| "epoch": 1.7607170693686673, |
| "grad_norm": 0.2543705987468934, |
| "learning_rate": 2.2919075144508673e-05, |
| "loss": 0.3607, |
| "step": 1130 |
| }, |
| { |
| "epoch": 1.7622759158222916, |
| "grad_norm": 0.298933402272287, |
| "learning_rate": 2.2890173410404627e-05, |
| "loss": 0.3683, |
| "step": 1131 |
| }, |
| { |
| "epoch": 1.7638347622759158, |
| "grad_norm": 0.311816355725973, |
| "learning_rate": 2.286127167630058e-05, |
| "loss": 0.3597, |
| "step": 1132 |
| }, |
| { |
| "epoch": 1.76539360872954, |
| "grad_norm": 0.3067218252259407, |
| "learning_rate": 2.2832369942196533e-05, |
| "loss": 0.3786, |
| "step": 1133 |
| }, |
| { |
| "epoch": 1.7669524551831643, |
| "grad_norm": 0.2861975440862343, |
| "learning_rate": 2.2803468208092487e-05, |
| "loss": 0.3731, |
| "step": 1134 |
| }, |
| { |
| "epoch": 1.7685113016367888, |
| "grad_norm": 0.316953066205446, |
| "learning_rate": 2.277456647398844e-05, |
| "loss": 0.3518, |
| "step": 1135 |
| }, |
| { |
| "epoch": 1.770070148090413, |
| "grad_norm": 0.2708691409104658, |
| "learning_rate": 2.2745664739884394e-05, |
| "loss": 0.373, |
| "step": 1136 |
| }, |
| { |
| "epoch": 1.7716289945440375, |
| "grad_norm": 0.26797311923183315, |
| "learning_rate": 2.2716763005780347e-05, |
| "loss": 0.3665, |
| "step": 1137 |
| }, |
| { |
| "epoch": 1.7731878409976618, |
| "grad_norm": 0.3099497339980096, |
| "learning_rate": 2.2687861271676304e-05, |
| "loss": 0.3951, |
| "step": 1138 |
| }, |
| { |
| "epoch": 1.774746687451286, |
| "grad_norm": 0.34054924476007226, |
| "learning_rate": 2.2658959537572254e-05, |
| "loss": 0.3594, |
| "step": 1139 |
| }, |
| { |
| "epoch": 1.7763055339049103, |
| "grad_norm": 0.25223898072450013, |
| "learning_rate": 2.263005780346821e-05, |
| "loss": 0.361, |
| "step": 1140 |
| }, |
| { |
| "epoch": 1.7778643803585346, |
| "grad_norm": 0.29284462313911747, |
| "learning_rate": 2.2601156069364164e-05, |
| "loss": 0.3575, |
| "step": 1141 |
| }, |
| { |
| "epoch": 1.779423226812159, |
| "grad_norm": 0.2960130777592313, |
| "learning_rate": 2.2572254335260118e-05, |
| "loss": 0.3715, |
| "step": 1142 |
| }, |
| { |
| "epoch": 1.7809820732657833, |
| "grad_norm": 0.26708445021070565, |
| "learning_rate": 2.254335260115607e-05, |
| "loss": 0.3685, |
| "step": 1143 |
| }, |
| { |
| "epoch": 1.7825409197194078, |
| "grad_norm": 0.23850951752764862, |
| "learning_rate": 2.2514450867052025e-05, |
| "loss": 0.3528, |
| "step": 1144 |
| }, |
| { |
| "epoch": 1.784099766173032, |
| "grad_norm": 0.28725957396773283, |
| "learning_rate": 2.2485549132947978e-05, |
| "loss": 0.3575, |
| "step": 1145 |
| }, |
| { |
| "epoch": 1.7856586126266563, |
| "grad_norm": 0.27977866068073226, |
| "learning_rate": 2.245664739884393e-05, |
| "loss": 0.3605, |
| "step": 1146 |
| }, |
| { |
| "epoch": 1.7872174590802805, |
| "grad_norm": 0.27572937920147894, |
| "learning_rate": 2.2427745664739885e-05, |
| "loss": 0.3797, |
| "step": 1147 |
| }, |
| { |
| "epoch": 1.7887763055339048, |
| "grad_norm": 0.3044698433558107, |
| "learning_rate": 2.2398843930635842e-05, |
| "loss": 0.3561, |
| "step": 1148 |
| }, |
| { |
| "epoch": 1.7903351519875292, |
| "grad_norm": 0.3034254736946843, |
| "learning_rate": 2.2369942196531792e-05, |
| "loss": 0.3729, |
| "step": 1149 |
| }, |
| { |
| "epoch": 1.7918939984411537, |
| "grad_norm": 0.29184309753197535, |
| "learning_rate": 2.234104046242775e-05, |
| "loss": 0.3602, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.793452844894778, |
| "grad_norm": 0.2752185880115312, |
| "learning_rate": 2.23121387283237e-05, |
| "loss": 0.3705, |
| "step": 1151 |
| }, |
| { |
| "epoch": 1.7950116913484022, |
| "grad_norm": 0.2809903038728107, |
| "learning_rate": 2.2283236994219656e-05, |
| "loss": 0.3545, |
| "step": 1152 |
| }, |
| { |
| "epoch": 1.7965705378020265, |
| "grad_norm": 0.3185758597066887, |
| "learning_rate": 2.225433526011561e-05, |
| "loss": 0.3775, |
| "step": 1153 |
| }, |
| { |
| "epoch": 1.7981293842556507, |
| "grad_norm": 0.32522837307739677, |
| "learning_rate": 2.2225433526011563e-05, |
| "loss": 0.3762, |
| "step": 1154 |
| }, |
| { |
| "epoch": 1.799688230709275, |
| "grad_norm": 0.29829669655375685, |
| "learning_rate": 2.2196531791907516e-05, |
| "loss": 0.3431, |
| "step": 1155 |
| }, |
| { |
| "epoch": 1.8012470771628994, |
| "grad_norm": 0.2923880518129942, |
| "learning_rate": 2.216763005780347e-05, |
| "loss": 0.3625, |
| "step": 1156 |
| }, |
| { |
| "epoch": 1.802805923616524, |
| "grad_norm": 0.31193429426470687, |
| "learning_rate": 2.2138728323699423e-05, |
| "loss": 0.3687, |
| "step": 1157 |
| }, |
| { |
| "epoch": 1.8043647700701482, |
| "grad_norm": 0.3028214816745831, |
| "learning_rate": 2.2109826589595376e-05, |
| "loss": 0.3835, |
| "step": 1158 |
| }, |
| { |
| "epoch": 1.8059236165237724, |
| "grad_norm": 0.35099054840606936, |
| "learning_rate": 2.208092485549133e-05, |
| "loss": 0.3785, |
| "step": 1159 |
| }, |
| { |
| "epoch": 1.8074824629773967, |
| "grad_norm": 0.3153451160744472, |
| "learning_rate": 2.2052023121387283e-05, |
| "loss": 0.367, |
| "step": 1160 |
| }, |
| { |
| "epoch": 1.809041309431021, |
| "grad_norm": 0.3253785936574865, |
| "learning_rate": 2.2023121387283237e-05, |
| "loss": 0.3877, |
| "step": 1161 |
| }, |
| { |
| "epoch": 1.8106001558846454, |
| "grad_norm": 0.30760323974470033, |
| "learning_rate": 2.199421965317919e-05, |
| "loss": 0.3643, |
| "step": 1162 |
| }, |
| { |
| "epoch": 1.8121590023382697, |
| "grad_norm": 0.3598054851684999, |
| "learning_rate": 2.1965317919075147e-05, |
| "loss": 0.3586, |
| "step": 1163 |
| }, |
| { |
| "epoch": 1.8137178487918941, |
| "grad_norm": 0.26304147816154894, |
| "learning_rate": 2.1936416184971097e-05, |
| "loss": 0.3621, |
| "step": 1164 |
| }, |
| { |
| "epoch": 1.8152766952455184, |
| "grad_norm": 0.3322641073371267, |
| "learning_rate": 2.1907514450867054e-05, |
| "loss": 0.3787, |
| "step": 1165 |
| }, |
| { |
| "epoch": 1.8168355416991426, |
| "grad_norm": 0.2972445280530977, |
| "learning_rate": 2.1878612716763007e-05, |
| "loss": 0.3623, |
| "step": 1166 |
| }, |
| { |
| "epoch": 1.8183943881527669, |
| "grad_norm": 0.29860325366761714, |
| "learning_rate": 2.184971098265896e-05, |
| "loss": 0.3693, |
| "step": 1167 |
| }, |
| { |
| "epoch": 1.8199532346063911, |
| "grad_norm": 0.31932300688288, |
| "learning_rate": 2.1820809248554914e-05, |
| "loss": 0.3687, |
| "step": 1168 |
| }, |
| { |
| "epoch": 1.8215120810600156, |
| "grad_norm": 0.25058398882971866, |
| "learning_rate": 2.1791907514450868e-05, |
| "loss": 0.3688, |
| "step": 1169 |
| }, |
| { |
| "epoch": 1.8230709275136399, |
| "grad_norm": 0.30451618236243205, |
| "learning_rate": 2.176300578034682e-05, |
| "loss": 0.3604, |
| "step": 1170 |
| }, |
| { |
| "epoch": 1.8246297739672643, |
| "grad_norm": 0.30692079739121414, |
| "learning_rate": 2.1734104046242775e-05, |
| "loss": 0.3515, |
| "step": 1171 |
| }, |
| { |
| "epoch": 1.8261886204208886, |
| "grad_norm": 0.2807778315885177, |
| "learning_rate": 2.1705202312138728e-05, |
| "loss": 0.3509, |
| "step": 1172 |
| }, |
| { |
| "epoch": 1.8277474668745128, |
| "grad_norm": 0.2905244115992112, |
| "learning_rate": 2.1676300578034685e-05, |
| "loss": 0.3661, |
| "step": 1173 |
| }, |
| { |
| "epoch": 1.829306313328137, |
| "grad_norm": 0.3105261582464057, |
| "learning_rate": 2.1647398843930635e-05, |
| "loss": 0.372, |
| "step": 1174 |
| }, |
| { |
| "epoch": 1.8308651597817613, |
| "grad_norm": 0.3095524114344275, |
| "learning_rate": 2.1618497109826592e-05, |
| "loss": 0.3683, |
| "step": 1175 |
| }, |
| { |
| "epoch": 1.8324240062353858, |
| "grad_norm": 0.3224707852323462, |
| "learning_rate": 2.1589595375722542e-05, |
| "loss": 0.3941, |
| "step": 1176 |
| }, |
| { |
| "epoch": 1.8339828526890103, |
| "grad_norm": 0.37265316733469883, |
| "learning_rate": 2.15606936416185e-05, |
| "loss": 0.3729, |
| "step": 1177 |
| }, |
| { |
| "epoch": 1.8355416991426345, |
| "grad_norm": 0.3484805036172097, |
| "learning_rate": 2.1531791907514452e-05, |
| "loss": 0.3695, |
| "step": 1178 |
| }, |
| { |
| "epoch": 1.8371005455962588, |
| "grad_norm": 0.3062185230582236, |
| "learning_rate": 2.1502890173410405e-05, |
| "loss": 0.3722, |
| "step": 1179 |
| }, |
| { |
| "epoch": 1.838659392049883, |
| "grad_norm": 0.3514017844930539, |
| "learning_rate": 2.147398843930636e-05, |
| "loss": 0.3716, |
| "step": 1180 |
| }, |
| { |
| "epoch": 1.8402182385035073, |
| "grad_norm": 0.29370679610653583, |
| "learning_rate": 2.1445086705202312e-05, |
| "loss": 0.3511, |
| "step": 1181 |
| }, |
| { |
| "epoch": 1.8417770849571318, |
| "grad_norm": 0.3650311060921678, |
| "learning_rate": 2.1416184971098266e-05, |
| "loss": 0.3746, |
| "step": 1182 |
| }, |
| { |
| "epoch": 1.843335931410756, |
| "grad_norm": 0.28361922423446095, |
| "learning_rate": 2.1387283236994223e-05, |
| "loss": 0.354, |
| "step": 1183 |
| }, |
| { |
| "epoch": 1.8448947778643805, |
| "grad_norm": 0.2915388311697381, |
| "learning_rate": 2.1358381502890173e-05, |
| "loss": 0.36, |
| "step": 1184 |
| }, |
| { |
| "epoch": 1.8464536243180047, |
| "grad_norm": 0.33038140947084105, |
| "learning_rate": 2.132947976878613e-05, |
| "loss": 0.3554, |
| "step": 1185 |
| }, |
| { |
| "epoch": 1.848012470771629, |
| "grad_norm": 0.419071588239709, |
| "learning_rate": 2.130057803468208e-05, |
| "loss": 0.3834, |
| "step": 1186 |
| }, |
| { |
| "epoch": 1.8495713172252533, |
| "grad_norm": 0.3514728435923063, |
| "learning_rate": 2.1271676300578036e-05, |
| "loss": 0.3627, |
| "step": 1187 |
| }, |
| { |
| "epoch": 1.8511301636788775, |
| "grad_norm": 0.35707340267403825, |
| "learning_rate": 2.124277456647399e-05, |
| "loss": 0.3874, |
| "step": 1188 |
| }, |
| { |
| "epoch": 1.852689010132502, |
| "grad_norm": 0.37099787424788894, |
| "learning_rate": 2.1213872832369943e-05, |
| "loss": 0.3726, |
| "step": 1189 |
| }, |
| { |
| "epoch": 1.8542478565861262, |
| "grad_norm": 0.330071756650617, |
| "learning_rate": 2.1184971098265897e-05, |
| "loss": 0.3715, |
| "step": 1190 |
| }, |
| { |
| "epoch": 1.8558067030397507, |
| "grad_norm": 0.37948140477016307, |
| "learning_rate": 2.115606936416185e-05, |
| "loss": 0.3561, |
| "step": 1191 |
| }, |
| { |
| "epoch": 1.857365549493375, |
| "grad_norm": 0.27396995420812964, |
| "learning_rate": 2.1127167630057804e-05, |
| "loss": 0.38, |
| "step": 1192 |
| }, |
| { |
| "epoch": 1.8589243959469992, |
| "grad_norm": 0.35094585580789595, |
| "learning_rate": 2.1098265895953757e-05, |
| "loss": 0.3646, |
| "step": 1193 |
| }, |
| { |
| "epoch": 1.8604832424006235, |
| "grad_norm": 0.3047789152628225, |
| "learning_rate": 2.106936416184971e-05, |
| "loss": 0.3778, |
| "step": 1194 |
| }, |
| { |
| "epoch": 1.8620420888542477, |
| "grad_norm": 0.33234879657650934, |
| "learning_rate": 2.1040462427745667e-05, |
| "loss": 0.3713, |
| "step": 1195 |
| }, |
| { |
| "epoch": 1.8636009353078722, |
| "grad_norm": 0.32766164310333334, |
| "learning_rate": 2.1011560693641617e-05, |
| "loss": 0.3736, |
| "step": 1196 |
| }, |
| { |
| "epoch": 1.8651597817614964, |
| "grad_norm": 0.2759861630025658, |
| "learning_rate": 2.0982658959537574e-05, |
| "loss": 0.3796, |
| "step": 1197 |
| }, |
| { |
| "epoch": 1.866718628215121, |
| "grad_norm": 0.32521839552334825, |
| "learning_rate": 2.0953757225433528e-05, |
| "loss": 0.362, |
| "step": 1198 |
| }, |
| { |
| "epoch": 1.8682774746687452, |
| "grad_norm": 0.2931283862783075, |
| "learning_rate": 2.092485549132948e-05, |
| "loss": 0.3502, |
| "step": 1199 |
| }, |
| { |
| "epoch": 1.8698363211223694, |
| "grad_norm": 0.2991343395943882, |
| "learning_rate": 2.0895953757225435e-05, |
| "loss": 0.3714, |
| "step": 1200 |
| }, |
| { |
| "epoch": 1.8713951675759937, |
| "grad_norm": 0.2932605788220294, |
| "learning_rate": 2.0867052023121388e-05, |
| "loss": 0.3819, |
| "step": 1201 |
| }, |
| { |
| "epoch": 1.872954014029618, |
| "grad_norm": 0.30785885245721334, |
| "learning_rate": 2.083815028901734e-05, |
| "loss": 0.3471, |
| "step": 1202 |
| }, |
| { |
| "epoch": 1.8745128604832424, |
| "grad_norm": 0.32014124433096525, |
| "learning_rate": 2.0809248554913295e-05, |
| "loss": 0.3561, |
| "step": 1203 |
| }, |
| { |
| "epoch": 1.8760717069368669, |
| "grad_norm": 0.3235541208435597, |
| "learning_rate": 2.078034682080925e-05, |
| "loss": 0.3866, |
| "step": 1204 |
| }, |
| { |
| "epoch": 1.8776305533904911, |
| "grad_norm": 0.30949299738115854, |
| "learning_rate": 2.0751445086705205e-05, |
| "loss": 0.3639, |
| "step": 1205 |
| }, |
| { |
| "epoch": 1.8791893998441154, |
| "grad_norm": 0.2817005798429569, |
| "learning_rate": 2.0722543352601155e-05, |
| "loss": 0.3597, |
| "step": 1206 |
| }, |
| { |
| "epoch": 1.8807482462977396, |
| "grad_norm": 0.30497938827386945, |
| "learning_rate": 2.0693641618497112e-05, |
| "loss": 0.383, |
| "step": 1207 |
| }, |
| { |
| "epoch": 1.8823070927513639, |
| "grad_norm": 0.25436146038693197, |
| "learning_rate": 2.0664739884393066e-05, |
| "loss": 0.3692, |
| "step": 1208 |
| }, |
| { |
| "epoch": 1.8838659392049883, |
| "grad_norm": 0.3056404459036848, |
| "learning_rate": 2.063583815028902e-05, |
| "loss": 0.3704, |
| "step": 1209 |
| }, |
| { |
| "epoch": 1.8854247856586126, |
| "grad_norm": 0.26875488415904514, |
| "learning_rate": 2.0606936416184972e-05, |
| "loss": 0.3594, |
| "step": 1210 |
| }, |
| { |
| "epoch": 1.886983632112237, |
| "grad_norm": 0.3511619874207967, |
| "learning_rate": 2.0578034682080926e-05, |
| "loss": 0.3798, |
| "step": 1211 |
| }, |
| { |
| "epoch": 1.8885424785658613, |
| "grad_norm": 0.26158426756531344, |
| "learning_rate": 2.054913294797688e-05, |
| "loss": 0.3856, |
| "step": 1212 |
| }, |
| { |
| "epoch": 1.8901013250194856, |
| "grad_norm": 0.3122928786540747, |
| "learning_rate": 2.0520231213872833e-05, |
| "loss": 0.3677, |
| "step": 1213 |
| }, |
| { |
| "epoch": 1.8916601714731098, |
| "grad_norm": 0.2684224099385814, |
| "learning_rate": 2.0491329479768786e-05, |
| "loss": 0.3534, |
| "step": 1214 |
| }, |
| { |
| "epoch": 1.893219017926734, |
| "grad_norm": 0.2571315731383385, |
| "learning_rate": 2.0462427745664743e-05, |
| "loss": 0.3722, |
| "step": 1215 |
| }, |
| { |
| "epoch": 1.8947778643803586, |
| "grad_norm": 0.25588546444512905, |
| "learning_rate": 2.0433526011560693e-05, |
| "loss": 0.3681, |
| "step": 1216 |
| }, |
| { |
| "epoch": 1.8963367108339828, |
| "grad_norm": 0.3187680200863735, |
| "learning_rate": 2.040462427745665e-05, |
| "loss": 0.3634, |
| "step": 1217 |
| }, |
| { |
| "epoch": 1.8978955572876073, |
| "grad_norm": 0.2611861721574427, |
| "learning_rate": 2.03757225433526e-05, |
| "loss": 0.3879, |
| "step": 1218 |
| }, |
| { |
| "epoch": 1.8994544037412315, |
| "grad_norm": 0.30654388499949137, |
| "learning_rate": 2.0346820809248557e-05, |
| "loss": 0.363, |
| "step": 1219 |
| }, |
| { |
| "epoch": 1.9010132501948558, |
| "grad_norm": 0.32726906906242276, |
| "learning_rate": 2.031791907514451e-05, |
| "loss": 0.369, |
| "step": 1220 |
| }, |
| { |
| "epoch": 1.90257209664848, |
| "grad_norm": 0.28441311387650836, |
| "learning_rate": 2.0289017341040464e-05, |
| "loss": 0.3559, |
| "step": 1221 |
| }, |
| { |
| "epoch": 1.9041309431021043, |
| "grad_norm": 0.2657998910454343, |
| "learning_rate": 2.0260115606936417e-05, |
| "loss": 0.3794, |
| "step": 1222 |
| }, |
| { |
| "epoch": 1.9056897895557288, |
| "grad_norm": 0.31053785670596307, |
| "learning_rate": 2.023121387283237e-05, |
| "loss": 0.3658, |
| "step": 1223 |
| }, |
| { |
| "epoch": 1.9072486360093532, |
| "grad_norm": 0.30798708262091334, |
| "learning_rate": 2.0202312138728324e-05, |
| "loss": 0.3603, |
| "step": 1224 |
| }, |
| { |
| "epoch": 1.9088074824629775, |
| "grad_norm": 0.2523008465363555, |
| "learning_rate": 2.017341040462428e-05, |
| "loss": 0.3724, |
| "step": 1225 |
| }, |
| { |
| "epoch": 1.9103663289166017, |
| "grad_norm": 0.2763374659384913, |
| "learning_rate": 2.014450867052023e-05, |
| "loss": 0.3548, |
| "step": 1226 |
| }, |
| { |
| "epoch": 1.911925175370226, |
| "grad_norm": 0.24964103816057576, |
| "learning_rate": 2.0115606936416188e-05, |
| "loss": 0.3694, |
| "step": 1227 |
| }, |
| { |
| "epoch": 1.9134840218238502, |
| "grad_norm": 0.28584599942242367, |
| "learning_rate": 2.0086705202312138e-05, |
| "loss": 0.3644, |
| "step": 1228 |
| }, |
| { |
| "epoch": 1.9150428682774747, |
| "grad_norm": 0.253224234190821, |
| "learning_rate": 2.0057803468208095e-05, |
| "loss": 0.3651, |
| "step": 1229 |
| }, |
| { |
| "epoch": 1.916601714731099, |
| "grad_norm": 0.2853460997991044, |
| "learning_rate": 2.0028901734104048e-05, |
| "loss": 0.3865, |
| "step": 1230 |
| }, |
| { |
| "epoch": 1.9181605611847234, |
| "grad_norm": 0.2914953043711197, |
| "learning_rate": 2e-05, |
| "loss": 0.3705, |
| "step": 1231 |
| }, |
| { |
| "epoch": 1.9197194076383477, |
| "grad_norm": 0.2703628033375394, |
| "learning_rate": 1.9971098265895955e-05, |
| "loss": 0.35, |
| "step": 1232 |
| }, |
| { |
| "epoch": 1.921278254091972, |
| "grad_norm": 0.3403993339455389, |
| "learning_rate": 1.994219653179191e-05, |
| "loss": 0.367, |
| "step": 1233 |
| }, |
| { |
| "epoch": 1.9228371005455962, |
| "grad_norm": 0.2403088129031985, |
| "learning_rate": 1.9913294797687862e-05, |
| "loss": 0.3554, |
| "step": 1234 |
| }, |
| { |
| "epoch": 1.9243959469992205, |
| "grad_norm": 0.2811341273215568, |
| "learning_rate": 1.988439306358382e-05, |
| "loss": 0.3617, |
| "step": 1235 |
| }, |
| { |
| "epoch": 1.925954793452845, |
| "grad_norm": 0.2878197944500182, |
| "learning_rate": 1.985549132947977e-05, |
| "loss": 0.3574, |
| "step": 1236 |
| }, |
| { |
| "epoch": 1.9275136399064692, |
| "grad_norm": 0.2774310293587101, |
| "learning_rate": 1.9826589595375726e-05, |
| "loss": 0.3748, |
| "step": 1237 |
| }, |
| { |
| "epoch": 1.9290724863600937, |
| "grad_norm": 0.3115605490343856, |
| "learning_rate": 1.9797687861271676e-05, |
| "loss": 0.353, |
| "step": 1238 |
| }, |
| { |
| "epoch": 1.930631332813718, |
| "grad_norm": 0.25590953954693557, |
| "learning_rate": 1.9768786127167633e-05, |
| "loss": 0.3662, |
| "step": 1239 |
| }, |
| { |
| "epoch": 1.9321901792673422, |
| "grad_norm": 0.2555007229781606, |
| "learning_rate": 1.9739884393063586e-05, |
| "loss": 0.3494, |
| "step": 1240 |
| }, |
| { |
| "epoch": 1.9337490257209664, |
| "grad_norm": 0.2691775864628584, |
| "learning_rate": 1.971098265895954e-05, |
| "loss": 0.3695, |
| "step": 1241 |
| }, |
| { |
| "epoch": 1.9353078721745907, |
| "grad_norm": 0.27904072318741047, |
| "learning_rate": 1.9682080924855493e-05, |
| "loss": 0.3553, |
| "step": 1242 |
| }, |
| { |
| "epoch": 1.9368667186282151, |
| "grad_norm": 0.2940422073737695, |
| "learning_rate": 1.9653179190751446e-05, |
| "loss": 0.3732, |
| "step": 1243 |
| }, |
| { |
| "epoch": 1.9384255650818394, |
| "grad_norm": 0.28761825900324317, |
| "learning_rate": 1.96242774566474e-05, |
| "loss": 0.3739, |
| "step": 1244 |
| }, |
| { |
| "epoch": 1.9399844115354639, |
| "grad_norm": 0.30744221511987735, |
| "learning_rate": 1.9595375722543353e-05, |
| "loss": 0.3598, |
| "step": 1245 |
| }, |
| { |
| "epoch": 1.9415432579890881, |
| "grad_norm": 0.2669547151465641, |
| "learning_rate": 1.9566473988439307e-05, |
| "loss": 0.3625, |
| "step": 1246 |
| }, |
| { |
| "epoch": 1.9431021044427124, |
| "grad_norm": 0.3163867104214392, |
| "learning_rate": 1.9537572254335264e-05, |
| "loss": 0.3527, |
| "step": 1247 |
| }, |
| { |
| "epoch": 1.9446609508963366, |
| "grad_norm": 0.2724508920694651, |
| "learning_rate": 1.9508670520231214e-05, |
| "loss": 0.349, |
| "step": 1248 |
| }, |
| { |
| "epoch": 1.9462197973499609, |
| "grad_norm": 0.31987516157242796, |
| "learning_rate": 1.947976878612717e-05, |
| "loss": 0.3578, |
| "step": 1249 |
| }, |
| { |
| "epoch": 1.9477786438035853, |
| "grad_norm": 0.27194855512008154, |
| "learning_rate": 1.9450867052023124e-05, |
| "loss": 0.3717, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.9493374902572098, |
| "grad_norm": 0.42931537141861814, |
| "learning_rate": 1.9421965317919074e-05, |
| "loss": 0.3762, |
| "step": 1251 |
| }, |
| { |
| "epoch": 1.950896336710834, |
| "grad_norm": 0.263639511648232, |
| "learning_rate": 1.939306358381503e-05, |
| "loss": 0.37, |
| "step": 1252 |
| }, |
| { |
| "epoch": 1.9524551831644583, |
| "grad_norm": 0.3046013215628072, |
| "learning_rate": 1.936416184971098e-05, |
| "loss": 0.3657, |
| "step": 1253 |
| }, |
| { |
| "epoch": 1.9540140296180826, |
| "grad_norm": 0.27822048356909573, |
| "learning_rate": 1.9335260115606938e-05, |
| "loss": 0.3702, |
| "step": 1254 |
| }, |
| { |
| "epoch": 1.9555728760717068, |
| "grad_norm": 0.2503963888170451, |
| "learning_rate": 1.930635838150289e-05, |
| "loss": 0.3533, |
| "step": 1255 |
| }, |
| { |
| "epoch": 1.9571317225253313, |
| "grad_norm": 0.3608440008147506, |
| "learning_rate": 1.9277456647398845e-05, |
| "loss": 0.3617, |
| "step": 1256 |
| }, |
| { |
| "epoch": 1.9586905689789555, |
| "grad_norm": 0.2618137124636811, |
| "learning_rate": 1.9248554913294798e-05, |
| "loss": 0.3536, |
| "step": 1257 |
| }, |
| { |
| "epoch": 1.96024941543258, |
| "grad_norm": 0.31638615142182935, |
| "learning_rate": 1.921965317919075e-05, |
| "loss": 0.3646, |
| "step": 1258 |
| }, |
| { |
| "epoch": 1.9618082618862043, |
| "grad_norm": 0.2902981949466536, |
| "learning_rate": 1.9190751445086705e-05, |
| "loss": 0.3576, |
| "step": 1259 |
| }, |
| { |
| "epoch": 1.9633671083398285, |
| "grad_norm": 0.24681016366367486, |
| "learning_rate": 1.916184971098266e-05, |
| "loss": 0.37, |
| "step": 1260 |
| }, |
| { |
| "epoch": 1.9649259547934528, |
| "grad_norm": 0.30886035269615025, |
| "learning_rate": 1.9132947976878612e-05, |
| "loss": 0.3735, |
| "step": 1261 |
| }, |
| { |
| "epoch": 1.966484801247077, |
| "grad_norm": 0.3261707681560328, |
| "learning_rate": 1.910404624277457e-05, |
| "loss": 0.3802, |
| "step": 1262 |
| }, |
| { |
| "epoch": 1.9680436477007015, |
| "grad_norm": 0.27343056543520194, |
| "learning_rate": 1.907514450867052e-05, |
| "loss": 0.3689, |
| "step": 1263 |
| }, |
| { |
| "epoch": 1.9696024941543258, |
| "grad_norm": 0.2919468083028249, |
| "learning_rate": 1.9046242774566476e-05, |
| "loss": 0.3599, |
| "step": 1264 |
| }, |
| { |
| "epoch": 1.9711613406079502, |
| "grad_norm": 0.2994649798188123, |
| "learning_rate": 1.901734104046243e-05, |
| "loss": 0.3713, |
| "step": 1265 |
| }, |
| { |
| "epoch": 1.9727201870615745, |
| "grad_norm": 0.3564440516448399, |
| "learning_rate": 1.8988439306358382e-05, |
| "loss": 0.3451, |
| "step": 1266 |
| }, |
| { |
| "epoch": 1.9742790335151987, |
| "grad_norm": 0.28761498604656505, |
| "learning_rate": 1.8959537572254336e-05, |
| "loss": 0.3691, |
| "step": 1267 |
| }, |
| { |
| "epoch": 1.975837879968823, |
| "grad_norm": 0.29458672500997857, |
| "learning_rate": 1.893063583815029e-05, |
| "loss": 0.3694, |
| "step": 1268 |
| }, |
| { |
| "epoch": 1.9773967264224472, |
| "grad_norm": 0.28968659606971875, |
| "learning_rate": 1.8901734104046243e-05, |
| "loss": 0.3757, |
| "step": 1269 |
| }, |
| { |
| "epoch": 1.9789555728760717, |
| "grad_norm": 0.25074130760016594, |
| "learning_rate": 1.8872832369942196e-05, |
| "loss": 0.3671, |
| "step": 1270 |
| }, |
| { |
| "epoch": 1.9805144193296962, |
| "grad_norm": 0.2727775661900619, |
| "learning_rate": 1.884393063583815e-05, |
| "loss": 0.3763, |
| "step": 1271 |
| }, |
| { |
| "epoch": 1.9820732657833204, |
| "grad_norm": 0.2540694971749628, |
| "learning_rate": 1.8815028901734106e-05, |
| "loss": 0.361, |
| "step": 1272 |
| }, |
| { |
| "epoch": 1.9836321122369447, |
| "grad_norm": 0.26620323560623, |
| "learning_rate": 1.8786127167630057e-05, |
| "loss": 0.3804, |
| "step": 1273 |
| }, |
| { |
| "epoch": 1.985190958690569, |
| "grad_norm": 0.2770753239201809, |
| "learning_rate": 1.8757225433526013e-05, |
| "loss": 0.3511, |
| "step": 1274 |
| }, |
| { |
| "epoch": 1.9867498051441932, |
| "grad_norm": 0.28316981515390427, |
| "learning_rate": 1.8728323699421967e-05, |
| "loss": 0.3527, |
| "step": 1275 |
| }, |
| { |
| "epoch": 1.9883086515978177, |
| "grad_norm": 0.24806656551525558, |
| "learning_rate": 1.869942196531792e-05, |
| "loss": 0.3888, |
| "step": 1276 |
| }, |
| { |
| "epoch": 1.989867498051442, |
| "grad_norm": 0.2751522114384385, |
| "learning_rate": 1.8670520231213874e-05, |
| "loss": 0.3591, |
| "step": 1277 |
| }, |
| { |
| "epoch": 1.9914263445050664, |
| "grad_norm": 0.29543750469737046, |
| "learning_rate": 1.8641618497109827e-05, |
| "loss": 0.3604, |
| "step": 1278 |
| }, |
| { |
| "epoch": 1.9929851909586906, |
| "grad_norm": 0.27104672954069825, |
| "learning_rate": 1.861271676300578e-05, |
| "loss": 0.3686, |
| "step": 1279 |
| }, |
| { |
| "epoch": 1.994544037412315, |
| "grad_norm": 0.26770020711721565, |
| "learning_rate": 1.8583815028901734e-05, |
| "loss": 0.3919, |
| "step": 1280 |
| }, |
| { |
| "epoch": 1.9961028838659391, |
| "grad_norm": 0.3002772154980313, |
| "learning_rate": 1.8554913294797688e-05, |
| "loss": 0.378, |
| "step": 1281 |
| }, |
| { |
| "epoch": 1.9976617303195634, |
| "grad_norm": 0.2934928063887189, |
| "learning_rate": 1.8526011560693644e-05, |
| "loss": 0.3851, |
| "step": 1282 |
| }, |
| { |
| "epoch": 1.9992205767731879, |
| "grad_norm": 0.26071286159069573, |
| "learning_rate": 1.8497109826589594e-05, |
| "loss": 0.3661, |
| "step": 1283 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 1.2407353049559013, |
| "learning_rate": 1.846820809248555e-05, |
| "loss": 0.3315, |
| "step": 1284 |
| }, |
| { |
| "epoch": 2.0015588464536243, |
| "grad_norm": 0.3019354121209205, |
| "learning_rate": 1.84393063583815e-05, |
| "loss": 0.3132, |
| "step": 1285 |
| }, |
| { |
| "epoch": 2.0031176929072485, |
| "grad_norm": 0.3221654485813362, |
| "learning_rate": 1.8410404624277458e-05, |
| "loss": 0.3049, |
| "step": 1286 |
| }, |
| { |
| "epoch": 2.0046765393608728, |
| "grad_norm": 0.3107212962226372, |
| "learning_rate": 1.838150289017341e-05, |
| "loss": 0.2962, |
| "step": 1287 |
| }, |
| { |
| "epoch": 2.0062353858144975, |
| "grad_norm": 0.2652451153347466, |
| "learning_rate": 1.8352601156069365e-05, |
| "loss": 0.2848, |
| "step": 1288 |
| }, |
| { |
| "epoch": 2.0077942322681217, |
| "grad_norm": 0.29115331241478903, |
| "learning_rate": 1.832369942196532e-05, |
| "loss": 0.2974, |
| "step": 1289 |
| }, |
| { |
| "epoch": 2.009353078721746, |
| "grad_norm": 0.29937191649859674, |
| "learning_rate": 1.8294797687861272e-05, |
| "loss": 0.28, |
| "step": 1290 |
| }, |
| { |
| "epoch": 2.01091192517537, |
| "grad_norm": 0.3181952793594262, |
| "learning_rate": 1.8265895953757225e-05, |
| "loss": 0.2795, |
| "step": 1291 |
| }, |
| { |
| "epoch": 2.0124707716289945, |
| "grad_norm": 0.3030873579967208, |
| "learning_rate": 1.8236994219653182e-05, |
| "loss": 0.3004, |
| "step": 1292 |
| }, |
| { |
| "epoch": 2.0140296180826187, |
| "grad_norm": 0.30093086334228963, |
| "learning_rate": 1.8208092485549132e-05, |
| "loss": 0.2987, |
| "step": 1293 |
| }, |
| { |
| "epoch": 2.015588464536243, |
| "grad_norm": 0.31992474845831625, |
| "learning_rate": 1.817919075144509e-05, |
| "loss": 0.2921, |
| "step": 1294 |
| }, |
| { |
| "epoch": 2.0171473109898677, |
| "grad_norm": 0.31971945976797334, |
| "learning_rate": 1.815028901734104e-05, |
| "loss": 0.2938, |
| "step": 1295 |
| }, |
| { |
| "epoch": 2.018706157443492, |
| "grad_norm": 0.2784528999275469, |
| "learning_rate": 1.8121387283236996e-05, |
| "loss": 0.2969, |
| "step": 1296 |
| }, |
| { |
| "epoch": 2.020265003897116, |
| "grad_norm": 0.28547739386324084, |
| "learning_rate": 1.809248554913295e-05, |
| "loss": 0.2819, |
| "step": 1297 |
| }, |
| { |
| "epoch": 2.0218238503507404, |
| "grad_norm": 0.35064000763654246, |
| "learning_rate": 1.8063583815028903e-05, |
| "loss": 0.2886, |
| "step": 1298 |
| }, |
| { |
| "epoch": 2.0233826968043647, |
| "grad_norm": 0.298377942201609, |
| "learning_rate": 1.8034682080924856e-05, |
| "loss": 0.2888, |
| "step": 1299 |
| }, |
| { |
| "epoch": 2.024941543257989, |
| "grad_norm": 0.2741254018613014, |
| "learning_rate": 1.800578034682081e-05, |
| "loss": 0.2846, |
| "step": 1300 |
| }, |
| { |
| "epoch": 2.0265003897116136, |
| "grad_norm": 0.3278384877642372, |
| "learning_rate": 1.7976878612716763e-05, |
| "loss": 0.2996, |
| "step": 1301 |
| }, |
| { |
| "epoch": 2.028059236165238, |
| "grad_norm": 0.31062098942293614, |
| "learning_rate": 1.794797687861272e-05, |
| "loss": 0.3004, |
| "step": 1302 |
| }, |
| { |
| "epoch": 2.029618082618862, |
| "grad_norm": 0.2840675944649226, |
| "learning_rate": 1.791907514450867e-05, |
| "loss": 0.2915, |
| "step": 1303 |
| }, |
| { |
| "epoch": 2.0311769290724864, |
| "grad_norm": 0.3066133656454931, |
| "learning_rate": 1.7890173410404627e-05, |
| "loss": 0.2887, |
| "step": 1304 |
| }, |
| { |
| "epoch": 2.0327357755261106, |
| "grad_norm": 0.2729351456670734, |
| "learning_rate": 1.7861271676300577e-05, |
| "loss": 0.2984, |
| "step": 1305 |
| }, |
| { |
| "epoch": 2.034294621979735, |
| "grad_norm": 0.2636535310871996, |
| "learning_rate": 1.7832369942196534e-05, |
| "loss": 0.291, |
| "step": 1306 |
| }, |
| { |
| "epoch": 2.035853468433359, |
| "grad_norm": 0.2964005719112254, |
| "learning_rate": 1.7803468208092487e-05, |
| "loss": 0.2878, |
| "step": 1307 |
| }, |
| { |
| "epoch": 2.037412314886984, |
| "grad_norm": 0.2362770470391929, |
| "learning_rate": 1.777456647398844e-05, |
| "loss": 0.2889, |
| "step": 1308 |
| }, |
| { |
| "epoch": 2.038971161340608, |
| "grad_norm": 0.2877360765498249, |
| "learning_rate": 1.7745664739884394e-05, |
| "loss": 0.306, |
| "step": 1309 |
| }, |
| { |
| "epoch": 2.0405300077942323, |
| "grad_norm": 0.26635846301125965, |
| "learning_rate": 1.7716763005780348e-05, |
| "loss": 0.2835, |
| "step": 1310 |
| }, |
| { |
| "epoch": 2.0420888542478566, |
| "grad_norm": 0.2364083261820846, |
| "learning_rate": 1.76878612716763e-05, |
| "loss": 0.2813, |
| "step": 1311 |
| }, |
| { |
| "epoch": 2.043647700701481, |
| "grad_norm": 0.25071829512152555, |
| "learning_rate": 1.7658959537572255e-05, |
| "loss": 0.2722, |
| "step": 1312 |
| }, |
| { |
| "epoch": 2.045206547155105, |
| "grad_norm": 0.2640068234739541, |
| "learning_rate": 1.7630057803468208e-05, |
| "loss": 0.3078, |
| "step": 1313 |
| }, |
| { |
| "epoch": 2.0467653936087293, |
| "grad_norm": 0.2733838421400439, |
| "learning_rate": 1.7601156069364165e-05, |
| "loss": 0.2936, |
| "step": 1314 |
| }, |
| { |
| "epoch": 2.048324240062354, |
| "grad_norm": 0.2550755795463892, |
| "learning_rate": 1.7572254335260115e-05, |
| "loss": 0.2819, |
| "step": 1315 |
| }, |
| { |
| "epoch": 2.0498830865159783, |
| "grad_norm": 0.25545376074388176, |
| "learning_rate": 1.754335260115607e-05, |
| "loss": 0.2823, |
| "step": 1316 |
| }, |
| { |
| "epoch": 2.0514419329696025, |
| "grad_norm": 0.22661427370917822, |
| "learning_rate": 1.7514450867052025e-05, |
| "loss": 0.2824, |
| "step": 1317 |
| }, |
| { |
| "epoch": 2.053000779423227, |
| "grad_norm": 0.29774507660120497, |
| "learning_rate": 1.748554913294798e-05, |
| "loss": 0.2838, |
| "step": 1318 |
| }, |
| { |
| "epoch": 2.054559625876851, |
| "grad_norm": 0.2598671788363091, |
| "learning_rate": 1.7456647398843932e-05, |
| "loss": 0.2903, |
| "step": 1319 |
| }, |
| { |
| "epoch": 2.0561184723304753, |
| "grad_norm": 0.2519961479463723, |
| "learning_rate": 1.7427745664739885e-05, |
| "loss": 0.2571, |
| "step": 1320 |
| }, |
| { |
| "epoch": 2.0576773187841, |
| "grad_norm": 0.25326241935804067, |
| "learning_rate": 1.739884393063584e-05, |
| "loss": 0.2977, |
| "step": 1321 |
| }, |
| { |
| "epoch": 2.0592361652377242, |
| "grad_norm": 0.24057089058554867, |
| "learning_rate": 1.7369942196531792e-05, |
| "loss": 0.2866, |
| "step": 1322 |
| }, |
| { |
| "epoch": 2.0607950116913485, |
| "grad_norm": 0.22295403456879342, |
| "learning_rate": 1.7341040462427746e-05, |
| "loss": 0.28, |
| "step": 1323 |
| }, |
| { |
| "epoch": 2.0623538581449727, |
| "grad_norm": 0.2868118439567236, |
| "learning_rate": 1.7312138728323703e-05, |
| "loss": 0.3044, |
| "step": 1324 |
| }, |
| { |
| "epoch": 2.063912704598597, |
| "grad_norm": 0.2708711985497848, |
| "learning_rate": 1.7283236994219653e-05, |
| "loss": 0.2823, |
| "step": 1325 |
| }, |
| { |
| "epoch": 2.0654715510522212, |
| "grad_norm": 0.42537603462611046, |
| "learning_rate": 1.725433526011561e-05, |
| "loss": 0.2984, |
| "step": 1326 |
| }, |
| { |
| "epoch": 2.0670303975058455, |
| "grad_norm": 0.25936897272013576, |
| "learning_rate": 1.722543352601156e-05, |
| "loss": 0.2996, |
| "step": 1327 |
| }, |
| { |
| "epoch": 2.06858924395947, |
| "grad_norm": 0.29205530294187826, |
| "learning_rate": 1.7196531791907516e-05, |
| "loss": 0.2815, |
| "step": 1328 |
| }, |
| { |
| "epoch": 2.0701480904130944, |
| "grad_norm": 0.2498440090336756, |
| "learning_rate": 1.716763005780347e-05, |
| "loss": 0.2908, |
| "step": 1329 |
| }, |
| { |
| "epoch": 2.0717069368667187, |
| "grad_norm": 0.2803332912261779, |
| "learning_rate": 1.7138728323699423e-05, |
| "loss": 0.2919, |
| "step": 1330 |
| }, |
| { |
| "epoch": 2.073265783320343, |
| "grad_norm": 0.2714375808140227, |
| "learning_rate": 1.7109826589595377e-05, |
| "loss": 0.285, |
| "step": 1331 |
| }, |
| { |
| "epoch": 2.074824629773967, |
| "grad_norm": 0.23047443561287922, |
| "learning_rate": 1.708092485549133e-05, |
| "loss": 0.2982, |
| "step": 1332 |
| }, |
| { |
| "epoch": 2.0763834762275915, |
| "grad_norm": 0.27974401775468655, |
| "learning_rate": 1.7052023121387284e-05, |
| "loss": 0.2816, |
| "step": 1333 |
| }, |
| { |
| "epoch": 2.0779423226812157, |
| "grad_norm": 0.27728883446303665, |
| "learning_rate": 1.702312138728324e-05, |
| "loss": 0.2874, |
| "step": 1334 |
| }, |
| { |
| "epoch": 2.0795011691348404, |
| "grad_norm": 0.22428181416464069, |
| "learning_rate": 1.699421965317919e-05, |
| "loss": 0.2959, |
| "step": 1335 |
| }, |
| { |
| "epoch": 2.0810600155884647, |
| "grad_norm": 0.4070173071535244, |
| "learning_rate": 1.6965317919075147e-05, |
| "loss": 0.2853, |
| "step": 1336 |
| }, |
| { |
| "epoch": 2.082618862042089, |
| "grad_norm": 0.33199185273932175, |
| "learning_rate": 1.6936416184971097e-05, |
| "loss": 0.2825, |
| "step": 1337 |
| }, |
| { |
| "epoch": 2.084177708495713, |
| "grad_norm": 0.23889026155636925, |
| "learning_rate": 1.6907514450867054e-05, |
| "loss": 0.2878, |
| "step": 1338 |
| }, |
| { |
| "epoch": 2.0857365549493374, |
| "grad_norm": 0.2588106933535315, |
| "learning_rate": 1.6878612716763008e-05, |
| "loss": 0.291, |
| "step": 1339 |
| }, |
| { |
| "epoch": 2.0872954014029617, |
| "grad_norm": 0.2521130177780321, |
| "learning_rate": 1.684971098265896e-05, |
| "loss": 0.2927, |
| "step": 1340 |
| }, |
| { |
| "epoch": 2.0888542478565864, |
| "grad_norm": 0.28556085649359164, |
| "learning_rate": 1.6820809248554915e-05, |
| "loss": 0.3141, |
| "step": 1341 |
| }, |
| { |
| "epoch": 2.0904130943102106, |
| "grad_norm": 0.23613222180841584, |
| "learning_rate": 1.6791907514450868e-05, |
| "loss": 0.2861, |
| "step": 1342 |
| }, |
| { |
| "epoch": 2.091971940763835, |
| "grad_norm": 0.24684587451326986, |
| "learning_rate": 1.676300578034682e-05, |
| "loss": 0.2853, |
| "step": 1343 |
| }, |
| { |
| "epoch": 2.093530787217459, |
| "grad_norm": 0.23325395667135312, |
| "learning_rate": 1.673410404624278e-05, |
| "loss": 0.2908, |
| "step": 1344 |
| }, |
| { |
| "epoch": 2.0950896336710834, |
| "grad_norm": 0.2594871779004086, |
| "learning_rate": 1.670520231213873e-05, |
| "loss": 0.3087, |
| "step": 1345 |
| }, |
| { |
| "epoch": 2.0966484801247076, |
| "grad_norm": 0.23321943315628127, |
| "learning_rate": 1.6676300578034685e-05, |
| "loss": 0.2958, |
| "step": 1346 |
| }, |
| { |
| "epoch": 2.098207326578332, |
| "grad_norm": 0.23482451299773394, |
| "learning_rate": 1.6647398843930635e-05, |
| "loss": 0.3037, |
| "step": 1347 |
| }, |
| { |
| "epoch": 2.0997661730319566, |
| "grad_norm": 0.2237656119837914, |
| "learning_rate": 1.661849710982659e-05, |
| "loss": 0.2788, |
| "step": 1348 |
| }, |
| { |
| "epoch": 2.101325019485581, |
| "grad_norm": 0.25698435105220185, |
| "learning_rate": 1.6589595375722546e-05, |
| "loss": 0.2747, |
| "step": 1349 |
| }, |
| { |
| "epoch": 2.102883865939205, |
| "grad_norm": 0.23963767268717012, |
| "learning_rate": 1.6560693641618496e-05, |
| "loss": 0.292, |
| "step": 1350 |
| }, |
| { |
| "epoch": 2.1044427123928293, |
| "grad_norm": 0.2588056696749546, |
| "learning_rate": 1.6531791907514452e-05, |
| "loss": 0.3035, |
| "step": 1351 |
| }, |
| { |
| "epoch": 2.1060015588464536, |
| "grad_norm": 0.25164368263083836, |
| "learning_rate": 1.6502890173410403e-05, |
| "loss": 0.297, |
| "step": 1352 |
| }, |
| { |
| "epoch": 2.107560405300078, |
| "grad_norm": 0.2570592630746674, |
| "learning_rate": 1.647398843930636e-05, |
| "loss": 0.2982, |
| "step": 1353 |
| }, |
| { |
| "epoch": 2.109119251753702, |
| "grad_norm": 0.2848315307917638, |
| "learning_rate": 1.6445086705202313e-05, |
| "loss": 0.3007, |
| "step": 1354 |
| }, |
| { |
| "epoch": 2.1106780982073268, |
| "grad_norm": 0.2634643134825361, |
| "learning_rate": 1.6416184971098266e-05, |
| "loss": 0.2818, |
| "step": 1355 |
| }, |
| { |
| "epoch": 2.112236944660951, |
| "grad_norm": 0.25838881698032484, |
| "learning_rate": 1.638728323699422e-05, |
| "loss": 0.2675, |
| "step": 1356 |
| }, |
| { |
| "epoch": 2.1137957911145753, |
| "grad_norm": 0.2438857581178678, |
| "learning_rate": 1.6358381502890173e-05, |
| "loss": 0.2931, |
| "step": 1357 |
| }, |
| { |
| "epoch": 2.1153546375681995, |
| "grad_norm": 0.23647456464356711, |
| "learning_rate": 1.6329479768786127e-05, |
| "loss": 0.2758, |
| "step": 1358 |
| }, |
| { |
| "epoch": 2.1169134840218238, |
| "grad_norm": 0.245819384339159, |
| "learning_rate": 1.6300578034682083e-05, |
| "loss": 0.2935, |
| "step": 1359 |
| }, |
| { |
| "epoch": 2.118472330475448, |
| "grad_norm": 0.21611647939557374, |
| "learning_rate": 1.6271676300578033e-05, |
| "loss": 0.2761, |
| "step": 1360 |
| }, |
| { |
| "epoch": 2.1200311769290723, |
| "grad_norm": 0.24308227386319844, |
| "learning_rate": 1.624277456647399e-05, |
| "loss": 0.2911, |
| "step": 1361 |
| }, |
| { |
| "epoch": 2.121590023382697, |
| "grad_norm": 0.28166564601668537, |
| "learning_rate": 1.621387283236994e-05, |
| "loss": 0.291, |
| "step": 1362 |
| }, |
| { |
| "epoch": 2.1231488698363212, |
| "grad_norm": 0.2333910024555345, |
| "learning_rate": 1.6184971098265897e-05, |
| "loss": 0.282, |
| "step": 1363 |
| }, |
| { |
| "epoch": 2.1247077162899455, |
| "grad_norm": 0.24801232636156478, |
| "learning_rate": 1.615606936416185e-05, |
| "loss": 0.286, |
| "step": 1364 |
| }, |
| { |
| "epoch": 2.1262665627435697, |
| "grad_norm": 0.2529976131043813, |
| "learning_rate": 1.6127167630057804e-05, |
| "loss": 0.2907, |
| "step": 1365 |
| }, |
| { |
| "epoch": 2.127825409197194, |
| "grad_norm": 0.24496746678348746, |
| "learning_rate": 1.6098265895953758e-05, |
| "loss": 0.3096, |
| "step": 1366 |
| }, |
| { |
| "epoch": 2.1293842556508182, |
| "grad_norm": 0.22826596743843666, |
| "learning_rate": 1.606936416184971e-05, |
| "loss": 0.2837, |
| "step": 1367 |
| }, |
| { |
| "epoch": 2.1309431021044425, |
| "grad_norm": 0.22953989118587162, |
| "learning_rate": 1.6040462427745664e-05, |
| "loss": 0.3016, |
| "step": 1368 |
| }, |
| { |
| "epoch": 2.132501948558067, |
| "grad_norm": 0.24938076633148093, |
| "learning_rate": 1.601156069364162e-05, |
| "loss": 0.2938, |
| "step": 1369 |
| }, |
| { |
| "epoch": 2.1340607950116914, |
| "grad_norm": 0.21892102115489234, |
| "learning_rate": 1.598265895953757e-05, |
| "loss": 0.2815, |
| "step": 1370 |
| }, |
| { |
| "epoch": 2.1356196414653157, |
| "grad_norm": 0.2590092923549659, |
| "learning_rate": 1.5953757225433528e-05, |
| "loss": 0.2954, |
| "step": 1371 |
| }, |
| { |
| "epoch": 2.13717848791894, |
| "grad_norm": 0.24890666085552407, |
| "learning_rate": 1.5924855491329478e-05, |
| "loss": 0.2906, |
| "step": 1372 |
| }, |
| { |
| "epoch": 2.138737334372564, |
| "grad_norm": 0.2266359560410207, |
| "learning_rate": 1.5895953757225435e-05, |
| "loss": 0.2817, |
| "step": 1373 |
| }, |
| { |
| "epoch": 2.1402961808261884, |
| "grad_norm": 0.24930550949426433, |
| "learning_rate": 1.586705202312139e-05, |
| "loss": 0.2902, |
| "step": 1374 |
| }, |
| { |
| "epoch": 2.141855027279813, |
| "grad_norm": 0.22270650770572073, |
| "learning_rate": 1.5838150289017342e-05, |
| "loss": 0.2963, |
| "step": 1375 |
| }, |
| { |
| "epoch": 2.1434138737334374, |
| "grad_norm": 0.21082837821856862, |
| "learning_rate": 1.5809248554913295e-05, |
| "loss": 0.2823, |
| "step": 1376 |
| }, |
| { |
| "epoch": 2.1449727201870616, |
| "grad_norm": 0.2506283104261599, |
| "learning_rate": 1.578034682080925e-05, |
| "loss": 0.3003, |
| "step": 1377 |
| }, |
| { |
| "epoch": 2.146531566640686, |
| "grad_norm": 0.2389054933616818, |
| "learning_rate": 1.5751445086705202e-05, |
| "loss": 0.2885, |
| "step": 1378 |
| }, |
| { |
| "epoch": 2.14809041309431, |
| "grad_norm": 0.2334569505339403, |
| "learning_rate": 1.5722543352601156e-05, |
| "loss": 0.3052, |
| "step": 1379 |
| }, |
| { |
| "epoch": 2.1496492595479344, |
| "grad_norm": 0.23175590298956983, |
| "learning_rate": 1.569364161849711e-05, |
| "loss": 0.2739, |
| "step": 1380 |
| }, |
| { |
| "epoch": 2.1512081060015587, |
| "grad_norm": 0.23033041297872123, |
| "learning_rate": 1.5664739884393066e-05, |
| "loss": 0.286, |
| "step": 1381 |
| }, |
| { |
| "epoch": 2.1527669524551833, |
| "grad_norm": 0.2465779921606415, |
| "learning_rate": 1.5635838150289016e-05, |
| "loss": 0.3009, |
| "step": 1382 |
| }, |
| { |
| "epoch": 2.1543257989088076, |
| "grad_norm": 0.21179227741678436, |
| "learning_rate": 1.5606936416184973e-05, |
| "loss": 0.2847, |
| "step": 1383 |
| }, |
| { |
| "epoch": 2.155884645362432, |
| "grad_norm": 0.25428536535374435, |
| "learning_rate": 1.5578034682080926e-05, |
| "loss": 0.2954, |
| "step": 1384 |
| }, |
| { |
| "epoch": 2.157443491816056, |
| "grad_norm": 0.22930157749520183, |
| "learning_rate": 1.554913294797688e-05, |
| "loss": 0.2818, |
| "step": 1385 |
| }, |
| { |
| "epoch": 2.1590023382696804, |
| "grad_norm": 0.23105202022376808, |
| "learning_rate": 1.5520231213872833e-05, |
| "loss": 0.3053, |
| "step": 1386 |
| }, |
| { |
| "epoch": 2.1605611847233046, |
| "grad_norm": 0.23246069929983043, |
| "learning_rate": 1.5491329479768787e-05, |
| "loss": 0.2803, |
| "step": 1387 |
| }, |
| { |
| "epoch": 2.1621200311769293, |
| "grad_norm": 0.22704083693916596, |
| "learning_rate": 1.546242774566474e-05, |
| "loss": 0.2891, |
| "step": 1388 |
| }, |
| { |
| "epoch": 2.1636788776305536, |
| "grad_norm": 0.2540369139980788, |
| "learning_rate": 1.5433526011560694e-05, |
| "loss": 0.2847, |
| "step": 1389 |
| }, |
| { |
| "epoch": 2.165237724084178, |
| "grad_norm": 0.21737272977125724, |
| "learning_rate": 1.5404624277456647e-05, |
| "loss": 0.2681, |
| "step": 1390 |
| }, |
| { |
| "epoch": 2.166796570537802, |
| "grad_norm": 0.2515481263822816, |
| "learning_rate": 1.5375722543352604e-05, |
| "loss": 0.2763, |
| "step": 1391 |
| }, |
| { |
| "epoch": 2.1683554169914263, |
| "grad_norm": 0.26477321713964047, |
| "learning_rate": 1.5346820809248554e-05, |
| "loss": 0.2808, |
| "step": 1392 |
| }, |
| { |
| "epoch": 2.1699142634450506, |
| "grad_norm": 0.22854862492021247, |
| "learning_rate": 1.531791907514451e-05, |
| "loss": 0.2943, |
| "step": 1393 |
| }, |
| { |
| "epoch": 2.171473109898675, |
| "grad_norm": 0.27158405015797865, |
| "learning_rate": 1.528901734104046e-05, |
| "loss": 0.2925, |
| "step": 1394 |
| }, |
| { |
| "epoch": 2.1730319563522995, |
| "grad_norm": 0.2347337281042923, |
| "learning_rate": 1.5260115606936418e-05, |
| "loss": 0.2943, |
| "step": 1395 |
| }, |
| { |
| "epoch": 2.1745908028059238, |
| "grad_norm": 0.23975916222494428, |
| "learning_rate": 1.523121387283237e-05, |
| "loss": 0.2804, |
| "step": 1396 |
| }, |
| { |
| "epoch": 2.176149649259548, |
| "grad_norm": 0.22779116644041048, |
| "learning_rate": 1.5202312138728325e-05, |
| "loss": 0.2847, |
| "step": 1397 |
| }, |
| { |
| "epoch": 2.1777084957131723, |
| "grad_norm": 0.2576587147864288, |
| "learning_rate": 1.5173410404624278e-05, |
| "loss": 0.2996, |
| "step": 1398 |
| }, |
| { |
| "epoch": 2.1792673421667965, |
| "grad_norm": 0.21115000700822473, |
| "learning_rate": 1.5144508670520233e-05, |
| "loss": 0.2787, |
| "step": 1399 |
| }, |
| { |
| "epoch": 2.1808261886204208, |
| "grad_norm": 0.24108973804933587, |
| "learning_rate": 1.5115606936416185e-05, |
| "loss": 0.287, |
| "step": 1400 |
| }, |
| { |
| "epoch": 2.182385035074045, |
| "grad_norm": 0.24288289350228465, |
| "learning_rate": 1.508670520231214e-05, |
| "loss": 0.2852, |
| "step": 1401 |
| }, |
| { |
| "epoch": 2.1839438815276697, |
| "grad_norm": 0.23706474437913794, |
| "learning_rate": 1.5057803468208092e-05, |
| "loss": 0.2968, |
| "step": 1402 |
| }, |
| { |
| "epoch": 2.185502727981294, |
| "grad_norm": 0.2183208225549947, |
| "learning_rate": 1.5028901734104049e-05, |
| "loss": 0.2865, |
| "step": 1403 |
| }, |
| { |
| "epoch": 2.187061574434918, |
| "grad_norm": 0.24253894248682953, |
| "learning_rate": 1.5e-05, |
| "loss": 0.2847, |
| "step": 1404 |
| }, |
| { |
| "epoch": 2.1886204208885425, |
| "grad_norm": 0.22598056970641814, |
| "learning_rate": 1.4971098265895956e-05, |
| "loss": 0.2982, |
| "step": 1405 |
| }, |
| { |
| "epoch": 2.1901792673421667, |
| "grad_norm": 0.2525617645303132, |
| "learning_rate": 1.4942196531791907e-05, |
| "loss": 0.2861, |
| "step": 1406 |
| }, |
| { |
| "epoch": 2.191738113795791, |
| "grad_norm": 0.2192754370403558, |
| "learning_rate": 1.4913294797687862e-05, |
| "loss": 0.2872, |
| "step": 1407 |
| }, |
| { |
| "epoch": 2.1932969602494152, |
| "grad_norm": 0.22900250201312447, |
| "learning_rate": 1.4884393063583816e-05, |
| "loss": 0.2845, |
| "step": 1408 |
| }, |
| { |
| "epoch": 2.19485580670304, |
| "grad_norm": 0.30054519236450217, |
| "learning_rate": 1.4855491329479771e-05, |
| "loss": 0.2943, |
| "step": 1409 |
| }, |
| { |
| "epoch": 2.196414653156664, |
| "grad_norm": 0.24441363373477734, |
| "learning_rate": 1.4826589595375723e-05, |
| "loss": 0.2975, |
| "step": 1410 |
| }, |
| { |
| "epoch": 2.1979734996102884, |
| "grad_norm": 0.2267613912682233, |
| "learning_rate": 1.4797687861271678e-05, |
| "loss": 0.3009, |
| "step": 1411 |
| }, |
| { |
| "epoch": 2.1995323460639127, |
| "grad_norm": 0.21100387037469506, |
| "learning_rate": 1.476878612716763e-05, |
| "loss": 0.2971, |
| "step": 1412 |
| }, |
| { |
| "epoch": 2.201091192517537, |
| "grad_norm": 0.21361711019848872, |
| "learning_rate": 1.4739884393063585e-05, |
| "loss": 0.2986, |
| "step": 1413 |
| }, |
| { |
| "epoch": 2.202650038971161, |
| "grad_norm": 0.23486378364056842, |
| "learning_rate": 1.4710982658959538e-05, |
| "loss": 0.2983, |
| "step": 1414 |
| }, |
| { |
| "epoch": 2.2042088854247854, |
| "grad_norm": 0.2194691571773862, |
| "learning_rate": 1.4682080924855493e-05, |
| "loss": 0.2805, |
| "step": 1415 |
| }, |
| { |
| "epoch": 2.20576773187841, |
| "grad_norm": 0.22839671067672832, |
| "learning_rate": 1.4653179190751445e-05, |
| "loss": 0.2821, |
| "step": 1416 |
| }, |
| { |
| "epoch": 2.2073265783320344, |
| "grad_norm": 0.22721582376446814, |
| "learning_rate": 1.46242774566474e-05, |
| "loss": 0.2883, |
| "step": 1417 |
| }, |
| { |
| "epoch": 2.2088854247856586, |
| "grad_norm": 0.21273654291505045, |
| "learning_rate": 1.4595375722543354e-05, |
| "loss": 0.2871, |
| "step": 1418 |
| }, |
| { |
| "epoch": 2.210444271239283, |
| "grad_norm": 0.22181506285423208, |
| "learning_rate": 1.4566473988439309e-05, |
| "loss": 0.2937, |
| "step": 1419 |
| }, |
| { |
| "epoch": 2.212003117692907, |
| "grad_norm": 0.21682833270568624, |
| "learning_rate": 1.453757225433526e-05, |
| "loss": 0.295, |
| "step": 1420 |
| }, |
| { |
| "epoch": 2.2135619641465314, |
| "grad_norm": 0.2544117406843311, |
| "learning_rate": 1.4508670520231216e-05, |
| "loss": 0.2959, |
| "step": 1421 |
| }, |
| { |
| "epoch": 2.215120810600156, |
| "grad_norm": 0.2496480472797846, |
| "learning_rate": 1.4479768786127167e-05, |
| "loss": 0.2955, |
| "step": 1422 |
| }, |
| { |
| "epoch": 2.2166796570537803, |
| "grad_norm": 0.25230712728942756, |
| "learning_rate": 1.4450867052023123e-05, |
| "loss": 0.2834, |
| "step": 1423 |
| }, |
| { |
| "epoch": 2.2182385035074046, |
| "grad_norm": 0.22497012216327553, |
| "learning_rate": 1.4421965317919076e-05, |
| "loss": 0.292, |
| "step": 1424 |
| }, |
| { |
| "epoch": 2.219797349961029, |
| "grad_norm": 0.2374016981159356, |
| "learning_rate": 1.4393063583815031e-05, |
| "loss": 0.2825, |
| "step": 1425 |
| }, |
| { |
| "epoch": 2.221356196414653, |
| "grad_norm": 0.25465184050475065, |
| "learning_rate": 1.4364161849710983e-05, |
| "loss": 0.2903, |
| "step": 1426 |
| }, |
| { |
| "epoch": 2.2229150428682773, |
| "grad_norm": 0.23870397107279917, |
| "learning_rate": 1.4335260115606938e-05, |
| "loss": 0.2936, |
| "step": 1427 |
| }, |
| { |
| "epoch": 2.2244738893219016, |
| "grad_norm": 0.2318933878119134, |
| "learning_rate": 1.430635838150289e-05, |
| "loss": 0.2849, |
| "step": 1428 |
| }, |
| { |
| "epoch": 2.2260327357755263, |
| "grad_norm": 0.23210851294757337, |
| "learning_rate": 1.4277456647398845e-05, |
| "loss": 0.2919, |
| "step": 1429 |
| }, |
| { |
| "epoch": 2.2275915822291505, |
| "grad_norm": 0.20510591201151088, |
| "learning_rate": 1.4248554913294798e-05, |
| "loss": 0.291, |
| "step": 1430 |
| }, |
| { |
| "epoch": 2.229150428682775, |
| "grad_norm": 0.2254853152020144, |
| "learning_rate": 1.4219653179190754e-05, |
| "loss": 0.2855, |
| "step": 1431 |
| }, |
| { |
| "epoch": 2.230709275136399, |
| "grad_norm": 0.22024099508735234, |
| "learning_rate": 1.4190751445086705e-05, |
| "loss": 0.2927, |
| "step": 1432 |
| }, |
| { |
| "epoch": 2.2322681215900233, |
| "grad_norm": 0.26043420023310093, |
| "learning_rate": 1.416184971098266e-05, |
| "loss": 0.3052, |
| "step": 1433 |
| }, |
| { |
| "epoch": 2.2338269680436476, |
| "grad_norm": 0.2133056223198868, |
| "learning_rate": 1.4132947976878614e-05, |
| "loss": 0.2927, |
| "step": 1434 |
| }, |
| { |
| "epoch": 2.2353858144972722, |
| "grad_norm": 0.2259340299782071, |
| "learning_rate": 1.4104046242774569e-05, |
| "loss": 0.2857, |
| "step": 1435 |
| }, |
| { |
| "epoch": 2.2369446609508965, |
| "grad_norm": 0.22200399124864398, |
| "learning_rate": 1.407514450867052e-05, |
| "loss": 0.2833, |
| "step": 1436 |
| }, |
| { |
| "epoch": 2.2385035074045208, |
| "grad_norm": 0.22962299042832857, |
| "learning_rate": 1.4046242774566476e-05, |
| "loss": 0.2799, |
| "step": 1437 |
| }, |
| { |
| "epoch": 2.240062353858145, |
| "grad_norm": 0.21438991547193448, |
| "learning_rate": 1.4017341040462428e-05, |
| "loss": 0.2859, |
| "step": 1438 |
| }, |
| { |
| "epoch": 2.2416212003117693, |
| "grad_norm": 0.22069076358373807, |
| "learning_rate": 1.3988439306358383e-05, |
| "loss": 0.2902, |
| "step": 1439 |
| }, |
| { |
| "epoch": 2.2431800467653935, |
| "grad_norm": 0.21552495322148416, |
| "learning_rate": 1.3959537572254336e-05, |
| "loss": 0.2717, |
| "step": 1440 |
| }, |
| { |
| "epoch": 2.2447388932190178, |
| "grad_norm": 0.22292189496523734, |
| "learning_rate": 1.3930635838150291e-05, |
| "loss": 0.2837, |
| "step": 1441 |
| }, |
| { |
| "epoch": 2.2462977396726425, |
| "grad_norm": 0.2235003322526891, |
| "learning_rate": 1.3901734104046243e-05, |
| "loss": 0.2785, |
| "step": 1442 |
| }, |
| { |
| "epoch": 2.2478565861262667, |
| "grad_norm": 0.21304911072284627, |
| "learning_rate": 1.3872832369942197e-05, |
| "loss": 0.2879, |
| "step": 1443 |
| }, |
| { |
| "epoch": 2.249415432579891, |
| "grad_norm": 0.25029914205006026, |
| "learning_rate": 1.3843930635838152e-05, |
| "loss": 0.2988, |
| "step": 1444 |
| }, |
| { |
| "epoch": 2.250974279033515, |
| "grad_norm": 0.2165276488818481, |
| "learning_rate": 1.3815028901734104e-05, |
| "loss": 0.272, |
| "step": 1445 |
| }, |
| { |
| "epoch": 2.2525331254871395, |
| "grad_norm": 0.24444489582018986, |
| "learning_rate": 1.3786127167630059e-05, |
| "loss": 0.3014, |
| "step": 1446 |
| }, |
| { |
| "epoch": 2.2540919719407637, |
| "grad_norm": 0.24771789944118036, |
| "learning_rate": 1.375722543352601e-05, |
| "loss": 0.2984, |
| "step": 1447 |
| }, |
| { |
| "epoch": 2.255650818394388, |
| "grad_norm": 0.23706904927685987, |
| "learning_rate": 1.3728323699421966e-05, |
| "loss": 0.3033, |
| "step": 1448 |
| }, |
| { |
| "epoch": 2.2572096648480127, |
| "grad_norm": 0.23835195294951741, |
| "learning_rate": 1.3699421965317919e-05, |
| "loss": 0.2774, |
| "step": 1449 |
| }, |
| { |
| "epoch": 2.258768511301637, |
| "grad_norm": 0.2492838137926293, |
| "learning_rate": 1.3670520231213874e-05, |
| "loss": 0.2843, |
| "step": 1450 |
| }, |
| { |
| "epoch": 2.260327357755261, |
| "grad_norm": 0.22427981907836916, |
| "learning_rate": 1.3641618497109826e-05, |
| "loss": 0.2862, |
| "step": 1451 |
| }, |
| { |
| "epoch": 2.2618862042088854, |
| "grad_norm": 0.2425076666411249, |
| "learning_rate": 1.3612716763005781e-05, |
| "loss": 0.2908, |
| "step": 1452 |
| }, |
| { |
| "epoch": 2.2634450506625097, |
| "grad_norm": 0.2686851435498773, |
| "learning_rate": 1.3583815028901733e-05, |
| "loss": 0.2853, |
| "step": 1453 |
| }, |
| { |
| "epoch": 2.265003897116134, |
| "grad_norm": 0.23258321364166276, |
| "learning_rate": 1.3554913294797688e-05, |
| "loss": 0.292, |
| "step": 1454 |
| }, |
| { |
| "epoch": 2.266562743569758, |
| "grad_norm": 0.2178252049659836, |
| "learning_rate": 1.3526011560693641e-05, |
| "loss": 0.2798, |
| "step": 1455 |
| }, |
| { |
| "epoch": 2.268121590023383, |
| "grad_norm": 0.23464644401450185, |
| "learning_rate": 1.3497109826589597e-05, |
| "loss": 0.279, |
| "step": 1456 |
| }, |
| { |
| "epoch": 2.269680436477007, |
| "grad_norm": 0.25551325960953597, |
| "learning_rate": 1.3468208092485548e-05, |
| "loss": 0.2997, |
| "step": 1457 |
| }, |
| { |
| "epoch": 2.2712392829306314, |
| "grad_norm": 0.2352887691568654, |
| "learning_rate": 1.3439306358381503e-05, |
| "loss": 0.2843, |
| "step": 1458 |
| }, |
| { |
| "epoch": 2.2727981293842556, |
| "grad_norm": 0.24160307618137192, |
| "learning_rate": 1.3410404624277457e-05, |
| "loss": 0.2979, |
| "step": 1459 |
| }, |
| { |
| "epoch": 2.27435697583788, |
| "grad_norm": 0.247635002623451, |
| "learning_rate": 1.3381502890173412e-05, |
| "loss": 0.2819, |
| "step": 1460 |
| }, |
| { |
| "epoch": 2.275915822291504, |
| "grad_norm": 0.2335426458405105, |
| "learning_rate": 1.3352601156069364e-05, |
| "loss": 0.2813, |
| "step": 1461 |
| }, |
| { |
| "epoch": 2.2774746687451284, |
| "grad_norm": 0.22663182041299806, |
| "learning_rate": 1.3323699421965319e-05, |
| "loss": 0.2981, |
| "step": 1462 |
| }, |
| { |
| "epoch": 2.279033515198753, |
| "grad_norm": 0.21171630873754166, |
| "learning_rate": 1.329479768786127e-05, |
| "loss": 0.2826, |
| "step": 1463 |
| }, |
| { |
| "epoch": 2.2805923616523773, |
| "grad_norm": 0.22757568106572626, |
| "learning_rate": 1.3265895953757226e-05, |
| "loss": 0.2923, |
| "step": 1464 |
| }, |
| { |
| "epoch": 2.2821512081060016, |
| "grad_norm": 0.22020055614811698, |
| "learning_rate": 1.323699421965318e-05, |
| "loss": 0.2807, |
| "step": 1465 |
| }, |
| { |
| "epoch": 2.283710054559626, |
| "grad_norm": 0.23965818423420435, |
| "learning_rate": 1.3208092485549134e-05, |
| "loss": 0.295, |
| "step": 1466 |
| }, |
| { |
| "epoch": 2.28526890101325, |
| "grad_norm": 0.21933495891348634, |
| "learning_rate": 1.3179190751445086e-05, |
| "loss": 0.2808, |
| "step": 1467 |
| }, |
| { |
| "epoch": 2.2868277474668743, |
| "grad_norm": 0.22394396080019968, |
| "learning_rate": 1.3150289017341041e-05, |
| "loss": 0.2828, |
| "step": 1468 |
| }, |
| { |
| "epoch": 2.2883865939204986, |
| "grad_norm": 0.2341244557636366, |
| "learning_rate": 1.3121387283236993e-05, |
| "loss": 0.2872, |
| "step": 1469 |
| }, |
| { |
| "epoch": 2.2899454403741233, |
| "grad_norm": 0.24597918258046544, |
| "learning_rate": 1.3092485549132948e-05, |
| "loss": 0.2854, |
| "step": 1470 |
| }, |
| { |
| "epoch": 2.2915042868277475, |
| "grad_norm": 0.23301804978413262, |
| "learning_rate": 1.3063583815028902e-05, |
| "loss": 0.3053, |
| "step": 1471 |
| }, |
| { |
| "epoch": 2.293063133281372, |
| "grad_norm": 0.2360765813557192, |
| "learning_rate": 1.3034682080924857e-05, |
| "loss": 0.2831, |
| "step": 1472 |
| }, |
| { |
| "epoch": 2.294621979734996, |
| "grad_norm": 0.2658423968018713, |
| "learning_rate": 1.3005780346820809e-05, |
| "loss": 0.2869, |
| "step": 1473 |
| }, |
| { |
| "epoch": 2.2961808261886203, |
| "grad_norm": 0.25016881594307117, |
| "learning_rate": 1.2976878612716764e-05, |
| "loss": 0.2763, |
| "step": 1474 |
| }, |
| { |
| "epoch": 2.2977396726422445, |
| "grad_norm": 0.2382698683460339, |
| "learning_rate": 1.2947976878612717e-05, |
| "loss": 0.3028, |
| "step": 1475 |
| }, |
| { |
| "epoch": 2.2992985190958692, |
| "grad_norm": 0.24221989883612682, |
| "learning_rate": 1.2919075144508672e-05, |
| "loss": 0.3088, |
| "step": 1476 |
| }, |
| { |
| "epoch": 2.3008573655494935, |
| "grad_norm": 0.25017938488171704, |
| "learning_rate": 1.2890173410404624e-05, |
| "loss": 0.2869, |
| "step": 1477 |
| }, |
| { |
| "epoch": 2.3024162120031177, |
| "grad_norm": 0.23057606820343765, |
| "learning_rate": 1.2861271676300579e-05, |
| "loss": 0.2812, |
| "step": 1478 |
| }, |
| { |
| "epoch": 2.303975058456742, |
| "grad_norm": 0.22851951981455979, |
| "learning_rate": 1.2832369942196531e-05, |
| "loss": 0.2903, |
| "step": 1479 |
| }, |
| { |
| "epoch": 2.3055339049103662, |
| "grad_norm": 0.25023954290485484, |
| "learning_rate": 1.2803468208092486e-05, |
| "loss": 0.2995, |
| "step": 1480 |
| }, |
| { |
| "epoch": 2.3070927513639905, |
| "grad_norm": 0.22267748488279954, |
| "learning_rate": 1.277456647398844e-05, |
| "loss": 0.2946, |
| "step": 1481 |
| }, |
| { |
| "epoch": 2.308651597817615, |
| "grad_norm": 0.2433472732667005, |
| "learning_rate": 1.2745664739884395e-05, |
| "loss": 0.2805, |
| "step": 1482 |
| }, |
| { |
| "epoch": 2.3102104442712394, |
| "grad_norm": 0.22084205335592666, |
| "learning_rate": 1.2716763005780346e-05, |
| "loss": 0.2718, |
| "step": 1483 |
| }, |
| { |
| "epoch": 2.3117692907248637, |
| "grad_norm": 0.24304148596183653, |
| "learning_rate": 1.2687861271676302e-05, |
| "loss": 0.2988, |
| "step": 1484 |
| }, |
| { |
| "epoch": 2.313328137178488, |
| "grad_norm": 0.2424544214432883, |
| "learning_rate": 1.2658959537572255e-05, |
| "loss": 0.2899, |
| "step": 1485 |
| }, |
| { |
| "epoch": 2.314886983632112, |
| "grad_norm": 0.2328910388765159, |
| "learning_rate": 1.263005780346821e-05, |
| "loss": 0.3127, |
| "step": 1486 |
| }, |
| { |
| "epoch": 2.3164458300857365, |
| "grad_norm": 0.23286533311552443, |
| "learning_rate": 1.2601156069364162e-05, |
| "loss": 0.2995, |
| "step": 1487 |
| }, |
| { |
| "epoch": 2.3180046765393607, |
| "grad_norm": 0.2549382777228154, |
| "learning_rate": 1.2572254335260117e-05, |
| "loss": 0.2928, |
| "step": 1488 |
| }, |
| { |
| "epoch": 2.3195635229929854, |
| "grad_norm": 0.2559163608878674, |
| "learning_rate": 1.2543352601156069e-05, |
| "loss": 0.3113, |
| "step": 1489 |
| }, |
| { |
| "epoch": 2.3211223694466097, |
| "grad_norm": 0.2265685017446726, |
| "learning_rate": 1.2514450867052024e-05, |
| "loss": 0.295, |
| "step": 1490 |
| }, |
| { |
| "epoch": 2.322681215900234, |
| "grad_norm": 0.22273564838191307, |
| "learning_rate": 1.2485549132947977e-05, |
| "loss": 0.2717, |
| "step": 1491 |
| }, |
| { |
| "epoch": 2.324240062353858, |
| "grad_norm": 0.21674653038960545, |
| "learning_rate": 1.245664739884393e-05, |
| "loss": 0.2845, |
| "step": 1492 |
| }, |
| { |
| "epoch": 2.3257989088074824, |
| "grad_norm": 0.21909928945574206, |
| "learning_rate": 1.2427745664739884e-05, |
| "loss": 0.284, |
| "step": 1493 |
| }, |
| { |
| "epoch": 2.3273577552611067, |
| "grad_norm": 0.2750007906217208, |
| "learning_rate": 1.2398843930635838e-05, |
| "loss": 0.2862, |
| "step": 1494 |
| }, |
| { |
| "epoch": 2.328916601714731, |
| "grad_norm": 0.21703802020708915, |
| "learning_rate": 1.2369942196531791e-05, |
| "loss": 0.2829, |
| "step": 1495 |
| }, |
| { |
| "epoch": 2.3304754481683556, |
| "grad_norm": 0.22950831232595262, |
| "learning_rate": 1.2341040462427746e-05, |
| "loss": 0.2883, |
| "step": 1496 |
| }, |
| { |
| "epoch": 2.33203429462198, |
| "grad_norm": 0.24426647745397967, |
| "learning_rate": 1.23121387283237e-05, |
| "loss": 0.2955, |
| "step": 1497 |
| }, |
| { |
| "epoch": 2.333593141075604, |
| "grad_norm": 0.23949518800459763, |
| "learning_rate": 1.2283236994219653e-05, |
| "loss": 0.2771, |
| "step": 1498 |
| }, |
| { |
| "epoch": 2.3351519875292284, |
| "grad_norm": 0.21894296552516587, |
| "learning_rate": 1.2254335260115607e-05, |
| "loss": 0.2899, |
| "step": 1499 |
| }, |
| { |
| "epoch": 2.3367108339828526, |
| "grad_norm": 0.22130870442542108, |
| "learning_rate": 1.222543352601156e-05, |
| "loss": 0.2836, |
| "step": 1500 |
| }, |
| { |
| "epoch": 2.338269680436477, |
| "grad_norm": 0.2659075059435682, |
| "learning_rate": 1.2196531791907515e-05, |
| "loss": 0.2838, |
| "step": 1501 |
| }, |
| { |
| "epoch": 2.339828526890101, |
| "grad_norm": 0.23607600423290415, |
| "learning_rate": 1.2167630057803469e-05, |
| "loss": 0.2972, |
| "step": 1502 |
| }, |
| { |
| "epoch": 2.341387373343726, |
| "grad_norm": 0.2352342145609656, |
| "learning_rate": 1.2138728323699422e-05, |
| "loss": 0.3003, |
| "step": 1503 |
| }, |
| { |
| "epoch": 2.34294621979735, |
| "grad_norm": 0.23180782759300908, |
| "learning_rate": 1.2109826589595376e-05, |
| "loss": 0.2925, |
| "step": 1504 |
| }, |
| { |
| "epoch": 2.3445050662509743, |
| "grad_norm": 0.2421492426629086, |
| "learning_rate": 1.2080924855491329e-05, |
| "loss": 0.3153, |
| "step": 1505 |
| }, |
| { |
| "epoch": 2.3460639127045986, |
| "grad_norm": 0.220047149003109, |
| "learning_rate": 1.2052023121387284e-05, |
| "loss": 0.2915, |
| "step": 1506 |
| }, |
| { |
| "epoch": 2.347622759158223, |
| "grad_norm": 0.2081371282315123, |
| "learning_rate": 1.2023121387283238e-05, |
| "loss": 0.2786, |
| "step": 1507 |
| }, |
| { |
| "epoch": 2.349181605611847, |
| "grad_norm": 0.2209132364079271, |
| "learning_rate": 1.1994219653179191e-05, |
| "loss": 0.2919, |
| "step": 1508 |
| }, |
| { |
| "epoch": 2.3507404520654713, |
| "grad_norm": 0.24203159776015792, |
| "learning_rate": 1.1965317919075144e-05, |
| "loss": 0.2944, |
| "step": 1509 |
| }, |
| { |
| "epoch": 2.352299298519096, |
| "grad_norm": 0.21125490785458723, |
| "learning_rate": 1.1936416184971098e-05, |
| "loss": 0.2819, |
| "step": 1510 |
| }, |
| { |
| "epoch": 2.3538581449727203, |
| "grad_norm": 0.21084827155820668, |
| "learning_rate": 1.1907514450867053e-05, |
| "loss": 0.2735, |
| "step": 1511 |
| }, |
| { |
| "epoch": 2.3554169914263445, |
| "grad_norm": 0.2341059203258637, |
| "learning_rate": 1.1878612716763006e-05, |
| "loss": 0.2991, |
| "step": 1512 |
| }, |
| { |
| "epoch": 2.356975837879969, |
| "grad_norm": 0.21296853713090008, |
| "learning_rate": 1.184971098265896e-05, |
| "loss": 0.2833, |
| "step": 1513 |
| }, |
| { |
| "epoch": 2.358534684333593, |
| "grad_norm": 0.2567119485230043, |
| "learning_rate": 1.1820809248554913e-05, |
| "loss": 0.2947, |
| "step": 1514 |
| }, |
| { |
| "epoch": 2.3600935307872173, |
| "grad_norm": 0.2333031479163138, |
| "learning_rate": 1.1791907514450867e-05, |
| "loss": 0.2864, |
| "step": 1515 |
| }, |
| { |
| "epoch": 2.3616523772408415, |
| "grad_norm": 0.24056595254646906, |
| "learning_rate": 1.176300578034682e-05, |
| "loss": 0.2802, |
| "step": 1516 |
| }, |
| { |
| "epoch": 2.3632112236944662, |
| "grad_norm": 0.23247035275872605, |
| "learning_rate": 1.1734104046242775e-05, |
| "loss": 0.2821, |
| "step": 1517 |
| }, |
| { |
| "epoch": 2.3647700701480905, |
| "grad_norm": 0.23551357842178408, |
| "learning_rate": 1.1705202312138729e-05, |
| "loss": 0.2799, |
| "step": 1518 |
| }, |
| { |
| "epoch": 2.3663289166017147, |
| "grad_norm": 0.22199252143905965, |
| "learning_rate": 1.1676300578034682e-05, |
| "loss": 0.2921, |
| "step": 1519 |
| }, |
| { |
| "epoch": 2.367887763055339, |
| "grad_norm": 0.2557073653528457, |
| "learning_rate": 1.1647398843930636e-05, |
| "loss": 0.2819, |
| "step": 1520 |
| }, |
| { |
| "epoch": 2.3694466095089632, |
| "grad_norm": 0.23177632971310883, |
| "learning_rate": 1.161849710982659e-05, |
| "loss": 0.3, |
| "step": 1521 |
| }, |
| { |
| "epoch": 2.3710054559625875, |
| "grad_norm": 0.2141949825685525, |
| "learning_rate": 1.1589595375722544e-05, |
| "loss": 0.2804, |
| "step": 1522 |
| }, |
| { |
| "epoch": 2.372564302416212, |
| "grad_norm": 0.22456594982131964, |
| "learning_rate": 1.1560693641618498e-05, |
| "loss": 0.2919, |
| "step": 1523 |
| }, |
| { |
| "epoch": 2.3741231488698364, |
| "grad_norm": 0.2328422263169891, |
| "learning_rate": 1.1531791907514451e-05, |
| "loss": 0.298, |
| "step": 1524 |
| }, |
| { |
| "epoch": 2.3756819953234607, |
| "grad_norm": 0.23696530509869898, |
| "learning_rate": 1.1502890173410405e-05, |
| "loss": 0.2826, |
| "step": 1525 |
| }, |
| { |
| "epoch": 2.377240841777085, |
| "grad_norm": 0.23471588381502256, |
| "learning_rate": 1.1473988439306358e-05, |
| "loss": 0.2954, |
| "step": 1526 |
| }, |
| { |
| "epoch": 2.378799688230709, |
| "grad_norm": 0.24324817997977416, |
| "learning_rate": 1.1445086705202313e-05, |
| "loss": 0.3002, |
| "step": 1527 |
| }, |
| { |
| "epoch": 2.3803585346843334, |
| "grad_norm": 0.23818184986570196, |
| "learning_rate": 1.1416184971098267e-05, |
| "loss": 0.2901, |
| "step": 1528 |
| }, |
| { |
| "epoch": 2.381917381137958, |
| "grad_norm": 0.2177316843513184, |
| "learning_rate": 1.138728323699422e-05, |
| "loss": 0.2837, |
| "step": 1529 |
| }, |
| { |
| "epoch": 2.3834762275915824, |
| "grad_norm": 0.2230616433485158, |
| "learning_rate": 1.1358381502890174e-05, |
| "loss": 0.2926, |
| "step": 1530 |
| }, |
| { |
| "epoch": 2.3850350740452066, |
| "grad_norm": 0.2092816344234883, |
| "learning_rate": 1.1329479768786127e-05, |
| "loss": 0.2771, |
| "step": 1531 |
| }, |
| { |
| "epoch": 2.386593920498831, |
| "grad_norm": 0.24426299651005548, |
| "learning_rate": 1.1300578034682082e-05, |
| "loss": 0.2893, |
| "step": 1532 |
| }, |
| { |
| "epoch": 2.388152766952455, |
| "grad_norm": 0.2257044731930893, |
| "learning_rate": 1.1271676300578036e-05, |
| "loss": 0.3099, |
| "step": 1533 |
| }, |
| { |
| "epoch": 2.3897116134060794, |
| "grad_norm": 0.22709943701947127, |
| "learning_rate": 1.1242774566473989e-05, |
| "loss": 0.2815, |
| "step": 1534 |
| }, |
| { |
| "epoch": 2.3912704598597037, |
| "grad_norm": 0.23081380246295977, |
| "learning_rate": 1.1213872832369943e-05, |
| "loss": 0.2892, |
| "step": 1535 |
| }, |
| { |
| "epoch": 2.3928293063133284, |
| "grad_norm": 0.2404518884149201, |
| "learning_rate": 1.1184971098265896e-05, |
| "loss": 0.2784, |
| "step": 1536 |
| }, |
| { |
| "epoch": 2.3943881527669526, |
| "grad_norm": 0.21874834698362006, |
| "learning_rate": 1.115606936416185e-05, |
| "loss": 0.2838, |
| "step": 1537 |
| }, |
| { |
| "epoch": 2.395946999220577, |
| "grad_norm": 0.2170285592301821, |
| "learning_rate": 1.1127167630057805e-05, |
| "loss": 0.2836, |
| "step": 1538 |
| }, |
| { |
| "epoch": 2.397505845674201, |
| "grad_norm": 0.22151366255017815, |
| "learning_rate": 1.1098265895953758e-05, |
| "loss": 0.2945, |
| "step": 1539 |
| }, |
| { |
| "epoch": 2.3990646921278254, |
| "grad_norm": 0.2272145173574314, |
| "learning_rate": 1.1069364161849711e-05, |
| "loss": 0.2836, |
| "step": 1540 |
| }, |
| { |
| "epoch": 2.4006235385814496, |
| "grad_norm": 0.22611588795882234, |
| "learning_rate": 1.1040462427745665e-05, |
| "loss": 0.3038, |
| "step": 1541 |
| }, |
| { |
| "epoch": 2.402182385035074, |
| "grad_norm": 0.21786269575794123, |
| "learning_rate": 1.1011560693641618e-05, |
| "loss": 0.2955, |
| "step": 1542 |
| }, |
| { |
| "epoch": 2.4037412314886986, |
| "grad_norm": 0.2081735505823338, |
| "learning_rate": 1.0982658959537573e-05, |
| "loss": 0.2845, |
| "step": 1543 |
| }, |
| { |
| "epoch": 2.405300077942323, |
| "grad_norm": 0.21791564332112892, |
| "learning_rate": 1.0953757225433527e-05, |
| "loss": 0.2888, |
| "step": 1544 |
| }, |
| { |
| "epoch": 2.406858924395947, |
| "grad_norm": 0.21343814806147593, |
| "learning_rate": 1.092485549132948e-05, |
| "loss": 0.3027, |
| "step": 1545 |
| }, |
| { |
| "epoch": 2.4084177708495713, |
| "grad_norm": 0.24305021560110593, |
| "learning_rate": 1.0895953757225434e-05, |
| "loss": 0.3096, |
| "step": 1546 |
| }, |
| { |
| "epoch": 2.4099766173031956, |
| "grad_norm": 0.2428823537320873, |
| "learning_rate": 1.0867052023121387e-05, |
| "loss": 0.2787, |
| "step": 1547 |
| }, |
| { |
| "epoch": 2.41153546375682, |
| "grad_norm": 0.22533176874540484, |
| "learning_rate": 1.0838150289017342e-05, |
| "loss": 0.3003, |
| "step": 1548 |
| }, |
| { |
| "epoch": 2.413094310210444, |
| "grad_norm": 0.23052788059445348, |
| "learning_rate": 1.0809248554913296e-05, |
| "loss": 0.2937, |
| "step": 1549 |
| }, |
| { |
| "epoch": 2.4146531566640688, |
| "grad_norm": 0.21873125390687612, |
| "learning_rate": 1.078034682080925e-05, |
| "loss": 0.2852, |
| "step": 1550 |
| }, |
| { |
| "epoch": 2.416212003117693, |
| "grad_norm": 0.23262040078023866, |
| "learning_rate": 1.0751445086705203e-05, |
| "loss": 0.2867, |
| "step": 1551 |
| }, |
| { |
| "epoch": 2.4177708495713173, |
| "grad_norm": 0.2061110656033279, |
| "learning_rate": 1.0722543352601156e-05, |
| "loss": 0.2834, |
| "step": 1552 |
| }, |
| { |
| "epoch": 2.4193296960249415, |
| "grad_norm": 0.21742353821980845, |
| "learning_rate": 1.0693641618497111e-05, |
| "loss": 0.286, |
| "step": 1553 |
| }, |
| { |
| "epoch": 2.4208885424785658, |
| "grad_norm": 0.21761958606143322, |
| "learning_rate": 1.0664739884393065e-05, |
| "loss": 0.2934, |
| "step": 1554 |
| }, |
| { |
| "epoch": 2.42244738893219, |
| "grad_norm": 0.22413107731125181, |
| "learning_rate": 1.0635838150289018e-05, |
| "loss": 0.3017, |
| "step": 1555 |
| }, |
| { |
| "epoch": 2.4240062353858143, |
| "grad_norm": 0.21834093708352184, |
| "learning_rate": 1.0606936416184972e-05, |
| "loss": 0.304, |
| "step": 1556 |
| }, |
| { |
| "epoch": 2.425565081839439, |
| "grad_norm": 0.22975540033681002, |
| "learning_rate": 1.0578034682080925e-05, |
| "loss": 0.2926, |
| "step": 1557 |
| }, |
| { |
| "epoch": 2.4271239282930632, |
| "grad_norm": 0.2274868161114814, |
| "learning_rate": 1.0549132947976879e-05, |
| "loss": 0.2999, |
| "step": 1558 |
| }, |
| { |
| "epoch": 2.4286827747466875, |
| "grad_norm": 0.22082603952846616, |
| "learning_rate": 1.0520231213872834e-05, |
| "loss": 0.2782, |
| "step": 1559 |
| }, |
| { |
| "epoch": 2.4302416212003117, |
| "grad_norm": 0.23461317288098227, |
| "learning_rate": 1.0491329479768787e-05, |
| "loss": 0.2842, |
| "step": 1560 |
| }, |
| { |
| "epoch": 2.431800467653936, |
| "grad_norm": 0.20440782512445527, |
| "learning_rate": 1.046242774566474e-05, |
| "loss": 0.2853, |
| "step": 1561 |
| }, |
| { |
| "epoch": 2.4333593141075602, |
| "grad_norm": 0.23611049627402994, |
| "learning_rate": 1.0433526011560694e-05, |
| "loss": 0.2841, |
| "step": 1562 |
| }, |
| { |
| "epoch": 2.4349181605611845, |
| "grad_norm": 0.23437404672528528, |
| "learning_rate": 1.0404624277456647e-05, |
| "loss": 0.2963, |
| "step": 1563 |
| }, |
| { |
| "epoch": 2.436477007014809, |
| "grad_norm": 0.2204494699300127, |
| "learning_rate": 1.0375722543352603e-05, |
| "loss": 0.2806, |
| "step": 1564 |
| }, |
| { |
| "epoch": 2.4380358534684334, |
| "grad_norm": 0.20490594460203101, |
| "learning_rate": 1.0346820809248556e-05, |
| "loss": 0.2761, |
| "step": 1565 |
| }, |
| { |
| "epoch": 2.4395946999220577, |
| "grad_norm": 0.6956010247527492, |
| "learning_rate": 1.031791907514451e-05, |
| "loss": 0.2901, |
| "step": 1566 |
| }, |
| { |
| "epoch": 2.441153546375682, |
| "grad_norm": 0.26224821751440014, |
| "learning_rate": 1.0289017341040463e-05, |
| "loss": 0.3034, |
| "step": 1567 |
| }, |
| { |
| "epoch": 2.442712392829306, |
| "grad_norm": 0.25126259259462486, |
| "learning_rate": 1.0260115606936416e-05, |
| "loss": 0.2899, |
| "step": 1568 |
| }, |
| { |
| "epoch": 2.4442712392829304, |
| "grad_norm": 0.22870053657745457, |
| "learning_rate": 1.0231213872832372e-05, |
| "loss": 0.2822, |
| "step": 1569 |
| }, |
| { |
| "epoch": 2.445830085736555, |
| "grad_norm": 0.23587258742639972, |
| "learning_rate": 1.0202312138728325e-05, |
| "loss": 0.2593, |
| "step": 1570 |
| }, |
| { |
| "epoch": 2.4473889321901794, |
| "grad_norm": 0.22897162723371334, |
| "learning_rate": 1.0173410404624278e-05, |
| "loss": 0.2912, |
| "step": 1571 |
| }, |
| { |
| "epoch": 2.4489477786438036, |
| "grad_norm": 0.24149402834986947, |
| "learning_rate": 1.0144508670520232e-05, |
| "loss": 0.2878, |
| "step": 1572 |
| }, |
| { |
| "epoch": 2.450506625097428, |
| "grad_norm": 0.23350074250406747, |
| "learning_rate": 1.0115606936416185e-05, |
| "loss": 0.2869, |
| "step": 1573 |
| }, |
| { |
| "epoch": 2.452065471551052, |
| "grad_norm": 0.22195416376987834, |
| "learning_rate": 1.008670520231214e-05, |
| "loss": 0.2956, |
| "step": 1574 |
| }, |
| { |
| "epoch": 2.4536243180046764, |
| "grad_norm": 1.3487509546561793, |
| "learning_rate": 1.0057803468208094e-05, |
| "loss": 0.3012, |
| "step": 1575 |
| }, |
| { |
| "epoch": 2.455183164458301, |
| "grad_norm": 0.2873786952181812, |
| "learning_rate": 1.0028901734104047e-05, |
| "loss": 0.284, |
| "step": 1576 |
| }, |
| { |
| "epoch": 2.4567420109119253, |
| "grad_norm": 0.281096191572859, |
| "learning_rate": 1e-05, |
| "loss": 0.2839, |
| "step": 1577 |
| }, |
| { |
| "epoch": 2.4583008573655496, |
| "grad_norm": 0.26582966306416833, |
| "learning_rate": 9.971098265895954e-06, |
| "loss": 0.2868, |
| "step": 1578 |
| }, |
| { |
| "epoch": 2.459859703819174, |
| "grad_norm": 0.23786440274496637, |
| "learning_rate": 9.94219653179191e-06, |
| "loss": 0.2874, |
| "step": 1579 |
| }, |
| { |
| "epoch": 2.461418550272798, |
| "grad_norm": 0.26380977501613667, |
| "learning_rate": 9.913294797687863e-06, |
| "loss": 0.298, |
| "step": 1580 |
| }, |
| { |
| "epoch": 2.4629773967264224, |
| "grad_norm": 0.28090632623520245, |
| "learning_rate": 9.884393063583816e-06, |
| "loss": 0.2824, |
| "step": 1581 |
| }, |
| { |
| "epoch": 2.4645362431800466, |
| "grad_norm": 0.24654785691378972, |
| "learning_rate": 9.85549132947977e-06, |
| "loss": 0.2842, |
| "step": 1582 |
| }, |
| { |
| "epoch": 2.4660950896336713, |
| "grad_norm": 0.22185993368845872, |
| "learning_rate": 9.826589595375723e-06, |
| "loss": 0.2784, |
| "step": 1583 |
| }, |
| { |
| "epoch": 2.4676539360872956, |
| "grad_norm": 0.25759773055873747, |
| "learning_rate": 9.797687861271677e-06, |
| "loss": 0.3065, |
| "step": 1584 |
| }, |
| { |
| "epoch": 2.46921278254092, |
| "grad_norm": 0.2448676371644166, |
| "learning_rate": 9.768786127167632e-06, |
| "loss": 0.2951, |
| "step": 1585 |
| }, |
| { |
| "epoch": 2.470771628994544, |
| "grad_norm": 0.22038630080496294, |
| "learning_rate": 9.739884393063585e-06, |
| "loss": 0.2766, |
| "step": 1586 |
| }, |
| { |
| "epoch": 2.4723304754481683, |
| "grad_norm": 0.21067846046967328, |
| "learning_rate": 9.710982658959537e-06, |
| "loss": 0.2772, |
| "step": 1587 |
| }, |
| { |
| "epoch": 2.4738893219017926, |
| "grad_norm": 0.2196571148859021, |
| "learning_rate": 9.68208092485549e-06, |
| "loss": 0.2944, |
| "step": 1588 |
| }, |
| { |
| "epoch": 2.475448168355417, |
| "grad_norm": 0.20661464746790095, |
| "learning_rate": 9.653179190751446e-06, |
| "loss": 0.3023, |
| "step": 1589 |
| }, |
| { |
| "epoch": 2.4770070148090415, |
| "grad_norm": 0.20946455345772869, |
| "learning_rate": 9.624277456647399e-06, |
| "loss": 0.2768, |
| "step": 1590 |
| }, |
| { |
| "epoch": 2.4785658612626658, |
| "grad_norm": 0.21800665843244352, |
| "learning_rate": 9.595375722543352e-06, |
| "loss": 0.3014, |
| "step": 1591 |
| }, |
| { |
| "epoch": 2.48012470771629, |
| "grad_norm": 0.21469256658865496, |
| "learning_rate": 9.566473988439306e-06, |
| "loss": 0.2865, |
| "step": 1592 |
| }, |
| { |
| "epoch": 2.4816835541699143, |
| "grad_norm": 0.20483902813627355, |
| "learning_rate": 9.53757225433526e-06, |
| "loss": 0.2734, |
| "step": 1593 |
| }, |
| { |
| "epoch": 2.4832424006235385, |
| "grad_norm": 0.22797586254976032, |
| "learning_rate": 9.508670520231214e-06, |
| "loss": 0.2839, |
| "step": 1594 |
| }, |
| { |
| "epoch": 2.4848012470771628, |
| "grad_norm": 0.20269734125823377, |
| "learning_rate": 9.479768786127168e-06, |
| "loss": 0.2853, |
| "step": 1595 |
| }, |
| { |
| "epoch": 2.486360093530787, |
| "grad_norm": 0.21209747614140953, |
| "learning_rate": 9.450867052023121e-06, |
| "loss": 0.2846, |
| "step": 1596 |
| }, |
| { |
| "epoch": 2.4879189399844117, |
| "grad_norm": 0.20886949964376036, |
| "learning_rate": 9.421965317919075e-06, |
| "loss": 0.2768, |
| "step": 1597 |
| }, |
| { |
| "epoch": 2.489477786438036, |
| "grad_norm": 0.21987185193696723, |
| "learning_rate": 9.393063583815028e-06, |
| "loss": 0.289, |
| "step": 1598 |
| }, |
| { |
| "epoch": 2.49103663289166, |
| "grad_norm": 0.22992122520081743, |
| "learning_rate": 9.364161849710983e-06, |
| "loss": 0.317, |
| "step": 1599 |
| }, |
| { |
| "epoch": 2.4925954793452845, |
| "grad_norm": 0.2216415692962526, |
| "learning_rate": 9.335260115606937e-06, |
| "loss": 0.2872, |
| "step": 1600 |
| }, |
| { |
| "epoch": 2.4941543257989087, |
| "grad_norm": 0.22333309804066181, |
| "learning_rate": 9.30635838150289e-06, |
| "loss": 0.2762, |
| "step": 1601 |
| }, |
| { |
| "epoch": 2.495713172252533, |
| "grad_norm": 0.2088510278237382, |
| "learning_rate": 9.277456647398844e-06, |
| "loss": 0.2849, |
| "step": 1602 |
| }, |
| { |
| "epoch": 2.4972720187061572, |
| "grad_norm": 0.21834916127206458, |
| "learning_rate": 9.248554913294797e-06, |
| "loss": 0.2945, |
| "step": 1603 |
| }, |
| { |
| "epoch": 2.498830865159782, |
| "grad_norm": 0.23784512272220631, |
| "learning_rate": 9.21965317919075e-06, |
| "loss": 0.29, |
| "step": 1604 |
| }, |
| { |
| "epoch": 2.500389711613406, |
| "grad_norm": 0.22622134161606003, |
| "learning_rate": 9.190751445086706e-06, |
| "loss": 0.2855, |
| "step": 1605 |
| }, |
| { |
| "epoch": 2.5019485580670304, |
| "grad_norm": 0.20987648910752202, |
| "learning_rate": 9.16184971098266e-06, |
| "loss": 0.2886, |
| "step": 1606 |
| }, |
| { |
| "epoch": 2.5035074045206547, |
| "grad_norm": 0.2271896769680405, |
| "learning_rate": 9.132947976878613e-06, |
| "loss": 0.2849, |
| "step": 1607 |
| }, |
| { |
| "epoch": 2.505066250974279, |
| "grad_norm": 0.20703729358511513, |
| "learning_rate": 9.104046242774566e-06, |
| "loss": 0.285, |
| "step": 1608 |
| }, |
| { |
| "epoch": 2.5066250974279036, |
| "grad_norm": 0.2210043646064696, |
| "learning_rate": 9.07514450867052e-06, |
| "loss": 0.2793, |
| "step": 1609 |
| }, |
| { |
| "epoch": 2.5081839438815274, |
| "grad_norm": 0.20832438994217642, |
| "learning_rate": 9.046242774566475e-06, |
| "loss": 0.2706, |
| "step": 1610 |
| }, |
| { |
| "epoch": 2.509742790335152, |
| "grad_norm": 0.22663700329890946, |
| "learning_rate": 9.017341040462428e-06, |
| "loss": 0.2926, |
| "step": 1611 |
| }, |
| { |
| "epoch": 2.5113016367887764, |
| "grad_norm": 0.2321310919459722, |
| "learning_rate": 8.988439306358382e-06, |
| "loss": 0.2929, |
| "step": 1612 |
| }, |
| { |
| "epoch": 2.5128604832424006, |
| "grad_norm": 0.20367730242743393, |
| "learning_rate": 8.959537572254335e-06, |
| "loss": 0.273, |
| "step": 1613 |
| }, |
| { |
| "epoch": 2.514419329696025, |
| "grad_norm": 0.2573291858309685, |
| "learning_rate": 8.930635838150288e-06, |
| "loss": 0.2937, |
| "step": 1614 |
| }, |
| { |
| "epoch": 2.515978176149649, |
| "grad_norm": 0.21765613776271725, |
| "learning_rate": 8.901734104046244e-06, |
| "loss": 0.3041, |
| "step": 1615 |
| }, |
| { |
| "epoch": 2.517537022603274, |
| "grad_norm": 0.22719439248378498, |
| "learning_rate": 8.872832369942197e-06, |
| "loss": 0.3025, |
| "step": 1616 |
| }, |
| { |
| "epoch": 2.5190958690568976, |
| "grad_norm": 0.2233584630382556, |
| "learning_rate": 8.84393063583815e-06, |
| "loss": 0.2901, |
| "step": 1617 |
| }, |
| { |
| "epoch": 2.5206547155105223, |
| "grad_norm": 0.2756546250082013, |
| "learning_rate": 8.815028901734104e-06, |
| "loss": 0.299, |
| "step": 1618 |
| }, |
| { |
| "epoch": 2.5222135619641466, |
| "grad_norm": 0.2471624019129573, |
| "learning_rate": 8.786127167630057e-06, |
| "loss": 0.2856, |
| "step": 1619 |
| }, |
| { |
| "epoch": 2.523772408417771, |
| "grad_norm": 0.22928484558281476, |
| "learning_rate": 8.757225433526013e-06, |
| "loss": 0.2885, |
| "step": 1620 |
| }, |
| { |
| "epoch": 2.525331254871395, |
| "grad_norm": 0.22619496618870039, |
| "learning_rate": 8.728323699421966e-06, |
| "loss": 0.2858, |
| "step": 1621 |
| }, |
| { |
| "epoch": 2.5268901013250193, |
| "grad_norm": 0.23523904557731568, |
| "learning_rate": 8.69942196531792e-06, |
| "loss": 0.2879, |
| "step": 1622 |
| }, |
| { |
| "epoch": 2.528448947778644, |
| "grad_norm": 0.23102152485027722, |
| "learning_rate": 8.670520231213873e-06, |
| "loss": 0.2978, |
| "step": 1623 |
| }, |
| { |
| "epoch": 2.530007794232268, |
| "grad_norm": 0.23596999054388898, |
| "learning_rate": 8.641618497109826e-06, |
| "loss": 0.2955, |
| "step": 1624 |
| }, |
| { |
| "epoch": 2.5315666406858925, |
| "grad_norm": 0.23775289137632036, |
| "learning_rate": 8.61271676300578e-06, |
| "loss": 0.303, |
| "step": 1625 |
| }, |
| { |
| "epoch": 2.533125487139517, |
| "grad_norm": 0.22544643461170122, |
| "learning_rate": 8.583815028901735e-06, |
| "loss": 0.2907, |
| "step": 1626 |
| }, |
| { |
| "epoch": 2.534684333593141, |
| "grad_norm": 0.2598040215067987, |
| "learning_rate": 8.554913294797688e-06, |
| "loss": 0.2946, |
| "step": 1627 |
| }, |
| { |
| "epoch": 2.5362431800467653, |
| "grad_norm": 0.21413673157343954, |
| "learning_rate": 8.526011560693642e-06, |
| "loss": 0.2836, |
| "step": 1628 |
| }, |
| { |
| "epoch": 2.5378020265003896, |
| "grad_norm": 0.4266061422700911, |
| "learning_rate": 8.497109826589595e-06, |
| "loss": 0.2864, |
| "step": 1629 |
| }, |
| { |
| "epoch": 2.5393608729540142, |
| "grad_norm": 0.21799108371646567, |
| "learning_rate": 8.468208092485549e-06, |
| "loss": 0.2813, |
| "step": 1630 |
| }, |
| { |
| "epoch": 2.5409197194076385, |
| "grad_norm": 0.2322583381309059, |
| "learning_rate": 8.439306358381504e-06, |
| "loss": 0.3071, |
| "step": 1631 |
| }, |
| { |
| "epoch": 2.5424785658612628, |
| "grad_norm": 0.2202382316089639, |
| "learning_rate": 8.410404624277457e-06, |
| "loss": 0.2926, |
| "step": 1632 |
| }, |
| { |
| "epoch": 2.544037412314887, |
| "grad_norm": 0.22009637471013366, |
| "learning_rate": 8.38150289017341e-06, |
| "loss": 0.2963, |
| "step": 1633 |
| }, |
| { |
| "epoch": 2.5455962587685113, |
| "grad_norm": 0.20540911119232894, |
| "learning_rate": 8.352601156069364e-06, |
| "loss": 0.275, |
| "step": 1634 |
| }, |
| { |
| "epoch": 2.5471551052221355, |
| "grad_norm": 0.21777131689422363, |
| "learning_rate": 8.323699421965318e-06, |
| "loss": 0.2996, |
| "step": 1635 |
| }, |
| { |
| "epoch": 2.5487139516757598, |
| "grad_norm": 0.20908427654761658, |
| "learning_rate": 8.294797687861273e-06, |
| "loss": 0.2742, |
| "step": 1636 |
| }, |
| { |
| "epoch": 2.5502727981293845, |
| "grad_norm": 0.21275798202193402, |
| "learning_rate": 8.265895953757226e-06, |
| "loss": 0.2921, |
| "step": 1637 |
| }, |
| { |
| "epoch": 2.5518316445830087, |
| "grad_norm": 0.22079763984850975, |
| "learning_rate": 8.23699421965318e-06, |
| "loss": 0.3023, |
| "step": 1638 |
| }, |
| { |
| "epoch": 2.553390491036633, |
| "grad_norm": 0.20167698447825708, |
| "learning_rate": 8.208092485549133e-06, |
| "loss": 0.3018, |
| "step": 1639 |
| }, |
| { |
| "epoch": 2.554949337490257, |
| "grad_norm": 0.204370882797588, |
| "learning_rate": 8.179190751445087e-06, |
| "loss": 0.2934, |
| "step": 1640 |
| }, |
| { |
| "epoch": 2.5565081839438815, |
| "grad_norm": 0.20933271636814302, |
| "learning_rate": 8.150289017341042e-06, |
| "loss": 0.2884, |
| "step": 1641 |
| }, |
| { |
| "epoch": 2.5580670303975057, |
| "grad_norm": 0.2364244855094284, |
| "learning_rate": 8.121387283236995e-06, |
| "loss": 0.291, |
| "step": 1642 |
| }, |
| { |
| "epoch": 2.55962587685113, |
| "grad_norm": 0.21280205055543394, |
| "learning_rate": 8.092485549132949e-06, |
| "loss": 0.2953, |
| "step": 1643 |
| }, |
| { |
| "epoch": 2.5611847233047547, |
| "grad_norm": 0.2225834531719899, |
| "learning_rate": 8.063583815028902e-06, |
| "loss": 0.2931, |
| "step": 1644 |
| }, |
| { |
| "epoch": 2.562743569758379, |
| "grad_norm": 0.20003247002848737, |
| "learning_rate": 8.034682080924856e-06, |
| "loss": 0.2769, |
| "step": 1645 |
| }, |
| { |
| "epoch": 2.564302416212003, |
| "grad_norm": 0.21372713472537674, |
| "learning_rate": 8.00578034682081e-06, |
| "loss": 0.2832, |
| "step": 1646 |
| }, |
| { |
| "epoch": 2.5658612626656274, |
| "grad_norm": 0.21988186302060084, |
| "learning_rate": 7.976878612716764e-06, |
| "loss": 0.2816, |
| "step": 1647 |
| }, |
| { |
| "epoch": 2.5674201091192517, |
| "grad_norm": 0.2114883685070932, |
| "learning_rate": 7.947976878612718e-06, |
| "loss": 0.2716, |
| "step": 1648 |
| }, |
| { |
| "epoch": 2.568978955572876, |
| "grad_norm": 0.22479614996029462, |
| "learning_rate": 7.919075144508671e-06, |
| "loss": 0.2895, |
| "step": 1649 |
| }, |
| { |
| "epoch": 2.5705378020265, |
| "grad_norm": 0.20995160083973538, |
| "learning_rate": 7.890173410404624e-06, |
| "loss": 0.2938, |
| "step": 1650 |
| }, |
| { |
| "epoch": 2.572096648480125, |
| "grad_norm": 0.19074058893303877, |
| "learning_rate": 7.861271676300578e-06, |
| "loss": 0.2879, |
| "step": 1651 |
| }, |
| { |
| "epoch": 2.573655494933749, |
| "grad_norm": 0.21433796239796818, |
| "learning_rate": 7.832369942196533e-06, |
| "loss": 0.2801, |
| "step": 1652 |
| }, |
| { |
| "epoch": 2.5752143413873734, |
| "grad_norm": 0.23148461464412992, |
| "learning_rate": 7.803468208092486e-06, |
| "loss": 0.3049, |
| "step": 1653 |
| }, |
| { |
| "epoch": 2.5767731878409976, |
| "grad_norm": 0.22404297663771336, |
| "learning_rate": 7.77456647398844e-06, |
| "loss": 0.2844, |
| "step": 1654 |
| }, |
| { |
| "epoch": 2.578332034294622, |
| "grad_norm": 0.22573478963343901, |
| "learning_rate": 7.745664739884393e-06, |
| "loss": 0.2907, |
| "step": 1655 |
| }, |
| { |
| "epoch": 2.5798908807482466, |
| "grad_norm": 0.19465572962012764, |
| "learning_rate": 7.716763005780347e-06, |
| "loss": 0.2786, |
| "step": 1656 |
| }, |
| { |
| "epoch": 2.5814497272018704, |
| "grad_norm": 0.21095454192122828, |
| "learning_rate": 7.687861271676302e-06, |
| "loss": 0.2845, |
| "step": 1657 |
| }, |
| { |
| "epoch": 2.583008573655495, |
| "grad_norm": 0.20373213583538066, |
| "learning_rate": 7.658959537572255e-06, |
| "loss": 0.2809, |
| "step": 1658 |
| }, |
| { |
| "epoch": 2.5845674201091193, |
| "grad_norm": 0.20909977447044456, |
| "learning_rate": 7.630057803468209e-06, |
| "loss": 0.2918, |
| "step": 1659 |
| }, |
| { |
| "epoch": 2.5861262665627436, |
| "grad_norm": 0.20642027240366984, |
| "learning_rate": 7.601156069364162e-06, |
| "loss": 0.2951, |
| "step": 1660 |
| }, |
| { |
| "epoch": 2.587685113016368, |
| "grad_norm": 0.20983785591383902, |
| "learning_rate": 7.5722543352601166e-06, |
| "loss": 0.2958, |
| "step": 1661 |
| }, |
| { |
| "epoch": 2.589243959469992, |
| "grad_norm": 0.23042841089901345, |
| "learning_rate": 7.54335260115607e-06, |
| "loss": 0.3101, |
| "step": 1662 |
| }, |
| { |
| "epoch": 2.590802805923617, |
| "grad_norm": 0.24194824337916648, |
| "learning_rate": 7.514450867052024e-06, |
| "loss": 0.3112, |
| "step": 1663 |
| }, |
| { |
| "epoch": 2.5923616523772406, |
| "grad_norm": 0.21401069536687056, |
| "learning_rate": 7.485549132947978e-06, |
| "loss": 0.2846, |
| "step": 1664 |
| }, |
| { |
| "epoch": 2.5939204988308653, |
| "grad_norm": 0.20182835176068856, |
| "learning_rate": 7.456647398843931e-06, |
| "loss": 0.2884, |
| "step": 1665 |
| }, |
| { |
| "epoch": 2.5954793452844895, |
| "grad_norm": 0.2438186993870951, |
| "learning_rate": 7.4277456647398855e-06, |
| "loss": 0.2914, |
| "step": 1666 |
| }, |
| { |
| "epoch": 2.597038191738114, |
| "grad_norm": 0.24739122146108788, |
| "learning_rate": 7.398843930635839e-06, |
| "loss": 0.2981, |
| "step": 1667 |
| }, |
| { |
| "epoch": 2.598597038191738, |
| "grad_norm": 0.25582873700332, |
| "learning_rate": 7.369942196531792e-06, |
| "loss": 0.2857, |
| "step": 1668 |
| }, |
| { |
| "epoch": 2.6001558846453623, |
| "grad_norm": 0.23788093624422235, |
| "learning_rate": 7.341040462427747e-06, |
| "loss": 0.2992, |
| "step": 1669 |
| }, |
| { |
| "epoch": 2.601714731098987, |
| "grad_norm": 0.2200261335858021, |
| "learning_rate": 7.3121387283237e-06, |
| "loss": 0.2869, |
| "step": 1670 |
| }, |
| { |
| "epoch": 2.603273577552611, |
| "grad_norm": 0.22579760963949985, |
| "learning_rate": 7.2832369942196544e-06, |
| "loss": 0.2982, |
| "step": 1671 |
| }, |
| { |
| "epoch": 2.6048324240062355, |
| "grad_norm": 0.23297289522530723, |
| "learning_rate": 7.254335260115608e-06, |
| "loss": 0.2825, |
| "step": 1672 |
| }, |
| { |
| "epoch": 2.6063912704598597, |
| "grad_norm": 0.20686966606749702, |
| "learning_rate": 7.225433526011561e-06, |
| "loss": 0.2852, |
| "step": 1673 |
| }, |
| { |
| "epoch": 2.607950116913484, |
| "grad_norm": 0.2266441955874942, |
| "learning_rate": 7.196531791907516e-06, |
| "loss": 0.3128, |
| "step": 1674 |
| }, |
| { |
| "epoch": 2.6095089633671082, |
| "grad_norm": 0.23213362522888187, |
| "learning_rate": 7.167630057803469e-06, |
| "loss": 0.2865, |
| "step": 1675 |
| }, |
| { |
| "epoch": 2.6110678098207325, |
| "grad_norm": 0.22468659409132077, |
| "learning_rate": 7.1387283236994225e-06, |
| "loss": 0.2654, |
| "step": 1676 |
| }, |
| { |
| "epoch": 2.612626656274357, |
| "grad_norm": 0.22695879901341415, |
| "learning_rate": 7.109826589595377e-06, |
| "loss": 0.2846, |
| "step": 1677 |
| }, |
| { |
| "epoch": 2.6141855027279814, |
| "grad_norm": 0.2055809379257179, |
| "learning_rate": 7.08092485549133e-06, |
| "loss": 0.2799, |
| "step": 1678 |
| }, |
| { |
| "epoch": 2.6157443491816057, |
| "grad_norm": 0.21550005636993283, |
| "learning_rate": 7.0520231213872845e-06, |
| "loss": 0.276, |
| "step": 1679 |
| }, |
| { |
| "epoch": 2.61730319563523, |
| "grad_norm": 0.2057285652964607, |
| "learning_rate": 7.023121387283238e-06, |
| "loss": 0.2871, |
| "step": 1680 |
| }, |
| { |
| "epoch": 2.618862042088854, |
| "grad_norm": 0.2329408962798249, |
| "learning_rate": 6.9942196531791914e-06, |
| "loss": 0.2969, |
| "step": 1681 |
| }, |
| { |
| "epoch": 2.6204208885424785, |
| "grad_norm": 0.21413261205015083, |
| "learning_rate": 6.965317919075146e-06, |
| "loss": 0.2887, |
| "step": 1682 |
| }, |
| { |
| "epoch": 2.6219797349961027, |
| "grad_norm": 0.20032603391697887, |
| "learning_rate": 6.936416184971098e-06, |
| "loss": 0.268, |
| "step": 1683 |
| }, |
| { |
| "epoch": 2.6235385814497274, |
| "grad_norm": 0.22915281553263556, |
| "learning_rate": 6.907514450867052e-06, |
| "loss": 0.2961, |
| "step": 1684 |
| }, |
| { |
| "epoch": 2.6250974279033517, |
| "grad_norm": 0.22923520682538653, |
| "learning_rate": 6.878612716763005e-06, |
| "loss": 0.2789, |
| "step": 1685 |
| }, |
| { |
| "epoch": 2.626656274356976, |
| "grad_norm": 0.20610407028884006, |
| "learning_rate": 6.8497109826589595e-06, |
| "loss": 0.2793, |
| "step": 1686 |
| }, |
| { |
| "epoch": 2.6282151208106, |
| "grad_norm": 0.21052444570238973, |
| "learning_rate": 6.820809248554913e-06, |
| "loss": 0.2864, |
| "step": 1687 |
| }, |
| { |
| "epoch": 2.6297739672642244, |
| "grad_norm": 0.2116958939607887, |
| "learning_rate": 6.791907514450866e-06, |
| "loss": 0.2893, |
| "step": 1688 |
| }, |
| { |
| "epoch": 2.6313328137178487, |
| "grad_norm": 0.2277230086812389, |
| "learning_rate": 6.763005780346821e-06, |
| "loss": 0.2912, |
| "step": 1689 |
| }, |
| { |
| "epoch": 2.632891660171473, |
| "grad_norm": 0.22320147334511584, |
| "learning_rate": 6.734104046242774e-06, |
| "loss": 0.3009, |
| "step": 1690 |
| }, |
| { |
| "epoch": 2.6344505066250976, |
| "grad_norm": 0.22390700490954193, |
| "learning_rate": 6.7052023121387284e-06, |
| "loss": 0.2818, |
| "step": 1691 |
| }, |
| { |
| "epoch": 2.636009353078722, |
| "grad_norm": 0.22080083950392762, |
| "learning_rate": 6.676300578034682e-06, |
| "loss": 0.3002, |
| "step": 1692 |
| }, |
| { |
| "epoch": 2.637568199532346, |
| "grad_norm": 0.2045566499206754, |
| "learning_rate": 6.647398843930635e-06, |
| "loss": 0.2845, |
| "step": 1693 |
| }, |
| { |
| "epoch": 2.6391270459859704, |
| "grad_norm": 0.2058593680536001, |
| "learning_rate": 6.61849710982659e-06, |
| "loss": 0.2963, |
| "step": 1694 |
| }, |
| { |
| "epoch": 2.6406858924395946, |
| "grad_norm": 0.21761267846258828, |
| "learning_rate": 6.589595375722543e-06, |
| "loss": 0.2958, |
| "step": 1695 |
| }, |
| { |
| "epoch": 2.642244738893219, |
| "grad_norm": 0.21267130911051824, |
| "learning_rate": 6.5606936416184965e-06, |
| "loss": 0.282, |
| "step": 1696 |
| }, |
| { |
| "epoch": 2.643803585346843, |
| "grad_norm": 0.2725520542350063, |
| "learning_rate": 6.531791907514451e-06, |
| "loss": 0.3082, |
| "step": 1697 |
| }, |
| { |
| "epoch": 2.645362431800468, |
| "grad_norm": 0.22632776119108647, |
| "learning_rate": 6.502890173410404e-06, |
| "loss": 0.2888, |
| "step": 1698 |
| }, |
| { |
| "epoch": 2.646921278254092, |
| "grad_norm": 0.21287462722249717, |
| "learning_rate": 6.4739884393063585e-06, |
| "loss": 0.2873, |
| "step": 1699 |
| }, |
| { |
| "epoch": 2.6484801247077163, |
| "grad_norm": 0.20006610540308692, |
| "learning_rate": 6.445086705202312e-06, |
| "loss": 0.2891, |
| "step": 1700 |
| }, |
| { |
| "epoch": 2.6500389711613406, |
| "grad_norm": 0.23392897268756666, |
| "learning_rate": 6.4161849710982654e-06, |
| "loss": 0.299, |
| "step": 1701 |
| }, |
| { |
| "epoch": 2.651597817614965, |
| "grad_norm": 0.2094557655435245, |
| "learning_rate": 6.38728323699422e-06, |
| "loss": 0.2893, |
| "step": 1702 |
| }, |
| { |
| "epoch": 2.6531566640685895, |
| "grad_norm": 0.2162667697074946, |
| "learning_rate": 6.358381502890173e-06, |
| "loss": 0.3018, |
| "step": 1703 |
| }, |
| { |
| "epoch": 2.6547155105222133, |
| "grad_norm": 0.20292202156574402, |
| "learning_rate": 6.3294797687861275e-06, |
| "loss": 0.289, |
| "step": 1704 |
| }, |
| { |
| "epoch": 2.656274356975838, |
| "grad_norm": 0.19930348804964854, |
| "learning_rate": 6.300578034682081e-06, |
| "loss": 0.2911, |
| "step": 1705 |
| }, |
| { |
| "epoch": 2.6578332034294623, |
| "grad_norm": 0.21358543639853841, |
| "learning_rate": 6.271676300578034e-06, |
| "loss": 0.2961, |
| "step": 1706 |
| }, |
| { |
| "epoch": 2.6593920498830865, |
| "grad_norm": 0.20342382560761232, |
| "learning_rate": 6.242774566473989e-06, |
| "loss": 0.2964, |
| "step": 1707 |
| }, |
| { |
| "epoch": 2.660950896336711, |
| "grad_norm": 0.21527984552285181, |
| "learning_rate": 6.213872832369942e-06, |
| "loss": 0.2757, |
| "step": 1708 |
| }, |
| { |
| "epoch": 2.662509742790335, |
| "grad_norm": 0.21467701298854663, |
| "learning_rate": 6.1849710982658956e-06, |
| "loss": 0.2901, |
| "step": 1709 |
| }, |
| { |
| "epoch": 2.6640685892439597, |
| "grad_norm": 0.19947891783905247, |
| "learning_rate": 6.15606936416185e-06, |
| "loss": 0.2821, |
| "step": 1710 |
| }, |
| { |
| "epoch": 2.6656274356975835, |
| "grad_norm": 0.2082997257189345, |
| "learning_rate": 6.127167630057803e-06, |
| "loss": 0.2845, |
| "step": 1711 |
| }, |
| { |
| "epoch": 2.6671862821512082, |
| "grad_norm": 0.21230363198815436, |
| "learning_rate": 6.098265895953758e-06, |
| "loss": 0.2933, |
| "step": 1712 |
| }, |
| { |
| "epoch": 2.6687451286048325, |
| "grad_norm": 0.19933909904198954, |
| "learning_rate": 6.069364161849711e-06, |
| "loss": 0.2837, |
| "step": 1713 |
| }, |
| { |
| "epoch": 2.6703039750584567, |
| "grad_norm": 0.22579536637132155, |
| "learning_rate": 6.0404624277456645e-06, |
| "loss": 0.2944, |
| "step": 1714 |
| }, |
| { |
| "epoch": 2.671862821512081, |
| "grad_norm": 0.2083571147340846, |
| "learning_rate": 6.011560693641619e-06, |
| "loss": 0.2858, |
| "step": 1715 |
| }, |
| { |
| "epoch": 2.6734216679657052, |
| "grad_norm": 0.21447279328154586, |
| "learning_rate": 5.982658959537572e-06, |
| "loss": 0.2796, |
| "step": 1716 |
| }, |
| { |
| "epoch": 2.67498051441933, |
| "grad_norm": 0.2577104816893999, |
| "learning_rate": 5.9537572254335265e-06, |
| "loss": 0.2777, |
| "step": 1717 |
| }, |
| { |
| "epoch": 2.6765393608729537, |
| "grad_norm": 0.2134999870561008, |
| "learning_rate": 5.92485549132948e-06, |
| "loss": 0.288, |
| "step": 1718 |
| }, |
| { |
| "epoch": 2.6780982073265784, |
| "grad_norm": 0.2057246513397782, |
| "learning_rate": 5.895953757225433e-06, |
| "loss": 0.2758, |
| "step": 1719 |
| }, |
| { |
| "epoch": 2.6796570537802027, |
| "grad_norm": 0.2291012037170924, |
| "learning_rate": 5.867052023121388e-06, |
| "loss": 0.2913, |
| "step": 1720 |
| }, |
| { |
| "epoch": 2.681215900233827, |
| "grad_norm": 0.2100404936453596, |
| "learning_rate": 5.838150289017341e-06, |
| "loss": 0.2817, |
| "step": 1721 |
| }, |
| { |
| "epoch": 2.682774746687451, |
| "grad_norm": 0.20420008955113567, |
| "learning_rate": 5.809248554913295e-06, |
| "loss": 0.2963, |
| "step": 1722 |
| }, |
| { |
| "epoch": 2.6843335931410754, |
| "grad_norm": 0.21002723265147977, |
| "learning_rate": 5.780346820809249e-06, |
| "loss": 0.3049, |
| "step": 1723 |
| }, |
| { |
| "epoch": 2.6858924395947, |
| "grad_norm": 0.20340290578451115, |
| "learning_rate": 5.751445086705202e-06, |
| "loss": 0.2847, |
| "step": 1724 |
| }, |
| { |
| "epoch": 2.6874512860483244, |
| "grad_norm": 0.21104439985364004, |
| "learning_rate": 5.722543352601157e-06, |
| "loss": 0.2821, |
| "step": 1725 |
| }, |
| { |
| "epoch": 2.6890101325019486, |
| "grad_norm": 0.21696962161072067, |
| "learning_rate": 5.69364161849711e-06, |
| "loss": 0.2721, |
| "step": 1726 |
| }, |
| { |
| "epoch": 2.690568978955573, |
| "grad_norm": 0.23015064920584716, |
| "learning_rate": 5.6647398843930635e-06, |
| "loss": 0.2922, |
| "step": 1727 |
| }, |
| { |
| "epoch": 2.692127825409197, |
| "grad_norm": 0.20813991146352637, |
| "learning_rate": 5.635838150289018e-06, |
| "loss": 0.2971, |
| "step": 1728 |
| }, |
| { |
| "epoch": 2.6936866718628214, |
| "grad_norm": 0.20649336747594163, |
| "learning_rate": 5.606936416184971e-06, |
| "loss": 0.3036, |
| "step": 1729 |
| }, |
| { |
| "epoch": 2.6952455183164457, |
| "grad_norm": 0.20331720787397545, |
| "learning_rate": 5.578034682080925e-06, |
| "loss": 0.2886, |
| "step": 1730 |
| }, |
| { |
| "epoch": 2.6968043647700704, |
| "grad_norm": 0.21078699708857532, |
| "learning_rate": 5.549132947976879e-06, |
| "loss": 0.2849, |
| "step": 1731 |
| }, |
| { |
| "epoch": 2.6983632112236946, |
| "grad_norm": 0.22036177053042216, |
| "learning_rate": 5.5202312138728324e-06, |
| "loss": 0.2861, |
| "step": 1732 |
| }, |
| { |
| "epoch": 2.699922057677319, |
| "grad_norm": 0.20172166515293294, |
| "learning_rate": 5.491329479768787e-06, |
| "loss": 0.2866, |
| "step": 1733 |
| }, |
| { |
| "epoch": 2.701480904130943, |
| "grad_norm": 0.19522835656935336, |
| "learning_rate": 5.46242774566474e-06, |
| "loss": 0.2681, |
| "step": 1734 |
| }, |
| { |
| "epoch": 2.7030397505845674, |
| "grad_norm": 0.2212443755898533, |
| "learning_rate": 5.433526011560694e-06, |
| "loss": 0.2958, |
| "step": 1735 |
| }, |
| { |
| "epoch": 2.7045985970381916, |
| "grad_norm": 0.19883348976389542, |
| "learning_rate": 5.404624277456648e-06, |
| "loss": 0.283, |
| "step": 1736 |
| }, |
| { |
| "epoch": 2.706157443491816, |
| "grad_norm": 0.22774567151755043, |
| "learning_rate": 5.375722543352601e-06, |
| "loss": 0.2817, |
| "step": 1737 |
| }, |
| { |
| "epoch": 2.7077162899454406, |
| "grad_norm": 0.19653848981003447, |
| "learning_rate": 5.346820809248556e-06, |
| "loss": 0.2807, |
| "step": 1738 |
| }, |
| { |
| "epoch": 2.709275136399065, |
| "grad_norm": 0.2155411949245046, |
| "learning_rate": 5.317919075144509e-06, |
| "loss": 0.2874, |
| "step": 1739 |
| }, |
| { |
| "epoch": 2.710833982852689, |
| "grad_norm": 0.21309269576853238, |
| "learning_rate": 5.2890173410404626e-06, |
| "loss": 0.2922, |
| "step": 1740 |
| }, |
| { |
| "epoch": 2.7123928293063133, |
| "grad_norm": 0.20084602651111383, |
| "learning_rate": 5.260115606936417e-06, |
| "loss": 0.2861, |
| "step": 1741 |
| }, |
| { |
| "epoch": 2.7139516757599376, |
| "grad_norm": 0.20949324216365292, |
| "learning_rate": 5.23121387283237e-06, |
| "loss": 0.2906, |
| "step": 1742 |
| }, |
| { |
| "epoch": 2.715510522213562, |
| "grad_norm": 0.19787967361130746, |
| "learning_rate": 5.202312138728324e-06, |
| "loss": 0.2851, |
| "step": 1743 |
| }, |
| { |
| "epoch": 2.717069368667186, |
| "grad_norm": 0.19777902230004707, |
| "learning_rate": 5.173410404624278e-06, |
| "loss": 0.2724, |
| "step": 1744 |
| }, |
| { |
| "epoch": 2.7186282151208108, |
| "grad_norm": 0.21000085676745223, |
| "learning_rate": 5.1445086705202315e-06, |
| "loss": 0.2863, |
| "step": 1745 |
| }, |
| { |
| "epoch": 2.720187061574435, |
| "grad_norm": 0.20127920458103757, |
| "learning_rate": 5.115606936416186e-06, |
| "loss": 0.2857, |
| "step": 1746 |
| }, |
| { |
| "epoch": 2.7217459080280593, |
| "grad_norm": 0.2090573509510013, |
| "learning_rate": 5.086705202312139e-06, |
| "loss": 0.2866, |
| "step": 1747 |
| }, |
| { |
| "epoch": 2.7233047544816835, |
| "grad_norm": 0.21739367096274437, |
| "learning_rate": 5.057803468208093e-06, |
| "loss": 0.2814, |
| "step": 1748 |
| }, |
| { |
| "epoch": 2.7248636009353078, |
| "grad_norm": 0.20833581410322963, |
| "learning_rate": 5.028901734104047e-06, |
| "loss": 0.2888, |
| "step": 1749 |
| }, |
| { |
| "epoch": 2.726422447388932, |
| "grad_norm": 0.19909078368387395, |
| "learning_rate": 5e-06, |
| "loss": 0.2911, |
| "step": 1750 |
| }, |
| { |
| "epoch": 2.7279812938425563, |
| "grad_norm": 0.20109886859179885, |
| "learning_rate": 4.971098265895955e-06, |
| "loss": 0.2895, |
| "step": 1751 |
| }, |
| { |
| "epoch": 2.729540140296181, |
| "grad_norm": 0.20247469803387927, |
| "learning_rate": 4.942196531791908e-06, |
| "loss": 0.276, |
| "step": 1752 |
| }, |
| { |
| "epoch": 2.7310989867498052, |
| "grad_norm": 0.20421110219784483, |
| "learning_rate": 4.913294797687862e-06, |
| "loss": 0.2792, |
| "step": 1753 |
| }, |
| { |
| "epoch": 2.7326578332034295, |
| "grad_norm": 0.19858031749032556, |
| "learning_rate": 4.884393063583816e-06, |
| "loss": 0.285, |
| "step": 1754 |
| }, |
| { |
| "epoch": 2.7342166796570537, |
| "grad_norm": 0.22215186509011026, |
| "learning_rate": 4.8554913294797685e-06, |
| "loss": 0.3038, |
| "step": 1755 |
| }, |
| { |
| "epoch": 2.735775526110678, |
| "grad_norm": 0.20256389152709856, |
| "learning_rate": 4.826589595375723e-06, |
| "loss": 0.2726, |
| "step": 1756 |
| }, |
| { |
| "epoch": 2.7373343725643027, |
| "grad_norm": 0.21034046831696598, |
| "learning_rate": 4.797687861271676e-06, |
| "loss": 0.296, |
| "step": 1757 |
| }, |
| { |
| "epoch": 2.7388932190179265, |
| "grad_norm": 0.20958728755008627, |
| "learning_rate": 4.76878612716763e-06, |
| "loss": 0.2868, |
| "step": 1758 |
| }, |
| { |
| "epoch": 2.740452065471551, |
| "grad_norm": 0.20383033185004612, |
| "learning_rate": 4.739884393063584e-06, |
| "loss": 0.2854, |
| "step": 1759 |
| }, |
| { |
| "epoch": 2.7420109119251754, |
| "grad_norm": 0.19907487917140468, |
| "learning_rate": 4.710982658959537e-06, |
| "loss": 0.285, |
| "step": 1760 |
| }, |
| { |
| "epoch": 2.7435697583787997, |
| "grad_norm": 0.20044771305150624, |
| "learning_rate": 4.682080924855492e-06, |
| "loss": 0.2816, |
| "step": 1761 |
| }, |
| { |
| "epoch": 2.745128604832424, |
| "grad_norm": 0.2164138120105804, |
| "learning_rate": 4.653179190751445e-06, |
| "loss": 0.2939, |
| "step": 1762 |
| }, |
| { |
| "epoch": 2.746687451286048, |
| "grad_norm": 0.24028484525744273, |
| "learning_rate": 4.624277456647399e-06, |
| "loss": 0.3133, |
| "step": 1763 |
| }, |
| { |
| "epoch": 2.748246297739673, |
| "grad_norm": 0.19984994036225665, |
| "learning_rate": 4.595375722543353e-06, |
| "loss": 0.2868, |
| "step": 1764 |
| }, |
| { |
| "epoch": 2.7498051441932967, |
| "grad_norm": 0.21636457592977681, |
| "learning_rate": 4.566473988439306e-06, |
| "loss": 0.2871, |
| "step": 1765 |
| }, |
| { |
| "epoch": 2.7513639906469214, |
| "grad_norm": 0.19990931527574785, |
| "learning_rate": 4.53757225433526e-06, |
| "loss": 0.2708, |
| "step": 1766 |
| }, |
| { |
| "epoch": 2.7529228371005456, |
| "grad_norm": 0.217527953437702, |
| "learning_rate": 4.508670520231214e-06, |
| "loss": 0.2972, |
| "step": 1767 |
| }, |
| { |
| "epoch": 2.75448168355417, |
| "grad_norm": 0.19223678719292475, |
| "learning_rate": 4.4797687861271675e-06, |
| "loss": 0.2898, |
| "step": 1768 |
| }, |
| { |
| "epoch": 2.756040530007794, |
| "grad_norm": 0.21317778941850382, |
| "learning_rate": 4.450867052023122e-06, |
| "loss": 0.2857, |
| "step": 1769 |
| }, |
| { |
| "epoch": 2.7575993764614184, |
| "grad_norm": 0.21322464171901906, |
| "learning_rate": 4.421965317919075e-06, |
| "loss": 0.2889, |
| "step": 1770 |
| }, |
| { |
| "epoch": 2.759158222915043, |
| "grad_norm": 0.21331351232714513, |
| "learning_rate": 4.393063583815029e-06, |
| "loss": 0.2787, |
| "step": 1771 |
| }, |
| { |
| "epoch": 2.7607170693686673, |
| "grad_norm": 0.20670483032146136, |
| "learning_rate": 4.364161849710983e-06, |
| "loss": 0.2961, |
| "step": 1772 |
| }, |
| { |
| "epoch": 2.7622759158222916, |
| "grad_norm": 0.22435785586885243, |
| "learning_rate": 4.3352601156069365e-06, |
| "loss": 0.2943, |
| "step": 1773 |
| }, |
| { |
| "epoch": 2.763834762275916, |
| "grad_norm": 0.19815210166141742, |
| "learning_rate": 4.30635838150289e-06, |
| "loss": 0.2935, |
| "step": 1774 |
| }, |
| { |
| "epoch": 2.76539360872954, |
| "grad_norm": 0.20921721259317946, |
| "learning_rate": 4.277456647398844e-06, |
| "loss": 0.2711, |
| "step": 1775 |
| }, |
| { |
| "epoch": 2.7669524551831643, |
| "grad_norm": 0.2239327598983314, |
| "learning_rate": 4.248554913294798e-06, |
| "loss": 0.3043, |
| "step": 1776 |
| }, |
| { |
| "epoch": 2.7685113016367886, |
| "grad_norm": 0.2004589789574725, |
| "learning_rate": 4.219653179190752e-06, |
| "loss": 0.2964, |
| "step": 1777 |
| }, |
| { |
| "epoch": 2.7700701480904133, |
| "grad_norm": 0.20625941199900238, |
| "learning_rate": 4.190751445086705e-06, |
| "loss": 0.2907, |
| "step": 1778 |
| }, |
| { |
| "epoch": 2.7716289945440375, |
| "grad_norm": 0.20519545434025108, |
| "learning_rate": 4.161849710982659e-06, |
| "loss": 0.3068, |
| "step": 1779 |
| }, |
| { |
| "epoch": 2.773187840997662, |
| "grad_norm": 0.21554367954746417, |
| "learning_rate": 4.132947976878613e-06, |
| "loss": 0.2952, |
| "step": 1780 |
| }, |
| { |
| "epoch": 2.774746687451286, |
| "grad_norm": 0.21272563121891813, |
| "learning_rate": 4.1040462427745666e-06, |
| "loss": 0.2842, |
| "step": 1781 |
| }, |
| { |
| "epoch": 2.7763055339049103, |
| "grad_norm": 0.21611401595167964, |
| "learning_rate": 4.075144508670521e-06, |
| "loss": 0.3046, |
| "step": 1782 |
| }, |
| { |
| "epoch": 2.7778643803585346, |
| "grad_norm": 0.1997924444181988, |
| "learning_rate": 4.046242774566474e-06, |
| "loss": 0.2931, |
| "step": 1783 |
| }, |
| { |
| "epoch": 2.779423226812159, |
| "grad_norm": 0.21506298028146192, |
| "learning_rate": 4.017341040462428e-06, |
| "loss": 0.2832, |
| "step": 1784 |
| }, |
| { |
| "epoch": 2.7809820732657835, |
| "grad_norm": 0.20021184702963296, |
| "learning_rate": 3.988439306358382e-06, |
| "loss": 0.29, |
| "step": 1785 |
| }, |
| { |
| "epoch": 2.7825409197194078, |
| "grad_norm": 0.20970397205026278, |
| "learning_rate": 3.9595375722543355e-06, |
| "loss": 0.3047, |
| "step": 1786 |
| }, |
| { |
| "epoch": 2.784099766173032, |
| "grad_norm": 0.20062422216247, |
| "learning_rate": 3.930635838150289e-06, |
| "loss": 0.2844, |
| "step": 1787 |
| }, |
| { |
| "epoch": 2.7856586126266563, |
| "grad_norm": 0.20179644813019854, |
| "learning_rate": 3.901734104046243e-06, |
| "loss": 0.2842, |
| "step": 1788 |
| }, |
| { |
| "epoch": 2.7872174590802805, |
| "grad_norm": 0.20018746664410972, |
| "learning_rate": 3.872832369942197e-06, |
| "loss": 0.2892, |
| "step": 1789 |
| }, |
| { |
| "epoch": 2.7887763055339048, |
| "grad_norm": 0.1946501903871324, |
| "learning_rate": 3.843930635838151e-06, |
| "loss": 0.29, |
| "step": 1790 |
| }, |
| { |
| "epoch": 2.790335151987529, |
| "grad_norm": 0.19085065858233305, |
| "learning_rate": 3.815028901734104e-06, |
| "loss": 0.275, |
| "step": 1791 |
| }, |
| { |
| "epoch": 2.7918939984411537, |
| "grad_norm": 0.20722259921075883, |
| "learning_rate": 3.7861271676300583e-06, |
| "loss": 0.2804, |
| "step": 1792 |
| }, |
| { |
| "epoch": 2.793452844894778, |
| "grad_norm": 0.2103035874157165, |
| "learning_rate": 3.757225433526012e-06, |
| "loss": 0.2896, |
| "step": 1793 |
| }, |
| { |
| "epoch": 2.795011691348402, |
| "grad_norm": 0.2102336939252641, |
| "learning_rate": 3.7283236994219656e-06, |
| "loss": 0.2779, |
| "step": 1794 |
| }, |
| { |
| "epoch": 2.7965705378020265, |
| "grad_norm": 0.20336255858279942, |
| "learning_rate": 3.6994219653179195e-06, |
| "loss": 0.2933, |
| "step": 1795 |
| }, |
| { |
| "epoch": 2.7981293842556507, |
| "grad_norm": 0.21206172168707907, |
| "learning_rate": 3.6705202312138733e-06, |
| "loss": 0.2786, |
| "step": 1796 |
| }, |
| { |
| "epoch": 2.799688230709275, |
| "grad_norm": 0.19753691873632548, |
| "learning_rate": 3.6416184971098272e-06, |
| "loss": 0.2925, |
| "step": 1797 |
| }, |
| { |
| "epoch": 2.8012470771628992, |
| "grad_norm": 0.21025169769964883, |
| "learning_rate": 3.6127167630057807e-06, |
| "loss": 0.2968, |
| "step": 1798 |
| }, |
| { |
| "epoch": 2.802805923616524, |
| "grad_norm": 0.22618633232114826, |
| "learning_rate": 3.5838150289017345e-06, |
| "loss": 0.2959, |
| "step": 1799 |
| }, |
| { |
| "epoch": 2.804364770070148, |
| "grad_norm": 0.21822500972127065, |
| "learning_rate": 3.5549132947976884e-06, |
| "loss": 0.3092, |
| "step": 1800 |
| }, |
| { |
| "epoch": 2.8059236165237724, |
| "grad_norm": 0.220482613302332, |
| "learning_rate": 3.5260115606936423e-06, |
| "loss": 0.2955, |
| "step": 1801 |
| }, |
| { |
| "epoch": 2.8074824629773967, |
| "grad_norm": 0.20780910132598235, |
| "learning_rate": 3.4971098265895957e-06, |
| "loss": 0.2761, |
| "step": 1802 |
| }, |
| { |
| "epoch": 2.809041309431021, |
| "grad_norm": 0.2072002447217666, |
| "learning_rate": 3.468208092485549e-06, |
| "loss": 0.2915, |
| "step": 1803 |
| }, |
| { |
| "epoch": 2.8106001558846456, |
| "grad_norm": 0.20725271527567482, |
| "learning_rate": 3.4393063583815026e-06, |
| "loss": 0.2876, |
| "step": 1804 |
| }, |
| { |
| "epoch": 2.8121590023382694, |
| "grad_norm": 0.2101105745995178, |
| "learning_rate": 3.4104046242774565e-06, |
| "loss": 0.2856, |
| "step": 1805 |
| }, |
| { |
| "epoch": 2.813717848791894, |
| "grad_norm": 0.20498941201400542, |
| "learning_rate": 3.3815028901734103e-06, |
| "loss": 0.2796, |
| "step": 1806 |
| }, |
| { |
| "epoch": 2.8152766952455184, |
| "grad_norm": 0.20462911144204035, |
| "learning_rate": 3.3526011560693642e-06, |
| "loss": 0.3016, |
| "step": 1807 |
| }, |
| { |
| "epoch": 2.8168355416991426, |
| "grad_norm": 0.2019060859936445, |
| "learning_rate": 3.3236994219653177e-06, |
| "loss": 0.2805, |
| "step": 1808 |
| }, |
| { |
| "epoch": 2.818394388152767, |
| "grad_norm": 0.2124738066306522, |
| "learning_rate": 3.2947976878612715e-06, |
| "loss": 0.2893, |
| "step": 1809 |
| }, |
| { |
| "epoch": 2.819953234606391, |
| "grad_norm": 0.2088568665930041, |
| "learning_rate": 3.2658959537572254e-06, |
| "loss": 0.2844, |
| "step": 1810 |
| }, |
| { |
| "epoch": 2.821512081060016, |
| "grad_norm": 0.19645688715836024, |
| "learning_rate": 3.2369942196531793e-06, |
| "loss": 0.2749, |
| "step": 1811 |
| }, |
| { |
| "epoch": 2.8230709275136396, |
| "grad_norm": 0.19409466738213488, |
| "learning_rate": 3.2080924855491327e-06, |
| "loss": 0.2812, |
| "step": 1812 |
| }, |
| { |
| "epoch": 2.8246297739672643, |
| "grad_norm": 0.2120230001481611, |
| "learning_rate": 3.1791907514450866e-06, |
| "loss": 0.2954, |
| "step": 1813 |
| }, |
| { |
| "epoch": 2.8261886204208886, |
| "grad_norm": 0.1984563439139553, |
| "learning_rate": 3.1502890173410405e-06, |
| "loss": 0.2984, |
| "step": 1814 |
| }, |
| { |
| "epoch": 2.827747466874513, |
| "grad_norm": 0.20703019708211207, |
| "learning_rate": 3.1213872832369943e-06, |
| "loss": 0.2818, |
| "step": 1815 |
| }, |
| { |
| "epoch": 2.829306313328137, |
| "grad_norm": 0.2073483200049451, |
| "learning_rate": 3.0924855491329478e-06, |
| "loss": 0.2945, |
| "step": 1816 |
| }, |
| { |
| "epoch": 2.8308651597817613, |
| "grad_norm": 0.20322071664588748, |
| "learning_rate": 3.0635838150289016e-06, |
| "loss": 0.28, |
| "step": 1817 |
| }, |
| { |
| "epoch": 2.832424006235386, |
| "grad_norm": 0.19200180838930353, |
| "learning_rate": 3.0346820809248555e-06, |
| "loss": 0.2899, |
| "step": 1818 |
| }, |
| { |
| "epoch": 2.8339828526890103, |
| "grad_norm": 0.19563958688827382, |
| "learning_rate": 3.0057803468208094e-06, |
| "loss": 0.282, |
| "step": 1819 |
| }, |
| { |
| "epoch": 2.8355416991426345, |
| "grad_norm": 0.1999256570053609, |
| "learning_rate": 2.9768786127167633e-06, |
| "loss": 0.2915, |
| "step": 1820 |
| }, |
| { |
| "epoch": 2.837100545596259, |
| "grad_norm": 0.19601345181511165, |
| "learning_rate": 2.9479768786127167e-06, |
| "loss": 0.2846, |
| "step": 1821 |
| }, |
| { |
| "epoch": 2.838659392049883, |
| "grad_norm": 0.2099789860276315, |
| "learning_rate": 2.9190751445086706e-06, |
| "loss": 0.2807, |
| "step": 1822 |
| }, |
| { |
| "epoch": 2.8402182385035073, |
| "grad_norm": 0.20747261090512478, |
| "learning_rate": 2.8901734104046244e-06, |
| "loss": 0.279, |
| "step": 1823 |
| }, |
| { |
| "epoch": 2.8417770849571315, |
| "grad_norm": 0.22257411158836346, |
| "learning_rate": 2.8612716763005783e-06, |
| "loss": 0.2997, |
| "step": 1824 |
| }, |
| { |
| "epoch": 2.8433359314107562, |
| "grad_norm": 0.20554349028187938, |
| "learning_rate": 2.8323699421965318e-06, |
| "loss": 0.2831, |
| "step": 1825 |
| }, |
| { |
| "epoch": 2.8448947778643805, |
| "grad_norm": 0.20735600722529712, |
| "learning_rate": 2.8034682080924856e-06, |
| "loss": 0.294, |
| "step": 1826 |
| }, |
| { |
| "epoch": 2.8464536243180047, |
| "grad_norm": 0.21980476595040552, |
| "learning_rate": 2.7745664739884395e-06, |
| "loss": 0.3026, |
| "step": 1827 |
| }, |
| { |
| "epoch": 2.848012470771629, |
| "grad_norm": 0.1925143943177367, |
| "learning_rate": 2.7456647398843934e-06, |
| "loss": 0.2876, |
| "step": 1828 |
| }, |
| { |
| "epoch": 2.8495713172252533, |
| "grad_norm": 0.1957010171931748, |
| "learning_rate": 2.716763005780347e-06, |
| "loss": 0.2859, |
| "step": 1829 |
| }, |
| { |
| "epoch": 2.8511301636788775, |
| "grad_norm": 0.20358594514165249, |
| "learning_rate": 2.6878612716763007e-06, |
| "loss": 0.282, |
| "step": 1830 |
| }, |
| { |
| "epoch": 2.8526890101325018, |
| "grad_norm": 0.20541392956189472, |
| "learning_rate": 2.6589595375722546e-06, |
| "loss": 0.2795, |
| "step": 1831 |
| }, |
| { |
| "epoch": 2.8542478565861265, |
| "grad_norm": 0.1956976623325346, |
| "learning_rate": 2.6300578034682084e-06, |
| "loss": 0.2753, |
| "step": 1832 |
| }, |
| { |
| "epoch": 2.8558067030397507, |
| "grad_norm": 0.1944502240686237, |
| "learning_rate": 2.601156069364162e-06, |
| "loss": 0.2779, |
| "step": 1833 |
| }, |
| { |
| "epoch": 2.857365549493375, |
| "grad_norm": 0.1842303161830082, |
| "learning_rate": 2.5722543352601157e-06, |
| "loss": 0.2747, |
| "step": 1834 |
| }, |
| { |
| "epoch": 2.858924395946999, |
| "grad_norm": 0.19270326923177308, |
| "learning_rate": 2.5433526011560696e-06, |
| "loss": 0.2895, |
| "step": 1835 |
| }, |
| { |
| "epoch": 2.8604832424006235, |
| "grad_norm": 0.20578389463380026, |
| "learning_rate": 2.5144508670520235e-06, |
| "loss": 0.2784, |
| "step": 1836 |
| }, |
| { |
| "epoch": 2.8620420888542477, |
| "grad_norm": 0.18959748823440806, |
| "learning_rate": 2.4855491329479774e-06, |
| "loss": 0.2941, |
| "step": 1837 |
| }, |
| { |
| "epoch": 2.863600935307872, |
| "grad_norm": 0.189041300234095, |
| "learning_rate": 2.456647398843931e-06, |
| "loss": 0.2863, |
| "step": 1838 |
| }, |
| { |
| "epoch": 2.8651597817614967, |
| "grad_norm": 0.21031320630114367, |
| "learning_rate": 2.4277456647398842e-06, |
| "loss": 0.2996, |
| "step": 1839 |
| }, |
| { |
| "epoch": 2.866718628215121, |
| "grad_norm": 0.20690718603806962, |
| "learning_rate": 2.398843930635838e-06, |
| "loss": 0.2812, |
| "step": 1840 |
| }, |
| { |
| "epoch": 2.868277474668745, |
| "grad_norm": 0.2073404491146693, |
| "learning_rate": 2.369942196531792e-06, |
| "loss": 0.2947, |
| "step": 1841 |
| }, |
| { |
| "epoch": 2.8698363211223694, |
| "grad_norm": 0.19029334104582032, |
| "learning_rate": 2.341040462427746e-06, |
| "loss": 0.2747, |
| "step": 1842 |
| }, |
| { |
| "epoch": 2.8713951675759937, |
| "grad_norm": 0.19017238272079218, |
| "learning_rate": 2.3121387283236993e-06, |
| "loss": 0.2847, |
| "step": 1843 |
| }, |
| { |
| "epoch": 2.872954014029618, |
| "grad_norm": 0.1979579277272625, |
| "learning_rate": 2.283236994219653e-06, |
| "loss": 0.2966, |
| "step": 1844 |
| }, |
| { |
| "epoch": 2.874512860483242, |
| "grad_norm": 0.20597366624414118, |
| "learning_rate": 2.254335260115607e-06, |
| "loss": 0.2967, |
| "step": 1845 |
| }, |
| { |
| "epoch": 2.876071706936867, |
| "grad_norm": 0.1914660504884028, |
| "learning_rate": 2.225433526011561e-06, |
| "loss": 0.2847, |
| "step": 1846 |
| }, |
| { |
| "epoch": 2.877630553390491, |
| "grad_norm": 0.21004523817562415, |
| "learning_rate": 2.1965317919075144e-06, |
| "loss": 0.2844, |
| "step": 1847 |
| }, |
| { |
| "epoch": 2.8791893998441154, |
| "grad_norm": 0.2050510627988167, |
| "learning_rate": 2.1676300578034682e-06, |
| "loss": 0.3026, |
| "step": 1848 |
| }, |
| { |
| "epoch": 2.8807482462977396, |
| "grad_norm": 0.19875825849528217, |
| "learning_rate": 2.138728323699422e-06, |
| "loss": 0.29, |
| "step": 1849 |
| }, |
| { |
| "epoch": 2.882307092751364, |
| "grad_norm": 0.19201938735986301, |
| "learning_rate": 2.109826589595376e-06, |
| "loss": 0.2814, |
| "step": 1850 |
| }, |
| { |
| "epoch": 2.8838659392049886, |
| "grad_norm": 0.19276619456964975, |
| "learning_rate": 2.0809248554913294e-06, |
| "loss": 0.2794, |
| "step": 1851 |
| }, |
| { |
| "epoch": 2.8854247856586124, |
| "grad_norm": 0.20837326668805686, |
| "learning_rate": 2.0520231213872833e-06, |
| "loss": 0.274, |
| "step": 1852 |
| }, |
| { |
| "epoch": 2.886983632112237, |
| "grad_norm": 0.20834168208438708, |
| "learning_rate": 2.023121387283237e-06, |
| "loss": 0.2817, |
| "step": 1853 |
| }, |
| { |
| "epoch": 2.8885424785658613, |
| "grad_norm": 0.20432457017704334, |
| "learning_rate": 1.994219653179191e-06, |
| "loss": 0.2949, |
| "step": 1854 |
| }, |
| { |
| "epoch": 2.8901013250194856, |
| "grad_norm": 0.20163276098807092, |
| "learning_rate": 1.9653179190751445e-06, |
| "loss": 0.2706, |
| "step": 1855 |
| }, |
| { |
| "epoch": 2.89166017147311, |
| "grad_norm": 0.20977626122832665, |
| "learning_rate": 1.9364161849710983e-06, |
| "loss": 0.3086, |
| "step": 1856 |
| }, |
| { |
| "epoch": 2.893219017926734, |
| "grad_norm": 0.20060594756785594, |
| "learning_rate": 1.907514450867052e-06, |
| "loss": 0.2911, |
| "step": 1857 |
| }, |
| { |
| "epoch": 2.894777864380359, |
| "grad_norm": 0.20293612568917765, |
| "learning_rate": 1.878612716763006e-06, |
| "loss": 0.2911, |
| "step": 1858 |
| }, |
| { |
| "epoch": 2.8963367108339826, |
| "grad_norm": 0.2096162037257313, |
| "learning_rate": 1.8497109826589597e-06, |
| "loss": 0.2918, |
| "step": 1859 |
| }, |
| { |
| "epoch": 2.8978955572876073, |
| "grad_norm": 0.21042720816672483, |
| "learning_rate": 1.8208092485549136e-06, |
| "loss": 0.3119, |
| "step": 1860 |
| }, |
| { |
| "epoch": 2.8994544037412315, |
| "grad_norm": 0.18793473718435338, |
| "learning_rate": 1.7919075144508673e-06, |
| "loss": 0.2752, |
| "step": 1861 |
| }, |
| { |
| "epoch": 2.901013250194856, |
| "grad_norm": 0.2079973480121488, |
| "learning_rate": 1.7630057803468211e-06, |
| "loss": 0.3029, |
| "step": 1862 |
| }, |
| { |
| "epoch": 2.90257209664848, |
| "grad_norm": 0.1961639560813693, |
| "learning_rate": 1.7341040462427746e-06, |
| "loss": 0.2916, |
| "step": 1863 |
| }, |
| { |
| "epoch": 2.9041309431021043, |
| "grad_norm": 0.1907492865888087, |
| "learning_rate": 1.7052023121387282e-06, |
| "loss": 0.2789, |
| "step": 1864 |
| }, |
| { |
| "epoch": 2.905689789555729, |
| "grad_norm": 0.20235805559023728, |
| "learning_rate": 1.6763005780346821e-06, |
| "loss": 0.2849, |
| "step": 1865 |
| }, |
| { |
| "epoch": 2.9072486360093532, |
| "grad_norm": 0.20288572024690002, |
| "learning_rate": 1.6473988439306358e-06, |
| "loss": 0.2654, |
| "step": 1866 |
| }, |
| { |
| "epoch": 2.9088074824629775, |
| "grad_norm": 0.20289343651469097, |
| "learning_rate": 1.6184971098265896e-06, |
| "loss": 0.2951, |
| "step": 1867 |
| }, |
| { |
| "epoch": 2.9103663289166017, |
| "grad_norm": 0.18922720141621407, |
| "learning_rate": 1.5895953757225433e-06, |
| "loss": 0.2762, |
| "step": 1868 |
| }, |
| { |
| "epoch": 2.911925175370226, |
| "grad_norm": 0.19241261968232612, |
| "learning_rate": 1.5606936416184972e-06, |
| "loss": 0.2881, |
| "step": 1869 |
| }, |
| { |
| "epoch": 2.9134840218238502, |
| "grad_norm": 0.19674852841020815, |
| "learning_rate": 1.5317919075144508e-06, |
| "loss": 0.2832, |
| "step": 1870 |
| }, |
| { |
| "epoch": 2.9150428682774745, |
| "grad_norm": 0.18740622725667494, |
| "learning_rate": 1.5028901734104047e-06, |
| "loss": 0.283, |
| "step": 1871 |
| }, |
| { |
| "epoch": 2.916601714731099, |
| "grad_norm": 0.20669295826778195, |
| "learning_rate": 1.4739884393063584e-06, |
| "loss": 0.2808, |
| "step": 1872 |
| }, |
| { |
| "epoch": 2.9181605611847234, |
| "grad_norm": 0.18108268743966033, |
| "learning_rate": 1.4450867052023122e-06, |
| "loss": 0.2708, |
| "step": 1873 |
| }, |
| { |
| "epoch": 2.9197194076383477, |
| "grad_norm": 0.19216377105239985, |
| "learning_rate": 1.4161849710982659e-06, |
| "loss": 0.2798, |
| "step": 1874 |
| }, |
| { |
| "epoch": 2.921278254091972, |
| "grad_norm": 0.2036871353826851, |
| "learning_rate": 1.3872832369942197e-06, |
| "loss": 0.2871, |
| "step": 1875 |
| }, |
| { |
| "epoch": 2.922837100545596, |
| "grad_norm": 0.19352818374916894, |
| "learning_rate": 1.3583815028901734e-06, |
| "loss": 0.2982, |
| "step": 1876 |
| }, |
| { |
| "epoch": 2.9243959469992205, |
| "grad_norm": 0.19567432642222407, |
| "learning_rate": 1.3294797687861273e-06, |
| "loss": 0.2832, |
| "step": 1877 |
| }, |
| { |
| "epoch": 2.9259547934528447, |
| "grad_norm": 0.19512011366905269, |
| "learning_rate": 1.300578034682081e-06, |
| "loss": 0.2839, |
| "step": 1878 |
| }, |
| { |
| "epoch": 2.9275136399064694, |
| "grad_norm": 0.202990327713183, |
| "learning_rate": 1.2716763005780348e-06, |
| "loss": 0.2823, |
| "step": 1879 |
| }, |
| { |
| "epoch": 2.9290724863600937, |
| "grad_norm": 0.20583389454978893, |
| "learning_rate": 1.2427745664739887e-06, |
| "loss": 0.2988, |
| "step": 1880 |
| }, |
| { |
| "epoch": 2.930631332813718, |
| "grad_norm": 0.2014751871494088, |
| "learning_rate": 1.2138728323699421e-06, |
| "loss": 0.293, |
| "step": 1881 |
| }, |
| { |
| "epoch": 2.932190179267342, |
| "grad_norm": 0.19892906821370257, |
| "learning_rate": 1.184971098265896e-06, |
| "loss": 0.2949, |
| "step": 1882 |
| }, |
| { |
| "epoch": 2.9337490257209664, |
| "grad_norm": 0.18958700311963878, |
| "learning_rate": 1.1560693641618497e-06, |
| "loss": 0.2826, |
| "step": 1883 |
| }, |
| { |
| "epoch": 2.9353078721745907, |
| "grad_norm": 0.19062254575039358, |
| "learning_rate": 1.1271676300578035e-06, |
| "loss": 0.2734, |
| "step": 1884 |
| }, |
| { |
| "epoch": 2.936866718628215, |
| "grad_norm": 0.19081647607459015, |
| "learning_rate": 1.0982658959537572e-06, |
| "loss": 0.2719, |
| "step": 1885 |
| }, |
| { |
| "epoch": 2.9384255650818396, |
| "grad_norm": 0.20749991571237006, |
| "learning_rate": 1.069364161849711e-06, |
| "loss": 0.2902, |
| "step": 1886 |
| }, |
| { |
| "epoch": 2.939984411535464, |
| "grad_norm": 0.201338883635779, |
| "learning_rate": 1.0404624277456647e-06, |
| "loss": 0.2866, |
| "step": 1887 |
| }, |
| { |
| "epoch": 2.941543257989088, |
| "grad_norm": 0.185374324630895, |
| "learning_rate": 1.0115606936416186e-06, |
| "loss": 0.291, |
| "step": 1888 |
| }, |
| { |
| "epoch": 2.9431021044427124, |
| "grad_norm": 0.19169182164611995, |
| "learning_rate": 9.826589595375722e-07, |
| "loss": 0.2919, |
| "step": 1889 |
| }, |
| { |
| "epoch": 2.9446609508963366, |
| "grad_norm": 0.19500681675014075, |
| "learning_rate": 9.53757225433526e-07, |
| "loss": 0.289, |
| "step": 1890 |
| }, |
| { |
| "epoch": 2.946219797349961, |
| "grad_norm": 0.20336352790343418, |
| "learning_rate": 9.248554913294799e-07, |
| "loss": 0.2888, |
| "step": 1891 |
| }, |
| { |
| "epoch": 2.947778643803585, |
| "grad_norm": 0.18996694302336917, |
| "learning_rate": 8.959537572254336e-07, |
| "loss": 0.2837, |
| "step": 1892 |
| }, |
| { |
| "epoch": 2.94933749025721, |
| "grad_norm": 0.2156301542305652, |
| "learning_rate": 8.670520231213873e-07, |
| "loss": 0.2799, |
| "step": 1893 |
| }, |
| { |
| "epoch": 2.950896336710834, |
| "grad_norm": 0.19727279525402547, |
| "learning_rate": 8.381502890173411e-07, |
| "loss": 0.2906, |
| "step": 1894 |
| }, |
| { |
| "epoch": 2.9524551831644583, |
| "grad_norm": 0.19284045282449663, |
| "learning_rate": 8.092485549132948e-07, |
| "loss": 0.2795, |
| "step": 1895 |
| }, |
| { |
| "epoch": 2.9540140296180826, |
| "grad_norm": 0.20089648135420463, |
| "learning_rate": 7.803468208092486e-07, |
| "loss": 0.2857, |
| "step": 1896 |
| }, |
| { |
| "epoch": 2.955572876071707, |
| "grad_norm": 0.18636348684721002, |
| "learning_rate": 7.514450867052023e-07, |
| "loss": 0.28, |
| "step": 1897 |
| }, |
| { |
| "epoch": 2.9571317225253315, |
| "grad_norm": 0.1842759397599328, |
| "learning_rate": 7.225433526011561e-07, |
| "loss": 0.2811, |
| "step": 1898 |
| }, |
| { |
| "epoch": 2.9586905689789553, |
| "grad_norm": 0.198586721483869, |
| "learning_rate": 6.936416184971099e-07, |
| "loss": 0.2795, |
| "step": 1899 |
| }, |
| { |
| "epoch": 2.96024941543258, |
| "grad_norm": 0.20175920964436003, |
| "learning_rate": 6.647398843930636e-07, |
| "loss": 0.2963, |
| "step": 1900 |
| }, |
| { |
| "epoch": 2.9618082618862043, |
| "grad_norm": 0.1894255377343828, |
| "learning_rate": 6.358381502890174e-07, |
| "loss": 0.2786, |
| "step": 1901 |
| }, |
| { |
| "epoch": 2.9633671083398285, |
| "grad_norm": 0.18846655912423485, |
| "learning_rate": 6.069364161849711e-07, |
| "loss": 0.2755, |
| "step": 1902 |
| }, |
| { |
| "epoch": 2.964925954793453, |
| "grad_norm": 0.1928139829341696, |
| "learning_rate": 5.780346820809248e-07, |
| "loss": 0.2999, |
| "step": 1903 |
| }, |
| { |
| "epoch": 2.966484801247077, |
| "grad_norm": 0.19323847526640792, |
| "learning_rate": 5.491329479768786e-07, |
| "loss": 0.2937, |
| "step": 1904 |
| }, |
| { |
| "epoch": 2.9680436477007017, |
| "grad_norm": 0.20120482130965053, |
| "learning_rate": 5.202312138728324e-07, |
| "loss": 0.2947, |
| "step": 1905 |
| }, |
| { |
| "epoch": 2.9696024941543255, |
| "grad_norm": 0.21061414202296763, |
| "learning_rate": 4.913294797687861e-07, |
| "loss": 0.2905, |
| "step": 1906 |
| }, |
| { |
| "epoch": 2.9711613406079502, |
| "grad_norm": 0.21063326211832292, |
| "learning_rate": 4.6242774566473993e-07, |
| "loss": 0.2965, |
| "step": 1907 |
| }, |
| { |
| "epoch": 2.9727201870615745, |
| "grad_norm": 0.20410862279580308, |
| "learning_rate": 4.3352601156069365e-07, |
| "loss": 0.2891, |
| "step": 1908 |
| }, |
| { |
| "epoch": 2.9742790335151987, |
| "grad_norm": 0.19836165458820448, |
| "learning_rate": 4.046242774566474e-07, |
| "loss": 0.2962, |
| "step": 1909 |
| }, |
| { |
| "epoch": 2.975837879968823, |
| "grad_norm": 0.19040393729360364, |
| "learning_rate": 3.7572254335260117e-07, |
| "loss": 0.2986, |
| "step": 1910 |
| }, |
| { |
| "epoch": 2.9773967264224472, |
| "grad_norm": 0.18498159624839047, |
| "learning_rate": 3.4682080924855494e-07, |
| "loss": 0.2855, |
| "step": 1911 |
| }, |
| { |
| "epoch": 2.978955572876072, |
| "grad_norm": 0.19089075468074318, |
| "learning_rate": 3.179190751445087e-07, |
| "loss": 0.2913, |
| "step": 1912 |
| }, |
| { |
| "epoch": 2.980514419329696, |
| "grad_norm": 0.19143218775583679, |
| "learning_rate": 2.890173410404624e-07, |
| "loss": 0.2739, |
| "step": 1913 |
| }, |
| { |
| "epoch": 2.9820732657833204, |
| "grad_norm": 0.19760714438972718, |
| "learning_rate": 2.601156069364162e-07, |
| "loss": 0.2955, |
| "step": 1914 |
| }, |
| { |
| "epoch": 2.9836321122369447, |
| "grad_norm": 0.18341652706228365, |
| "learning_rate": 2.3121387283236997e-07, |
| "loss": 0.2713, |
| "step": 1915 |
| }, |
| { |
| "epoch": 2.985190958690569, |
| "grad_norm": 0.19428086051572635, |
| "learning_rate": 2.023121387283237e-07, |
| "loss": 0.2867, |
| "step": 1916 |
| }, |
| { |
| "epoch": 2.986749805144193, |
| "grad_norm": 0.19485137014886658, |
| "learning_rate": 1.7341040462427747e-07, |
| "loss": 0.2761, |
| "step": 1917 |
| }, |
| { |
| "epoch": 2.9883086515978174, |
| "grad_norm": 0.19777851419652856, |
| "learning_rate": 1.445086705202312e-07, |
| "loss": 0.2823, |
| "step": 1918 |
| }, |
| { |
| "epoch": 2.989867498051442, |
| "grad_norm": 0.19065668774947628, |
| "learning_rate": 1.1560693641618498e-07, |
| "loss": 0.2954, |
| "step": 1919 |
| }, |
| { |
| "epoch": 2.9914263445050664, |
| "grad_norm": 0.1974490266526018, |
| "learning_rate": 8.670520231213873e-08, |
| "loss": 0.2813, |
| "step": 1920 |
| }, |
| { |
| "epoch": 2.9929851909586906, |
| "grad_norm": 0.19818812256280618, |
| "learning_rate": 5.780346820809249e-08, |
| "loss": 0.2773, |
| "step": 1921 |
| }, |
| { |
| "epoch": 2.994544037412315, |
| "grad_norm": 0.1943618741709864, |
| "learning_rate": 2.8901734104046246e-08, |
| "loss": 0.2787, |
| "step": 1922 |
| }, |
| { |
| "epoch": 2.996102883865939, |
| "grad_norm": 0.1954427128274177, |
| "learning_rate": 0.0, |
| "loss": 0.278, |
| "step": 1923 |
| }, |
| { |
| "epoch": 2.996102883865939, |
| "step": 1923, |
| "total_flos": 1.6424334483479593e+18, |
| "train_loss": 0.4532742842497208, |
| "train_runtime": 111641.3311, |
| "train_samples_per_second": 0.276, |
| "train_steps_per_second": 0.017 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 1923, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.6424334483479593e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|