flyingbugs's picture
Model save
fbfcbf5 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.9964664310954063,
"eval_steps": 500,
"global_step": 2121,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0014134275618374558,
"grad_norm": 56.42226339712467,
"learning_rate": 2.347417840375587e-07,
"loss": 10.9871,
"step": 1
},
{
"epoch": 0.0028268551236749115,
"grad_norm": 55.48151168223359,
"learning_rate": 4.694835680751174e-07,
"loss": 11.0806,
"step": 2
},
{
"epoch": 0.004240282685512367,
"grad_norm": 54.726584311181135,
"learning_rate": 7.042253521126761e-07,
"loss": 11.0739,
"step": 3
},
{
"epoch": 0.005653710247349823,
"grad_norm": 53.387513209693935,
"learning_rate": 9.389671361502348e-07,
"loss": 11.1041,
"step": 4
},
{
"epoch": 0.007067137809187279,
"grad_norm": 56.72798952029813,
"learning_rate": 1.1737089201877934e-06,
"loss": 10.9757,
"step": 5
},
{
"epoch": 0.008480565371024734,
"grad_norm": 56.04107881738446,
"learning_rate": 1.4084507042253521e-06,
"loss": 10.9305,
"step": 6
},
{
"epoch": 0.009893992932862191,
"grad_norm": 58.24818705674269,
"learning_rate": 1.643192488262911e-06,
"loss": 10.9601,
"step": 7
},
{
"epoch": 0.011307420494699646,
"grad_norm": 61.02374914117528,
"learning_rate": 1.8779342723004696e-06,
"loss": 10.6065,
"step": 8
},
{
"epoch": 0.012720848056537103,
"grad_norm": 65.37883124738032,
"learning_rate": 2.112676056338028e-06,
"loss": 10.5378,
"step": 9
},
{
"epoch": 0.014134275618374558,
"grad_norm": 94.44002256981354,
"learning_rate": 2.3474178403755868e-06,
"loss": 9.2227,
"step": 10
},
{
"epoch": 0.015547703180212015,
"grad_norm": 90.23004674068657,
"learning_rate": 2.582159624413146e-06,
"loss": 9.3227,
"step": 11
},
{
"epoch": 0.01696113074204947,
"grad_norm": 98.0914630446004,
"learning_rate": 2.8169014084507042e-06,
"loss": 8.8325,
"step": 12
},
{
"epoch": 0.018374558303886925,
"grad_norm": 90.18406194204498,
"learning_rate": 3.051643192488263e-06,
"loss": 4.5936,
"step": 13
},
{
"epoch": 0.019787985865724382,
"grad_norm": 62.07315673380062,
"learning_rate": 3.286384976525822e-06,
"loss": 3.4451,
"step": 14
},
{
"epoch": 0.02120141342756184,
"grad_norm": 52.705698763839266,
"learning_rate": 3.521126760563381e-06,
"loss": 3.1379,
"step": 15
},
{
"epoch": 0.022614840989399292,
"grad_norm": 36.62978608059841,
"learning_rate": 3.755868544600939e-06,
"loss": 2.501,
"step": 16
},
{
"epoch": 0.02402826855123675,
"grad_norm": 29.6086038596033,
"learning_rate": 3.990610328638498e-06,
"loss": 2.2712,
"step": 17
},
{
"epoch": 0.025441696113074206,
"grad_norm": 6.83114114273313,
"learning_rate": 4.225352112676056e-06,
"loss": 1.39,
"step": 18
},
{
"epoch": 0.02685512367491166,
"grad_norm": 16.847463504756654,
"learning_rate": 4.460093896713615e-06,
"loss": 1.2988,
"step": 19
},
{
"epoch": 0.028268551236749116,
"grad_norm": 4.462018271487505,
"learning_rate": 4.6948356807511736e-06,
"loss": 1.2288,
"step": 20
},
{
"epoch": 0.029681978798586573,
"grad_norm": 3.5660607444383903,
"learning_rate": 4.929577464788732e-06,
"loss": 1.1746,
"step": 21
},
{
"epoch": 0.03109540636042403,
"grad_norm": 2.7434734811027166,
"learning_rate": 5.164319248826292e-06,
"loss": 1.0755,
"step": 22
},
{
"epoch": 0.03250883392226148,
"grad_norm": 2.258171646473509,
"learning_rate": 5.3990610328638506e-06,
"loss": 1.0613,
"step": 23
},
{
"epoch": 0.03392226148409894,
"grad_norm": 1.6489883781583445,
"learning_rate": 5.6338028169014084e-06,
"loss": 0.9786,
"step": 24
},
{
"epoch": 0.0353356890459364,
"grad_norm": 1.3445046113432206,
"learning_rate": 5.868544600938967e-06,
"loss": 0.9328,
"step": 25
},
{
"epoch": 0.03674911660777385,
"grad_norm": 6.450379326238587,
"learning_rate": 6.103286384976526e-06,
"loss": 0.8547,
"step": 26
},
{
"epoch": 0.03816254416961131,
"grad_norm": 4.087839418208385,
"learning_rate": 6.338028169014085e-06,
"loss": 0.8688,
"step": 27
},
{
"epoch": 0.039575971731448764,
"grad_norm": 1.2783228531894089,
"learning_rate": 6.572769953051644e-06,
"loss": 0.8615,
"step": 28
},
{
"epoch": 0.04098939929328622,
"grad_norm": 0.936710269618676,
"learning_rate": 6.807511737089202e-06,
"loss": 0.8239,
"step": 29
},
{
"epoch": 0.04240282685512368,
"grad_norm": 0.8644125780651799,
"learning_rate": 7.042253521126762e-06,
"loss": 0.784,
"step": 30
},
{
"epoch": 0.04381625441696113,
"grad_norm": 0.7183601298903778,
"learning_rate": 7.2769953051643195e-06,
"loss": 0.7802,
"step": 31
},
{
"epoch": 0.045229681978798585,
"grad_norm": 0.7249843179424419,
"learning_rate": 7.511737089201878e-06,
"loss": 0.7634,
"step": 32
},
{
"epoch": 0.046643109540636045,
"grad_norm": 0.6573717921375788,
"learning_rate": 7.746478873239436e-06,
"loss": 0.7453,
"step": 33
},
{
"epoch": 0.0480565371024735,
"grad_norm": 0.6360345395028835,
"learning_rate": 7.981220657276996e-06,
"loss": 0.7336,
"step": 34
},
{
"epoch": 0.04946996466431095,
"grad_norm": 0.6822682214136042,
"learning_rate": 8.215962441314555e-06,
"loss": 0.7033,
"step": 35
},
{
"epoch": 0.05088339222614841,
"grad_norm": 0.6223475454740455,
"learning_rate": 8.450704225352112e-06,
"loss": 0.6726,
"step": 36
},
{
"epoch": 0.052296819787985865,
"grad_norm": 0.5921854852791705,
"learning_rate": 8.685446009389673e-06,
"loss": 0.685,
"step": 37
},
{
"epoch": 0.05371024734982332,
"grad_norm": 0.5544244772607408,
"learning_rate": 8.92018779342723e-06,
"loss": 0.6909,
"step": 38
},
{
"epoch": 0.05512367491166078,
"grad_norm": 0.44192546256108367,
"learning_rate": 9.15492957746479e-06,
"loss": 0.6524,
"step": 39
},
{
"epoch": 0.05653710247349823,
"grad_norm": 0.49523631630948994,
"learning_rate": 9.389671361502347e-06,
"loss": 0.6563,
"step": 40
},
{
"epoch": 0.057950530035335686,
"grad_norm": 0.48126028482865685,
"learning_rate": 9.624413145539906e-06,
"loss": 0.647,
"step": 41
},
{
"epoch": 0.059363957597173146,
"grad_norm": 0.5477543207247778,
"learning_rate": 9.859154929577465e-06,
"loss": 0.6538,
"step": 42
},
{
"epoch": 0.0607773851590106,
"grad_norm": 0.46443570767100384,
"learning_rate": 1.0093896713615023e-05,
"loss": 0.6321,
"step": 43
},
{
"epoch": 0.06219081272084806,
"grad_norm": 0.42178797107060345,
"learning_rate": 1.0328638497652584e-05,
"loss": 0.6326,
"step": 44
},
{
"epoch": 0.0636042402826855,
"grad_norm": 0.4183599036700563,
"learning_rate": 1.056338028169014e-05,
"loss": 0.6247,
"step": 45
},
{
"epoch": 0.06501766784452297,
"grad_norm": 0.4745520621134214,
"learning_rate": 1.0798122065727701e-05,
"loss": 0.615,
"step": 46
},
{
"epoch": 0.06643109540636043,
"grad_norm": 0.4545164876008835,
"learning_rate": 1.1032863849765258e-05,
"loss": 0.6225,
"step": 47
},
{
"epoch": 0.06784452296819787,
"grad_norm": 0.37471089672971875,
"learning_rate": 1.1267605633802817e-05,
"loss": 0.5918,
"step": 48
},
{
"epoch": 0.06925795053003533,
"grad_norm": 0.3594764620396568,
"learning_rate": 1.1502347417840376e-05,
"loss": 0.6321,
"step": 49
},
{
"epoch": 0.0706713780918728,
"grad_norm": 0.38711984123021864,
"learning_rate": 1.1737089201877934e-05,
"loss": 0.6012,
"step": 50
},
{
"epoch": 0.07208480565371024,
"grad_norm": 0.42569447627256096,
"learning_rate": 1.1971830985915493e-05,
"loss": 0.5976,
"step": 51
},
{
"epoch": 0.0734982332155477,
"grad_norm": 0.3882643860041322,
"learning_rate": 1.2206572769953052e-05,
"loss": 0.6072,
"step": 52
},
{
"epoch": 0.07491166077738516,
"grad_norm": 0.34525229034941224,
"learning_rate": 1.2441314553990612e-05,
"loss": 0.6207,
"step": 53
},
{
"epoch": 0.07632508833922262,
"grad_norm": 0.3196235917570463,
"learning_rate": 1.267605633802817e-05,
"loss": 0.5638,
"step": 54
},
{
"epoch": 0.07773851590106007,
"grad_norm": 0.42511337165515106,
"learning_rate": 1.2910798122065728e-05,
"loss": 0.5796,
"step": 55
},
{
"epoch": 0.07915194346289753,
"grad_norm": 0.35081751611492834,
"learning_rate": 1.3145539906103288e-05,
"loss": 0.5838,
"step": 56
},
{
"epoch": 0.08056537102473499,
"grad_norm": 0.330312920202481,
"learning_rate": 1.3380281690140845e-05,
"loss": 0.5718,
"step": 57
},
{
"epoch": 0.08197879858657244,
"grad_norm": 0.3622026069121041,
"learning_rate": 1.3615023474178404e-05,
"loss": 0.5809,
"step": 58
},
{
"epoch": 0.0833922261484099,
"grad_norm": 0.331102207204659,
"learning_rate": 1.3849765258215963e-05,
"loss": 0.5718,
"step": 59
},
{
"epoch": 0.08480565371024736,
"grad_norm": 0.36014791333060386,
"learning_rate": 1.4084507042253523e-05,
"loss": 0.5618,
"step": 60
},
{
"epoch": 0.0862190812720848,
"grad_norm": 0.3037872969063458,
"learning_rate": 1.4319248826291082e-05,
"loss": 0.5371,
"step": 61
},
{
"epoch": 0.08763250883392226,
"grad_norm": 0.30019378421281034,
"learning_rate": 1.4553990610328639e-05,
"loss": 0.5364,
"step": 62
},
{
"epoch": 0.08904593639575972,
"grad_norm": 0.32498207002424667,
"learning_rate": 1.4788732394366198e-05,
"loss": 0.5527,
"step": 63
},
{
"epoch": 0.09045936395759717,
"grad_norm": 0.33648120105975854,
"learning_rate": 1.5023474178403756e-05,
"loss": 0.5365,
"step": 64
},
{
"epoch": 0.09187279151943463,
"grad_norm": 0.3098438630173537,
"learning_rate": 1.5258215962441317e-05,
"loss": 0.5466,
"step": 65
},
{
"epoch": 0.09328621908127209,
"grad_norm": 0.2900036768621737,
"learning_rate": 1.5492957746478872e-05,
"loss": 0.5364,
"step": 66
},
{
"epoch": 0.09469964664310954,
"grad_norm": 0.3361217386245732,
"learning_rate": 1.5727699530516433e-05,
"loss": 0.5506,
"step": 67
},
{
"epoch": 0.096113074204947,
"grad_norm": 0.30785367100558875,
"learning_rate": 1.5962441314553993e-05,
"loss": 0.553,
"step": 68
},
{
"epoch": 0.09752650176678446,
"grad_norm": 0.31407397592189423,
"learning_rate": 1.619718309859155e-05,
"loss": 0.5527,
"step": 69
},
{
"epoch": 0.0989399293286219,
"grad_norm": 0.34888097378655036,
"learning_rate": 1.643192488262911e-05,
"loss": 0.5251,
"step": 70
},
{
"epoch": 0.10035335689045936,
"grad_norm": 0.3054813921820636,
"learning_rate": 1.6666666666666667e-05,
"loss": 0.5565,
"step": 71
},
{
"epoch": 0.10176678445229682,
"grad_norm": 0.33112437004259265,
"learning_rate": 1.6901408450704224e-05,
"loss": 0.537,
"step": 72
},
{
"epoch": 0.10318021201413427,
"grad_norm": 0.39180452325999704,
"learning_rate": 1.7136150234741785e-05,
"loss": 0.5247,
"step": 73
},
{
"epoch": 0.10459363957597173,
"grad_norm": 0.2612292303415971,
"learning_rate": 1.7370892018779345e-05,
"loss": 0.5255,
"step": 74
},
{
"epoch": 0.10600706713780919,
"grad_norm": 0.3430234877576929,
"learning_rate": 1.7605633802816902e-05,
"loss": 0.5395,
"step": 75
},
{
"epoch": 0.10742049469964664,
"grad_norm": 0.31496269692855305,
"learning_rate": 1.784037558685446e-05,
"loss": 0.5377,
"step": 76
},
{
"epoch": 0.1088339222614841,
"grad_norm": 0.26999813850701354,
"learning_rate": 1.807511737089202e-05,
"loss": 0.5368,
"step": 77
},
{
"epoch": 0.11024734982332156,
"grad_norm": 0.3006615419658019,
"learning_rate": 1.830985915492958e-05,
"loss": 0.5231,
"step": 78
},
{
"epoch": 0.111660777385159,
"grad_norm": 0.30752811013208,
"learning_rate": 1.8544600938967137e-05,
"loss": 0.5261,
"step": 79
},
{
"epoch": 0.11307420494699646,
"grad_norm": 0.2634973174362467,
"learning_rate": 1.8779342723004694e-05,
"loss": 0.5209,
"step": 80
},
{
"epoch": 0.11448763250883393,
"grad_norm": 0.3276519715124132,
"learning_rate": 1.9014084507042255e-05,
"loss": 0.5259,
"step": 81
},
{
"epoch": 0.11590106007067137,
"grad_norm": 0.34482768042373113,
"learning_rate": 1.9248826291079812e-05,
"loss": 0.5223,
"step": 82
},
{
"epoch": 0.11731448763250883,
"grad_norm": 0.3129361399473587,
"learning_rate": 1.9483568075117372e-05,
"loss": 0.5308,
"step": 83
},
{
"epoch": 0.11872791519434629,
"grad_norm": 0.3174012874930301,
"learning_rate": 1.971830985915493e-05,
"loss": 0.5241,
"step": 84
},
{
"epoch": 0.12014134275618374,
"grad_norm": 0.3351447025751815,
"learning_rate": 1.995305164319249e-05,
"loss": 0.5257,
"step": 85
},
{
"epoch": 0.1215547703180212,
"grad_norm": 0.37237052581633967,
"learning_rate": 2.0187793427230047e-05,
"loss": 0.5348,
"step": 86
},
{
"epoch": 0.12296819787985866,
"grad_norm": 0.2986903223380715,
"learning_rate": 2.0422535211267607e-05,
"loss": 0.5145,
"step": 87
},
{
"epoch": 0.12438162544169612,
"grad_norm": 0.344440104115397,
"learning_rate": 2.0657276995305167e-05,
"loss": 0.5033,
"step": 88
},
{
"epoch": 0.12579505300353358,
"grad_norm": 0.33907230268674626,
"learning_rate": 2.0892018779342724e-05,
"loss": 0.509,
"step": 89
},
{
"epoch": 0.127208480565371,
"grad_norm": 0.31199108817893306,
"learning_rate": 2.112676056338028e-05,
"loss": 0.5199,
"step": 90
},
{
"epoch": 0.12862190812720847,
"grad_norm": 0.3099318979291099,
"learning_rate": 2.1361502347417842e-05,
"loss": 0.493,
"step": 91
},
{
"epoch": 0.13003533568904593,
"grad_norm": 0.35815638707394737,
"learning_rate": 2.1596244131455402e-05,
"loss": 0.5215,
"step": 92
},
{
"epoch": 0.1314487632508834,
"grad_norm": 0.3084233138887893,
"learning_rate": 2.1830985915492956e-05,
"loss": 0.5025,
"step": 93
},
{
"epoch": 0.13286219081272085,
"grad_norm": 0.34644307213091813,
"learning_rate": 2.2065727699530516e-05,
"loss": 0.4971,
"step": 94
},
{
"epoch": 0.13427561837455831,
"grad_norm": 0.3265963866077138,
"learning_rate": 2.2300469483568077e-05,
"loss": 0.5376,
"step": 95
},
{
"epoch": 0.13568904593639575,
"grad_norm": 0.4039233292439116,
"learning_rate": 2.2535211267605634e-05,
"loss": 0.4818,
"step": 96
},
{
"epoch": 0.1371024734982332,
"grad_norm": 0.3299074291876188,
"learning_rate": 2.2769953051643194e-05,
"loss": 0.4917,
"step": 97
},
{
"epoch": 0.13851590106007067,
"grad_norm": 0.3477769346910349,
"learning_rate": 2.300469483568075e-05,
"loss": 0.4897,
"step": 98
},
{
"epoch": 0.13992932862190813,
"grad_norm": 0.36276717655466395,
"learning_rate": 2.323943661971831e-05,
"loss": 0.4938,
"step": 99
},
{
"epoch": 0.1413427561837456,
"grad_norm": 0.3704068861440918,
"learning_rate": 2.347417840375587e-05,
"loss": 0.5078,
"step": 100
},
{
"epoch": 0.14275618374558305,
"grad_norm": 0.37448899417178366,
"learning_rate": 2.370892018779343e-05,
"loss": 0.502,
"step": 101
},
{
"epoch": 0.14416961130742048,
"grad_norm": 0.3501915962981322,
"learning_rate": 2.3943661971830986e-05,
"loss": 0.5126,
"step": 102
},
{
"epoch": 0.14558303886925794,
"grad_norm": 0.35808878273684014,
"learning_rate": 2.4178403755868547e-05,
"loss": 0.5042,
"step": 103
},
{
"epoch": 0.1469964664310954,
"grad_norm": 0.3576552666759903,
"learning_rate": 2.4413145539906104e-05,
"loss": 0.5038,
"step": 104
},
{
"epoch": 0.14840989399293286,
"grad_norm": 0.3122284689858596,
"learning_rate": 2.4647887323943664e-05,
"loss": 0.5078,
"step": 105
},
{
"epoch": 0.14982332155477032,
"grad_norm": 0.36241334314587087,
"learning_rate": 2.4882629107981224e-05,
"loss": 0.4991,
"step": 106
},
{
"epoch": 0.15123674911660778,
"grad_norm": 0.3400783841883597,
"learning_rate": 2.511737089201878e-05,
"loss": 0.4952,
"step": 107
},
{
"epoch": 0.15265017667844524,
"grad_norm": 0.33372185814275035,
"learning_rate": 2.535211267605634e-05,
"loss": 0.4989,
"step": 108
},
{
"epoch": 0.15406360424028268,
"grad_norm": 0.3491622610074761,
"learning_rate": 2.5586854460093895e-05,
"loss": 0.479,
"step": 109
},
{
"epoch": 0.15547703180212014,
"grad_norm": 0.3821855965068597,
"learning_rate": 2.5821596244131456e-05,
"loss": 0.4949,
"step": 110
},
{
"epoch": 0.1568904593639576,
"grad_norm": 0.3960389042509953,
"learning_rate": 2.6056338028169013e-05,
"loss": 0.4812,
"step": 111
},
{
"epoch": 0.15830388692579506,
"grad_norm": 0.4050541984051512,
"learning_rate": 2.6291079812206577e-05,
"loss": 0.4904,
"step": 112
},
{
"epoch": 0.15971731448763252,
"grad_norm": 0.35903133269474774,
"learning_rate": 2.6525821596244134e-05,
"loss": 0.5236,
"step": 113
},
{
"epoch": 0.16113074204946998,
"grad_norm": 0.37451653703277293,
"learning_rate": 2.676056338028169e-05,
"loss": 0.5089,
"step": 114
},
{
"epoch": 0.1625441696113074,
"grad_norm": 0.3420689620426013,
"learning_rate": 2.699530516431925e-05,
"loss": 0.5027,
"step": 115
},
{
"epoch": 0.16395759717314487,
"grad_norm": 0.4061025389019252,
"learning_rate": 2.7230046948356808e-05,
"loss": 0.5038,
"step": 116
},
{
"epoch": 0.16537102473498233,
"grad_norm": 0.36026706272450626,
"learning_rate": 2.746478873239437e-05,
"loss": 0.4733,
"step": 117
},
{
"epoch": 0.1667844522968198,
"grad_norm": 0.3156524090594004,
"learning_rate": 2.7699530516431926e-05,
"loss": 0.478,
"step": 118
},
{
"epoch": 0.16819787985865725,
"grad_norm": 0.3703364359472409,
"learning_rate": 2.7934272300469483e-05,
"loss": 0.4835,
"step": 119
},
{
"epoch": 0.1696113074204947,
"grad_norm": 0.38306945212444454,
"learning_rate": 2.8169014084507046e-05,
"loss": 0.5055,
"step": 120
},
{
"epoch": 0.17102473498233214,
"grad_norm": 0.9705823882285375,
"learning_rate": 2.84037558685446e-05,
"loss": 0.4964,
"step": 121
},
{
"epoch": 0.1724381625441696,
"grad_norm": 0.38606428147195415,
"learning_rate": 2.8638497652582164e-05,
"loss": 0.4998,
"step": 122
},
{
"epoch": 0.17385159010600706,
"grad_norm": 0.3198726283662416,
"learning_rate": 2.887323943661972e-05,
"loss": 0.4721,
"step": 123
},
{
"epoch": 0.17526501766784452,
"grad_norm": 0.36863135142064374,
"learning_rate": 2.9107981220657278e-05,
"loss": 0.4976,
"step": 124
},
{
"epoch": 0.17667844522968199,
"grad_norm": 0.39457495929324077,
"learning_rate": 2.934272300469484e-05,
"loss": 0.4772,
"step": 125
},
{
"epoch": 0.17809187279151945,
"grad_norm": 0.385379754919714,
"learning_rate": 2.9577464788732395e-05,
"loss": 0.4704,
"step": 126
},
{
"epoch": 0.17950530035335688,
"grad_norm": 0.3552775480679993,
"learning_rate": 2.9812206572769952e-05,
"loss": 0.4577,
"step": 127
},
{
"epoch": 0.18091872791519434,
"grad_norm": 0.4894777096412263,
"learning_rate": 3.0046948356807513e-05,
"loss": 0.4698,
"step": 128
},
{
"epoch": 0.1823321554770318,
"grad_norm": 0.48301876222141044,
"learning_rate": 3.028169014084507e-05,
"loss": 0.487,
"step": 129
},
{
"epoch": 0.18374558303886926,
"grad_norm": 0.42882135739291644,
"learning_rate": 3.0516431924882634e-05,
"loss": 0.4687,
"step": 130
},
{
"epoch": 0.18515901060070672,
"grad_norm": 0.5630152851545808,
"learning_rate": 3.075117370892019e-05,
"loss": 0.4839,
"step": 131
},
{
"epoch": 0.18657243816254418,
"grad_norm": 0.6149022922504754,
"learning_rate": 3.0985915492957744e-05,
"loss": 0.4637,
"step": 132
},
{
"epoch": 0.1879858657243816,
"grad_norm": 0.46882867073024187,
"learning_rate": 3.1220657276995305e-05,
"loss": 0.4643,
"step": 133
},
{
"epoch": 0.18939929328621907,
"grad_norm": 0.6707941153125274,
"learning_rate": 3.1455399061032865e-05,
"loss": 0.5131,
"step": 134
},
{
"epoch": 0.19081272084805653,
"grad_norm": 0.7360329683354832,
"learning_rate": 3.1690140845070426e-05,
"loss": 0.4842,
"step": 135
},
{
"epoch": 0.192226148409894,
"grad_norm": 0.4517861186297854,
"learning_rate": 3.1924882629107986e-05,
"loss": 0.496,
"step": 136
},
{
"epoch": 0.19363957597173145,
"grad_norm": 0.8455719344880723,
"learning_rate": 3.215962441314554e-05,
"loss": 0.4934,
"step": 137
},
{
"epoch": 0.1950530035335689,
"grad_norm": 0.46870096488459007,
"learning_rate": 3.23943661971831e-05,
"loss": 0.4486,
"step": 138
},
{
"epoch": 0.19646643109540637,
"grad_norm": 0.5949880039494658,
"learning_rate": 3.262910798122066e-05,
"loss": 0.4894,
"step": 139
},
{
"epoch": 0.1978798586572438,
"grad_norm": 0.681109538134855,
"learning_rate": 3.286384976525822e-05,
"loss": 0.4683,
"step": 140
},
{
"epoch": 0.19929328621908127,
"grad_norm": 0.45221115793082844,
"learning_rate": 3.3098591549295775e-05,
"loss": 0.4886,
"step": 141
},
{
"epoch": 0.20070671378091873,
"grad_norm": 0.59648879478688,
"learning_rate": 3.3333333333333335e-05,
"loss": 0.5025,
"step": 142
},
{
"epoch": 0.2021201413427562,
"grad_norm": 0.5061834255789737,
"learning_rate": 3.3568075117370895e-05,
"loss": 0.4793,
"step": 143
},
{
"epoch": 0.20353356890459365,
"grad_norm": 0.6369285070350454,
"learning_rate": 3.380281690140845e-05,
"loss": 0.4801,
"step": 144
},
{
"epoch": 0.2049469964664311,
"grad_norm": 0.5212906809581923,
"learning_rate": 3.4037558685446016e-05,
"loss": 0.4894,
"step": 145
},
{
"epoch": 0.20636042402826854,
"grad_norm": 0.4332099559420833,
"learning_rate": 3.427230046948357e-05,
"loss": 0.4908,
"step": 146
},
{
"epoch": 0.207773851590106,
"grad_norm": 0.6363111129867618,
"learning_rate": 3.450704225352113e-05,
"loss": 0.4622,
"step": 147
},
{
"epoch": 0.20918727915194346,
"grad_norm": 0.35058837126019,
"learning_rate": 3.474178403755869e-05,
"loss": 0.4648,
"step": 148
},
{
"epoch": 0.21060070671378092,
"grad_norm": 0.6007514517212775,
"learning_rate": 3.4976525821596244e-05,
"loss": 0.4747,
"step": 149
},
{
"epoch": 0.21201413427561838,
"grad_norm": 0.3666636434239935,
"learning_rate": 3.5211267605633805e-05,
"loss": 0.4604,
"step": 150
},
{
"epoch": 0.21342756183745584,
"grad_norm": 0.5445693164570659,
"learning_rate": 3.5446009389671365e-05,
"loss": 0.488,
"step": 151
},
{
"epoch": 0.21484098939929328,
"grad_norm": 0.48412017961095644,
"learning_rate": 3.568075117370892e-05,
"loss": 0.4919,
"step": 152
},
{
"epoch": 0.21625441696113074,
"grad_norm": 0.618051583600738,
"learning_rate": 3.5915492957746486e-05,
"loss": 0.4731,
"step": 153
},
{
"epoch": 0.2176678445229682,
"grad_norm": 0.5607515812534696,
"learning_rate": 3.615023474178404e-05,
"loss": 0.4833,
"step": 154
},
{
"epoch": 0.21908127208480566,
"grad_norm": 0.3552970973652474,
"learning_rate": 3.638497652582159e-05,
"loss": 0.4761,
"step": 155
},
{
"epoch": 0.22049469964664312,
"grad_norm": 0.5499278242305976,
"learning_rate": 3.661971830985916e-05,
"loss": 0.4962,
"step": 156
},
{
"epoch": 0.22190812720848058,
"grad_norm": 0.4409006504338133,
"learning_rate": 3.6854460093896714e-05,
"loss": 0.472,
"step": 157
},
{
"epoch": 0.223321554770318,
"grad_norm": 0.42348313656950354,
"learning_rate": 3.7089201877934274e-05,
"loss": 0.4802,
"step": 158
},
{
"epoch": 0.22473498233215547,
"grad_norm": 0.4095139831910871,
"learning_rate": 3.7323943661971835e-05,
"loss": 0.4825,
"step": 159
},
{
"epoch": 0.22614840989399293,
"grad_norm": 0.3788938571591519,
"learning_rate": 3.755868544600939e-05,
"loss": 0.4718,
"step": 160
},
{
"epoch": 0.2275618374558304,
"grad_norm": 0.36154958424551403,
"learning_rate": 3.779342723004695e-05,
"loss": 0.4756,
"step": 161
},
{
"epoch": 0.22897526501766785,
"grad_norm": 0.3621822886560777,
"learning_rate": 3.802816901408451e-05,
"loss": 0.4801,
"step": 162
},
{
"epoch": 0.2303886925795053,
"grad_norm": 0.4115644210161354,
"learning_rate": 3.826291079812207e-05,
"loss": 0.4826,
"step": 163
},
{
"epoch": 0.23180212014134274,
"grad_norm": 0.3774022446145924,
"learning_rate": 3.8497652582159623e-05,
"loss": 0.479,
"step": 164
},
{
"epoch": 0.2332155477031802,
"grad_norm": 0.43836908576651007,
"learning_rate": 3.8732394366197184e-05,
"loss": 0.4901,
"step": 165
},
{
"epoch": 0.23462897526501766,
"grad_norm": 0.412820897453378,
"learning_rate": 3.8967136150234744e-05,
"loss": 0.4912,
"step": 166
},
{
"epoch": 0.23604240282685512,
"grad_norm": 0.38574669491243935,
"learning_rate": 3.9201877934272305e-05,
"loss": 0.4606,
"step": 167
},
{
"epoch": 0.23745583038869258,
"grad_norm": 0.514866110872125,
"learning_rate": 3.943661971830986e-05,
"loss": 0.492,
"step": 168
},
{
"epoch": 0.23886925795053005,
"grad_norm": 0.4030170283331362,
"learning_rate": 3.967136150234742e-05,
"loss": 0.4468,
"step": 169
},
{
"epoch": 0.24028268551236748,
"grad_norm": 0.4156199823373934,
"learning_rate": 3.990610328638498e-05,
"loss": 0.4558,
"step": 170
},
{
"epoch": 0.24169611307420494,
"grad_norm": 0.5383688975190611,
"learning_rate": 4.014084507042254e-05,
"loss": 0.4583,
"step": 171
},
{
"epoch": 0.2431095406360424,
"grad_norm": 0.3999294365406423,
"learning_rate": 4.037558685446009e-05,
"loss": 0.4486,
"step": 172
},
{
"epoch": 0.24452296819787986,
"grad_norm": 0.37942974681096403,
"learning_rate": 4.0610328638497654e-05,
"loss": 0.4446,
"step": 173
},
{
"epoch": 0.24593639575971732,
"grad_norm": 0.6251809154169679,
"learning_rate": 4.0845070422535214e-05,
"loss": 0.5048,
"step": 174
},
{
"epoch": 0.24734982332155478,
"grad_norm": 0.43609754667296535,
"learning_rate": 4.107981220657277e-05,
"loss": 0.475,
"step": 175
},
{
"epoch": 0.24876325088339224,
"grad_norm": 0.487456054967176,
"learning_rate": 4.1314553990610335e-05,
"loss": 0.4828,
"step": 176
},
{
"epoch": 0.25017667844522967,
"grad_norm": 0.49869544741243477,
"learning_rate": 4.154929577464789e-05,
"loss": 0.4579,
"step": 177
},
{
"epoch": 0.25159010600706716,
"grad_norm": 0.41646076709361013,
"learning_rate": 4.178403755868545e-05,
"loss": 0.4978,
"step": 178
},
{
"epoch": 0.2530035335689046,
"grad_norm": 0.3652745946478605,
"learning_rate": 4.201877934272301e-05,
"loss": 0.4618,
"step": 179
},
{
"epoch": 0.254416961130742,
"grad_norm": 0.44309837228209203,
"learning_rate": 4.225352112676056e-05,
"loss": 0.4708,
"step": 180
},
{
"epoch": 0.2558303886925795,
"grad_norm": 0.3703176758701102,
"learning_rate": 4.248826291079812e-05,
"loss": 0.4391,
"step": 181
},
{
"epoch": 0.25724381625441695,
"grad_norm": 0.41568301298842447,
"learning_rate": 4.2723004694835684e-05,
"loss": 0.4578,
"step": 182
},
{
"epoch": 0.25865724381625443,
"grad_norm": 0.452632790377793,
"learning_rate": 4.295774647887324e-05,
"loss": 0.4401,
"step": 183
},
{
"epoch": 0.26007067137809187,
"grad_norm": 0.3950374094717056,
"learning_rate": 4.3192488262910805e-05,
"loss": 0.4553,
"step": 184
},
{
"epoch": 0.26148409893992935,
"grad_norm": 0.44858954196342,
"learning_rate": 4.342723004694836e-05,
"loss": 0.4635,
"step": 185
},
{
"epoch": 0.2628975265017668,
"grad_norm": 0.4695900263655852,
"learning_rate": 4.366197183098591e-05,
"loss": 0.4725,
"step": 186
},
{
"epoch": 0.2643109540636042,
"grad_norm": 0.39872607011517575,
"learning_rate": 4.389671361502348e-05,
"loss": 0.4782,
"step": 187
},
{
"epoch": 0.2657243816254417,
"grad_norm": 0.44919702629639935,
"learning_rate": 4.413145539906103e-05,
"loss": 0.4425,
"step": 188
},
{
"epoch": 0.26713780918727914,
"grad_norm": 0.43445875851397536,
"learning_rate": 4.436619718309859e-05,
"loss": 0.4455,
"step": 189
},
{
"epoch": 0.26855123674911663,
"grad_norm": 0.4088827806418668,
"learning_rate": 4.4600938967136154e-05,
"loss": 0.4513,
"step": 190
},
{
"epoch": 0.26996466431095406,
"grad_norm": 0.47467327236338414,
"learning_rate": 4.483568075117371e-05,
"loss": 0.4707,
"step": 191
},
{
"epoch": 0.2713780918727915,
"grad_norm": 0.40422094399866,
"learning_rate": 4.507042253521127e-05,
"loss": 0.4711,
"step": 192
},
{
"epoch": 0.272791519434629,
"grad_norm": 0.4088052687639006,
"learning_rate": 4.530516431924883e-05,
"loss": 0.4597,
"step": 193
},
{
"epoch": 0.2742049469964664,
"grad_norm": 0.548423716631882,
"learning_rate": 4.553990610328639e-05,
"loss": 0.4767,
"step": 194
},
{
"epoch": 0.2756183745583039,
"grad_norm": 0.399996139171453,
"learning_rate": 4.577464788732395e-05,
"loss": 0.463,
"step": 195
},
{
"epoch": 0.27703180212014133,
"grad_norm": 0.559462446548959,
"learning_rate": 4.60093896713615e-05,
"loss": 0.4532,
"step": 196
},
{
"epoch": 0.2784452296819788,
"grad_norm": 0.3628835449685601,
"learning_rate": 4.624413145539906e-05,
"loss": 0.4392,
"step": 197
},
{
"epoch": 0.27985865724381626,
"grad_norm": 0.610668707838289,
"learning_rate": 4.647887323943662e-05,
"loss": 0.4755,
"step": 198
},
{
"epoch": 0.2812720848056537,
"grad_norm": 0.5590747798615124,
"learning_rate": 4.6713615023474184e-05,
"loss": 0.4739,
"step": 199
},
{
"epoch": 0.2826855123674912,
"grad_norm": 0.4332038314284377,
"learning_rate": 4.694835680751174e-05,
"loss": 0.4461,
"step": 200
},
{
"epoch": 0.2840989399293286,
"grad_norm": 0.6007570427630339,
"learning_rate": 4.71830985915493e-05,
"loss": 0.4337,
"step": 201
},
{
"epoch": 0.2855123674911661,
"grad_norm": 0.48637039606285376,
"learning_rate": 4.741784037558686e-05,
"loss": 0.4405,
"step": 202
},
{
"epoch": 0.28692579505300353,
"grad_norm": 0.445441672125132,
"learning_rate": 4.765258215962441e-05,
"loss": 0.4408,
"step": 203
},
{
"epoch": 0.28833922261484096,
"grad_norm": 0.4628483913663728,
"learning_rate": 4.788732394366197e-05,
"loss": 0.4565,
"step": 204
},
{
"epoch": 0.28975265017667845,
"grad_norm": 0.5469657549576609,
"learning_rate": 4.812206572769953e-05,
"loss": 0.4651,
"step": 205
},
{
"epoch": 0.2911660777385159,
"grad_norm": 0.5728637091509516,
"learning_rate": 4.835680751173709e-05,
"loss": 0.4403,
"step": 206
},
{
"epoch": 0.29257950530035337,
"grad_norm": 0.4228458880495792,
"learning_rate": 4.8591549295774653e-05,
"loss": 0.4644,
"step": 207
},
{
"epoch": 0.2939929328621908,
"grad_norm": 0.709605235639504,
"learning_rate": 4.882629107981221e-05,
"loss": 0.4595,
"step": 208
},
{
"epoch": 0.2954063604240283,
"grad_norm": 0.49973338111218885,
"learning_rate": 4.906103286384977e-05,
"loss": 0.435,
"step": 209
},
{
"epoch": 0.2968197879858657,
"grad_norm": 0.6075301924057134,
"learning_rate": 4.929577464788733e-05,
"loss": 0.4681,
"step": 210
},
{
"epoch": 0.29823321554770316,
"grad_norm": 0.7116801149318095,
"learning_rate": 4.953051643192488e-05,
"loss": 0.4578,
"step": 211
},
{
"epoch": 0.29964664310954064,
"grad_norm": 0.5930983536424208,
"learning_rate": 4.976525821596245e-05,
"loss": 0.4652,
"step": 212
},
{
"epoch": 0.3010600706713781,
"grad_norm": 0.5873710840354205,
"learning_rate": 5e-05,
"loss": 0.4402,
"step": 213
},
{
"epoch": 0.30247349823321557,
"grad_norm": 0.5427879697036859,
"learning_rate": 4.997379454926625e-05,
"loss": 0.4566,
"step": 214
},
{
"epoch": 0.303886925795053,
"grad_norm": 0.7006829520991325,
"learning_rate": 4.9947589098532496e-05,
"loss": 0.4872,
"step": 215
},
{
"epoch": 0.3053003533568905,
"grad_norm": 0.703138769473133,
"learning_rate": 4.992138364779874e-05,
"loss": 0.4453,
"step": 216
},
{
"epoch": 0.3067137809187279,
"grad_norm": 0.4418825358659035,
"learning_rate": 4.989517819706499e-05,
"loss": 0.4422,
"step": 217
},
{
"epoch": 0.30812720848056535,
"grad_norm": 0.6491270957166714,
"learning_rate": 4.9868972746331236e-05,
"loss": 0.463,
"step": 218
},
{
"epoch": 0.30954063604240284,
"grad_norm": 0.49176572004514846,
"learning_rate": 4.984276729559749e-05,
"loss": 0.4482,
"step": 219
},
{
"epoch": 0.31095406360424027,
"grad_norm": 0.4573271770674481,
"learning_rate": 4.9816561844863737e-05,
"loss": 0.4358,
"step": 220
},
{
"epoch": 0.31236749116607776,
"grad_norm": 0.5285083851133923,
"learning_rate": 4.979035639412998e-05,
"loss": 0.4586,
"step": 221
},
{
"epoch": 0.3137809187279152,
"grad_norm": 0.4039831012122995,
"learning_rate": 4.976415094339622e-05,
"loss": 0.4603,
"step": 222
},
{
"epoch": 0.3151943462897526,
"grad_norm": 0.584251360171906,
"learning_rate": 4.973794549266248e-05,
"loss": 0.4679,
"step": 223
},
{
"epoch": 0.3166077738515901,
"grad_norm": 0.4898863775176805,
"learning_rate": 4.9711740041928724e-05,
"loss": 0.4552,
"step": 224
},
{
"epoch": 0.31802120141342755,
"grad_norm": 0.4704350749123625,
"learning_rate": 4.968553459119497e-05,
"loss": 0.45,
"step": 225
},
{
"epoch": 0.31943462897526503,
"grad_norm": 0.5213331738536818,
"learning_rate": 4.965932914046122e-05,
"loss": 0.465,
"step": 226
},
{
"epoch": 0.32084805653710247,
"grad_norm": 0.4427556401631995,
"learning_rate": 4.963312368972747e-05,
"loss": 0.459,
"step": 227
},
{
"epoch": 0.32226148409893995,
"grad_norm": 0.396751942613299,
"learning_rate": 4.960691823899371e-05,
"loss": 0.4308,
"step": 228
},
{
"epoch": 0.3236749116607774,
"grad_norm": 0.45252393592155243,
"learning_rate": 4.958071278825996e-05,
"loss": 0.441,
"step": 229
},
{
"epoch": 0.3250883392226148,
"grad_norm": 0.45924090100781156,
"learning_rate": 4.9554507337526204e-05,
"loss": 0.4514,
"step": 230
},
{
"epoch": 0.3265017667844523,
"grad_norm": 0.5225114440032141,
"learning_rate": 4.952830188679246e-05,
"loss": 0.4472,
"step": 231
},
{
"epoch": 0.32791519434628974,
"grad_norm": 0.38908423126206887,
"learning_rate": 4.9502096436058705e-05,
"loss": 0.4472,
"step": 232
},
{
"epoch": 0.32932862190812723,
"grad_norm": 0.5091000067222715,
"learning_rate": 4.947589098532495e-05,
"loss": 0.4421,
"step": 233
},
{
"epoch": 0.33074204946996466,
"grad_norm": 0.3593537369177473,
"learning_rate": 4.944968553459119e-05,
"loss": 0.4342,
"step": 234
},
{
"epoch": 0.3321554770318021,
"grad_norm": 0.5676435968087244,
"learning_rate": 4.9423480083857445e-05,
"loss": 0.4357,
"step": 235
},
{
"epoch": 0.3335689045936396,
"grad_norm": 0.3360589650982914,
"learning_rate": 4.939727463312369e-05,
"loss": 0.4505,
"step": 236
},
{
"epoch": 0.334982332155477,
"grad_norm": 0.6440581372666085,
"learning_rate": 4.937106918238994e-05,
"loss": 0.4676,
"step": 237
},
{
"epoch": 0.3363957597173145,
"grad_norm": 0.4213626699526642,
"learning_rate": 4.9344863731656185e-05,
"loss": 0.4522,
"step": 238
},
{
"epoch": 0.33780918727915193,
"grad_norm": 0.7141068873660817,
"learning_rate": 4.931865828092244e-05,
"loss": 0.4502,
"step": 239
},
{
"epoch": 0.3392226148409894,
"grad_norm": 0.420777301820082,
"learning_rate": 4.929245283018868e-05,
"loss": 0.4427,
"step": 240
},
{
"epoch": 0.34063604240282686,
"grad_norm": 0.5669830728000341,
"learning_rate": 4.9266247379454926e-05,
"loss": 0.4378,
"step": 241
},
{
"epoch": 0.3420494699646643,
"grad_norm": 0.3932356533424693,
"learning_rate": 4.924004192872117e-05,
"loss": 0.4491,
"step": 242
},
{
"epoch": 0.3434628975265018,
"grad_norm": 0.4522099570449616,
"learning_rate": 4.9213836477987426e-05,
"loss": 0.4373,
"step": 243
},
{
"epoch": 0.3448763250883392,
"grad_norm": 0.3898910659437534,
"learning_rate": 4.918763102725367e-05,
"loss": 0.4497,
"step": 244
},
{
"epoch": 0.3462897526501767,
"grad_norm": 0.39037801278841205,
"learning_rate": 4.916142557651992e-05,
"loss": 0.4455,
"step": 245
},
{
"epoch": 0.34770318021201413,
"grad_norm": 0.3936931340328528,
"learning_rate": 4.9135220125786166e-05,
"loss": 0.4346,
"step": 246
},
{
"epoch": 0.3491166077738516,
"grad_norm": 0.3597789515568097,
"learning_rate": 4.910901467505241e-05,
"loss": 0.4498,
"step": 247
},
{
"epoch": 0.35053003533568905,
"grad_norm": 0.4372280409310295,
"learning_rate": 4.908280922431866e-05,
"loss": 0.455,
"step": 248
},
{
"epoch": 0.3519434628975265,
"grad_norm": 0.3538618896316091,
"learning_rate": 4.9056603773584906e-05,
"loss": 0.4631,
"step": 249
},
{
"epoch": 0.35335689045936397,
"grad_norm": 0.35265368655697876,
"learning_rate": 4.903039832285115e-05,
"loss": 0.4541,
"step": 250
},
{
"epoch": 0.3547703180212014,
"grad_norm": 0.3398319073766121,
"learning_rate": 4.900419287211741e-05,
"loss": 0.4357,
"step": 251
},
{
"epoch": 0.3561837455830389,
"grad_norm": 0.3219681721607528,
"learning_rate": 4.897798742138365e-05,
"loss": 0.4413,
"step": 252
},
{
"epoch": 0.3575971731448763,
"grad_norm": 0.3819614247738302,
"learning_rate": 4.8951781970649894e-05,
"loss": 0.4572,
"step": 253
},
{
"epoch": 0.35901060070671376,
"grad_norm": 0.4144732459849225,
"learning_rate": 4.892557651991615e-05,
"loss": 0.4425,
"step": 254
},
{
"epoch": 0.36042402826855124,
"grad_norm": 0.3841268049284795,
"learning_rate": 4.8899371069182394e-05,
"loss": 0.436,
"step": 255
},
{
"epoch": 0.3618374558303887,
"grad_norm": 0.4968795804092234,
"learning_rate": 4.887316561844864e-05,
"loss": 0.4525,
"step": 256
},
{
"epoch": 0.36325088339222616,
"grad_norm": 0.42960254459377345,
"learning_rate": 4.884696016771489e-05,
"loss": 0.4208,
"step": 257
},
{
"epoch": 0.3646643109540636,
"grad_norm": 0.47460528423407705,
"learning_rate": 4.8820754716981134e-05,
"loss": 0.4566,
"step": 258
},
{
"epoch": 0.3660777385159011,
"grad_norm": 0.39998134137819485,
"learning_rate": 4.879454926624738e-05,
"loss": 0.455,
"step": 259
},
{
"epoch": 0.3674911660777385,
"grad_norm": 0.5366601493626028,
"learning_rate": 4.876834381551363e-05,
"loss": 0.4284,
"step": 260
},
{
"epoch": 0.36890459363957595,
"grad_norm": 0.47172475103621286,
"learning_rate": 4.8742138364779875e-05,
"loss": 0.4578,
"step": 261
},
{
"epoch": 0.37031802120141344,
"grad_norm": 0.4889725763088375,
"learning_rate": 4.871593291404613e-05,
"loss": 0.4442,
"step": 262
},
{
"epoch": 0.37173144876325087,
"grad_norm": 0.45114380725407893,
"learning_rate": 4.8689727463312375e-05,
"loss": 0.439,
"step": 263
},
{
"epoch": 0.37314487632508836,
"grad_norm": 0.4087393763244045,
"learning_rate": 4.8663522012578615e-05,
"loss": 0.426,
"step": 264
},
{
"epoch": 0.3745583038869258,
"grad_norm": 0.45487679824454774,
"learning_rate": 4.863731656184486e-05,
"loss": 0.4416,
"step": 265
},
{
"epoch": 0.3759717314487632,
"grad_norm": 0.4035773243226527,
"learning_rate": 4.8611111111111115e-05,
"loss": 0.4365,
"step": 266
},
{
"epoch": 0.3773851590106007,
"grad_norm": 0.45261765912123003,
"learning_rate": 4.858490566037736e-05,
"loss": 0.4355,
"step": 267
},
{
"epoch": 0.37879858657243815,
"grad_norm": 0.41027372670133594,
"learning_rate": 4.855870020964361e-05,
"loss": 0.4398,
"step": 268
},
{
"epoch": 0.38021201413427563,
"grad_norm": 0.482769817859632,
"learning_rate": 4.8532494758909855e-05,
"loss": 0.4607,
"step": 269
},
{
"epoch": 0.38162544169611307,
"grad_norm": 0.5315955481077392,
"learning_rate": 4.85062893081761e-05,
"loss": 0.4473,
"step": 270
},
{
"epoch": 0.38303886925795055,
"grad_norm": 0.45423213844934934,
"learning_rate": 4.848008385744235e-05,
"loss": 0.4591,
"step": 271
},
{
"epoch": 0.384452296819788,
"grad_norm": 0.5396912112044189,
"learning_rate": 4.8453878406708596e-05,
"loss": 0.4277,
"step": 272
},
{
"epoch": 0.3858657243816254,
"grad_norm": 0.4661912511846789,
"learning_rate": 4.842767295597484e-05,
"loss": 0.4225,
"step": 273
},
{
"epoch": 0.3872791519434629,
"grad_norm": 0.5883227796485858,
"learning_rate": 4.8401467505241096e-05,
"loss": 0.4535,
"step": 274
},
{
"epoch": 0.38869257950530034,
"grad_norm": 0.5353545951385164,
"learning_rate": 4.837526205450734e-05,
"loss": 0.4347,
"step": 275
},
{
"epoch": 0.3901060070671378,
"grad_norm": 0.471363529311719,
"learning_rate": 4.834905660377358e-05,
"loss": 0.4517,
"step": 276
},
{
"epoch": 0.39151943462897526,
"grad_norm": 0.48697768437347017,
"learning_rate": 4.8322851153039836e-05,
"loss": 0.4512,
"step": 277
},
{
"epoch": 0.39293286219081275,
"grad_norm": 0.3912106608686061,
"learning_rate": 4.829664570230608e-05,
"loss": 0.4177,
"step": 278
},
{
"epoch": 0.3943462897526502,
"grad_norm": 0.4826282098289275,
"learning_rate": 4.827044025157233e-05,
"loss": 0.4619,
"step": 279
},
{
"epoch": 0.3957597173144876,
"grad_norm": 0.8000770942459711,
"learning_rate": 4.824423480083858e-05,
"loss": 0.4439,
"step": 280
},
{
"epoch": 0.3971731448763251,
"grad_norm": 0.4411667620141701,
"learning_rate": 4.8218029350104823e-05,
"loss": 0.4413,
"step": 281
},
{
"epoch": 0.39858657243816253,
"grad_norm": 0.4785956915104037,
"learning_rate": 4.819182389937107e-05,
"loss": 0.4359,
"step": 282
},
{
"epoch": 0.4,
"grad_norm": 0.4847206258381646,
"learning_rate": 4.816561844863732e-05,
"loss": 0.4451,
"step": 283
},
{
"epoch": 0.40141342756183745,
"grad_norm": 0.4823905521021751,
"learning_rate": 4.8139412997903564e-05,
"loss": 0.4372,
"step": 284
},
{
"epoch": 0.4028268551236749,
"grad_norm": 0.4077767828814804,
"learning_rate": 4.811320754716982e-05,
"loss": 0.462,
"step": 285
},
{
"epoch": 0.4042402826855124,
"grad_norm": 0.37324970993483486,
"learning_rate": 4.8087002096436064e-05,
"loss": 0.4234,
"step": 286
},
{
"epoch": 0.4056537102473498,
"grad_norm": 0.361003488158031,
"learning_rate": 4.806079664570231e-05,
"loss": 0.4436,
"step": 287
},
{
"epoch": 0.4070671378091873,
"grad_norm": 0.4020198015666143,
"learning_rate": 4.803459119496855e-05,
"loss": 0.4231,
"step": 288
},
{
"epoch": 0.40848056537102473,
"grad_norm": 0.4208496719898538,
"learning_rate": 4.8008385744234804e-05,
"loss": 0.4334,
"step": 289
},
{
"epoch": 0.4098939929328622,
"grad_norm": 0.3685996330166293,
"learning_rate": 4.798218029350105e-05,
"loss": 0.4463,
"step": 290
},
{
"epoch": 0.41130742049469965,
"grad_norm": 0.351080264625881,
"learning_rate": 4.79559748427673e-05,
"loss": 0.428,
"step": 291
},
{
"epoch": 0.4127208480565371,
"grad_norm": 0.38034182834888935,
"learning_rate": 4.7929769392033545e-05,
"loss": 0.4348,
"step": 292
},
{
"epoch": 0.41413427561837457,
"grad_norm": 0.4152641638935849,
"learning_rate": 4.79035639412998e-05,
"loss": 0.456,
"step": 293
},
{
"epoch": 0.415547703180212,
"grad_norm": 0.40898489212227657,
"learning_rate": 4.787735849056604e-05,
"loss": 0.4249,
"step": 294
},
{
"epoch": 0.4169611307420495,
"grad_norm": 0.4680037335540577,
"learning_rate": 4.7851153039832285e-05,
"loss": 0.4243,
"step": 295
},
{
"epoch": 0.4183745583038869,
"grad_norm": 0.48897600656779794,
"learning_rate": 4.782494758909853e-05,
"loss": 0.4346,
"step": 296
},
{
"epoch": 0.41978798586572436,
"grad_norm": 0.48998633268040054,
"learning_rate": 4.7798742138364785e-05,
"loss": 0.4373,
"step": 297
},
{
"epoch": 0.42120141342756184,
"grad_norm": 0.4829726717267748,
"learning_rate": 4.777253668763103e-05,
"loss": 0.4162,
"step": 298
},
{
"epoch": 0.4226148409893993,
"grad_norm": 0.46383675287225185,
"learning_rate": 4.774633123689728e-05,
"loss": 0.4371,
"step": 299
},
{
"epoch": 0.42402826855123676,
"grad_norm": 0.4108529802488147,
"learning_rate": 4.772012578616352e-05,
"loss": 0.4407,
"step": 300
},
{
"epoch": 0.4254416961130742,
"grad_norm": 0.41990687135658594,
"learning_rate": 4.769392033542977e-05,
"loss": 0.4376,
"step": 301
},
{
"epoch": 0.4268551236749117,
"grad_norm": 0.5045927022492632,
"learning_rate": 4.766771488469602e-05,
"loss": 0.4654,
"step": 302
},
{
"epoch": 0.4282685512367491,
"grad_norm": 0.46743287563120195,
"learning_rate": 4.7641509433962266e-05,
"loss": 0.4378,
"step": 303
},
{
"epoch": 0.42968197879858655,
"grad_norm": 0.44871221014882084,
"learning_rate": 4.761530398322851e-05,
"loss": 0.4474,
"step": 304
},
{
"epoch": 0.43109540636042404,
"grad_norm": 0.4522652525898622,
"learning_rate": 4.7589098532494766e-05,
"loss": 0.4427,
"step": 305
},
{
"epoch": 0.43250883392226147,
"grad_norm": 0.49699067654174134,
"learning_rate": 4.7562893081761006e-05,
"loss": 0.4411,
"step": 306
},
{
"epoch": 0.43392226148409896,
"grad_norm": 0.5239968047556487,
"learning_rate": 4.753668763102725e-05,
"loss": 0.447,
"step": 307
},
{
"epoch": 0.4353356890459364,
"grad_norm": 0.4237009663704529,
"learning_rate": 4.75104821802935e-05,
"loss": 0.4274,
"step": 308
},
{
"epoch": 0.4367491166077738,
"grad_norm": 0.4763049165325869,
"learning_rate": 4.7484276729559753e-05,
"loss": 0.4247,
"step": 309
},
{
"epoch": 0.4381625441696113,
"grad_norm": 0.4995839481340786,
"learning_rate": 4.7458071278826e-05,
"loss": 0.4522,
"step": 310
},
{
"epoch": 0.43957597173144874,
"grad_norm": 0.46978723325929295,
"learning_rate": 4.743186582809225e-05,
"loss": 0.4274,
"step": 311
},
{
"epoch": 0.44098939929328623,
"grad_norm": 0.4941208341172885,
"learning_rate": 4.7405660377358494e-05,
"loss": 0.4377,
"step": 312
},
{
"epoch": 0.44240282685512367,
"grad_norm": 0.4174244779435056,
"learning_rate": 4.737945492662474e-05,
"loss": 0.4455,
"step": 313
},
{
"epoch": 0.44381625441696115,
"grad_norm": 0.5362809071187373,
"learning_rate": 4.735324947589099e-05,
"loss": 0.4285,
"step": 314
},
{
"epoch": 0.4452296819787986,
"grad_norm": 0.372646984628038,
"learning_rate": 4.7327044025157234e-05,
"loss": 0.441,
"step": 315
},
{
"epoch": 0.446643109540636,
"grad_norm": 0.4899730372587719,
"learning_rate": 4.730083857442348e-05,
"loss": 0.4361,
"step": 316
},
{
"epoch": 0.4480565371024735,
"grad_norm": 0.39611623085272635,
"learning_rate": 4.7274633123689734e-05,
"loss": 0.4631,
"step": 317
},
{
"epoch": 0.44946996466431094,
"grad_norm": 0.5386858350849578,
"learning_rate": 4.7248427672955974e-05,
"loss": 0.4579,
"step": 318
},
{
"epoch": 0.4508833922261484,
"grad_norm": 0.38345404575037345,
"learning_rate": 4.722222222222222e-05,
"loss": 0.4429,
"step": 319
},
{
"epoch": 0.45229681978798586,
"grad_norm": 0.4227524875325521,
"learning_rate": 4.7196016771488475e-05,
"loss": 0.421,
"step": 320
},
{
"epoch": 0.45371024734982335,
"grad_norm": 0.42193697695184146,
"learning_rate": 4.716981132075472e-05,
"loss": 0.4349,
"step": 321
},
{
"epoch": 0.4551236749116608,
"grad_norm": 0.4499606969758806,
"learning_rate": 4.714360587002097e-05,
"loss": 0.4305,
"step": 322
},
{
"epoch": 0.4565371024734982,
"grad_norm": 0.3729283888646506,
"learning_rate": 4.7117400419287215e-05,
"loss": 0.4076,
"step": 323
},
{
"epoch": 0.4579505300353357,
"grad_norm": 0.41137165767835127,
"learning_rate": 4.709119496855346e-05,
"loss": 0.4345,
"step": 324
},
{
"epoch": 0.45936395759717313,
"grad_norm": 0.316403771978034,
"learning_rate": 4.706498951781971e-05,
"loss": 0.4264,
"step": 325
},
{
"epoch": 0.4607773851590106,
"grad_norm": 0.45411071559992994,
"learning_rate": 4.7038784067085955e-05,
"loss": 0.4438,
"step": 326
},
{
"epoch": 0.46219081272084805,
"grad_norm": 0.3235089666947301,
"learning_rate": 4.70125786163522e-05,
"loss": 0.433,
"step": 327
},
{
"epoch": 0.4636042402826855,
"grad_norm": 0.36849881242612104,
"learning_rate": 4.6986373165618456e-05,
"loss": 0.4375,
"step": 328
},
{
"epoch": 0.465017667844523,
"grad_norm": 0.35297099484038175,
"learning_rate": 4.69601677148847e-05,
"loss": 0.4463,
"step": 329
},
{
"epoch": 0.4664310954063604,
"grad_norm": 0.3812005412594215,
"learning_rate": 4.693396226415094e-05,
"loss": 0.449,
"step": 330
},
{
"epoch": 0.4678445229681979,
"grad_norm": 0.41845800276538303,
"learning_rate": 4.690775681341719e-05,
"loss": 0.4378,
"step": 331
},
{
"epoch": 0.46925795053003533,
"grad_norm": 0.3259797880355645,
"learning_rate": 4.688155136268344e-05,
"loss": 0.4639,
"step": 332
},
{
"epoch": 0.4706713780918728,
"grad_norm": 0.31948730908868445,
"learning_rate": 4.685534591194969e-05,
"loss": 0.4144,
"step": 333
},
{
"epoch": 0.47208480565371025,
"grad_norm": 0.3540553915064335,
"learning_rate": 4.6829140461215936e-05,
"loss": 0.4332,
"step": 334
},
{
"epoch": 0.4734982332155477,
"grad_norm": 0.35408865943969065,
"learning_rate": 4.680293501048218e-05,
"loss": 0.4436,
"step": 335
},
{
"epoch": 0.47491166077738517,
"grad_norm": 0.31955036327163666,
"learning_rate": 4.677672955974843e-05,
"loss": 0.4294,
"step": 336
},
{
"epoch": 0.4763250883392226,
"grad_norm": 0.37124169095618326,
"learning_rate": 4.6750524109014677e-05,
"loss": 0.4311,
"step": 337
},
{
"epoch": 0.4777385159010601,
"grad_norm": 0.32681257759774196,
"learning_rate": 4.672431865828092e-05,
"loss": 0.4146,
"step": 338
},
{
"epoch": 0.4791519434628975,
"grad_norm": 0.33323469445666193,
"learning_rate": 4.669811320754717e-05,
"loss": 0.4241,
"step": 339
},
{
"epoch": 0.48056537102473496,
"grad_norm": 0.38728274939763657,
"learning_rate": 4.6671907756813424e-05,
"loss": 0.4389,
"step": 340
},
{
"epoch": 0.48197879858657244,
"grad_norm": 0.3424007945011468,
"learning_rate": 4.664570230607967e-05,
"loss": 0.4234,
"step": 341
},
{
"epoch": 0.4833922261484099,
"grad_norm": 0.3688017296892159,
"learning_rate": 4.661949685534591e-05,
"loss": 0.429,
"step": 342
},
{
"epoch": 0.48480565371024736,
"grad_norm": 0.36721273377460834,
"learning_rate": 4.659329140461216e-05,
"loss": 0.4154,
"step": 343
},
{
"epoch": 0.4862190812720848,
"grad_norm": 0.3522488459929326,
"learning_rate": 4.656708595387841e-05,
"loss": 0.437,
"step": 344
},
{
"epoch": 0.4876325088339223,
"grad_norm": 0.42691948634176113,
"learning_rate": 4.654088050314466e-05,
"loss": 0.4386,
"step": 345
},
{
"epoch": 0.4890459363957597,
"grad_norm": 0.3564682754204902,
"learning_rate": 4.6514675052410904e-05,
"loss": 0.4236,
"step": 346
},
{
"epoch": 0.49045936395759715,
"grad_norm": 0.36656500632794414,
"learning_rate": 4.648846960167715e-05,
"loss": 0.4431,
"step": 347
},
{
"epoch": 0.49187279151943464,
"grad_norm": 0.41161174822968527,
"learning_rate": 4.64622641509434e-05,
"loss": 0.4453,
"step": 348
},
{
"epoch": 0.49328621908127207,
"grad_norm": 0.3896669930099669,
"learning_rate": 4.6436058700209645e-05,
"loss": 0.4277,
"step": 349
},
{
"epoch": 0.49469964664310956,
"grad_norm": 0.48244026968763604,
"learning_rate": 4.640985324947589e-05,
"loss": 0.4382,
"step": 350
},
{
"epoch": 0.496113074204947,
"grad_norm": 0.43534680363411676,
"learning_rate": 4.638364779874214e-05,
"loss": 0.4319,
"step": 351
},
{
"epoch": 0.4975265017667845,
"grad_norm": 0.42239925552139224,
"learning_rate": 4.635744234800839e-05,
"loss": 0.4323,
"step": 352
},
{
"epoch": 0.4989399293286219,
"grad_norm": 0.450231939115933,
"learning_rate": 4.633123689727464e-05,
"loss": 0.4373,
"step": 353
},
{
"epoch": 0.5003533568904593,
"grad_norm": 0.35651957084070757,
"learning_rate": 4.630503144654088e-05,
"loss": 0.4452,
"step": 354
},
{
"epoch": 0.5017667844522968,
"grad_norm": 0.4218037205199467,
"learning_rate": 4.627882599580713e-05,
"loss": 0.4269,
"step": 355
},
{
"epoch": 0.5031802120141343,
"grad_norm": 0.381886045414961,
"learning_rate": 4.625262054507338e-05,
"loss": 0.44,
"step": 356
},
{
"epoch": 0.5045936395759717,
"grad_norm": 0.41088654956137965,
"learning_rate": 4.6226415094339625e-05,
"loss": 0.4224,
"step": 357
},
{
"epoch": 0.5060070671378092,
"grad_norm": 0.4340076370333352,
"learning_rate": 4.620020964360587e-05,
"loss": 0.4401,
"step": 358
},
{
"epoch": 0.5074204946996467,
"grad_norm": 0.38341083782962065,
"learning_rate": 4.617400419287212e-05,
"loss": 0.4514,
"step": 359
},
{
"epoch": 0.508833922261484,
"grad_norm": 0.37921610925418375,
"learning_rate": 4.6147798742138366e-05,
"loss": 0.438,
"step": 360
},
{
"epoch": 0.5102473498233215,
"grad_norm": 0.3809653131950038,
"learning_rate": 4.612159329140461e-05,
"loss": 0.4368,
"step": 361
},
{
"epoch": 0.511660777385159,
"grad_norm": 0.3791131157961737,
"learning_rate": 4.609538784067086e-05,
"loss": 0.4057,
"step": 362
},
{
"epoch": 0.5130742049469965,
"grad_norm": 0.4146850209726091,
"learning_rate": 4.606918238993711e-05,
"loss": 0.4188,
"step": 363
},
{
"epoch": 0.5144876325088339,
"grad_norm": 0.41208492048366835,
"learning_rate": 4.604297693920336e-05,
"loss": 0.4289,
"step": 364
},
{
"epoch": 0.5159010600706714,
"grad_norm": 0.395074464723285,
"learning_rate": 4.6016771488469606e-05,
"loss": 0.4171,
"step": 365
},
{
"epoch": 0.5173144876325089,
"grad_norm": 0.39959817217545895,
"learning_rate": 4.5990566037735846e-05,
"loss": 0.4165,
"step": 366
},
{
"epoch": 0.5187279151943462,
"grad_norm": 0.42775748207211617,
"learning_rate": 4.59643605870021e-05,
"loss": 0.4169,
"step": 367
},
{
"epoch": 0.5201413427561837,
"grad_norm": 0.3520019289239908,
"learning_rate": 4.593815513626835e-05,
"loss": 0.4198,
"step": 368
},
{
"epoch": 0.5215547703180212,
"grad_norm": 0.3759726962518837,
"learning_rate": 4.5911949685534594e-05,
"loss": 0.4223,
"step": 369
},
{
"epoch": 0.5229681978798587,
"grad_norm": 0.34649518289975273,
"learning_rate": 4.588574423480084e-05,
"loss": 0.4149,
"step": 370
},
{
"epoch": 0.5243816254416961,
"grad_norm": 0.4199710394072954,
"learning_rate": 4.5859538784067094e-05,
"loss": 0.4247,
"step": 371
},
{
"epoch": 0.5257950530035336,
"grad_norm": 0.3829455920652968,
"learning_rate": 4.5833333333333334e-05,
"loss": 0.4309,
"step": 372
},
{
"epoch": 0.5272084805653711,
"grad_norm": 0.403514704819411,
"learning_rate": 4.580712788259958e-05,
"loss": 0.4232,
"step": 373
},
{
"epoch": 0.5286219081272084,
"grad_norm": 0.3708940078094395,
"learning_rate": 4.578092243186583e-05,
"loss": 0.4377,
"step": 374
},
{
"epoch": 0.5300353356890459,
"grad_norm": 0.40488240135088177,
"learning_rate": 4.575471698113208e-05,
"loss": 0.4184,
"step": 375
},
{
"epoch": 0.5314487632508834,
"grad_norm": 0.40392659945287374,
"learning_rate": 4.572851153039833e-05,
"loss": 0.4272,
"step": 376
},
{
"epoch": 0.5328621908127208,
"grad_norm": 0.425073968193134,
"learning_rate": 4.570230607966457e-05,
"loss": 0.4354,
"step": 377
},
{
"epoch": 0.5342756183745583,
"grad_norm": 0.3650187038181732,
"learning_rate": 4.567610062893082e-05,
"loss": 0.4184,
"step": 378
},
{
"epoch": 0.5356890459363958,
"grad_norm": 0.4289354408300635,
"learning_rate": 4.564989517819707e-05,
"loss": 0.4117,
"step": 379
},
{
"epoch": 0.5371024734982333,
"grad_norm": 0.4171591743490235,
"learning_rate": 4.5623689727463315e-05,
"loss": 0.4262,
"step": 380
},
{
"epoch": 0.5385159010600706,
"grad_norm": 0.3868702709271063,
"learning_rate": 4.559748427672956e-05,
"loss": 0.4417,
"step": 381
},
{
"epoch": 0.5399293286219081,
"grad_norm": 0.40401692207690887,
"learning_rate": 4.557127882599581e-05,
"loss": 0.4216,
"step": 382
},
{
"epoch": 0.5413427561837456,
"grad_norm": 0.3834926881972645,
"learning_rate": 4.5545073375262055e-05,
"loss": 0.4221,
"step": 383
},
{
"epoch": 0.542756183745583,
"grad_norm": 0.43394716342540895,
"learning_rate": 4.55188679245283e-05,
"loss": 0.4193,
"step": 384
},
{
"epoch": 0.5441696113074205,
"grad_norm": 0.35241250002076346,
"learning_rate": 4.549266247379455e-05,
"loss": 0.4063,
"step": 385
},
{
"epoch": 0.545583038869258,
"grad_norm": 0.49978252612987445,
"learning_rate": 4.54664570230608e-05,
"loss": 0.429,
"step": 386
},
{
"epoch": 0.5469964664310955,
"grad_norm": 0.3285578023384669,
"learning_rate": 4.544025157232705e-05,
"loss": 0.4296,
"step": 387
},
{
"epoch": 0.5484098939929328,
"grad_norm": 0.4723753068271501,
"learning_rate": 4.5414046121593296e-05,
"loss": 0.4104,
"step": 388
},
{
"epoch": 0.5498233215547703,
"grad_norm": 0.3161736906633244,
"learning_rate": 4.5387840670859536e-05,
"loss": 0.4368,
"step": 389
},
{
"epoch": 0.5512367491166078,
"grad_norm": 0.44050388196529466,
"learning_rate": 4.536163522012579e-05,
"loss": 0.4509,
"step": 390
},
{
"epoch": 0.5526501766784452,
"grad_norm": 0.4584389230762012,
"learning_rate": 4.5335429769392036e-05,
"loss": 0.4355,
"step": 391
},
{
"epoch": 0.5540636042402827,
"grad_norm": 0.33828830437843893,
"learning_rate": 4.530922431865828e-05,
"loss": 0.42,
"step": 392
},
{
"epoch": 0.5554770318021202,
"grad_norm": 0.39097193145461734,
"learning_rate": 4.528301886792453e-05,
"loss": 0.406,
"step": 393
},
{
"epoch": 0.5568904593639576,
"grad_norm": 0.34032967314347934,
"learning_rate": 4.525681341719078e-05,
"loss": 0.4148,
"step": 394
},
{
"epoch": 0.558303886925795,
"grad_norm": 0.47625412346532103,
"learning_rate": 4.523060796645702e-05,
"loss": 0.4399,
"step": 395
},
{
"epoch": 0.5597173144876325,
"grad_norm": 0.38910438044940343,
"learning_rate": 4.520440251572327e-05,
"loss": 0.4187,
"step": 396
},
{
"epoch": 0.56113074204947,
"grad_norm": 0.4025329553698859,
"learning_rate": 4.517819706498952e-05,
"loss": 0.4303,
"step": 397
},
{
"epoch": 0.5625441696113074,
"grad_norm": 0.3980346835088101,
"learning_rate": 4.515199161425577e-05,
"loss": 0.4356,
"step": 398
},
{
"epoch": 0.5639575971731449,
"grad_norm": 0.406313093933877,
"learning_rate": 4.512578616352202e-05,
"loss": 0.4228,
"step": 399
},
{
"epoch": 0.5653710247349824,
"grad_norm": 0.36710605517187095,
"learning_rate": 4.5099580712788264e-05,
"loss": 0.4356,
"step": 400
},
{
"epoch": 0.5667844522968198,
"grad_norm": 0.5226224020747389,
"learning_rate": 4.5073375262054504e-05,
"loss": 0.428,
"step": 401
},
{
"epoch": 0.5681978798586572,
"grad_norm": 0.39322099628639123,
"learning_rate": 4.504716981132076e-05,
"loss": 0.4318,
"step": 402
},
{
"epoch": 0.5696113074204947,
"grad_norm": 0.5330775815363618,
"learning_rate": 4.5020964360587004e-05,
"loss": 0.4309,
"step": 403
},
{
"epoch": 0.5710247349823322,
"grad_norm": 0.3388481139059996,
"learning_rate": 4.499475890985325e-05,
"loss": 0.432,
"step": 404
},
{
"epoch": 0.5724381625441696,
"grad_norm": 0.48611818996944667,
"learning_rate": 4.49685534591195e-05,
"loss": 0.4185,
"step": 405
},
{
"epoch": 0.5738515901060071,
"grad_norm": 0.4164819849451003,
"learning_rate": 4.494234800838575e-05,
"loss": 0.4455,
"step": 406
},
{
"epoch": 0.5752650176678445,
"grad_norm": 0.3602720713616969,
"learning_rate": 4.491614255765199e-05,
"loss": 0.4321,
"step": 407
},
{
"epoch": 0.5766784452296819,
"grad_norm": 0.42628102739867024,
"learning_rate": 4.488993710691824e-05,
"loss": 0.4367,
"step": 408
},
{
"epoch": 0.5780918727915194,
"grad_norm": 0.427517500734996,
"learning_rate": 4.4863731656184485e-05,
"loss": 0.4449,
"step": 409
},
{
"epoch": 0.5795053003533569,
"grad_norm": 0.40670980138969226,
"learning_rate": 4.483752620545074e-05,
"loss": 0.4184,
"step": 410
},
{
"epoch": 0.5809187279151944,
"grad_norm": 0.3804046711248002,
"learning_rate": 4.4811320754716985e-05,
"loss": 0.4186,
"step": 411
},
{
"epoch": 0.5823321554770318,
"grad_norm": 0.4578621850057103,
"learning_rate": 4.478511530398323e-05,
"loss": 0.4418,
"step": 412
},
{
"epoch": 0.5837455830388693,
"grad_norm": 0.3398163708348313,
"learning_rate": 4.475890985324948e-05,
"loss": 0.4108,
"step": 413
},
{
"epoch": 0.5851590106007067,
"grad_norm": 0.4232696444151693,
"learning_rate": 4.4732704402515725e-05,
"loss": 0.4116,
"step": 414
},
{
"epoch": 0.5865724381625441,
"grad_norm": 0.3577321989387612,
"learning_rate": 4.470649895178197e-05,
"loss": 0.4349,
"step": 415
},
{
"epoch": 0.5879858657243816,
"grad_norm": 0.426376112185207,
"learning_rate": 4.468029350104822e-05,
"loss": 0.4302,
"step": 416
},
{
"epoch": 0.5893992932862191,
"grad_norm": 0.3029929991111419,
"learning_rate": 4.4654088050314466e-05,
"loss": 0.4268,
"step": 417
},
{
"epoch": 0.5908127208480566,
"grad_norm": 0.33419770645698604,
"learning_rate": 4.462788259958072e-05,
"loss": 0.4316,
"step": 418
},
{
"epoch": 0.592226148409894,
"grad_norm": 0.38946782305604805,
"learning_rate": 4.460167714884696e-05,
"loss": 0.4327,
"step": 419
},
{
"epoch": 0.5936395759717314,
"grad_norm": 0.35623574096100596,
"learning_rate": 4.4575471698113206e-05,
"loss": 0.4293,
"step": 420
},
{
"epoch": 0.5950530035335689,
"grad_norm": 0.46760307152389574,
"learning_rate": 4.454926624737946e-05,
"loss": 0.4231,
"step": 421
},
{
"epoch": 0.5964664310954063,
"grad_norm": 0.38232298356540123,
"learning_rate": 4.4523060796645706e-05,
"loss": 0.445,
"step": 422
},
{
"epoch": 0.5978798586572438,
"grad_norm": 0.5788547632226275,
"learning_rate": 4.449685534591195e-05,
"loss": 0.4233,
"step": 423
},
{
"epoch": 0.5992932862190813,
"grad_norm": 0.3545561590556285,
"learning_rate": 4.44706498951782e-05,
"loss": 0.427,
"step": 424
},
{
"epoch": 0.6007067137809188,
"grad_norm": 0.4740963513811581,
"learning_rate": 4.4444444444444447e-05,
"loss": 0.4191,
"step": 425
},
{
"epoch": 0.6021201413427562,
"grad_norm": 0.33966236234660835,
"learning_rate": 4.441823899371069e-05,
"loss": 0.4388,
"step": 426
},
{
"epoch": 0.6035335689045936,
"grad_norm": 0.471917947585291,
"learning_rate": 4.439203354297694e-05,
"loss": 0.4173,
"step": 427
},
{
"epoch": 0.6049469964664311,
"grad_norm": 0.42033939052603003,
"learning_rate": 4.436582809224319e-05,
"loss": 0.4122,
"step": 428
},
{
"epoch": 0.6063604240282685,
"grad_norm": 0.45751930608809094,
"learning_rate": 4.433962264150944e-05,
"loss": 0.438,
"step": 429
},
{
"epoch": 0.607773851590106,
"grad_norm": 0.43815067801656293,
"learning_rate": 4.431341719077569e-05,
"loss": 0.4266,
"step": 430
},
{
"epoch": 0.6091872791519435,
"grad_norm": 0.43747627956547547,
"learning_rate": 4.428721174004193e-05,
"loss": 0.4207,
"step": 431
},
{
"epoch": 0.610600706713781,
"grad_norm": 0.47722247995722705,
"learning_rate": 4.4261006289308174e-05,
"loss": 0.4293,
"step": 432
},
{
"epoch": 0.6120141342756183,
"grad_norm": 0.3855729208748066,
"learning_rate": 4.423480083857443e-05,
"loss": 0.4108,
"step": 433
},
{
"epoch": 0.6134275618374558,
"grad_norm": 0.4828090226247105,
"learning_rate": 4.4208595387840674e-05,
"loss": 0.4151,
"step": 434
},
{
"epoch": 0.6148409893992933,
"grad_norm": 0.3825873198800965,
"learning_rate": 4.418238993710692e-05,
"loss": 0.4287,
"step": 435
},
{
"epoch": 0.6162544169611307,
"grad_norm": 0.4390351095918237,
"learning_rate": 4.415618448637317e-05,
"loss": 0.4254,
"step": 436
},
{
"epoch": 0.6176678445229682,
"grad_norm": 0.3721681545334005,
"learning_rate": 4.4129979035639415e-05,
"loss": 0.4084,
"step": 437
},
{
"epoch": 0.6190812720848057,
"grad_norm": 0.373540306725536,
"learning_rate": 4.410377358490566e-05,
"loss": 0.4227,
"step": 438
},
{
"epoch": 0.620494699646643,
"grad_norm": 0.4384674930923621,
"learning_rate": 4.407756813417191e-05,
"loss": 0.4326,
"step": 439
},
{
"epoch": 0.6219081272084805,
"grad_norm": 0.4803538481936198,
"learning_rate": 4.4051362683438155e-05,
"loss": 0.4026,
"step": 440
},
{
"epoch": 0.623321554770318,
"grad_norm": 0.35580222227693725,
"learning_rate": 4.402515723270441e-05,
"loss": 0.3941,
"step": 441
},
{
"epoch": 0.6247349823321555,
"grad_norm": 0.404844341555805,
"learning_rate": 4.3998951781970655e-05,
"loss": 0.4174,
"step": 442
},
{
"epoch": 0.6261484098939929,
"grad_norm": 0.4930117366949244,
"learning_rate": 4.3972746331236895e-05,
"loss": 0.4355,
"step": 443
},
{
"epoch": 0.6275618374558304,
"grad_norm": 0.34581549297704767,
"learning_rate": 4.394654088050314e-05,
"loss": 0.4258,
"step": 444
},
{
"epoch": 0.6289752650176679,
"grad_norm": 0.40168189041675345,
"learning_rate": 4.3920335429769396e-05,
"loss": 0.4428,
"step": 445
},
{
"epoch": 0.6303886925795052,
"grad_norm": 0.37392062856482566,
"learning_rate": 4.389412997903564e-05,
"loss": 0.4263,
"step": 446
},
{
"epoch": 0.6318021201413427,
"grad_norm": 0.4106471188734287,
"learning_rate": 4.386792452830189e-05,
"loss": 0.4315,
"step": 447
},
{
"epoch": 0.6332155477031802,
"grad_norm": 0.4179646786570411,
"learning_rate": 4.3841719077568136e-05,
"loss": 0.4423,
"step": 448
},
{
"epoch": 0.6346289752650177,
"grad_norm": 0.37499965100090304,
"learning_rate": 4.381551362683438e-05,
"loss": 0.4105,
"step": 449
},
{
"epoch": 0.6360424028268551,
"grad_norm": 0.4342505688045509,
"learning_rate": 4.378930817610063e-05,
"loss": 0.4175,
"step": 450
},
{
"epoch": 0.6374558303886926,
"grad_norm": 0.3687454786345885,
"learning_rate": 4.3763102725366876e-05,
"loss": 0.4413,
"step": 451
},
{
"epoch": 0.6388692579505301,
"grad_norm": 0.46162391468291125,
"learning_rate": 4.373689727463312e-05,
"loss": 0.4425,
"step": 452
},
{
"epoch": 0.6402826855123674,
"grad_norm": 0.3592740322234895,
"learning_rate": 4.3710691823899376e-05,
"loss": 0.4271,
"step": 453
},
{
"epoch": 0.6416961130742049,
"grad_norm": 0.44686169294353845,
"learning_rate": 4.368448637316562e-05,
"loss": 0.4615,
"step": 454
},
{
"epoch": 0.6431095406360424,
"grad_norm": 0.40296347345399197,
"learning_rate": 4.365828092243186e-05,
"loss": 0.42,
"step": 455
},
{
"epoch": 0.6445229681978799,
"grad_norm": 0.38437414737240655,
"learning_rate": 4.363207547169812e-05,
"loss": 0.4229,
"step": 456
},
{
"epoch": 0.6459363957597173,
"grad_norm": 0.38048376060947947,
"learning_rate": 4.3605870020964364e-05,
"loss": 0.3974,
"step": 457
},
{
"epoch": 0.6473498233215548,
"grad_norm": 0.3369442139939333,
"learning_rate": 4.357966457023061e-05,
"loss": 0.4158,
"step": 458
},
{
"epoch": 0.6487632508833923,
"grad_norm": 0.3789909179108359,
"learning_rate": 4.355345911949686e-05,
"loss": 0.4199,
"step": 459
},
{
"epoch": 0.6501766784452296,
"grad_norm": 0.404366195951107,
"learning_rate": 4.3527253668763104e-05,
"loss": 0.4414,
"step": 460
},
{
"epoch": 0.6515901060070671,
"grad_norm": 0.3272333978811858,
"learning_rate": 4.350104821802935e-05,
"loss": 0.3985,
"step": 461
},
{
"epoch": 0.6530035335689046,
"grad_norm": 0.4780505959058204,
"learning_rate": 4.34748427672956e-05,
"loss": 0.4353,
"step": 462
},
{
"epoch": 0.6544169611307421,
"grad_norm": 0.42295446033516176,
"learning_rate": 4.3448637316561844e-05,
"loss": 0.4351,
"step": 463
},
{
"epoch": 0.6558303886925795,
"grad_norm": 0.41991361612608247,
"learning_rate": 4.34224318658281e-05,
"loss": 0.4306,
"step": 464
},
{
"epoch": 0.657243816254417,
"grad_norm": 0.34111998101439006,
"learning_rate": 4.3396226415094345e-05,
"loss": 0.3989,
"step": 465
},
{
"epoch": 0.6586572438162545,
"grad_norm": 0.4322586280682675,
"learning_rate": 4.337002096436059e-05,
"loss": 0.4155,
"step": 466
},
{
"epoch": 0.6600706713780918,
"grad_norm": 0.3249595379592676,
"learning_rate": 4.334381551362683e-05,
"loss": 0.4135,
"step": 467
},
{
"epoch": 0.6614840989399293,
"grad_norm": 0.35847458234510754,
"learning_rate": 4.3317610062893085e-05,
"loss": 0.4243,
"step": 468
},
{
"epoch": 0.6628975265017668,
"grad_norm": 0.44726558821401896,
"learning_rate": 4.329140461215933e-05,
"loss": 0.4195,
"step": 469
},
{
"epoch": 0.6643109540636042,
"grad_norm": 0.3757543780052803,
"learning_rate": 4.326519916142558e-05,
"loss": 0.4258,
"step": 470
},
{
"epoch": 0.6657243816254417,
"grad_norm": 0.5088747489591788,
"learning_rate": 4.3238993710691825e-05,
"loss": 0.4307,
"step": 471
},
{
"epoch": 0.6671378091872792,
"grad_norm": 0.4883741145951537,
"learning_rate": 4.321278825995808e-05,
"loss": 0.4302,
"step": 472
},
{
"epoch": 0.6685512367491167,
"grad_norm": 0.4254574278241842,
"learning_rate": 4.318658280922432e-05,
"loss": 0.4085,
"step": 473
},
{
"epoch": 0.669964664310954,
"grad_norm": 0.4059135648931803,
"learning_rate": 4.3160377358490565e-05,
"loss": 0.4182,
"step": 474
},
{
"epoch": 0.6713780918727915,
"grad_norm": 0.41501849569403865,
"learning_rate": 4.313417190775681e-05,
"loss": 0.425,
"step": 475
},
{
"epoch": 0.672791519434629,
"grad_norm": 0.4820726777875722,
"learning_rate": 4.3107966457023066e-05,
"loss": 0.4389,
"step": 476
},
{
"epoch": 0.6742049469964664,
"grad_norm": 0.4063520559751687,
"learning_rate": 4.308176100628931e-05,
"loss": 0.4294,
"step": 477
},
{
"epoch": 0.6756183745583039,
"grad_norm": 0.36422808305190024,
"learning_rate": 4.305555555555556e-05,
"loss": 0.4135,
"step": 478
},
{
"epoch": 0.6770318021201414,
"grad_norm": 0.4048172059055986,
"learning_rate": 4.3029350104821806e-05,
"loss": 0.4052,
"step": 479
},
{
"epoch": 0.6784452296819788,
"grad_norm": 0.30739309562023953,
"learning_rate": 4.300314465408805e-05,
"loss": 0.4033,
"step": 480
},
{
"epoch": 0.6798586572438162,
"grad_norm": 0.3459131890099211,
"learning_rate": 4.29769392033543e-05,
"loss": 0.4149,
"step": 481
},
{
"epoch": 0.6812720848056537,
"grad_norm": 0.40568561480555476,
"learning_rate": 4.2950733752620546e-05,
"loss": 0.4076,
"step": 482
},
{
"epoch": 0.6826855123674912,
"grad_norm": 0.31376743019306536,
"learning_rate": 4.292452830188679e-05,
"loss": 0.4197,
"step": 483
},
{
"epoch": 0.6840989399293286,
"grad_norm": 0.3751879100302488,
"learning_rate": 4.289832285115305e-05,
"loss": 0.4246,
"step": 484
},
{
"epoch": 0.6855123674911661,
"grad_norm": 0.35474057243436047,
"learning_rate": 4.287211740041929e-05,
"loss": 0.4117,
"step": 485
},
{
"epoch": 0.6869257950530036,
"grad_norm": 0.3165197240284382,
"learning_rate": 4.2845911949685533e-05,
"loss": 0.4317,
"step": 486
},
{
"epoch": 0.688339222614841,
"grad_norm": 0.4290303121537088,
"learning_rate": 4.281970649895179e-05,
"loss": 0.4235,
"step": 487
},
{
"epoch": 0.6897526501766784,
"grad_norm": 0.39577622491898085,
"learning_rate": 4.2793501048218034e-05,
"loss": 0.4182,
"step": 488
},
{
"epoch": 0.6911660777385159,
"grad_norm": 0.34145919988242746,
"learning_rate": 4.276729559748428e-05,
"loss": 0.4042,
"step": 489
},
{
"epoch": 0.6925795053003534,
"grad_norm": 0.40567929698145455,
"learning_rate": 4.274109014675053e-05,
"loss": 0.4213,
"step": 490
},
{
"epoch": 0.6939929328621908,
"grad_norm": 0.38788123514875444,
"learning_rate": 4.2714884696016774e-05,
"loss": 0.4117,
"step": 491
},
{
"epoch": 0.6954063604240283,
"grad_norm": 0.34532755253612135,
"learning_rate": 4.268867924528302e-05,
"loss": 0.4025,
"step": 492
},
{
"epoch": 0.6968197879858657,
"grad_norm": 0.43020021574988876,
"learning_rate": 4.266247379454927e-05,
"loss": 0.4401,
"step": 493
},
{
"epoch": 0.6982332155477032,
"grad_norm": 0.35385267617030464,
"learning_rate": 4.2636268343815514e-05,
"loss": 0.4252,
"step": 494
},
{
"epoch": 0.6996466431095406,
"grad_norm": 0.45094813435571446,
"learning_rate": 4.261006289308177e-05,
"loss": 0.4152,
"step": 495
},
{
"epoch": 0.7010600706713781,
"grad_norm": 0.3150603439008357,
"learning_rate": 4.2583857442348015e-05,
"loss": 0.4158,
"step": 496
},
{
"epoch": 0.7024734982332156,
"grad_norm": 0.36726752678098434,
"learning_rate": 4.2557651991614255e-05,
"loss": 0.4231,
"step": 497
},
{
"epoch": 0.703886925795053,
"grad_norm": 0.42950254528385723,
"learning_rate": 4.25314465408805e-05,
"loss": 0.4109,
"step": 498
},
{
"epoch": 0.7053003533568905,
"grad_norm": 0.3253703095697292,
"learning_rate": 4.2505241090146755e-05,
"loss": 0.4455,
"step": 499
},
{
"epoch": 0.7067137809187279,
"grad_norm": 0.32720358130764693,
"learning_rate": 4.2479035639413e-05,
"loss": 0.4268,
"step": 500
},
{
"epoch": 0.7081272084805653,
"grad_norm": 0.3947837720415545,
"learning_rate": 4.245283018867925e-05,
"loss": 0.4376,
"step": 501
},
{
"epoch": 0.7095406360424028,
"grad_norm": 0.3009193967963457,
"learning_rate": 4.2426624737945495e-05,
"loss": 0.4268,
"step": 502
},
{
"epoch": 0.7109540636042403,
"grad_norm": 0.4064754586095115,
"learning_rate": 4.240041928721174e-05,
"loss": 0.4185,
"step": 503
},
{
"epoch": 0.7123674911660778,
"grad_norm": 0.3119875934087222,
"learning_rate": 4.237421383647799e-05,
"loss": 0.4228,
"step": 504
},
{
"epoch": 0.7137809187279152,
"grad_norm": 0.32783888154461077,
"learning_rate": 4.2348008385744236e-05,
"loss": 0.4161,
"step": 505
},
{
"epoch": 0.7151943462897526,
"grad_norm": 0.37264759180440965,
"learning_rate": 4.232180293501048e-05,
"loss": 0.4008,
"step": 506
},
{
"epoch": 0.7166077738515901,
"grad_norm": 0.34665358982230904,
"learning_rate": 4.2295597484276736e-05,
"loss": 0.4281,
"step": 507
},
{
"epoch": 0.7180212014134275,
"grad_norm": 0.3623284428091778,
"learning_rate": 4.226939203354298e-05,
"loss": 0.4189,
"step": 508
},
{
"epoch": 0.719434628975265,
"grad_norm": 0.3477934503764715,
"learning_rate": 4.224318658280922e-05,
"loss": 0.4248,
"step": 509
},
{
"epoch": 0.7208480565371025,
"grad_norm": 0.39458079825265907,
"learning_rate": 4.221698113207547e-05,
"loss": 0.4003,
"step": 510
},
{
"epoch": 0.72226148409894,
"grad_norm": 0.33710934796159253,
"learning_rate": 4.219077568134172e-05,
"loss": 0.4045,
"step": 511
},
{
"epoch": 0.7236749116607774,
"grad_norm": 0.41828965300184534,
"learning_rate": 4.216457023060797e-05,
"loss": 0.4209,
"step": 512
},
{
"epoch": 0.7250883392226148,
"grad_norm": 0.33950894569947887,
"learning_rate": 4.213836477987422e-05,
"loss": 0.4292,
"step": 513
},
{
"epoch": 0.7265017667844523,
"grad_norm": 0.34044065910837623,
"learning_rate": 4.2112159329140463e-05,
"loss": 0.4325,
"step": 514
},
{
"epoch": 0.7279151943462897,
"grad_norm": 0.3161037253395095,
"learning_rate": 4.208595387840671e-05,
"loss": 0.4023,
"step": 515
},
{
"epoch": 0.7293286219081272,
"grad_norm": 0.3860142288912167,
"learning_rate": 4.205974842767296e-05,
"loss": 0.4151,
"step": 516
},
{
"epoch": 0.7307420494699647,
"grad_norm": 0.4053364053394855,
"learning_rate": 4.2033542976939204e-05,
"loss": 0.4152,
"step": 517
},
{
"epoch": 0.7321554770318022,
"grad_norm": 0.39702454207629423,
"learning_rate": 4.200733752620545e-05,
"loss": 0.4122,
"step": 518
},
{
"epoch": 0.7335689045936395,
"grad_norm": 0.3458803215279829,
"learning_rate": 4.1981132075471704e-05,
"loss": 0.41,
"step": 519
},
{
"epoch": 0.734982332155477,
"grad_norm": 0.4125733889434357,
"learning_rate": 4.195492662473795e-05,
"loss": 0.4012,
"step": 520
},
{
"epoch": 0.7363957597173145,
"grad_norm": 0.3167005734741213,
"learning_rate": 4.192872117400419e-05,
"loss": 0.4229,
"step": 521
},
{
"epoch": 0.7378091872791519,
"grad_norm": 0.38392987253345234,
"learning_rate": 4.1902515723270444e-05,
"loss": 0.4185,
"step": 522
},
{
"epoch": 0.7392226148409894,
"grad_norm": 0.36795882647064565,
"learning_rate": 4.187631027253669e-05,
"loss": 0.4291,
"step": 523
},
{
"epoch": 0.7406360424028269,
"grad_norm": 0.41252439319601,
"learning_rate": 4.185010482180294e-05,
"loss": 0.4207,
"step": 524
},
{
"epoch": 0.7420494699646644,
"grad_norm": 0.39504855235333675,
"learning_rate": 4.1823899371069185e-05,
"loss": 0.4096,
"step": 525
},
{
"epoch": 0.7434628975265017,
"grad_norm": 0.3850848665653565,
"learning_rate": 4.179769392033543e-05,
"loss": 0.4243,
"step": 526
},
{
"epoch": 0.7448763250883392,
"grad_norm": 0.47910284351834487,
"learning_rate": 4.177148846960168e-05,
"loss": 0.4163,
"step": 527
},
{
"epoch": 0.7462897526501767,
"grad_norm": 0.40353251602608775,
"learning_rate": 4.1745283018867925e-05,
"loss": 0.4417,
"step": 528
},
{
"epoch": 0.7477031802120141,
"grad_norm": 0.3727676586174066,
"learning_rate": 4.171907756813417e-05,
"loss": 0.4195,
"step": 529
},
{
"epoch": 0.7491166077738516,
"grad_norm": 0.46311694781388524,
"learning_rate": 4.1692872117400425e-05,
"loss": 0.4137,
"step": 530
},
{
"epoch": 0.7505300353356891,
"grad_norm": 0.3114811189209522,
"learning_rate": 4.166666666666667e-05,
"loss": 0.4071,
"step": 531
},
{
"epoch": 0.7519434628975264,
"grad_norm": 0.41569836492929024,
"learning_rate": 4.164046121593291e-05,
"loss": 0.3945,
"step": 532
},
{
"epoch": 0.7533568904593639,
"grad_norm": 0.36048533574162495,
"learning_rate": 4.161425576519916e-05,
"loss": 0.4182,
"step": 533
},
{
"epoch": 0.7547703180212014,
"grad_norm": 0.46080018865139516,
"learning_rate": 4.158805031446541e-05,
"loss": 0.4143,
"step": 534
},
{
"epoch": 0.7561837455830389,
"grad_norm": 0.393351894652798,
"learning_rate": 4.156184486373166e-05,
"loss": 0.4367,
"step": 535
},
{
"epoch": 0.7575971731448763,
"grad_norm": 0.3732212632301122,
"learning_rate": 4.1535639412997906e-05,
"loss": 0.4071,
"step": 536
},
{
"epoch": 0.7590106007067138,
"grad_norm": 0.4069320678393075,
"learning_rate": 4.150943396226415e-05,
"loss": 0.405,
"step": 537
},
{
"epoch": 0.7604240282685513,
"grad_norm": 0.34603936432737153,
"learning_rate": 4.14832285115304e-05,
"loss": 0.4145,
"step": 538
},
{
"epoch": 0.7618374558303886,
"grad_norm": 0.4559793389807064,
"learning_rate": 4.1457023060796646e-05,
"loss": 0.4116,
"step": 539
},
{
"epoch": 0.7632508833922261,
"grad_norm": 0.30758451247016905,
"learning_rate": 4.143081761006289e-05,
"loss": 0.415,
"step": 540
},
{
"epoch": 0.7646643109540636,
"grad_norm": 0.42386277945476647,
"learning_rate": 4.140461215932914e-05,
"loss": 0.4045,
"step": 541
},
{
"epoch": 0.7660777385159011,
"grad_norm": 0.35773893861252776,
"learning_rate": 4.137840670859539e-05,
"loss": 0.4161,
"step": 542
},
{
"epoch": 0.7674911660777385,
"grad_norm": 0.3969970376450972,
"learning_rate": 4.135220125786164e-05,
"loss": 0.428,
"step": 543
},
{
"epoch": 0.768904593639576,
"grad_norm": 0.3386171504783467,
"learning_rate": 4.132599580712788e-05,
"loss": 0.3994,
"step": 544
},
{
"epoch": 0.7703180212014135,
"grad_norm": 0.3582470506093284,
"learning_rate": 4.129979035639413e-05,
"loss": 0.4289,
"step": 545
},
{
"epoch": 0.7717314487632508,
"grad_norm": 0.4070523915061161,
"learning_rate": 4.127358490566038e-05,
"loss": 0.4207,
"step": 546
},
{
"epoch": 0.7731448763250883,
"grad_norm": 0.34740836943494224,
"learning_rate": 4.124737945492663e-05,
"loss": 0.4144,
"step": 547
},
{
"epoch": 0.7745583038869258,
"grad_norm": 0.44930789351088574,
"learning_rate": 4.1221174004192874e-05,
"loss": 0.4148,
"step": 548
},
{
"epoch": 0.7759717314487633,
"grad_norm": 0.36461690609749436,
"learning_rate": 4.119496855345912e-05,
"loss": 0.4414,
"step": 549
},
{
"epoch": 0.7773851590106007,
"grad_norm": 0.4128112654459074,
"learning_rate": 4.116876310272537e-05,
"loss": 0.4049,
"step": 550
},
{
"epoch": 0.7787985865724382,
"grad_norm": 0.4654481888910615,
"learning_rate": 4.1142557651991614e-05,
"loss": 0.41,
"step": 551
},
{
"epoch": 0.7802120141342757,
"grad_norm": 0.34642741509757274,
"learning_rate": 4.111635220125786e-05,
"loss": 0.4302,
"step": 552
},
{
"epoch": 0.781625441696113,
"grad_norm": 2.568380880457562,
"learning_rate": 4.109014675052411e-05,
"loss": 0.4394,
"step": 553
},
{
"epoch": 0.7830388692579505,
"grad_norm": 0.5953016705134792,
"learning_rate": 4.106394129979036e-05,
"loss": 0.419,
"step": 554
},
{
"epoch": 0.784452296819788,
"grad_norm": 0.4244462824256507,
"learning_rate": 4.103773584905661e-05,
"loss": 0.4259,
"step": 555
},
{
"epoch": 0.7858657243816255,
"grad_norm": 0.43555483943269824,
"learning_rate": 4.101153039832285e-05,
"loss": 0.3993,
"step": 556
},
{
"epoch": 0.7872791519434629,
"grad_norm": 0.4699205473291656,
"learning_rate": 4.09853249475891e-05,
"loss": 0.3897,
"step": 557
},
{
"epoch": 0.7886925795053004,
"grad_norm": 0.457793802751825,
"learning_rate": 4.095911949685535e-05,
"loss": 0.4079,
"step": 558
},
{
"epoch": 0.7901060070671378,
"grad_norm": 0.5659374648550031,
"learning_rate": 4.0932914046121595e-05,
"loss": 0.435,
"step": 559
},
{
"epoch": 0.7915194346289752,
"grad_norm": 0.4365995446012308,
"learning_rate": 4.090670859538784e-05,
"loss": 0.4243,
"step": 560
},
{
"epoch": 0.7929328621908127,
"grad_norm": 0.4908472068927948,
"learning_rate": 4.088050314465409e-05,
"loss": 0.4237,
"step": 561
},
{
"epoch": 0.7943462897526502,
"grad_norm": 0.45264251625280894,
"learning_rate": 4.0854297693920336e-05,
"loss": 0.4119,
"step": 562
},
{
"epoch": 0.7957597173144876,
"grad_norm": 0.4948639379669275,
"learning_rate": 4.082809224318658e-05,
"loss": 0.3981,
"step": 563
},
{
"epoch": 0.7971731448763251,
"grad_norm": 0.3837780363330271,
"learning_rate": 4.080188679245283e-05,
"loss": 0.408,
"step": 564
},
{
"epoch": 0.7985865724381626,
"grad_norm": 0.6113486391799774,
"learning_rate": 4.077568134171908e-05,
"loss": 0.4395,
"step": 565
},
{
"epoch": 0.8,
"grad_norm": 0.4013636803556065,
"learning_rate": 4.074947589098533e-05,
"loss": 0.3992,
"step": 566
},
{
"epoch": 0.8014134275618374,
"grad_norm": 0.42886316797919255,
"learning_rate": 4.0723270440251576e-05,
"loss": 0.4039,
"step": 567
},
{
"epoch": 0.8028268551236749,
"grad_norm": 0.4689941810214451,
"learning_rate": 4.0697064989517816e-05,
"loss": 0.4199,
"step": 568
},
{
"epoch": 0.8042402826855124,
"grad_norm": 0.394206968776456,
"learning_rate": 4.067085953878407e-05,
"loss": 0.4251,
"step": 569
},
{
"epoch": 0.8056537102473498,
"grad_norm": 0.4595806467826367,
"learning_rate": 4.0644654088050316e-05,
"loss": 0.4039,
"step": 570
},
{
"epoch": 0.8070671378091873,
"grad_norm": 0.38341170586662304,
"learning_rate": 4.061844863731656e-05,
"loss": 0.4196,
"step": 571
},
{
"epoch": 0.8084805653710248,
"grad_norm": 0.40436023838694746,
"learning_rate": 4.059224318658281e-05,
"loss": 0.4393,
"step": 572
},
{
"epoch": 0.8098939929328622,
"grad_norm": 0.40718574610614616,
"learning_rate": 4.0566037735849064e-05,
"loss": 0.3905,
"step": 573
},
{
"epoch": 0.8113074204946996,
"grad_norm": 0.38367740446928805,
"learning_rate": 4.0539832285115304e-05,
"loss": 0.391,
"step": 574
},
{
"epoch": 0.8127208480565371,
"grad_norm": 0.3793619473170191,
"learning_rate": 4.051362683438155e-05,
"loss": 0.4016,
"step": 575
},
{
"epoch": 0.8141342756183746,
"grad_norm": 0.3811921030479541,
"learning_rate": 4.04874213836478e-05,
"loss": 0.426,
"step": 576
},
{
"epoch": 0.815547703180212,
"grad_norm": 0.45321378181170174,
"learning_rate": 4.046121593291405e-05,
"loss": 0.4243,
"step": 577
},
{
"epoch": 0.8169611307420495,
"grad_norm": 0.3008321361684754,
"learning_rate": 4.04350104821803e-05,
"loss": 0.3959,
"step": 578
},
{
"epoch": 0.818374558303887,
"grad_norm": 0.42869709159556013,
"learning_rate": 4.0408805031446544e-05,
"loss": 0.4026,
"step": 579
},
{
"epoch": 0.8197879858657244,
"grad_norm": 0.33992602762622615,
"learning_rate": 4.038259958071279e-05,
"loss": 0.4167,
"step": 580
},
{
"epoch": 0.8212014134275618,
"grad_norm": 0.354828754911048,
"learning_rate": 4.035639412997904e-05,
"loss": 0.4298,
"step": 581
},
{
"epoch": 0.8226148409893993,
"grad_norm": 0.4411710153881503,
"learning_rate": 4.0330188679245284e-05,
"loss": 0.4079,
"step": 582
},
{
"epoch": 0.8240282685512368,
"grad_norm": 0.36062357079683866,
"learning_rate": 4.030398322851153e-05,
"loss": 0.4224,
"step": 583
},
{
"epoch": 0.8254416961130742,
"grad_norm": 0.4058608315800711,
"learning_rate": 4.027777777777778e-05,
"loss": 0.4161,
"step": 584
},
{
"epoch": 0.8268551236749117,
"grad_norm": 0.37453552593742895,
"learning_rate": 4.025157232704403e-05,
"loss": 0.3834,
"step": 585
},
{
"epoch": 0.8282685512367491,
"grad_norm": 0.4015569545801815,
"learning_rate": 4.022536687631027e-05,
"loss": 0.4061,
"step": 586
},
{
"epoch": 0.8296819787985866,
"grad_norm": 0.3729800940226261,
"learning_rate": 4.019916142557652e-05,
"loss": 0.4108,
"step": 587
},
{
"epoch": 0.831095406360424,
"grad_norm": 0.3987475770260318,
"learning_rate": 4.017295597484277e-05,
"loss": 0.4037,
"step": 588
},
{
"epoch": 0.8325088339222615,
"grad_norm": 0.38440806057472476,
"learning_rate": 4.014675052410902e-05,
"loss": 0.433,
"step": 589
},
{
"epoch": 0.833922261484099,
"grad_norm": 0.4150611273667124,
"learning_rate": 4.0120545073375265e-05,
"loss": 0.4103,
"step": 590
},
{
"epoch": 0.8353356890459364,
"grad_norm": 0.34594143758812523,
"learning_rate": 4.009433962264151e-05,
"loss": 0.4173,
"step": 591
},
{
"epoch": 0.8367491166077738,
"grad_norm": 0.4334237429207024,
"learning_rate": 4.006813417190776e-05,
"loss": 0.4077,
"step": 592
},
{
"epoch": 0.8381625441696113,
"grad_norm": 0.3734023724895848,
"learning_rate": 4.0041928721174006e-05,
"loss": 0.4152,
"step": 593
},
{
"epoch": 0.8395759717314487,
"grad_norm": 0.3893716600550784,
"learning_rate": 4.001572327044025e-05,
"loss": 0.3891,
"step": 594
},
{
"epoch": 0.8409893992932862,
"grad_norm": 0.3694945907799801,
"learning_rate": 3.99895178197065e-05,
"loss": 0.4229,
"step": 595
},
{
"epoch": 0.8424028268551237,
"grad_norm": 0.36071295237195355,
"learning_rate": 3.996331236897275e-05,
"loss": 0.416,
"step": 596
},
{
"epoch": 0.8438162544169612,
"grad_norm": 0.36844906303603353,
"learning_rate": 3.9937106918239e-05,
"loss": 0.4011,
"step": 597
},
{
"epoch": 0.8452296819787986,
"grad_norm": 0.7579764509123078,
"learning_rate": 3.991090146750524e-05,
"loss": 0.4415,
"step": 598
},
{
"epoch": 0.846643109540636,
"grad_norm": 0.42834280069786246,
"learning_rate": 3.9884696016771486e-05,
"loss": 0.4067,
"step": 599
},
{
"epoch": 0.8480565371024735,
"grad_norm": 0.44020195023471725,
"learning_rate": 3.985849056603774e-05,
"loss": 0.4059,
"step": 600
},
{
"epoch": 0.8494699646643109,
"grad_norm": 0.4896107927280786,
"learning_rate": 3.983228511530399e-05,
"loss": 0.4287,
"step": 601
},
{
"epoch": 0.8508833922261484,
"grad_norm": 0.3861194246244646,
"learning_rate": 3.9806079664570233e-05,
"loss": 0.3966,
"step": 602
},
{
"epoch": 0.8522968197879859,
"grad_norm": 0.49084199525721084,
"learning_rate": 3.977987421383648e-05,
"loss": 0.4357,
"step": 603
},
{
"epoch": 0.8537102473498234,
"grad_norm": 0.3977208691168237,
"learning_rate": 3.975366876310273e-05,
"loss": 0.3998,
"step": 604
},
{
"epoch": 0.8551236749116607,
"grad_norm": 0.47058695707312703,
"learning_rate": 3.9727463312368974e-05,
"loss": 0.4329,
"step": 605
},
{
"epoch": 0.8565371024734982,
"grad_norm": 0.35618879269008863,
"learning_rate": 3.970125786163522e-05,
"loss": 0.4034,
"step": 606
},
{
"epoch": 0.8579505300353357,
"grad_norm": 0.5170591415895558,
"learning_rate": 3.967505241090147e-05,
"loss": 0.4208,
"step": 607
},
{
"epoch": 0.8593639575971731,
"grad_norm": 0.4341980923731296,
"learning_rate": 3.964884696016772e-05,
"loss": 0.4149,
"step": 608
},
{
"epoch": 0.8607773851590106,
"grad_norm": 0.5016217750587869,
"learning_rate": 3.962264150943397e-05,
"loss": 0.4168,
"step": 609
},
{
"epoch": 0.8621908127208481,
"grad_norm": 0.5169170858258177,
"learning_rate": 3.959643605870021e-05,
"loss": 0.4438,
"step": 610
},
{
"epoch": 0.8636042402826856,
"grad_norm": 0.4266159686745794,
"learning_rate": 3.9570230607966454e-05,
"loss": 0.4148,
"step": 611
},
{
"epoch": 0.8650176678445229,
"grad_norm": 0.45123254507631055,
"learning_rate": 3.954402515723271e-05,
"loss": 0.416,
"step": 612
},
{
"epoch": 0.8664310954063604,
"grad_norm": 0.443594266511716,
"learning_rate": 3.9517819706498955e-05,
"loss": 0.4025,
"step": 613
},
{
"epoch": 0.8678445229681979,
"grad_norm": 0.412990512121991,
"learning_rate": 3.94916142557652e-05,
"loss": 0.4019,
"step": 614
},
{
"epoch": 0.8692579505300353,
"grad_norm": 0.4684211072925879,
"learning_rate": 3.946540880503145e-05,
"loss": 0.4072,
"step": 615
},
{
"epoch": 0.8706713780918728,
"grad_norm": 0.3143399571016764,
"learning_rate": 3.9439203354297695e-05,
"loss": 0.431,
"step": 616
},
{
"epoch": 0.8720848056537103,
"grad_norm": 0.4661888274257073,
"learning_rate": 3.941299790356394e-05,
"loss": 0.4071,
"step": 617
},
{
"epoch": 0.8734982332155476,
"grad_norm": 0.3735979857863716,
"learning_rate": 3.938679245283019e-05,
"loss": 0.4178,
"step": 618
},
{
"epoch": 0.8749116607773851,
"grad_norm": 0.4729391398132935,
"learning_rate": 3.9360587002096435e-05,
"loss": 0.4142,
"step": 619
},
{
"epoch": 0.8763250883392226,
"grad_norm": 0.35298562266028416,
"learning_rate": 3.933438155136269e-05,
"loss": 0.4013,
"step": 620
},
{
"epoch": 0.8777385159010601,
"grad_norm": 0.3700138635635356,
"learning_rate": 3.9308176100628936e-05,
"loss": 0.4122,
"step": 621
},
{
"epoch": 0.8791519434628975,
"grad_norm": 0.3526283150490633,
"learning_rate": 3.9281970649895176e-05,
"loss": 0.403,
"step": 622
},
{
"epoch": 0.880565371024735,
"grad_norm": 0.3928367824504502,
"learning_rate": 3.925576519916143e-05,
"loss": 0.4018,
"step": 623
},
{
"epoch": 0.8819787985865725,
"grad_norm": 0.33901798148292844,
"learning_rate": 3.9229559748427676e-05,
"loss": 0.392,
"step": 624
},
{
"epoch": 0.8833922261484098,
"grad_norm": 0.4156948882645667,
"learning_rate": 3.920335429769392e-05,
"loss": 0.3871,
"step": 625
},
{
"epoch": 0.8848056537102473,
"grad_norm": 0.447620403348896,
"learning_rate": 3.917714884696017e-05,
"loss": 0.433,
"step": 626
},
{
"epoch": 0.8862190812720848,
"grad_norm": 0.4620470953445995,
"learning_rate": 3.9150943396226416e-05,
"loss": 0.4084,
"step": 627
},
{
"epoch": 0.8876325088339223,
"grad_norm": 0.36158476392826727,
"learning_rate": 3.912473794549266e-05,
"loss": 0.4097,
"step": 628
},
{
"epoch": 0.8890459363957597,
"grad_norm": 0.35901137785573645,
"learning_rate": 3.909853249475891e-05,
"loss": 0.41,
"step": 629
},
{
"epoch": 0.8904593639575972,
"grad_norm": 0.3232326125126936,
"learning_rate": 3.9072327044025157e-05,
"loss": 0.4045,
"step": 630
},
{
"epoch": 0.8918727915194347,
"grad_norm": 0.3214466848622873,
"learning_rate": 3.904612159329141e-05,
"loss": 0.4009,
"step": 631
},
{
"epoch": 0.893286219081272,
"grad_norm": 0.4468872551793425,
"learning_rate": 3.901991614255766e-05,
"loss": 0.4262,
"step": 632
},
{
"epoch": 0.8946996466431095,
"grad_norm": 0.3664611099699118,
"learning_rate": 3.8993710691823904e-05,
"loss": 0.4384,
"step": 633
},
{
"epoch": 0.896113074204947,
"grad_norm": 0.3245803851135017,
"learning_rate": 3.8967505241090144e-05,
"loss": 0.3932,
"step": 634
},
{
"epoch": 0.8975265017667845,
"grad_norm": 0.3190681346929863,
"learning_rate": 3.89412997903564e-05,
"loss": 0.4303,
"step": 635
},
{
"epoch": 0.8989399293286219,
"grad_norm": 0.33984746694187773,
"learning_rate": 3.8915094339622644e-05,
"loss": 0.4219,
"step": 636
},
{
"epoch": 0.9003533568904594,
"grad_norm": 0.3880550975266584,
"learning_rate": 3.888888888888889e-05,
"loss": 0.4062,
"step": 637
},
{
"epoch": 0.9017667844522969,
"grad_norm": 0.3114990069652242,
"learning_rate": 3.886268343815514e-05,
"loss": 0.4323,
"step": 638
},
{
"epoch": 0.9031802120141342,
"grad_norm": 0.4077813374563541,
"learning_rate": 3.883647798742139e-05,
"loss": 0.4337,
"step": 639
},
{
"epoch": 0.9045936395759717,
"grad_norm": 0.327666565263863,
"learning_rate": 3.881027253668763e-05,
"loss": 0.3992,
"step": 640
},
{
"epoch": 0.9060070671378092,
"grad_norm": 0.341732005140741,
"learning_rate": 3.878406708595388e-05,
"loss": 0.4217,
"step": 641
},
{
"epoch": 0.9074204946996467,
"grad_norm": 0.32190627165496305,
"learning_rate": 3.8757861635220125e-05,
"loss": 0.4113,
"step": 642
},
{
"epoch": 0.9088339222614841,
"grad_norm": 0.3609152959353754,
"learning_rate": 3.873165618448638e-05,
"loss": 0.4122,
"step": 643
},
{
"epoch": 0.9102473498233216,
"grad_norm": 0.30840855725044847,
"learning_rate": 3.8705450733752625e-05,
"loss": 0.3954,
"step": 644
},
{
"epoch": 0.911660777385159,
"grad_norm": 0.38819036676065466,
"learning_rate": 3.867924528301887e-05,
"loss": 0.4124,
"step": 645
},
{
"epoch": 0.9130742049469964,
"grad_norm": 0.3313822594384359,
"learning_rate": 3.865303983228511e-05,
"loss": 0.4073,
"step": 646
},
{
"epoch": 0.9144876325088339,
"grad_norm": 0.37879837106001957,
"learning_rate": 3.8626834381551365e-05,
"loss": 0.4128,
"step": 647
},
{
"epoch": 0.9159010600706714,
"grad_norm": 0.36398707449125006,
"learning_rate": 3.860062893081761e-05,
"loss": 0.3711,
"step": 648
},
{
"epoch": 0.9173144876325088,
"grad_norm": 0.3417837158668181,
"learning_rate": 3.857442348008386e-05,
"loss": 0.4135,
"step": 649
},
{
"epoch": 0.9187279151943463,
"grad_norm": 0.37914320910390675,
"learning_rate": 3.8548218029350106e-05,
"loss": 0.411,
"step": 650
},
{
"epoch": 0.9201413427561838,
"grad_norm": 0.4189635123586755,
"learning_rate": 3.852201257861636e-05,
"loss": 0.4031,
"step": 651
},
{
"epoch": 0.9215547703180212,
"grad_norm": 0.33973767222720036,
"learning_rate": 3.84958071278826e-05,
"loss": 0.4051,
"step": 652
},
{
"epoch": 0.9229681978798586,
"grad_norm": 0.43793314486562585,
"learning_rate": 3.8469601677148846e-05,
"loss": 0.4145,
"step": 653
},
{
"epoch": 0.9243816254416961,
"grad_norm": 0.34562987701491,
"learning_rate": 3.844339622641509e-05,
"loss": 0.4218,
"step": 654
},
{
"epoch": 0.9257950530035336,
"grad_norm": 0.4009105602367646,
"learning_rate": 3.8417190775681346e-05,
"loss": 0.4231,
"step": 655
},
{
"epoch": 0.927208480565371,
"grad_norm": 0.34418637236689303,
"learning_rate": 3.839098532494759e-05,
"loss": 0.4057,
"step": 656
},
{
"epoch": 0.9286219081272085,
"grad_norm": 0.3898603060080436,
"learning_rate": 3.836477987421384e-05,
"loss": 0.4082,
"step": 657
},
{
"epoch": 0.930035335689046,
"grad_norm": 0.3633611885604749,
"learning_rate": 3.8338574423480086e-05,
"loss": 0.4063,
"step": 658
},
{
"epoch": 0.9314487632508834,
"grad_norm": 0.32323982049963773,
"learning_rate": 3.831236897274633e-05,
"loss": 0.3692,
"step": 659
},
{
"epoch": 0.9328621908127208,
"grad_norm": 0.4122820383664903,
"learning_rate": 3.828616352201258e-05,
"loss": 0.4402,
"step": 660
},
{
"epoch": 0.9342756183745583,
"grad_norm": 0.381905316263321,
"learning_rate": 3.825995807127883e-05,
"loss": 0.4238,
"step": 661
},
{
"epoch": 0.9356890459363958,
"grad_norm": 0.3530229290539169,
"learning_rate": 3.8233752620545074e-05,
"loss": 0.4116,
"step": 662
},
{
"epoch": 0.9371024734982332,
"grad_norm": 0.3823820358611645,
"learning_rate": 3.820754716981133e-05,
"loss": 0.3963,
"step": 663
},
{
"epoch": 0.9385159010600707,
"grad_norm": 0.34105968805236425,
"learning_rate": 3.818134171907757e-05,
"loss": 0.3988,
"step": 664
},
{
"epoch": 0.9399293286219081,
"grad_norm": 0.32654965847315154,
"learning_rate": 3.8155136268343814e-05,
"loss": 0.4168,
"step": 665
},
{
"epoch": 0.9413427561837456,
"grad_norm": 0.347384947129823,
"learning_rate": 3.812893081761007e-05,
"loss": 0.4273,
"step": 666
},
{
"epoch": 0.942756183745583,
"grad_norm": 0.42437539365107974,
"learning_rate": 3.8102725366876314e-05,
"loss": 0.4083,
"step": 667
},
{
"epoch": 0.9441696113074205,
"grad_norm": 0.32158112128064104,
"learning_rate": 3.807651991614256e-05,
"loss": 0.4204,
"step": 668
},
{
"epoch": 0.945583038869258,
"grad_norm": 0.49287208081757977,
"learning_rate": 3.805031446540881e-05,
"loss": 0.4099,
"step": 669
},
{
"epoch": 0.9469964664310954,
"grad_norm": 0.3232981118624515,
"learning_rate": 3.8024109014675055e-05,
"loss": 0.4014,
"step": 670
},
{
"epoch": 0.9484098939929329,
"grad_norm": 0.42135962915219716,
"learning_rate": 3.79979035639413e-05,
"loss": 0.4154,
"step": 671
},
{
"epoch": 0.9498233215547703,
"grad_norm": 0.3665728354664516,
"learning_rate": 3.797169811320755e-05,
"loss": 0.408,
"step": 672
},
{
"epoch": 0.9512367491166078,
"grad_norm": 0.2779128872485725,
"learning_rate": 3.7945492662473795e-05,
"loss": 0.4001,
"step": 673
},
{
"epoch": 0.9526501766784452,
"grad_norm": 0.33736200896809143,
"learning_rate": 3.791928721174005e-05,
"loss": 0.3935,
"step": 674
},
{
"epoch": 0.9540636042402827,
"grad_norm": 0.35968159156453683,
"learning_rate": 3.7893081761006295e-05,
"loss": 0.4153,
"step": 675
},
{
"epoch": 0.9554770318021202,
"grad_norm": 0.33206851865404124,
"learning_rate": 3.7866876310272535e-05,
"loss": 0.3901,
"step": 676
},
{
"epoch": 0.9568904593639576,
"grad_norm": 0.3321249591321321,
"learning_rate": 3.784067085953878e-05,
"loss": 0.4145,
"step": 677
},
{
"epoch": 0.958303886925795,
"grad_norm": 0.3079689697513417,
"learning_rate": 3.7814465408805035e-05,
"loss": 0.3967,
"step": 678
},
{
"epoch": 0.9597173144876325,
"grad_norm": 0.3491366265167499,
"learning_rate": 3.778825995807128e-05,
"loss": 0.4125,
"step": 679
},
{
"epoch": 0.9611307420494699,
"grad_norm": 0.3445282193518051,
"learning_rate": 3.776205450733753e-05,
"loss": 0.4318,
"step": 680
},
{
"epoch": 0.9625441696113074,
"grad_norm": 0.38470455275045545,
"learning_rate": 3.7735849056603776e-05,
"loss": 0.4155,
"step": 681
},
{
"epoch": 0.9639575971731449,
"grad_norm": 0.31031891000605816,
"learning_rate": 3.770964360587002e-05,
"loss": 0.4089,
"step": 682
},
{
"epoch": 0.9653710247349824,
"grad_norm": 0.3603669916013144,
"learning_rate": 3.768343815513627e-05,
"loss": 0.4076,
"step": 683
},
{
"epoch": 0.9667844522968198,
"grad_norm": 0.3963655989636739,
"learning_rate": 3.7657232704402516e-05,
"loss": 0.4413,
"step": 684
},
{
"epoch": 0.9681978798586572,
"grad_norm": 0.34939728007841586,
"learning_rate": 3.763102725366876e-05,
"loss": 0.3911,
"step": 685
},
{
"epoch": 0.9696113074204947,
"grad_norm": 0.39294839119901787,
"learning_rate": 3.7604821802935016e-05,
"loss": 0.409,
"step": 686
},
{
"epoch": 0.9710247349823321,
"grad_norm": 0.3717207349643657,
"learning_rate": 3.757861635220126e-05,
"loss": 0.3763,
"step": 687
},
{
"epoch": 0.9724381625441696,
"grad_norm": 0.3725565074963433,
"learning_rate": 3.75524109014675e-05,
"loss": 0.4293,
"step": 688
},
{
"epoch": 0.9738515901060071,
"grad_norm": 0.3910539830936109,
"learning_rate": 3.752620545073376e-05,
"loss": 0.4078,
"step": 689
},
{
"epoch": 0.9752650176678446,
"grad_norm": 0.3191968336145843,
"learning_rate": 3.7500000000000003e-05,
"loss": 0.3956,
"step": 690
},
{
"epoch": 0.976678445229682,
"grad_norm": 0.3476552196967715,
"learning_rate": 3.747379454926625e-05,
"loss": 0.4159,
"step": 691
},
{
"epoch": 0.9780918727915194,
"grad_norm": 0.38777675519846533,
"learning_rate": 3.74475890985325e-05,
"loss": 0.4207,
"step": 692
},
{
"epoch": 0.9795053003533569,
"grad_norm": 0.36124102246465356,
"learning_rate": 3.7421383647798744e-05,
"loss": 0.4075,
"step": 693
},
{
"epoch": 0.9809187279151943,
"grad_norm": 0.3519628560906105,
"learning_rate": 3.739517819706499e-05,
"loss": 0.4075,
"step": 694
},
{
"epoch": 0.9823321554770318,
"grad_norm": 0.4045055278470149,
"learning_rate": 3.736897274633124e-05,
"loss": 0.41,
"step": 695
},
{
"epoch": 0.9837455830388693,
"grad_norm": 0.3816351053482838,
"learning_rate": 3.7342767295597484e-05,
"loss": 0.4154,
"step": 696
},
{
"epoch": 0.9851590106007068,
"grad_norm": 0.3315119547016144,
"learning_rate": 3.731656184486374e-05,
"loss": 0.4099,
"step": 697
},
{
"epoch": 0.9865724381625441,
"grad_norm": 0.4005658653011404,
"learning_rate": 3.7290356394129984e-05,
"loss": 0.4257,
"step": 698
},
{
"epoch": 0.9879858657243816,
"grad_norm": 0.3106957420304495,
"learning_rate": 3.7264150943396224e-05,
"loss": 0.4024,
"step": 699
},
{
"epoch": 0.9893992932862191,
"grad_norm": 0.38798771675838967,
"learning_rate": 3.723794549266247e-05,
"loss": 0.4169,
"step": 700
},
{
"epoch": 0.9908127208480565,
"grad_norm": 0.3440212304035,
"learning_rate": 3.7211740041928725e-05,
"loss": 0.4159,
"step": 701
},
{
"epoch": 0.992226148409894,
"grad_norm": 0.3589247180712301,
"learning_rate": 3.718553459119497e-05,
"loss": 0.3911,
"step": 702
},
{
"epoch": 0.9936395759717315,
"grad_norm": 0.364717534041688,
"learning_rate": 3.715932914046122e-05,
"loss": 0.4243,
"step": 703
},
{
"epoch": 0.995053003533569,
"grad_norm": 0.35734181461705755,
"learning_rate": 3.7133123689727465e-05,
"loss": 0.4241,
"step": 704
},
{
"epoch": 0.9964664310954063,
"grad_norm": 0.44423905548943465,
"learning_rate": 3.710691823899371e-05,
"loss": 0.4037,
"step": 705
},
{
"epoch": 0.9978798586572438,
"grad_norm": 0.34829126844074265,
"learning_rate": 3.708071278825996e-05,
"loss": 0.3774,
"step": 706
},
{
"epoch": 0.9992932862190813,
"grad_norm": 0.4705672020385433,
"learning_rate": 3.7054507337526205e-05,
"loss": 0.4285,
"step": 707
},
{
"epoch": 1.0,
"grad_norm": 0.4705672020385433,
"learning_rate": 3.702830188679245e-05,
"loss": 0.3888,
"step": 708
},
{
"epoch": 1.0014134275618374,
"grad_norm": 0.5903330192319796,
"learning_rate": 3.7002096436058706e-05,
"loss": 0.3194,
"step": 709
},
{
"epoch": 1.002826855123675,
"grad_norm": 0.41448463788303513,
"learning_rate": 3.697589098532495e-05,
"loss": 0.3489,
"step": 710
},
{
"epoch": 1.0042402826855124,
"grad_norm": 0.45189174166182,
"learning_rate": 3.694968553459119e-05,
"loss": 0.3483,
"step": 711
},
{
"epoch": 1.0056537102473497,
"grad_norm": 0.3411164677467743,
"learning_rate": 3.692348008385744e-05,
"loss": 0.3537,
"step": 712
},
{
"epoch": 1.0070671378091873,
"grad_norm": 0.40758934828414095,
"learning_rate": 3.689727463312369e-05,
"loss": 0.3762,
"step": 713
},
{
"epoch": 1.0084805653710247,
"grad_norm": 0.45782678512817626,
"learning_rate": 3.687106918238994e-05,
"loss": 0.3441,
"step": 714
},
{
"epoch": 1.009893992932862,
"grad_norm": 0.388577847582502,
"learning_rate": 3.6844863731656186e-05,
"loss": 0.3286,
"step": 715
},
{
"epoch": 1.0113074204946997,
"grad_norm": 0.4193186035211449,
"learning_rate": 3.681865828092243e-05,
"loss": 0.3435,
"step": 716
},
{
"epoch": 1.012720848056537,
"grad_norm": 0.4375481959600438,
"learning_rate": 3.679245283018868e-05,
"loss": 0.3492,
"step": 717
},
{
"epoch": 1.0141342756183747,
"grad_norm": 0.3769654290568487,
"learning_rate": 3.676624737945493e-05,
"loss": 0.3379,
"step": 718
},
{
"epoch": 1.015547703180212,
"grad_norm": 0.40041162314142165,
"learning_rate": 3.6740041928721173e-05,
"loss": 0.3408,
"step": 719
},
{
"epoch": 1.0169611307420494,
"grad_norm": 0.357639336216928,
"learning_rate": 3.671383647798742e-05,
"loss": 0.3533,
"step": 720
},
{
"epoch": 1.018374558303887,
"grad_norm": 0.41914261138760034,
"learning_rate": 3.6687631027253674e-05,
"loss": 0.3439,
"step": 721
},
{
"epoch": 1.0197879858657244,
"grad_norm": 0.41510104190682934,
"learning_rate": 3.666142557651992e-05,
"loss": 0.3478,
"step": 722
},
{
"epoch": 1.0212014134275618,
"grad_norm": 0.2916326951809022,
"learning_rate": 3.663522012578616e-05,
"loss": 0.3434,
"step": 723
},
{
"epoch": 1.0226148409893994,
"grad_norm": 0.4219472546080783,
"learning_rate": 3.6609014675052414e-05,
"loss": 0.3409,
"step": 724
},
{
"epoch": 1.0240282685512367,
"grad_norm": 0.3777567583012385,
"learning_rate": 3.658280922431866e-05,
"loss": 0.3526,
"step": 725
},
{
"epoch": 1.0254416961130741,
"grad_norm": 0.38395893224828936,
"learning_rate": 3.655660377358491e-05,
"loss": 0.3551,
"step": 726
},
{
"epoch": 1.0268551236749117,
"grad_norm": 0.38465912640478855,
"learning_rate": 3.6530398322851154e-05,
"loss": 0.3352,
"step": 727
},
{
"epoch": 1.028268551236749,
"grad_norm": 0.30689294629298425,
"learning_rate": 3.65041928721174e-05,
"loss": 0.3472,
"step": 728
},
{
"epoch": 1.0296819787985865,
"grad_norm": 0.36250695543035444,
"learning_rate": 3.647798742138365e-05,
"loss": 0.3439,
"step": 729
},
{
"epoch": 1.031095406360424,
"grad_norm": 0.3201343810755919,
"learning_rate": 3.6451781970649895e-05,
"loss": 0.331,
"step": 730
},
{
"epoch": 1.0325088339222614,
"grad_norm": 0.31275335440587837,
"learning_rate": 3.642557651991614e-05,
"loss": 0.3344,
"step": 731
},
{
"epoch": 1.0339222614840988,
"grad_norm": 0.34385990538987316,
"learning_rate": 3.6399371069182395e-05,
"loss": 0.3293,
"step": 732
},
{
"epoch": 1.0353356890459364,
"grad_norm": 0.2863362642302072,
"learning_rate": 3.637316561844864e-05,
"loss": 0.3485,
"step": 733
},
{
"epoch": 1.0367491166077738,
"grad_norm": 0.31189653345424395,
"learning_rate": 3.634696016771489e-05,
"loss": 0.3349,
"step": 734
},
{
"epoch": 1.0381625441696114,
"grad_norm": 0.31590612587819455,
"learning_rate": 3.632075471698113e-05,
"loss": 0.3301,
"step": 735
},
{
"epoch": 1.0395759717314488,
"grad_norm": 0.30338294335935406,
"learning_rate": 3.629454926624738e-05,
"loss": 0.3446,
"step": 736
},
{
"epoch": 1.0409893992932862,
"grad_norm": 0.29921205931030087,
"learning_rate": 3.626834381551363e-05,
"loss": 0.3417,
"step": 737
},
{
"epoch": 1.0424028268551238,
"grad_norm": 0.3459514299364315,
"learning_rate": 3.6242138364779876e-05,
"loss": 0.3451,
"step": 738
},
{
"epoch": 1.0438162544169611,
"grad_norm": 0.3374251663010735,
"learning_rate": 3.621593291404612e-05,
"loss": 0.3265,
"step": 739
},
{
"epoch": 1.0452296819787985,
"grad_norm": 0.3298180625136512,
"learning_rate": 3.6189727463312376e-05,
"loss": 0.3405,
"step": 740
},
{
"epoch": 1.046643109540636,
"grad_norm": 0.3024930112071315,
"learning_rate": 3.6163522012578616e-05,
"loss": 0.3583,
"step": 741
},
{
"epoch": 1.0480565371024735,
"grad_norm": 0.3451530627017385,
"learning_rate": 3.613731656184486e-05,
"loss": 0.3334,
"step": 742
},
{
"epoch": 1.0494699646643109,
"grad_norm": 0.30837097855795564,
"learning_rate": 3.611111111111111e-05,
"loss": 0.343,
"step": 743
},
{
"epoch": 1.0508833922261485,
"grad_norm": 0.3009402868321942,
"learning_rate": 3.608490566037736e-05,
"loss": 0.3436,
"step": 744
},
{
"epoch": 1.0522968197879858,
"grad_norm": 0.34273196702025216,
"learning_rate": 3.605870020964361e-05,
"loss": 0.333,
"step": 745
},
{
"epoch": 1.0537102473498232,
"grad_norm": 0.34405009998782105,
"learning_rate": 3.6032494758909857e-05,
"loss": 0.3649,
"step": 746
},
{
"epoch": 1.0551236749116608,
"grad_norm": 0.3162128511809767,
"learning_rate": 3.6006289308176097e-05,
"loss": 0.3409,
"step": 747
},
{
"epoch": 1.0565371024734982,
"grad_norm": 0.3267489552835148,
"learning_rate": 3.598008385744235e-05,
"loss": 0.3344,
"step": 748
},
{
"epoch": 1.0579505300353358,
"grad_norm": 0.3566231434375217,
"learning_rate": 3.59538784067086e-05,
"loss": 0.3463,
"step": 749
},
{
"epoch": 1.0593639575971732,
"grad_norm": 0.30732351082339,
"learning_rate": 3.5927672955974844e-05,
"loss": 0.3473,
"step": 750
},
{
"epoch": 1.0607773851590105,
"grad_norm": 0.32937983469551874,
"learning_rate": 3.590146750524109e-05,
"loss": 0.3356,
"step": 751
},
{
"epoch": 1.0621908127208481,
"grad_norm": 0.3609912309947035,
"learning_rate": 3.5875262054507344e-05,
"loss": 0.3559,
"step": 752
},
{
"epoch": 1.0636042402826855,
"grad_norm": 0.31682048757690323,
"learning_rate": 3.5849056603773584e-05,
"loss": 0.3346,
"step": 753
},
{
"epoch": 1.065017667844523,
"grad_norm": 0.29333756055588717,
"learning_rate": 3.582285115303983e-05,
"loss": 0.3237,
"step": 754
},
{
"epoch": 1.0664310954063605,
"grad_norm": 0.34277768952510773,
"learning_rate": 3.579664570230608e-05,
"loss": 0.3612,
"step": 755
},
{
"epoch": 1.0678445229681979,
"grad_norm": 0.2937903814111069,
"learning_rate": 3.577044025157233e-05,
"loss": 0.3351,
"step": 756
},
{
"epoch": 1.0692579505300353,
"grad_norm": 0.34799713306810826,
"learning_rate": 3.574423480083858e-05,
"loss": 0.3473,
"step": 757
},
{
"epoch": 1.0706713780918728,
"grad_norm": 0.395124088353914,
"learning_rate": 3.5718029350104825e-05,
"loss": 0.3444,
"step": 758
},
{
"epoch": 1.0720848056537102,
"grad_norm": 0.3424852392843125,
"learning_rate": 3.569182389937107e-05,
"loss": 0.3602,
"step": 759
},
{
"epoch": 1.0734982332155476,
"grad_norm": 0.3850594901250311,
"learning_rate": 3.566561844863732e-05,
"loss": 0.336,
"step": 760
},
{
"epoch": 1.0749116607773852,
"grad_norm": 0.3153355265194284,
"learning_rate": 3.5639412997903565e-05,
"loss": 0.337,
"step": 761
},
{
"epoch": 1.0763250883392226,
"grad_norm": 0.39705971108405086,
"learning_rate": 3.561320754716981e-05,
"loss": 0.3467,
"step": 762
},
{
"epoch": 1.0777385159010602,
"grad_norm": 0.3261102354286737,
"learning_rate": 3.558700209643606e-05,
"loss": 0.3645,
"step": 763
},
{
"epoch": 1.0791519434628976,
"grad_norm": 0.3083147548862041,
"learning_rate": 3.556079664570231e-05,
"loss": 0.3349,
"step": 764
},
{
"epoch": 1.080565371024735,
"grad_norm": 0.3128054519470578,
"learning_rate": 3.553459119496855e-05,
"loss": 0.3346,
"step": 765
},
{
"epoch": 1.0819787985865725,
"grad_norm": 0.31029549077365864,
"learning_rate": 3.55083857442348e-05,
"loss": 0.3468,
"step": 766
},
{
"epoch": 1.08339222614841,
"grad_norm": 0.28918672051304645,
"learning_rate": 3.548218029350105e-05,
"loss": 0.3559,
"step": 767
},
{
"epoch": 1.0848056537102473,
"grad_norm": 0.3077934377501235,
"learning_rate": 3.54559748427673e-05,
"loss": 0.3311,
"step": 768
},
{
"epoch": 1.0862190812720849,
"grad_norm": 0.30547598317369473,
"learning_rate": 3.5429769392033546e-05,
"loss": 0.3382,
"step": 769
},
{
"epoch": 1.0876325088339223,
"grad_norm": 0.3043112227796262,
"learning_rate": 3.540356394129979e-05,
"loss": 0.335,
"step": 770
},
{
"epoch": 1.0890459363957596,
"grad_norm": 0.2788917956034447,
"learning_rate": 3.537735849056604e-05,
"loss": 0.335,
"step": 771
},
{
"epoch": 1.0904593639575972,
"grad_norm": 0.3109442123242191,
"learning_rate": 3.5351153039832286e-05,
"loss": 0.3519,
"step": 772
},
{
"epoch": 1.0918727915194346,
"grad_norm": 0.29668611607148526,
"learning_rate": 3.532494758909853e-05,
"loss": 0.3409,
"step": 773
},
{
"epoch": 1.093286219081272,
"grad_norm": 0.30181149658101886,
"learning_rate": 3.529874213836478e-05,
"loss": 0.3272,
"step": 774
},
{
"epoch": 1.0946996466431096,
"grad_norm": 0.32608875321042924,
"learning_rate": 3.527253668763103e-05,
"loss": 0.3548,
"step": 775
},
{
"epoch": 1.096113074204947,
"grad_norm": 0.3364692753172606,
"learning_rate": 3.524633123689728e-05,
"loss": 0.3309,
"step": 776
},
{
"epoch": 1.0975265017667843,
"grad_norm": 0.31573283771716787,
"learning_rate": 3.522012578616352e-05,
"loss": 0.3453,
"step": 777
},
{
"epoch": 1.098939929328622,
"grad_norm": 0.3572301942196435,
"learning_rate": 3.519392033542977e-05,
"loss": 0.3432,
"step": 778
},
{
"epoch": 1.1003533568904593,
"grad_norm": 0.26904681671653635,
"learning_rate": 3.516771488469602e-05,
"loss": 0.3235,
"step": 779
},
{
"epoch": 1.101766784452297,
"grad_norm": 0.4284450153912112,
"learning_rate": 3.514150943396227e-05,
"loss": 0.3405,
"step": 780
},
{
"epoch": 1.1031802120141343,
"grad_norm": 0.32464646378144624,
"learning_rate": 3.5115303983228514e-05,
"loss": 0.3576,
"step": 781
},
{
"epoch": 1.1045936395759717,
"grad_norm": 0.44385013923288386,
"learning_rate": 3.508909853249476e-05,
"loss": 0.3443,
"step": 782
},
{
"epoch": 1.1060070671378093,
"grad_norm": 0.2699178425865669,
"learning_rate": 3.506289308176101e-05,
"loss": 0.3364,
"step": 783
},
{
"epoch": 1.1074204946996467,
"grad_norm": 0.3172979655003759,
"learning_rate": 3.5036687631027254e-05,
"loss": 0.3193,
"step": 784
},
{
"epoch": 1.108833922261484,
"grad_norm": 0.29098309165573527,
"learning_rate": 3.50104821802935e-05,
"loss": 0.3225,
"step": 785
},
{
"epoch": 1.1102473498233216,
"grad_norm": 0.34284078521144634,
"learning_rate": 3.498427672955975e-05,
"loss": 0.3459,
"step": 786
},
{
"epoch": 1.111660777385159,
"grad_norm": 0.2760602529202556,
"learning_rate": 3.4958071278826e-05,
"loss": 0.3276,
"step": 787
},
{
"epoch": 1.1130742049469964,
"grad_norm": 0.35391611545355234,
"learning_rate": 3.493186582809225e-05,
"loss": 0.3441,
"step": 788
},
{
"epoch": 1.114487632508834,
"grad_norm": 0.31524867719436767,
"learning_rate": 3.490566037735849e-05,
"loss": 0.3406,
"step": 789
},
{
"epoch": 1.1159010600706714,
"grad_norm": 0.38290949037321304,
"learning_rate": 3.487945492662474e-05,
"loss": 0.3494,
"step": 790
},
{
"epoch": 1.1173144876325087,
"grad_norm": 0.3341855178450593,
"learning_rate": 3.485324947589099e-05,
"loss": 0.3216,
"step": 791
},
{
"epoch": 1.1187279151943463,
"grad_norm": 0.32872761182359755,
"learning_rate": 3.4827044025157235e-05,
"loss": 0.333,
"step": 792
},
{
"epoch": 1.1201413427561837,
"grad_norm": 0.3809848087395745,
"learning_rate": 3.480083857442348e-05,
"loss": 0.3567,
"step": 793
},
{
"epoch": 1.121554770318021,
"grad_norm": 0.3189574197880521,
"learning_rate": 3.477463312368973e-05,
"loss": 0.3591,
"step": 794
},
{
"epoch": 1.1229681978798587,
"grad_norm": 0.36368705014953745,
"learning_rate": 3.4748427672955975e-05,
"loss": 0.3484,
"step": 795
},
{
"epoch": 1.124381625441696,
"grad_norm": 0.3293951005375751,
"learning_rate": 3.472222222222222e-05,
"loss": 0.3426,
"step": 796
},
{
"epoch": 1.1257950530035337,
"grad_norm": 0.3630320557648167,
"learning_rate": 3.469601677148847e-05,
"loss": 0.339,
"step": 797
},
{
"epoch": 1.127208480565371,
"grad_norm": 0.3772811957677582,
"learning_rate": 3.466981132075472e-05,
"loss": 0.3704,
"step": 798
},
{
"epoch": 1.1286219081272084,
"grad_norm": 0.35645745489271935,
"learning_rate": 3.464360587002097e-05,
"loss": 0.3557,
"step": 799
},
{
"epoch": 1.130035335689046,
"grad_norm": 0.401081529271296,
"learning_rate": 3.4617400419287216e-05,
"loss": 0.334,
"step": 800
},
{
"epoch": 1.1314487632508834,
"grad_norm": 0.31989490480633215,
"learning_rate": 3.4591194968553456e-05,
"loss": 0.327,
"step": 801
},
{
"epoch": 1.1328621908127208,
"grad_norm": 0.35643433063791113,
"learning_rate": 3.456498951781971e-05,
"loss": 0.3339,
"step": 802
},
{
"epoch": 1.1342756183745584,
"grad_norm": 0.3705516295061189,
"learning_rate": 3.4538784067085956e-05,
"loss": 0.3497,
"step": 803
},
{
"epoch": 1.1356890459363957,
"grad_norm": 0.37539680939405173,
"learning_rate": 3.45125786163522e-05,
"loss": 0.3516,
"step": 804
},
{
"epoch": 1.1371024734982331,
"grad_norm": 0.33667891352317664,
"learning_rate": 3.448637316561845e-05,
"loss": 0.321,
"step": 805
},
{
"epoch": 1.1385159010600707,
"grad_norm": 0.3755659875611759,
"learning_rate": 3.4460167714884703e-05,
"loss": 0.3386,
"step": 806
},
{
"epoch": 1.139929328621908,
"grad_norm": 0.3698995064252585,
"learning_rate": 3.4433962264150943e-05,
"loss": 0.3509,
"step": 807
},
{
"epoch": 1.1413427561837457,
"grad_norm": 0.3738576634874047,
"learning_rate": 3.440775681341719e-05,
"loss": 0.3512,
"step": 808
},
{
"epoch": 1.142756183745583,
"grad_norm": 0.34864933179855984,
"learning_rate": 3.438155136268344e-05,
"loss": 0.3391,
"step": 809
},
{
"epoch": 1.1441696113074205,
"grad_norm": 0.4203032829558303,
"learning_rate": 3.435534591194969e-05,
"loss": 0.3283,
"step": 810
},
{
"epoch": 1.1455830388692578,
"grad_norm": 0.3488553335995196,
"learning_rate": 3.432914046121594e-05,
"loss": 0.3558,
"step": 811
},
{
"epoch": 1.1469964664310954,
"grad_norm": 0.36130189723373207,
"learning_rate": 3.4302935010482184e-05,
"loss": 0.3306,
"step": 812
},
{
"epoch": 1.1484098939929328,
"grad_norm": 0.34956184152561837,
"learning_rate": 3.4276729559748424e-05,
"loss": 0.3458,
"step": 813
},
{
"epoch": 1.1498233215547704,
"grad_norm": 0.38550746904378774,
"learning_rate": 3.425052410901468e-05,
"loss": 0.336,
"step": 814
},
{
"epoch": 1.1512367491166078,
"grad_norm": 0.2783995476635999,
"learning_rate": 3.4224318658280924e-05,
"loss": 0.3345,
"step": 815
},
{
"epoch": 1.1526501766784452,
"grad_norm": 0.39210074561598196,
"learning_rate": 3.419811320754717e-05,
"loss": 0.3309,
"step": 816
},
{
"epoch": 1.1540636042402828,
"grad_norm": 0.2983414927935504,
"learning_rate": 3.417190775681342e-05,
"loss": 0.3423,
"step": 817
},
{
"epoch": 1.1554770318021201,
"grad_norm": 0.35552225958905276,
"learning_rate": 3.414570230607967e-05,
"loss": 0.3461,
"step": 818
},
{
"epoch": 1.1568904593639575,
"grad_norm": 0.31397123089218903,
"learning_rate": 3.411949685534591e-05,
"loss": 0.342,
"step": 819
},
{
"epoch": 1.1583038869257951,
"grad_norm": 0.3701215005570763,
"learning_rate": 3.409329140461216e-05,
"loss": 0.3483,
"step": 820
},
{
"epoch": 1.1597173144876325,
"grad_norm": 0.32629498215915137,
"learning_rate": 3.4067085953878405e-05,
"loss": 0.3511,
"step": 821
},
{
"epoch": 1.1611307420494699,
"grad_norm": 0.32580323344433504,
"learning_rate": 3.404088050314466e-05,
"loss": 0.3323,
"step": 822
},
{
"epoch": 1.1625441696113075,
"grad_norm": 0.372623925594248,
"learning_rate": 3.4014675052410905e-05,
"loss": 0.3361,
"step": 823
},
{
"epoch": 1.1639575971731448,
"grad_norm": 0.298929756477785,
"learning_rate": 3.398846960167715e-05,
"loss": 0.3339,
"step": 824
},
{
"epoch": 1.1653710247349824,
"grad_norm": 0.3446082849208321,
"learning_rate": 3.39622641509434e-05,
"loss": 0.3387,
"step": 825
},
{
"epoch": 1.1667844522968198,
"grad_norm": 0.3510655917641569,
"learning_rate": 3.3936058700209646e-05,
"loss": 0.3494,
"step": 826
},
{
"epoch": 1.1681978798586572,
"grad_norm": 0.2925396019706847,
"learning_rate": 3.390985324947589e-05,
"loss": 0.3458,
"step": 827
},
{
"epoch": 1.1696113074204948,
"grad_norm": 0.33479619154762763,
"learning_rate": 3.388364779874214e-05,
"loss": 0.3394,
"step": 828
},
{
"epoch": 1.1710247349823322,
"grad_norm": 0.3561490838444327,
"learning_rate": 3.3857442348008386e-05,
"loss": 0.3638,
"step": 829
},
{
"epoch": 1.1724381625441695,
"grad_norm": 0.3427191886854321,
"learning_rate": 3.383123689727464e-05,
"loss": 0.3397,
"step": 830
},
{
"epoch": 1.1738515901060071,
"grad_norm": 0.3248573214138937,
"learning_rate": 3.380503144654088e-05,
"loss": 0.325,
"step": 831
},
{
"epoch": 1.1752650176678445,
"grad_norm": 0.3733586553897855,
"learning_rate": 3.3778825995807126e-05,
"loss": 0.3359,
"step": 832
},
{
"epoch": 1.176678445229682,
"grad_norm": 0.30488078199377217,
"learning_rate": 3.375262054507338e-05,
"loss": 0.3448,
"step": 833
},
{
"epoch": 1.1780918727915195,
"grad_norm": 0.34413687319443415,
"learning_rate": 3.3726415094339627e-05,
"loss": 0.3243,
"step": 834
},
{
"epoch": 1.1795053003533569,
"grad_norm": 0.3423168923347738,
"learning_rate": 3.370020964360587e-05,
"loss": 0.3443,
"step": 835
},
{
"epoch": 1.1809187279151943,
"grad_norm": 0.36383573539850994,
"learning_rate": 3.367400419287212e-05,
"loss": 0.3502,
"step": 836
},
{
"epoch": 1.1823321554770319,
"grad_norm": 0.3134424871738008,
"learning_rate": 3.364779874213837e-05,
"loss": 0.3437,
"step": 837
},
{
"epoch": 1.1837455830388692,
"grad_norm": 0.36021878672412416,
"learning_rate": 3.3621593291404614e-05,
"loss": 0.3382,
"step": 838
},
{
"epoch": 1.1851590106007066,
"grad_norm": 0.28915419933065434,
"learning_rate": 3.359538784067086e-05,
"loss": 0.3551,
"step": 839
},
{
"epoch": 1.1865724381625442,
"grad_norm": 0.39095931370358283,
"learning_rate": 3.356918238993711e-05,
"loss": 0.3298,
"step": 840
},
{
"epoch": 1.1879858657243816,
"grad_norm": 0.38921310834093126,
"learning_rate": 3.354297693920336e-05,
"loss": 0.3362,
"step": 841
},
{
"epoch": 1.1893992932862192,
"grad_norm": 0.3222005124044404,
"learning_rate": 3.351677148846961e-05,
"loss": 0.3589,
"step": 842
},
{
"epoch": 1.1908127208480566,
"grad_norm": 0.3379229455143538,
"learning_rate": 3.349056603773585e-05,
"loss": 0.3425,
"step": 843
},
{
"epoch": 1.192226148409894,
"grad_norm": 0.38229334792000824,
"learning_rate": 3.3464360587002094e-05,
"loss": 0.3777,
"step": 844
},
{
"epoch": 1.1936395759717315,
"grad_norm": 0.34502445225330275,
"learning_rate": 3.343815513626835e-05,
"loss": 0.3522,
"step": 845
},
{
"epoch": 1.195053003533569,
"grad_norm": 0.3620627333767539,
"learning_rate": 3.3411949685534595e-05,
"loss": 0.3465,
"step": 846
},
{
"epoch": 1.1964664310954063,
"grad_norm": 0.3405836437448439,
"learning_rate": 3.338574423480084e-05,
"loss": 0.3529,
"step": 847
},
{
"epoch": 1.197879858657244,
"grad_norm": 0.3098641477436777,
"learning_rate": 3.335953878406709e-05,
"loss": 0.3359,
"step": 848
},
{
"epoch": 1.1992932862190813,
"grad_norm": 0.3329965061801027,
"learning_rate": 3.3333333333333335e-05,
"loss": 0.3464,
"step": 849
},
{
"epoch": 1.2007067137809186,
"grad_norm": 0.3528410458562368,
"learning_rate": 3.330712788259958e-05,
"loss": 0.3263,
"step": 850
},
{
"epoch": 1.2021201413427562,
"grad_norm": 0.2938827685827857,
"learning_rate": 3.328092243186583e-05,
"loss": 0.323,
"step": 851
},
{
"epoch": 1.2035335689045936,
"grad_norm": 0.25101784573590114,
"learning_rate": 3.3254716981132075e-05,
"loss": 0.3347,
"step": 852
},
{
"epoch": 1.2049469964664312,
"grad_norm": 0.40209107020921026,
"learning_rate": 3.322851153039833e-05,
"loss": 0.3358,
"step": 853
},
{
"epoch": 1.2063604240282686,
"grad_norm": 0.2927854901104637,
"learning_rate": 3.320230607966457e-05,
"loss": 0.3472,
"step": 854
},
{
"epoch": 1.207773851590106,
"grad_norm": 0.3581091686668252,
"learning_rate": 3.3176100628930816e-05,
"loss": 0.3296,
"step": 855
},
{
"epoch": 1.2091872791519434,
"grad_norm": 0.2928721006809454,
"learning_rate": 3.314989517819706e-05,
"loss": 0.3405,
"step": 856
},
{
"epoch": 1.210600706713781,
"grad_norm": 0.3142322002172193,
"learning_rate": 3.3123689727463316e-05,
"loss": 0.3248,
"step": 857
},
{
"epoch": 1.2120141342756183,
"grad_norm": 0.3825097137176879,
"learning_rate": 3.309748427672956e-05,
"loss": 0.334,
"step": 858
},
{
"epoch": 1.213427561837456,
"grad_norm": 0.36284014207601395,
"learning_rate": 3.307127882599581e-05,
"loss": 0.3793,
"step": 859
},
{
"epoch": 1.2148409893992933,
"grad_norm": 0.2780637709002201,
"learning_rate": 3.3045073375262056e-05,
"loss": 0.3512,
"step": 860
},
{
"epoch": 1.2162544169611307,
"grad_norm": 0.4049339824959475,
"learning_rate": 3.30188679245283e-05,
"loss": 0.3559,
"step": 861
},
{
"epoch": 1.2176678445229683,
"grad_norm": 0.3020093899569334,
"learning_rate": 3.299266247379455e-05,
"loss": 0.3398,
"step": 862
},
{
"epoch": 1.2190812720848057,
"grad_norm": 0.2789302464942545,
"learning_rate": 3.2966457023060796e-05,
"loss": 0.3391,
"step": 863
},
{
"epoch": 1.220494699646643,
"grad_norm": 0.37291784901929326,
"learning_rate": 3.294025157232704e-05,
"loss": 0.3353,
"step": 864
},
{
"epoch": 1.2219081272084806,
"grad_norm": 0.257091947489492,
"learning_rate": 3.29140461215933e-05,
"loss": 0.3483,
"step": 865
},
{
"epoch": 1.223321554770318,
"grad_norm": 0.2988403014665894,
"learning_rate": 3.288784067085954e-05,
"loss": 0.3554,
"step": 866
},
{
"epoch": 1.2247349823321554,
"grad_norm": 0.3014573829255683,
"learning_rate": 3.2861635220125784e-05,
"loss": 0.3427,
"step": 867
},
{
"epoch": 1.226148409893993,
"grad_norm": 0.30879457687723194,
"learning_rate": 3.283542976939204e-05,
"loss": 0.3567,
"step": 868
},
{
"epoch": 1.2275618374558304,
"grad_norm": 0.2674020429114154,
"learning_rate": 3.2809224318658284e-05,
"loss": 0.3391,
"step": 869
},
{
"epoch": 1.228975265017668,
"grad_norm": 0.2960825948286288,
"learning_rate": 3.278301886792453e-05,
"loss": 0.3639,
"step": 870
},
{
"epoch": 1.2303886925795053,
"grad_norm": 0.318301982624263,
"learning_rate": 3.275681341719078e-05,
"loss": 0.3539,
"step": 871
},
{
"epoch": 1.2318021201413427,
"grad_norm": 0.29057071918846933,
"learning_rate": 3.2730607966457024e-05,
"loss": 0.335,
"step": 872
},
{
"epoch": 1.23321554770318,
"grad_norm": 0.30438415255306084,
"learning_rate": 3.270440251572327e-05,
"loss": 0.3401,
"step": 873
},
{
"epoch": 1.2346289752650177,
"grad_norm": 0.3227864278853179,
"learning_rate": 3.267819706498952e-05,
"loss": 0.3441,
"step": 874
},
{
"epoch": 1.236042402826855,
"grad_norm": 0.2893231761247737,
"learning_rate": 3.2651991614255765e-05,
"loss": 0.3531,
"step": 875
},
{
"epoch": 1.2374558303886927,
"grad_norm": 0.3880093064516775,
"learning_rate": 3.262578616352202e-05,
"loss": 0.3455,
"step": 876
},
{
"epoch": 1.23886925795053,
"grad_norm": 0.32308441528958987,
"learning_rate": 3.2599580712788265e-05,
"loss": 0.3497,
"step": 877
},
{
"epoch": 1.2402826855123674,
"grad_norm": 0.30571510565887045,
"learning_rate": 3.2573375262054505e-05,
"loss": 0.3419,
"step": 878
},
{
"epoch": 1.241696113074205,
"grad_norm": 0.35727418613583667,
"learning_rate": 3.254716981132075e-05,
"loss": 0.3323,
"step": 879
},
{
"epoch": 1.2431095406360424,
"grad_norm": 0.34608646060490333,
"learning_rate": 3.2520964360587005e-05,
"loss": 0.3458,
"step": 880
},
{
"epoch": 1.2445229681978798,
"grad_norm": 0.23237925043977584,
"learning_rate": 3.249475890985325e-05,
"loss": 0.3536,
"step": 881
},
{
"epoch": 1.2459363957597174,
"grad_norm": 0.4272525897142211,
"learning_rate": 3.24685534591195e-05,
"loss": 0.3604,
"step": 882
},
{
"epoch": 1.2473498233215548,
"grad_norm": 0.37056426855406654,
"learning_rate": 3.2442348008385745e-05,
"loss": 0.3532,
"step": 883
},
{
"epoch": 1.2487632508833921,
"grad_norm": 0.3088852039133549,
"learning_rate": 3.241614255765199e-05,
"loss": 0.332,
"step": 884
},
{
"epoch": 1.2501766784452297,
"grad_norm": 0.3517315993703657,
"learning_rate": 3.238993710691824e-05,
"loss": 0.3346,
"step": 885
},
{
"epoch": 1.251590106007067,
"grad_norm": 0.39353888440097556,
"learning_rate": 3.2363731656184486e-05,
"loss": 0.3271,
"step": 886
},
{
"epoch": 1.2530035335689047,
"grad_norm": 0.2831940146931687,
"learning_rate": 3.233752620545073e-05,
"loss": 0.3565,
"step": 887
},
{
"epoch": 1.254416961130742,
"grad_norm": 0.4726645708885642,
"learning_rate": 3.2311320754716986e-05,
"loss": 0.3513,
"step": 888
},
{
"epoch": 1.2558303886925795,
"grad_norm": 0.28501888628264627,
"learning_rate": 3.228511530398323e-05,
"loss": 0.3273,
"step": 889
},
{
"epoch": 1.2572438162544168,
"grad_norm": 0.33196467339854485,
"learning_rate": 3.225890985324947e-05,
"loss": 0.3376,
"step": 890
},
{
"epoch": 1.2586572438162544,
"grad_norm": 0.4020336946448539,
"learning_rate": 3.2232704402515726e-05,
"loss": 0.3235,
"step": 891
},
{
"epoch": 1.2600706713780918,
"grad_norm": 0.298284120326621,
"learning_rate": 3.220649895178197e-05,
"loss": 0.3328,
"step": 892
},
{
"epoch": 1.2614840989399294,
"grad_norm": 0.40768872339064965,
"learning_rate": 3.218029350104822e-05,
"loss": 0.3413,
"step": 893
},
{
"epoch": 1.2628975265017668,
"grad_norm": 0.310378165561836,
"learning_rate": 3.215408805031447e-05,
"loss": 0.3658,
"step": 894
},
{
"epoch": 1.2643109540636042,
"grad_norm": 0.34353424929439297,
"learning_rate": 3.2127882599580713e-05,
"loss": 0.3204,
"step": 895
},
{
"epoch": 1.2657243816254418,
"grad_norm": 0.31525844955635635,
"learning_rate": 3.210167714884696e-05,
"loss": 0.344,
"step": 896
},
{
"epoch": 1.2671378091872791,
"grad_norm": 0.30630363014507334,
"learning_rate": 3.207547169811321e-05,
"loss": 0.3377,
"step": 897
},
{
"epoch": 1.2685512367491167,
"grad_norm": 0.31971530826427,
"learning_rate": 3.2049266247379454e-05,
"loss": 0.3445,
"step": 898
},
{
"epoch": 1.2699646643109541,
"grad_norm": 0.3107811168847068,
"learning_rate": 3.202306079664571e-05,
"loss": 0.3529,
"step": 899
},
{
"epoch": 1.2713780918727915,
"grad_norm": 0.2924342125625325,
"learning_rate": 3.1996855345911954e-05,
"loss": 0.333,
"step": 900
},
{
"epoch": 1.2727915194346289,
"grad_norm": 0.3082214075116378,
"learning_rate": 3.19706498951782e-05,
"loss": 0.343,
"step": 901
},
{
"epoch": 1.2742049469964665,
"grad_norm": 0.26946917776669554,
"learning_rate": 3.194444444444444e-05,
"loss": 0.3515,
"step": 902
},
{
"epoch": 1.2756183745583038,
"grad_norm": 0.29927432270448473,
"learning_rate": 3.1918238993710694e-05,
"loss": 0.3317,
"step": 903
},
{
"epoch": 1.2770318021201414,
"grad_norm": 0.2696483798129391,
"learning_rate": 3.189203354297694e-05,
"loss": 0.3499,
"step": 904
},
{
"epoch": 1.2784452296819788,
"grad_norm": 0.29914390687732134,
"learning_rate": 3.186582809224319e-05,
"loss": 0.3554,
"step": 905
},
{
"epoch": 1.2798586572438162,
"grad_norm": 0.345881981351746,
"learning_rate": 3.1839622641509435e-05,
"loss": 0.3477,
"step": 906
},
{
"epoch": 1.2812720848056536,
"grad_norm": 0.29197736387142664,
"learning_rate": 3.181341719077569e-05,
"loss": 0.3451,
"step": 907
},
{
"epoch": 1.2826855123674912,
"grad_norm": 0.32668481738568206,
"learning_rate": 3.178721174004193e-05,
"loss": 0.3699,
"step": 908
},
{
"epoch": 1.2840989399293286,
"grad_norm": 0.27820225179654345,
"learning_rate": 3.1761006289308175e-05,
"loss": 0.351,
"step": 909
},
{
"epoch": 1.2855123674911662,
"grad_norm": 0.31158331895514724,
"learning_rate": 3.173480083857442e-05,
"loss": 0.3488,
"step": 910
},
{
"epoch": 1.2869257950530035,
"grad_norm": 0.3193538994074403,
"learning_rate": 3.1708595387840675e-05,
"loss": 0.3324,
"step": 911
},
{
"epoch": 1.288339222614841,
"grad_norm": 0.23242370112624888,
"learning_rate": 3.168238993710692e-05,
"loss": 0.352,
"step": 912
},
{
"epoch": 1.2897526501766785,
"grad_norm": 0.29276680331548366,
"learning_rate": 3.165618448637317e-05,
"loss": 0.3292,
"step": 913
},
{
"epoch": 1.2911660777385159,
"grad_norm": 0.28990684909251085,
"learning_rate": 3.162997903563941e-05,
"loss": 0.3523,
"step": 914
},
{
"epoch": 1.2925795053003535,
"grad_norm": 0.2576290053164335,
"learning_rate": 3.160377358490566e-05,
"loss": 0.34,
"step": 915
},
{
"epoch": 1.2939929328621909,
"grad_norm": 0.3146149983898465,
"learning_rate": 3.157756813417191e-05,
"loss": 0.3442,
"step": 916
},
{
"epoch": 1.2954063604240282,
"grad_norm": 0.28016391648733474,
"learning_rate": 3.1551362683438156e-05,
"loss": 0.3468,
"step": 917
},
{
"epoch": 1.2968197879858656,
"grad_norm": 0.30634808817894194,
"learning_rate": 3.15251572327044e-05,
"loss": 0.3347,
"step": 918
},
{
"epoch": 1.2982332155477032,
"grad_norm": 0.29268024121730857,
"learning_rate": 3.1498951781970656e-05,
"loss": 0.3571,
"step": 919
},
{
"epoch": 1.2996466431095406,
"grad_norm": 0.32922148889329417,
"learning_rate": 3.1472746331236896e-05,
"loss": 0.3301,
"step": 920
},
{
"epoch": 1.3010600706713782,
"grad_norm": 0.29807141050867336,
"learning_rate": 3.144654088050314e-05,
"loss": 0.3511,
"step": 921
},
{
"epoch": 1.3024734982332156,
"grad_norm": 0.26186501335955376,
"learning_rate": 3.142033542976939e-05,
"loss": 0.3255,
"step": 922
},
{
"epoch": 1.303886925795053,
"grad_norm": 0.34228584280484925,
"learning_rate": 3.1394129979035643e-05,
"loss": 0.3389,
"step": 923
},
{
"epoch": 1.3053003533568905,
"grad_norm": 0.2754720846148229,
"learning_rate": 3.136792452830189e-05,
"loss": 0.3375,
"step": 924
},
{
"epoch": 1.306713780918728,
"grad_norm": 0.2844457469194688,
"learning_rate": 3.134171907756814e-05,
"loss": 0.3449,
"step": 925
},
{
"epoch": 1.3081272084805653,
"grad_norm": 0.32237023314794594,
"learning_rate": 3.1315513626834384e-05,
"loss": 0.3514,
"step": 926
},
{
"epoch": 1.309540636042403,
"grad_norm": 0.3014762575173883,
"learning_rate": 3.128930817610063e-05,
"loss": 0.3291,
"step": 927
},
{
"epoch": 1.3109540636042403,
"grad_norm": 0.3123681176265126,
"learning_rate": 3.126310272536688e-05,
"loss": 0.3342,
"step": 928
},
{
"epoch": 1.3123674911660776,
"grad_norm": 0.31185588516743484,
"learning_rate": 3.1236897274633124e-05,
"loss": 0.3346,
"step": 929
},
{
"epoch": 1.3137809187279152,
"grad_norm": 0.3478400767331844,
"learning_rate": 3.121069182389937e-05,
"loss": 0.3601,
"step": 930
},
{
"epoch": 1.3151943462897526,
"grad_norm": 0.38625591476920607,
"learning_rate": 3.1184486373165624e-05,
"loss": 0.3511,
"step": 931
},
{
"epoch": 1.3166077738515902,
"grad_norm": 0.3110449326777183,
"learning_rate": 3.1158280922431864e-05,
"loss": 0.3494,
"step": 932
},
{
"epoch": 1.3180212014134276,
"grad_norm": 0.3000650497363699,
"learning_rate": 3.113207547169811e-05,
"loss": 0.3577,
"step": 933
},
{
"epoch": 1.319434628975265,
"grad_norm": 0.3009306420209947,
"learning_rate": 3.1105870020964365e-05,
"loss": 0.337,
"step": 934
},
{
"epoch": 1.3208480565371024,
"grad_norm": 0.3177344443328069,
"learning_rate": 3.107966457023061e-05,
"loss": 0.3551,
"step": 935
},
{
"epoch": 1.32226148409894,
"grad_norm": 0.2929617176198596,
"learning_rate": 3.105345911949686e-05,
"loss": 0.3445,
"step": 936
},
{
"epoch": 1.3236749116607773,
"grad_norm": 0.3094903613213098,
"learning_rate": 3.1027253668763105e-05,
"loss": 0.3491,
"step": 937
},
{
"epoch": 1.325088339222615,
"grad_norm": 0.303146679067632,
"learning_rate": 3.100104821802935e-05,
"loss": 0.3482,
"step": 938
},
{
"epoch": 1.3265017667844523,
"grad_norm": 0.29043350557209824,
"learning_rate": 3.09748427672956e-05,
"loss": 0.3249,
"step": 939
},
{
"epoch": 1.3279151943462897,
"grad_norm": 0.32437108210309734,
"learning_rate": 3.0948637316561845e-05,
"loss": 0.3358,
"step": 940
},
{
"epoch": 1.3293286219081273,
"grad_norm": 0.3102306925172366,
"learning_rate": 3.092243186582809e-05,
"loss": 0.3454,
"step": 941
},
{
"epoch": 1.3307420494699647,
"grad_norm": 0.31776235551299337,
"learning_rate": 3.0896226415094346e-05,
"loss": 0.3428,
"step": 942
},
{
"epoch": 1.332155477031802,
"grad_norm": 0.2964113367368599,
"learning_rate": 3.087002096436059e-05,
"loss": 0.3464,
"step": 943
},
{
"epoch": 1.3335689045936396,
"grad_norm": 0.29113909936289906,
"learning_rate": 3.084381551362683e-05,
"loss": 0.3304,
"step": 944
},
{
"epoch": 1.334982332155477,
"grad_norm": 0.30630955970591905,
"learning_rate": 3.081761006289308e-05,
"loss": 0.3405,
"step": 945
},
{
"epoch": 1.3363957597173144,
"grad_norm": 0.32752753083495867,
"learning_rate": 3.079140461215933e-05,
"loss": 0.3357,
"step": 946
},
{
"epoch": 1.337809187279152,
"grad_norm": 0.29146750324183673,
"learning_rate": 3.076519916142558e-05,
"loss": 0.3435,
"step": 947
},
{
"epoch": 1.3392226148409894,
"grad_norm": 0.4009742568475919,
"learning_rate": 3.0738993710691826e-05,
"loss": 0.3694,
"step": 948
},
{
"epoch": 1.340636042402827,
"grad_norm": 0.3438370490794015,
"learning_rate": 3.071278825995807e-05,
"loss": 0.3572,
"step": 949
},
{
"epoch": 1.3420494699646643,
"grad_norm": 0.36438160668178715,
"learning_rate": 3.068658280922432e-05,
"loss": 0.3406,
"step": 950
},
{
"epoch": 1.3434628975265017,
"grad_norm": 0.34991918467119265,
"learning_rate": 3.0660377358490567e-05,
"loss": 0.3405,
"step": 951
},
{
"epoch": 1.344876325088339,
"grad_norm": 0.29558754951173094,
"learning_rate": 3.063417190775681e-05,
"loss": 0.3308,
"step": 952
},
{
"epoch": 1.3462897526501767,
"grad_norm": 0.3074338684941389,
"learning_rate": 3.060796645702306e-05,
"loss": 0.3448,
"step": 953
},
{
"epoch": 1.347703180212014,
"grad_norm": 1.0810159203324765,
"learning_rate": 3.0581761006289314e-05,
"loss": 0.3369,
"step": 954
},
{
"epoch": 1.3491166077738517,
"grad_norm": 0.30203165966444323,
"learning_rate": 3.055555555555556e-05,
"loss": 0.3755,
"step": 955
},
{
"epoch": 1.350530035335689,
"grad_norm": 0.3254260301789604,
"learning_rate": 3.05293501048218e-05,
"loss": 0.3509,
"step": 956
},
{
"epoch": 1.3519434628975264,
"grad_norm": 0.3167427043162942,
"learning_rate": 3.050314465408805e-05,
"loss": 0.3528,
"step": 957
},
{
"epoch": 1.353356890459364,
"grad_norm": 0.2776204848773146,
"learning_rate": 3.0476939203354297e-05,
"loss": 0.3346,
"step": 958
},
{
"epoch": 1.3547703180212014,
"grad_norm": 0.3116696453125232,
"learning_rate": 3.0450733752620547e-05,
"loss": 0.354,
"step": 959
},
{
"epoch": 1.356183745583039,
"grad_norm": 0.26983295889890424,
"learning_rate": 3.0424528301886794e-05,
"loss": 0.3514,
"step": 960
},
{
"epoch": 1.3575971731448764,
"grad_norm": 0.3355402345061969,
"learning_rate": 3.0398322851153044e-05,
"loss": 0.3336,
"step": 961
},
{
"epoch": 1.3590106007067138,
"grad_norm": 0.304731288899329,
"learning_rate": 3.0372117400419288e-05,
"loss": 0.3743,
"step": 962
},
{
"epoch": 1.3604240282685511,
"grad_norm": 0.2809029511107724,
"learning_rate": 3.0345911949685535e-05,
"loss": 0.3417,
"step": 963
},
{
"epoch": 1.3618374558303887,
"grad_norm": 0.36155147279278943,
"learning_rate": 3.0319706498951785e-05,
"loss": 0.3508,
"step": 964
},
{
"epoch": 1.363250883392226,
"grad_norm": 0.4161098305303148,
"learning_rate": 3.029350104821803e-05,
"loss": 0.332,
"step": 965
},
{
"epoch": 1.3646643109540637,
"grad_norm": 0.3113605401986755,
"learning_rate": 3.0267295597484278e-05,
"loss": 0.3437,
"step": 966
},
{
"epoch": 1.366077738515901,
"grad_norm": 0.40454307069011347,
"learning_rate": 3.024109014675053e-05,
"loss": 0.3513,
"step": 967
},
{
"epoch": 1.3674911660777385,
"grad_norm": 0.3347349746738412,
"learning_rate": 3.0214884696016772e-05,
"loss": 0.3264,
"step": 968
},
{
"epoch": 1.3689045936395758,
"grad_norm": 0.29762677658911135,
"learning_rate": 3.018867924528302e-05,
"loss": 0.3391,
"step": 969
},
{
"epoch": 1.3703180212014134,
"grad_norm": 0.3253172958497518,
"learning_rate": 3.016247379454927e-05,
"loss": 0.3385,
"step": 970
},
{
"epoch": 1.3717314487632508,
"grad_norm": 0.32742297151658095,
"learning_rate": 3.0136268343815516e-05,
"loss": 0.347,
"step": 971
},
{
"epoch": 1.3731448763250884,
"grad_norm": 0.3160323097717696,
"learning_rate": 3.0110062893081766e-05,
"loss": 0.329,
"step": 972
},
{
"epoch": 1.3745583038869258,
"grad_norm": 0.3528196773233047,
"learning_rate": 3.0083857442348012e-05,
"loss": 0.3505,
"step": 973
},
{
"epoch": 1.3759717314487632,
"grad_norm": 0.308212171614256,
"learning_rate": 3.0057651991614256e-05,
"loss": 0.3436,
"step": 974
},
{
"epoch": 1.3773851590106008,
"grad_norm": 0.30284019935192724,
"learning_rate": 3.0031446540880503e-05,
"loss": 0.3533,
"step": 975
},
{
"epoch": 1.3787985865724381,
"grad_norm": 0.29157067929668934,
"learning_rate": 3.0005241090146753e-05,
"loss": 0.3562,
"step": 976
},
{
"epoch": 1.3802120141342757,
"grad_norm": 0.2977823272793427,
"learning_rate": 2.9979035639413e-05,
"loss": 0.3449,
"step": 977
},
{
"epoch": 1.3816254416961131,
"grad_norm": 0.31233797449774253,
"learning_rate": 2.995283018867925e-05,
"loss": 0.3531,
"step": 978
},
{
"epoch": 1.3830388692579505,
"grad_norm": 0.2812232417413985,
"learning_rate": 2.9926624737945496e-05,
"loss": 0.3268,
"step": 979
},
{
"epoch": 1.3844522968197879,
"grad_norm": 0.3130780215218055,
"learning_rate": 2.990041928721174e-05,
"loss": 0.3689,
"step": 980
},
{
"epoch": 1.3858657243816255,
"grad_norm": 0.289409267489802,
"learning_rate": 2.9874213836477987e-05,
"loss": 0.3331,
"step": 981
},
{
"epoch": 1.3872791519434629,
"grad_norm": 0.28772728959561455,
"learning_rate": 2.9848008385744237e-05,
"loss": 0.345,
"step": 982
},
{
"epoch": 1.3886925795053005,
"grad_norm": 0.33673412687643556,
"learning_rate": 2.9821802935010484e-05,
"loss": 0.3589,
"step": 983
},
{
"epoch": 1.3901060070671378,
"grad_norm": 0.3681444305628273,
"learning_rate": 2.9795597484276734e-05,
"loss": 0.338,
"step": 984
},
{
"epoch": 1.3915194346289752,
"grad_norm": 0.2908866124308732,
"learning_rate": 2.976939203354298e-05,
"loss": 0.3363,
"step": 985
},
{
"epoch": 1.3929328621908128,
"grad_norm": 0.3499372728309795,
"learning_rate": 2.9743186582809224e-05,
"loss": 0.3281,
"step": 986
},
{
"epoch": 1.3943462897526502,
"grad_norm": 0.3376539192372045,
"learning_rate": 2.971698113207547e-05,
"loss": 0.3582,
"step": 987
},
{
"epoch": 1.3957597173144876,
"grad_norm": 0.37240585565726714,
"learning_rate": 2.969077568134172e-05,
"loss": 0.3402,
"step": 988
},
{
"epoch": 1.3971731448763252,
"grad_norm": 0.3070582864572544,
"learning_rate": 2.9664570230607968e-05,
"loss": 0.3433,
"step": 989
},
{
"epoch": 1.3985865724381625,
"grad_norm": 0.3478842225175788,
"learning_rate": 2.9638364779874218e-05,
"loss": 0.3296,
"step": 990
},
{
"epoch": 1.4,
"grad_norm": 0.33089603883155594,
"learning_rate": 2.9612159329140464e-05,
"loss": 0.3354,
"step": 991
},
{
"epoch": 1.4014134275618375,
"grad_norm": 0.2919630510941769,
"learning_rate": 2.9585953878406708e-05,
"loss": 0.3494,
"step": 992
},
{
"epoch": 1.4028268551236749,
"grad_norm": 0.3337357816488823,
"learning_rate": 2.9559748427672958e-05,
"loss": 0.3383,
"step": 993
},
{
"epoch": 1.4042402826855125,
"grad_norm": 0.27413742554143183,
"learning_rate": 2.9533542976939205e-05,
"loss": 0.3317,
"step": 994
},
{
"epoch": 1.4056537102473499,
"grad_norm": 0.30883230758342234,
"learning_rate": 2.950733752620545e-05,
"loss": 0.3317,
"step": 995
},
{
"epoch": 1.4070671378091872,
"grad_norm": 0.273414098064158,
"learning_rate": 2.9481132075471702e-05,
"loss": 0.3543,
"step": 996
},
{
"epoch": 1.4084805653710246,
"grad_norm": 0.3403583774774878,
"learning_rate": 2.945492662473795e-05,
"loss": 0.3438,
"step": 997
},
{
"epoch": 1.4098939929328622,
"grad_norm": 0.2922535507396669,
"learning_rate": 2.9428721174004192e-05,
"loss": 0.3145,
"step": 998
},
{
"epoch": 1.4113074204946996,
"grad_norm": 0.27764710153612404,
"learning_rate": 2.9402515723270442e-05,
"loss": 0.3588,
"step": 999
},
{
"epoch": 1.4127208480565372,
"grad_norm": 0.3305418332760064,
"learning_rate": 2.937631027253669e-05,
"loss": 0.35,
"step": 1000
},
{
"epoch": 1.4141342756183746,
"grad_norm": 0.3975562233401403,
"learning_rate": 2.935010482180294e-05,
"loss": 0.3542,
"step": 1001
},
{
"epoch": 1.415547703180212,
"grad_norm": 0.26403609641086206,
"learning_rate": 2.9323899371069186e-05,
"loss": 0.3509,
"step": 1002
},
{
"epoch": 1.4169611307420495,
"grad_norm": 0.3801140654990358,
"learning_rate": 2.9297693920335433e-05,
"loss": 0.3608,
"step": 1003
},
{
"epoch": 1.418374558303887,
"grad_norm": 0.3144434731625939,
"learning_rate": 2.9271488469601676e-05,
"loss": 0.329,
"step": 1004
},
{
"epoch": 1.4197879858657243,
"grad_norm": 0.34435315291330854,
"learning_rate": 2.9245283018867926e-05,
"loss": 0.3387,
"step": 1005
},
{
"epoch": 1.421201413427562,
"grad_norm": 0.3414343504928365,
"learning_rate": 2.9219077568134173e-05,
"loss": 0.3549,
"step": 1006
},
{
"epoch": 1.4226148409893993,
"grad_norm": 0.3404496063465621,
"learning_rate": 2.9192872117400423e-05,
"loss": 0.3701,
"step": 1007
},
{
"epoch": 1.4240282685512367,
"grad_norm": 0.320096420317356,
"learning_rate": 2.916666666666667e-05,
"loss": 0.3232,
"step": 1008
},
{
"epoch": 1.4254416961130743,
"grad_norm": 0.2765564062765251,
"learning_rate": 2.9140461215932913e-05,
"loss": 0.3452,
"step": 1009
},
{
"epoch": 1.4268551236749116,
"grad_norm": 0.3296676044639395,
"learning_rate": 2.911425576519916e-05,
"loss": 0.3528,
"step": 1010
},
{
"epoch": 1.4282685512367492,
"grad_norm": 0.29493091866644805,
"learning_rate": 2.908805031446541e-05,
"loss": 0.3332,
"step": 1011
},
{
"epoch": 1.4296819787985866,
"grad_norm": 0.2725439874147952,
"learning_rate": 2.9061844863731657e-05,
"loss": 0.3254,
"step": 1012
},
{
"epoch": 1.431095406360424,
"grad_norm": 0.26570464842231023,
"learning_rate": 2.9035639412997907e-05,
"loss": 0.3387,
"step": 1013
},
{
"epoch": 1.4325088339222614,
"grad_norm": 0.30355319171082273,
"learning_rate": 2.9009433962264154e-05,
"loss": 0.3234,
"step": 1014
},
{
"epoch": 1.433922261484099,
"grad_norm": 0.27737555889934623,
"learning_rate": 2.8983228511530397e-05,
"loss": 0.356,
"step": 1015
},
{
"epoch": 1.4353356890459363,
"grad_norm": 0.3141059273692918,
"learning_rate": 2.8957023060796644e-05,
"loss": 0.3464,
"step": 1016
},
{
"epoch": 1.436749116607774,
"grad_norm": 0.3399670473110708,
"learning_rate": 2.8930817610062894e-05,
"loss": 0.3588,
"step": 1017
},
{
"epoch": 1.4381625441696113,
"grad_norm": 0.27579097439188677,
"learning_rate": 2.890461215932914e-05,
"loss": 0.3353,
"step": 1018
},
{
"epoch": 1.4395759717314487,
"grad_norm": 0.30143214879840796,
"learning_rate": 2.887840670859539e-05,
"loss": 0.3407,
"step": 1019
},
{
"epoch": 1.4409893992932863,
"grad_norm": 0.3590705294433039,
"learning_rate": 2.8852201257861638e-05,
"loss": 0.3312,
"step": 1020
},
{
"epoch": 1.4424028268551237,
"grad_norm": 0.3058896269752058,
"learning_rate": 2.882599580712788e-05,
"loss": 0.3203,
"step": 1021
},
{
"epoch": 1.4438162544169613,
"grad_norm": 0.3160955828467629,
"learning_rate": 2.8799790356394128e-05,
"loss": 0.3412,
"step": 1022
},
{
"epoch": 1.4452296819787986,
"grad_norm": 0.3593290792898773,
"learning_rate": 2.8773584905660378e-05,
"loss": 0.3579,
"step": 1023
},
{
"epoch": 1.446643109540636,
"grad_norm": 0.29211068770249016,
"learning_rate": 2.8747379454926625e-05,
"loss": 0.3235,
"step": 1024
},
{
"epoch": 1.4480565371024734,
"grad_norm": 0.3356614867945652,
"learning_rate": 2.8721174004192875e-05,
"loss": 0.3523,
"step": 1025
},
{
"epoch": 1.449469964664311,
"grad_norm": 0.2878812811141153,
"learning_rate": 2.8694968553459122e-05,
"loss": 0.3404,
"step": 1026
},
{
"epoch": 1.4508833922261484,
"grad_norm": 0.31974211440714906,
"learning_rate": 2.8668763102725365e-05,
"loss": 0.3526,
"step": 1027
},
{
"epoch": 1.452296819787986,
"grad_norm": 0.3190843140707295,
"learning_rate": 2.8642557651991615e-05,
"loss": 0.3413,
"step": 1028
},
{
"epoch": 1.4537102473498233,
"grad_norm": 0.2926802057740561,
"learning_rate": 2.8616352201257862e-05,
"loss": 0.3491,
"step": 1029
},
{
"epoch": 1.4551236749116607,
"grad_norm": 0.34591823241149644,
"learning_rate": 2.859014675052411e-05,
"loss": 0.3379,
"step": 1030
},
{
"epoch": 1.456537102473498,
"grad_norm": 0.29642062401579394,
"learning_rate": 2.856394129979036e-05,
"loss": 0.3541,
"step": 1031
},
{
"epoch": 1.4579505300353357,
"grad_norm": 0.288904086690414,
"learning_rate": 2.8537735849056606e-05,
"loss": 0.3546,
"step": 1032
},
{
"epoch": 1.459363957597173,
"grad_norm": 0.2816299000436851,
"learning_rate": 2.851153039832285e-05,
"loss": 0.3571,
"step": 1033
},
{
"epoch": 1.4607773851590107,
"grad_norm": 0.3627533205510595,
"learning_rate": 2.84853249475891e-05,
"loss": 0.3439,
"step": 1034
},
{
"epoch": 1.462190812720848,
"grad_norm": 0.3007554910495077,
"learning_rate": 2.8459119496855346e-05,
"loss": 0.3466,
"step": 1035
},
{
"epoch": 1.4636042402826854,
"grad_norm": 0.2665439160901115,
"learning_rate": 2.8432914046121596e-05,
"loss": 0.3215,
"step": 1036
},
{
"epoch": 1.465017667844523,
"grad_norm": 0.34961756498351276,
"learning_rate": 2.8406708595387843e-05,
"loss": 0.3612,
"step": 1037
},
{
"epoch": 1.4664310954063604,
"grad_norm": 0.290761301523157,
"learning_rate": 2.838050314465409e-05,
"loss": 0.3498,
"step": 1038
},
{
"epoch": 1.467844522968198,
"grad_norm": 0.3418989125415649,
"learning_rate": 2.8354297693920333e-05,
"loss": 0.3262,
"step": 1039
},
{
"epoch": 1.4692579505300354,
"grad_norm": 0.3107168829570856,
"learning_rate": 2.8328092243186583e-05,
"loss": 0.3613,
"step": 1040
},
{
"epoch": 1.4706713780918728,
"grad_norm": 0.30934370895982805,
"learning_rate": 2.830188679245283e-05,
"loss": 0.3576,
"step": 1041
},
{
"epoch": 1.4720848056537101,
"grad_norm": 0.29247554635821005,
"learning_rate": 2.827568134171908e-05,
"loss": 0.3477,
"step": 1042
},
{
"epoch": 1.4734982332155477,
"grad_norm": 0.29198013033750886,
"learning_rate": 2.8249475890985327e-05,
"loss": 0.3396,
"step": 1043
},
{
"epoch": 1.4749116607773851,
"grad_norm": 0.2702770159406501,
"learning_rate": 2.8223270440251577e-05,
"loss": 0.3443,
"step": 1044
},
{
"epoch": 1.4763250883392227,
"grad_norm": 0.30091945014757565,
"learning_rate": 2.8197064989517817e-05,
"loss": 0.3437,
"step": 1045
},
{
"epoch": 1.47773851590106,
"grad_norm": 0.25011662531321016,
"learning_rate": 2.8170859538784067e-05,
"loss": 0.3213,
"step": 1046
},
{
"epoch": 1.4791519434628975,
"grad_norm": 0.2947833247781145,
"learning_rate": 2.8144654088050314e-05,
"loss": 0.3398,
"step": 1047
},
{
"epoch": 1.4805653710247348,
"grad_norm": 0.30606384163203026,
"learning_rate": 2.8118448637316564e-05,
"loss": 0.329,
"step": 1048
},
{
"epoch": 1.4819787985865724,
"grad_norm": 0.27404564021674016,
"learning_rate": 2.809224318658281e-05,
"loss": 0.3397,
"step": 1049
},
{
"epoch": 1.4833922261484098,
"grad_norm": 0.28969677911269615,
"learning_rate": 2.806603773584906e-05,
"loss": 0.3319,
"step": 1050
},
{
"epoch": 1.4848056537102474,
"grad_norm": 0.28466621551954957,
"learning_rate": 2.80398322851153e-05,
"loss": 0.3675,
"step": 1051
},
{
"epoch": 1.4862190812720848,
"grad_norm": 0.30575110652617954,
"learning_rate": 2.801362683438155e-05,
"loss": 0.3237,
"step": 1052
},
{
"epoch": 1.4876325088339222,
"grad_norm": 0.2866350967579496,
"learning_rate": 2.7987421383647798e-05,
"loss": 0.3449,
"step": 1053
},
{
"epoch": 1.4890459363957598,
"grad_norm": 0.3096740276242457,
"learning_rate": 2.796121593291405e-05,
"loss": 0.342,
"step": 1054
},
{
"epoch": 1.4904593639575971,
"grad_norm": 0.334229524001031,
"learning_rate": 2.7935010482180295e-05,
"loss": 0.3585,
"step": 1055
},
{
"epoch": 1.4918727915194347,
"grad_norm": 0.3174009536083424,
"learning_rate": 2.7908805031446545e-05,
"loss": 0.3356,
"step": 1056
},
{
"epoch": 1.4932862190812721,
"grad_norm": 0.32647929485923316,
"learning_rate": 2.788259958071279e-05,
"loss": 0.3588,
"step": 1057
},
{
"epoch": 1.4946996466431095,
"grad_norm": 0.29929133520717854,
"learning_rate": 2.7856394129979035e-05,
"loss": 0.3423,
"step": 1058
},
{
"epoch": 1.4961130742049469,
"grad_norm": 0.2722027102455403,
"learning_rate": 2.7830188679245282e-05,
"loss": 0.3576,
"step": 1059
},
{
"epoch": 1.4975265017667845,
"grad_norm": 0.30846594744786987,
"learning_rate": 2.7803983228511532e-05,
"loss": 0.3567,
"step": 1060
},
{
"epoch": 1.4989399293286219,
"grad_norm": 0.3264278103630487,
"learning_rate": 2.777777777777778e-05,
"loss": 0.3467,
"step": 1061
},
{
"epoch": 1.5003533568904595,
"grad_norm": 0.27122843175821015,
"learning_rate": 2.775157232704403e-05,
"loss": 0.3382,
"step": 1062
},
{
"epoch": 1.5017667844522968,
"grad_norm": 0.33533852516340085,
"learning_rate": 2.7725366876310273e-05,
"loss": 0.3477,
"step": 1063
},
{
"epoch": 1.5031802120141342,
"grad_norm": 0.33883956983111974,
"learning_rate": 2.769916142557652e-05,
"loss": 0.3676,
"step": 1064
},
{
"epoch": 1.5045936395759716,
"grad_norm": 0.3366014222377103,
"learning_rate": 2.767295597484277e-05,
"loss": 0.3399,
"step": 1065
},
{
"epoch": 1.5060070671378092,
"grad_norm": 0.32635024216651276,
"learning_rate": 2.7646750524109016e-05,
"loss": 0.3311,
"step": 1066
},
{
"epoch": 1.5074204946996468,
"grad_norm": 0.33000675889614134,
"learning_rate": 2.7620545073375263e-05,
"loss": 0.3431,
"step": 1067
},
{
"epoch": 1.5088339222614842,
"grad_norm": 0.2799424605636112,
"learning_rate": 2.7594339622641513e-05,
"loss": 0.3405,
"step": 1068
},
{
"epoch": 1.5102473498233215,
"grad_norm": 0.3786617514249437,
"learning_rate": 2.7568134171907757e-05,
"loss": 0.3752,
"step": 1069
},
{
"epoch": 1.511660777385159,
"grad_norm": 0.30704229588847565,
"learning_rate": 2.7541928721174003e-05,
"loss": 0.3481,
"step": 1070
},
{
"epoch": 1.5130742049469965,
"grad_norm": 0.48382315235512613,
"learning_rate": 2.7515723270440254e-05,
"loss": 0.3494,
"step": 1071
},
{
"epoch": 1.514487632508834,
"grad_norm": 0.3310787584419615,
"learning_rate": 2.74895178197065e-05,
"loss": 0.343,
"step": 1072
},
{
"epoch": 1.5159010600706715,
"grad_norm": 0.3107875744970524,
"learning_rate": 2.746331236897275e-05,
"loss": 0.3344,
"step": 1073
},
{
"epoch": 1.5173144876325089,
"grad_norm": 0.32332150613744803,
"learning_rate": 2.7437106918238997e-05,
"loss": 0.3404,
"step": 1074
},
{
"epoch": 1.5187279151943462,
"grad_norm": 0.30487106876840153,
"learning_rate": 2.741090146750524e-05,
"loss": 0.351,
"step": 1075
},
{
"epoch": 1.5201413427561836,
"grad_norm": 0.28833749395453273,
"learning_rate": 2.7384696016771487e-05,
"loss": 0.3382,
"step": 1076
},
{
"epoch": 1.5215547703180212,
"grad_norm": 0.33420481013953296,
"learning_rate": 2.7358490566037738e-05,
"loss": 0.3506,
"step": 1077
},
{
"epoch": 1.5229681978798588,
"grad_norm": 0.28593971608409285,
"learning_rate": 2.7332285115303984e-05,
"loss": 0.3347,
"step": 1078
},
{
"epoch": 1.5243816254416962,
"grad_norm": 0.31884930045240883,
"learning_rate": 2.7306079664570235e-05,
"loss": 0.3377,
"step": 1079
},
{
"epoch": 1.5257950530035336,
"grad_norm": 0.3179139059547283,
"learning_rate": 2.727987421383648e-05,
"loss": 0.3428,
"step": 1080
},
{
"epoch": 1.527208480565371,
"grad_norm": 0.30097399617528825,
"learning_rate": 2.7253668763102725e-05,
"loss": 0.3249,
"step": 1081
},
{
"epoch": 1.5286219081272083,
"grad_norm": 0.2991511457911611,
"learning_rate": 2.722746331236897e-05,
"loss": 0.3478,
"step": 1082
},
{
"epoch": 1.530035335689046,
"grad_norm": 0.3145114454194086,
"learning_rate": 2.720125786163522e-05,
"loss": 0.3524,
"step": 1083
},
{
"epoch": 1.5314487632508835,
"grad_norm": 0.2597169113699356,
"learning_rate": 2.717505241090147e-05,
"loss": 0.3266,
"step": 1084
},
{
"epoch": 1.532862190812721,
"grad_norm": 0.3508034739269135,
"learning_rate": 2.714884696016772e-05,
"loss": 0.3241,
"step": 1085
},
{
"epoch": 1.5342756183745583,
"grad_norm": 0.27072461369878986,
"learning_rate": 2.7122641509433965e-05,
"loss": 0.3436,
"step": 1086
},
{
"epoch": 1.5356890459363957,
"grad_norm": 0.31223409907363103,
"learning_rate": 2.709643605870021e-05,
"loss": 0.3339,
"step": 1087
},
{
"epoch": 1.5371024734982333,
"grad_norm": 0.30003861824422157,
"learning_rate": 2.7070230607966455e-05,
"loss": 0.3407,
"step": 1088
},
{
"epoch": 1.5385159010600706,
"grad_norm": 0.3163343792656869,
"learning_rate": 2.7044025157232706e-05,
"loss": 0.3637,
"step": 1089
},
{
"epoch": 1.5399293286219082,
"grad_norm": 0.2699608901290185,
"learning_rate": 2.7017819706498952e-05,
"loss": 0.3514,
"step": 1090
},
{
"epoch": 1.5413427561837456,
"grad_norm": 0.2881126734192235,
"learning_rate": 2.6991614255765203e-05,
"loss": 0.3693,
"step": 1091
},
{
"epoch": 1.542756183745583,
"grad_norm": 0.3192680502579931,
"learning_rate": 2.696540880503145e-05,
"loss": 0.3508,
"step": 1092
},
{
"epoch": 1.5441696113074204,
"grad_norm": 0.3246958978303329,
"learning_rate": 2.6939203354297693e-05,
"loss": 0.3604,
"step": 1093
},
{
"epoch": 1.545583038869258,
"grad_norm": 0.29010802189315216,
"learning_rate": 2.6912997903563943e-05,
"loss": 0.3598,
"step": 1094
},
{
"epoch": 1.5469964664310956,
"grad_norm": 0.27329311496275605,
"learning_rate": 2.688679245283019e-05,
"loss": 0.3495,
"step": 1095
},
{
"epoch": 1.548409893992933,
"grad_norm": 0.33269596150419656,
"learning_rate": 2.6860587002096436e-05,
"loss": 0.3362,
"step": 1096
},
{
"epoch": 1.5498233215547703,
"grad_norm": 0.33736959586297993,
"learning_rate": 2.6834381551362687e-05,
"loss": 0.3398,
"step": 1097
},
{
"epoch": 1.5512367491166077,
"grad_norm": 0.25533888460632953,
"learning_rate": 2.6808176100628933e-05,
"loss": 0.3339,
"step": 1098
},
{
"epoch": 1.552650176678445,
"grad_norm": 0.3564326626523608,
"learning_rate": 2.6781970649895177e-05,
"loss": 0.3483,
"step": 1099
},
{
"epoch": 1.5540636042402827,
"grad_norm": 0.2927613947346261,
"learning_rate": 2.6755765199161427e-05,
"loss": 0.3382,
"step": 1100
},
{
"epoch": 1.5554770318021203,
"grad_norm": 0.26622642712053535,
"learning_rate": 2.6729559748427674e-05,
"loss": 0.3487,
"step": 1101
},
{
"epoch": 1.5568904593639576,
"grad_norm": 0.28987434308482385,
"learning_rate": 2.6703354297693924e-05,
"loss": 0.3421,
"step": 1102
},
{
"epoch": 1.558303886925795,
"grad_norm": 0.2674335691769351,
"learning_rate": 2.667714884696017e-05,
"loss": 0.3481,
"step": 1103
},
{
"epoch": 1.5597173144876324,
"grad_norm": 0.3163003167124997,
"learning_rate": 2.6650943396226417e-05,
"loss": 0.3504,
"step": 1104
},
{
"epoch": 1.56113074204947,
"grad_norm": 0.29406077754351867,
"learning_rate": 2.662473794549266e-05,
"loss": 0.3448,
"step": 1105
},
{
"epoch": 1.5625441696113074,
"grad_norm": 0.31364572773702964,
"learning_rate": 2.659853249475891e-05,
"loss": 0.3273,
"step": 1106
},
{
"epoch": 1.563957597173145,
"grad_norm": 0.28122918642094147,
"learning_rate": 2.6572327044025158e-05,
"loss": 0.3746,
"step": 1107
},
{
"epoch": 1.5653710247349824,
"grad_norm": 0.2965911249175889,
"learning_rate": 2.6546121593291408e-05,
"loss": 0.339,
"step": 1108
},
{
"epoch": 1.5667844522968197,
"grad_norm": 0.26787918085184786,
"learning_rate": 2.6519916142557655e-05,
"loss": 0.3386,
"step": 1109
},
{
"epoch": 1.568197879858657,
"grad_norm": 0.26257049010347755,
"learning_rate": 2.6493710691823905e-05,
"loss": 0.3613,
"step": 1110
},
{
"epoch": 1.5696113074204947,
"grad_norm": 0.2457428733861934,
"learning_rate": 2.6467505241090145e-05,
"loss": 0.3592,
"step": 1111
},
{
"epoch": 1.5710247349823323,
"grad_norm": 0.30942246219460157,
"learning_rate": 2.6441299790356395e-05,
"loss": 0.3297,
"step": 1112
},
{
"epoch": 1.5724381625441697,
"grad_norm": 0.30329228535668107,
"learning_rate": 2.641509433962264e-05,
"loss": 0.3426,
"step": 1113
},
{
"epoch": 1.573851590106007,
"grad_norm": 0.2560863244349175,
"learning_rate": 2.6388888888888892e-05,
"loss": 0.335,
"step": 1114
},
{
"epoch": 1.5752650176678444,
"grad_norm": 0.3099463835275014,
"learning_rate": 2.636268343815514e-05,
"loss": 0.365,
"step": 1115
},
{
"epoch": 1.5766784452296818,
"grad_norm": 0.32711392514779974,
"learning_rate": 2.633647798742139e-05,
"loss": 0.3396,
"step": 1116
},
{
"epoch": 1.5780918727915194,
"grad_norm": 0.2588202565843469,
"learning_rate": 2.631027253668763e-05,
"loss": 0.3387,
"step": 1117
},
{
"epoch": 1.579505300353357,
"grad_norm": 0.3272267697283742,
"learning_rate": 2.628406708595388e-05,
"loss": 0.3294,
"step": 1118
},
{
"epoch": 1.5809187279151944,
"grad_norm": 0.2929676109122929,
"learning_rate": 2.6257861635220126e-05,
"loss": 0.36,
"step": 1119
},
{
"epoch": 1.5823321554770318,
"grad_norm": 0.27481129883025895,
"learning_rate": 2.6231656184486376e-05,
"loss": 0.3396,
"step": 1120
},
{
"epoch": 1.5837455830388691,
"grad_norm": 0.26808596288343023,
"learning_rate": 2.6205450733752623e-05,
"loss": 0.3305,
"step": 1121
},
{
"epoch": 1.5851590106007067,
"grad_norm": 0.2600229427605683,
"learning_rate": 2.6179245283018873e-05,
"loss": 0.3739,
"step": 1122
},
{
"epoch": 1.5865724381625441,
"grad_norm": 0.30858883599926334,
"learning_rate": 2.6153039832285113e-05,
"loss": 0.3309,
"step": 1123
},
{
"epoch": 1.5879858657243817,
"grad_norm": 0.24471018760086208,
"learning_rate": 2.6126834381551363e-05,
"loss": 0.317,
"step": 1124
},
{
"epoch": 1.589399293286219,
"grad_norm": 0.3643991217847995,
"learning_rate": 2.610062893081761e-05,
"loss": 0.3469,
"step": 1125
},
{
"epoch": 1.5908127208480565,
"grad_norm": 0.27783370612221836,
"learning_rate": 2.607442348008386e-05,
"loss": 0.3417,
"step": 1126
},
{
"epoch": 1.5922261484098938,
"grad_norm": 0.27356896108247414,
"learning_rate": 2.6048218029350107e-05,
"loss": 0.3426,
"step": 1127
},
{
"epoch": 1.5936395759717314,
"grad_norm": 0.28983670238733594,
"learning_rate": 2.6022012578616357e-05,
"loss": 0.328,
"step": 1128
},
{
"epoch": 1.595053003533569,
"grad_norm": 0.2620547746986468,
"learning_rate": 2.59958071278826e-05,
"loss": 0.3515,
"step": 1129
},
{
"epoch": 1.5964664310954064,
"grad_norm": 0.3171274219886582,
"learning_rate": 2.5969601677148847e-05,
"loss": 0.3569,
"step": 1130
},
{
"epoch": 1.5978798586572438,
"grad_norm": 0.2638203950880888,
"learning_rate": 2.5943396226415094e-05,
"loss": 0.3299,
"step": 1131
},
{
"epoch": 1.5992932862190812,
"grad_norm": 0.27217260365045864,
"learning_rate": 2.5917190775681344e-05,
"loss": 0.3358,
"step": 1132
},
{
"epoch": 1.6007067137809188,
"grad_norm": 0.2575876490881197,
"learning_rate": 2.589098532494759e-05,
"loss": 0.3364,
"step": 1133
},
{
"epoch": 1.6021201413427562,
"grad_norm": 0.30877518441688395,
"learning_rate": 2.586477987421384e-05,
"loss": 0.3271,
"step": 1134
},
{
"epoch": 1.6035335689045938,
"grad_norm": 0.26202533043088616,
"learning_rate": 2.5838574423480084e-05,
"loss": 0.3507,
"step": 1135
},
{
"epoch": 1.6049469964664311,
"grad_norm": 0.29026478652122645,
"learning_rate": 2.581236897274633e-05,
"loss": 0.3281,
"step": 1136
},
{
"epoch": 1.6063604240282685,
"grad_norm": 0.2659507510079641,
"learning_rate": 2.578616352201258e-05,
"loss": 0.3411,
"step": 1137
},
{
"epoch": 1.6077738515901059,
"grad_norm": 0.275077465710653,
"learning_rate": 2.5759958071278828e-05,
"loss": 0.3407,
"step": 1138
},
{
"epoch": 1.6091872791519435,
"grad_norm": 0.31597839945605927,
"learning_rate": 2.5733752620545075e-05,
"loss": 0.344,
"step": 1139
},
{
"epoch": 1.610600706713781,
"grad_norm": 0.24913059491752368,
"learning_rate": 2.5707547169811325e-05,
"loss": 0.3468,
"step": 1140
},
{
"epoch": 1.6120141342756185,
"grad_norm": 0.2857575129105775,
"learning_rate": 2.5681341719077568e-05,
"loss": 0.3451,
"step": 1141
},
{
"epoch": 1.6134275618374558,
"grad_norm": 0.27138181599602973,
"learning_rate": 2.5655136268343815e-05,
"loss": 0.36,
"step": 1142
},
{
"epoch": 1.6148409893992932,
"grad_norm": 0.28927941367654625,
"learning_rate": 2.5628930817610065e-05,
"loss": 0.3536,
"step": 1143
},
{
"epoch": 1.6162544169611306,
"grad_norm": 0.2834267147853496,
"learning_rate": 2.5602725366876312e-05,
"loss": 0.3201,
"step": 1144
},
{
"epoch": 1.6176678445229682,
"grad_norm": 0.29127113746437655,
"learning_rate": 2.5576519916142562e-05,
"loss": 0.3326,
"step": 1145
},
{
"epoch": 1.6190812720848058,
"grad_norm": 0.3163818038939525,
"learning_rate": 2.555031446540881e-05,
"loss": 0.3523,
"step": 1146
},
{
"epoch": 1.6204946996466432,
"grad_norm": 0.28616216988568327,
"learning_rate": 2.5524109014675052e-05,
"loss": 0.3734,
"step": 1147
},
{
"epoch": 1.6219081272084805,
"grad_norm": 0.41979351593211023,
"learning_rate": 2.54979035639413e-05,
"loss": 0.3228,
"step": 1148
},
{
"epoch": 1.623321554770318,
"grad_norm": 0.28910789163297373,
"learning_rate": 2.547169811320755e-05,
"loss": 0.3452,
"step": 1149
},
{
"epoch": 1.6247349823321555,
"grad_norm": 0.3826685702471921,
"learning_rate": 2.5445492662473796e-05,
"loss": 0.341,
"step": 1150
},
{
"epoch": 1.626148409893993,
"grad_norm": 0.2833494849336923,
"learning_rate": 2.5419287211740046e-05,
"loss": 0.3433,
"step": 1151
},
{
"epoch": 1.6275618374558305,
"grad_norm": 0.31323888277153594,
"learning_rate": 2.5393081761006293e-05,
"loss": 0.3248,
"step": 1152
},
{
"epoch": 1.6289752650176679,
"grad_norm": 0.3187340850820309,
"learning_rate": 2.5366876310272536e-05,
"loss": 0.3422,
"step": 1153
},
{
"epoch": 1.6303886925795052,
"grad_norm": 0.41564184649859875,
"learning_rate": 2.5340670859538783e-05,
"loss": 0.3543,
"step": 1154
},
{
"epoch": 1.6318021201413426,
"grad_norm": 0.27857116334789866,
"learning_rate": 2.5314465408805033e-05,
"loss": 0.3291,
"step": 1155
},
{
"epoch": 1.6332155477031802,
"grad_norm": 0.2750702559201003,
"learning_rate": 2.528825995807128e-05,
"loss": 0.3275,
"step": 1156
},
{
"epoch": 1.6346289752650178,
"grad_norm": 0.3849138693692559,
"learning_rate": 2.526205450733753e-05,
"loss": 0.3405,
"step": 1157
},
{
"epoch": 1.6360424028268552,
"grad_norm": 0.2971853589416029,
"learning_rate": 2.5235849056603777e-05,
"loss": 0.3329,
"step": 1158
},
{
"epoch": 1.6374558303886926,
"grad_norm": 0.3495707200933462,
"learning_rate": 2.520964360587002e-05,
"loss": 0.3534,
"step": 1159
},
{
"epoch": 1.63886925795053,
"grad_norm": 0.2886100677200243,
"learning_rate": 2.5183438155136267e-05,
"loss": 0.3346,
"step": 1160
},
{
"epoch": 1.6402826855123673,
"grad_norm": 0.2983350671564064,
"learning_rate": 2.5157232704402517e-05,
"loss": 0.3381,
"step": 1161
},
{
"epoch": 1.641696113074205,
"grad_norm": 0.3473593881923298,
"learning_rate": 2.5131027253668764e-05,
"loss": 0.3452,
"step": 1162
},
{
"epoch": 1.6431095406360425,
"grad_norm": 0.2728023170277083,
"learning_rate": 2.5104821802935014e-05,
"loss": 0.3428,
"step": 1163
},
{
"epoch": 1.64452296819788,
"grad_norm": 0.35631472869448855,
"learning_rate": 2.507861635220126e-05,
"loss": 0.3543,
"step": 1164
},
{
"epoch": 1.6459363957597173,
"grad_norm": 0.339422548503301,
"learning_rate": 2.5052410901467504e-05,
"loss": 0.3325,
"step": 1165
},
{
"epoch": 1.6473498233215547,
"grad_norm": 0.24622620245924046,
"learning_rate": 2.5026205450733754e-05,
"loss": 0.3383,
"step": 1166
},
{
"epoch": 1.6487632508833923,
"grad_norm": 0.3025819322464276,
"learning_rate": 2.5e-05,
"loss": 0.3292,
"step": 1167
},
{
"epoch": 1.6501766784452296,
"grad_norm": 0.3282965059548612,
"learning_rate": 2.4973794549266248e-05,
"loss": 0.3558,
"step": 1168
},
{
"epoch": 1.6515901060070672,
"grad_norm": 0.3267746174096148,
"learning_rate": 2.4947589098532495e-05,
"loss": 0.3513,
"step": 1169
},
{
"epoch": 1.6530035335689046,
"grad_norm": 0.3091167619540997,
"learning_rate": 2.4921383647798745e-05,
"loss": 0.3356,
"step": 1170
},
{
"epoch": 1.654416961130742,
"grad_norm": 0.32085297612166397,
"learning_rate": 2.489517819706499e-05,
"loss": 0.3383,
"step": 1171
},
{
"epoch": 1.6558303886925794,
"grad_norm": 0.30864062802941145,
"learning_rate": 2.486897274633124e-05,
"loss": 0.3389,
"step": 1172
},
{
"epoch": 1.657243816254417,
"grad_norm": 0.3325030215874712,
"learning_rate": 2.4842767295597485e-05,
"loss": 0.3482,
"step": 1173
},
{
"epoch": 1.6586572438162546,
"grad_norm": 0.27879346966802393,
"learning_rate": 2.4816561844863735e-05,
"loss": 0.3668,
"step": 1174
},
{
"epoch": 1.660070671378092,
"grad_norm": 0.2934237149734627,
"learning_rate": 2.479035639412998e-05,
"loss": 0.3375,
"step": 1175
},
{
"epoch": 1.6614840989399293,
"grad_norm": 0.2982519312091363,
"learning_rate": 2.476415094339623e-05,
"loss": 0.3578,
"step": 1176
},
{
"epoch": 1.6628975265017667,
"grad_norm": 0.2548697781313368,
"learning_rate": 2.4737945492662476e-05,
"loss": 0.3364,
"step": 1177
},
{
"epoch": 1.664310954063604,
"grad_norm": 0.32663534900693203,
"learning_rate": 2.4711740041928722e-05,
"loss": 0.3235,
"step": 1178
},
{
"epoch": 1.6657243816254417,
"grad_norm": 0.2642937541810234,
"learning_rate": 2.468553459119497e-05,
"loss": 0.3178,
"step": 1179
},
{
"epoch": 1.6671378091872793,
"grad_norm": 0.3086345848406079,
"learning_rate": 2.465932914046122e-05,
"loss": 0.3546,
"step": 1180
},
{
"epoch": 1.6685512367491167,
"grad_norm": 0.2528770805498533,
"learning_rate": 2.4633123689727463e-05,
"loss": 0.3358,
"step": 1181
},
{
"epoch": 1.669964664310954,
"grad_norm": 0.2574895589122439,
"learning_rate": 2.4606918238993713e-05,
"loss": 0.3337,
"step": 1182
},
{
"epoch": 1.6713780918727914,
"grad_norm": 0.26450144249790586,
"learning_rate": 2.458071278825996e-05,
"loss": 0.3232,
"step": 1183
},
{
"epoch": 1.672791519434629,
"grad_norm": 0.24357444703199635,
"learning_rate": 2.4554507337526206e-05,
"loss": 0.3321,
"step": 1184
},
{
"epoch": 1.6742049469964664,
"grad_norm": 0.2834731698049858,
"learning_rate": 2.4528301886792453e-05,
"loss": 0.3295,
"step": 1185
},
{
"epoch": 1.675618374558304,
"grad_norm": 0.2669222994048844,
"learning_rate": 2.4502096436058703e-05,
"loss": 0.3318,
"step": 1186
},
{
"epoch": 1.6770318021201414,
"grad_norm": 0.2752251492926739,
"learning_rate": 2.4475890985324947e-05,
"loss": 0.3399,
"step": 1187
},
{
"epoch": 1.6784452296819787,
"grad_norm": 0.34083570972557564,
"learning_rate": 2.4449685534591197e-05,
"loss": 0.3435,
"step": 1188
},
{
"epoch": 1.6798586572438161,
"grad_norm": 0.24679053418481997,
"learning_rate": 2.4423480083857444e-05,
"loss": 0.3278,
"step": 1189
},
{
"epoch": 1.6812720848056537,
"grad_norm": 0.3100989637988737,
"learning_rate": 2.439727463312369e-05,
"loss": 0.3328,
"step": 1190
},
{
"epoch": 1.6826855123674913,
"grad_norm": 0.3000381441486398,
"learning_rate": 2.4371069182389937e-05,
"loss": 0.3719,
"step": 1191
},
{
"epoch": 1.6840989399293287,
"grad_norm": 0.27185107629174166,
"learning_rate": 2.4344863731656187e-05,
"loss": 0.351,
"step": 1192
},
{
"epoch": 1.685512367491166,
"grad_norm": 0.2890081933160051,
"learning_rate": 2.431865828092243e-05,
"loss": 0.3195,
"step": 1193
},
{
"epoch": 1.6869257950530034,
"grad_norm": 0.2513928101346897,
"learning_rate": 2.429245283018868e-05,
"loss": 0.3392,
"step": 1194
},
{
"epoch": 1.688339222614841,
"grad_norm": 0.30141138569473047,
"learning_rate": 2.4266247379454928e-05,
"loss": 0.3602,
"step": 1195
},
{
"epoch": 1.6897526501766784,
"grad_norm": 0.2835503490480039,
"learning_rate": 2.4240041928721174e-05,
"loss": 0.3386,
"step": 1196
},
{
"epoch": 1.691166077738516,
"grad_norm": 0.31299582106252904,
"learning_rate": 2.421383647798742e-05,
"loss": 0.3227,
"step": 1197
},
{
"epoch": 1.6925795053003534,
"grad_norm": 0.3154382071488017,
"learning_rate": 2.418763102725367e-05,
"loss": 0.3365,
"step": 1198
},
{
"epoch": 1.6939929328621908,
"grad_norm": 0.28339138218974613,
"learning_rate": 2.4161425576519918e-05,
"loss": 0.3485,
"step": 1199
},
{
"epoch": 1.6954063604240281,
"grad_norm": 0.29471282669994664,
"learning_rate": 2.4135220125786165e-05,
"loss": 0.3286,
"step": 1200
},
{
"epoch": 1.6968197879858657,
"grad_norm": 0.2718546688460231,
"learning_rate": 2.4109014675052412e-05,
"loss": 0.3374,
"step": 1201
},
{
"epoch": 1.6982332155477033,
"grad_norm": 0.3084697955741263,
"learning_rate": 2.408280922431866e-05,
"loss": 0.3277,
"step": 1202
},
{
"epoch": 1.6996466431095407,
"grad_norm": 0.27345499244618976,
"learning_rate": 2.405660377358491e-05,
"loss": 0.3512,
"step": 1203
},
{
"epoch": 1.701060070671378,
"grad_norm": 0.3668315630135521,
"learning_rate": 2.4030398322851155e-05,
"loss": 0.3427,
"step": 1204
},
{
"epoch": 1.7024734982332155,
"grad_norm": 0.3200517864620076,
"learning_rate": 2.4004192872117402e-05,
"loss": 0.3326,
"step": 1205
},
{
"epoch": 1.7038869257950529,
"grad_norm": 0.27372337211769604,
"learning_rate": 2.397798742138365e-05,
"loss": 0.3263,
"step": 1206
},
{
"epoch": 1.7053003533568905,
"grad_norm": 0.31146194637546876,
"learning_rate": 2.39517819706499e-05,
"loss": 0.353,
"step": 1207
},
{
"epoch": 1.706713780918728,
"grad_norm": 0.28957688252142394,
"learning_rate": 2.3925576519916143e-05,
"loss": 0.3464,
"step": 1208
},
{
"epoch": 1.7081272084805654,
"grad_norm": 0.30980507955684955,
"learning_rate": 2.3899371069182393e-05,
"loss": 0.3505,
"step": 1209
},
{
"epoch": 1.7095406360424028,
"grad_norm": 0.30605958791228793,
"learning_rate": 2.387316561844864e-05,
"loss": 0.3501,
"step": 1210
},
{
"epoch": 1.7109540636042402,
"grad_norm": 0.26475382087934834,
"learning_rate": 2.3846960167714886e-05,
"loss": 0.3358,
"step": 1211
},
{
"epoch": 1.7123674911660778,
"grad_norm": 0.2827165824529976,
"learning_rate": 2.3820754716981133e-05,
"loss": 0.3354,
"step": 1212
},
{
"epoch": 1.7137809187279152,
"grad_norm": 0.3165682600054934,
"learning_rate": 2.3794549266247383e-05,
"loss": 0.3442,
"step": 1213
},
{
"epoch": 1.7151943462897528,
"grad_norm": 0.26093972500431195,
"learning_rate": 2.3768343815513627e-05,
"loss": 0.3409,
"step": 1214
},
{
"epoch": 1.7166077738515901,
"grad_norm": 0.278038767995744,
"learning_rate": 2.3742138364779877e-05,
"loss": 0.3606,
"step": 1215
},
{
"epoch": 1.7180212014134275,
"grad_norm": 0.30422091739516843,
"learning_rate": 2.3715932914046123e-05,
"loss": 0.3582,
"step": 1216
},
{
"epoch": 1.719434628975265,
"grad_norm": 0.29899063371787454,
"learning_rate": 2.368972746331237e-05,
"loss": 0.3197,
"step": 1217
},
{
"epoch": 1.7208480565371025,
"grad_norm": 0.2678733203096524,
"learning_rate": 2.3663522012578617e-05,
"loss": 0.3461,
"step": 1218
},
{
"epoch": 1.72226148409894,
"grad_norm": 0.3086655039140425,
"learning_rate": 2.3637316561844867e-05,
"loss": 0.3293,
"step": 1219
},
{
"epoch": 1.7236749116607775,
"grad_norm": 0.2996994236734227,
"learning_rate": 2.361111111111111e-05,
"loss": 0.3401,
"step": 1220
},
{
"epoch": 1.7250883392226148,
"grad_norm": 0.33424130724125745,
"learning_rate": 2.358490566037736e-05,
"loss": 0.3391,
"step": 1221
},
{
"epoch": 1.7265017667844522,
"grad_norm": 0.29347333927740527,
"learning_rate": 2.3558700209643607e-05,
"loss": 0.3516,
"step": 1222
},
{
"epoch": 1.7279151943462896,
"grad_norm": 0.2865894204677293,
"learning_rate": 2.3532494758909854e-05,
"loss": 0.3262,
"step": 1223
},
{
"epoch": 1.7293286219081272,
"grad_norm": 0.29746219965159953,
"learning_rate": 2.35062893081761e-05,
"loss": 0.3421,
"step": 1224
},
{
"epoch": 1.7307420494699648,
"grad_norm": 0.2711034651732495,
"learning_rate": 2.348008385744235e-05,
"loss": 0.3573,
"step": 1225
},
{
"epoch": 1.7321554770318022,
"grad_norm": 0.2983748555661459,
"learning_rate": 2.3453878406708595e-05,
"loss": 0.3427,
"step": 1226
},
{
"epoch": 1.7335689045936395,
"grad_norm": 0.2541088945439432,
"learning_rate": 2.3427672955974845e-05,
"loss": 0.3211,
"step": 1227
},
{
"epoch": 1.734982332155477,
"grad_norm": 0.2934057796772585,
"learning_rate": 2.340146750524109e-05,
"loss": 0.373,
"step": 1228
},
{
"epoch": 1.7363957597173145,
"grad_norm": 0.2886268367013585,
"learning_rate": 2.3375262054507338e-05,
"loss": 0.3476,
"step": 1229
},
{
"epoch": 1.737809187279152,
"grad_norm": 0.27931693122278906,
"learning_rate": 2.3349056603773585e-05,
"loss": 0.3442,
"step": 1230
},
{
"epoch": 1.7392226148409895,
"grad_norm": 0.2947567877773335,
"learning_rate": 2.3322851153039835e-05,
"loss": 0.3182,
"step": 1231
},
{
"epoch": 1.7406360424028269,
"grad_norm": 0.28506107131423847,
"learning_rate": 2.329664570230608e-05,
"loss": 0.3389,
"step": 1232
},
{
"epoch": 1.7420494699646643,
"grad_norm": 0.2623924922017898,
"learning_rate": 2.327044025157233e-05,
"loss": 0.3314,
"step": 1233
},
{
"epoch": 1.7434628975265016,
"grad_norm": 0.3241097140656216,
"learning_rate": 2.3244234800838576e-05,
"loss": 0.3233,
"step": 1234
},
{
"epoch": 1.7448763250883392,
"grad_norm": 0.2779776974059651,
"learning_rate": 2.3218029350104822e-05,
"loss": 0.3315,
"step": 1235
},
{
"epoch": 1.7462897526501768,
"grad_norm": 0.27156427576086545,
"learning_rate": 2.319182389937107e-05,
"loss": 0.3323,
"step": 1236
},
{
"epoch": 1.7477031802120142,
"grad_norm": 0.29615368898161565,
"learning_rate": 2.316561844863732e-05,
"loss": 0.3411,
"step": 1237
},
{
"epoch": 1.7491166077738516,
"grad_norm": 0.2915750791916398,
"learning_rate": 2.3139412997903566e-05,
"loss": 0.3312,
"step": 1238
},
{
"epoch": 1.750530035335689,
"grad_norm": 0.25600520919029474,
"learning_rate": 2.3113207547169813e-05,
"loss": 0.3385,
"step": 1239
},
{
"epoch": 1.7519434628975263,
"grad_norm": 0.32159375603039364,
"learning_rate": 2.308700209643606e-05,
"loss": 0.3442,
"step": 1240
},
{
"epoch": 1.753356890459364,
"grad_norm": 0.31564555033466796,
"learning_rate": 2.3060796645702306e-05,
"loss": 0.3292,
"step": 1241
},
{
"epoch": 1.7547703180212015,
"grad_norm": 0.281720612820609,
"learning_rate": 2.3034591194968556e-05,
"loss": 0.3372,
"step": 1242
},
{
"epoch": 1.756183745583039,
"grad_norm": 0.2742563457537321,
"learning_rate": 2.3008385744234803e-05,
"loss": 0.341,
"step": 1243
},
{
"epoch": 1.7575971731448763,
"grad_norm": 0.28923557839016006,
"learning_rate": 2.298218029350105e-05,
"loss": 0.3423,
"step": 1244
},
{
"epoch": 1.7590106007067137,
"grad_norm": 0.29588676351801696,
"learning_rate": 2.2955974842767297e-05,
"loss": 0.3469,
"step": 1245
},
{
"epoch": 1.7604240282685513,
"grad_norm": 0.2889890484182882,
"learning_rate": 2.2929769392033547e-05,
"loss": 0.3364,
"step": 1246
},
{
"epoch": 1.7618374558303886,
"grad_norm": 0.28688788092554596,
"learning_rate": 2.290356394129979e-05,
"loss": 0.3582,
"step": 1247
},
{
"epoch": 1.7632508833922262,
"grad_norm": 0.27852372300577155,
"learning_rate": 2.287735849056604e-05,
"loss": 0.3497,
"step": 1248
},
{
"epoch": 1.7646643109540636,
"grad_norm": 0.4245847728654154,
"learning_rate": 2.2851153039832284e-05,
"loss": 0.3562,
"step": 1249
},
{
"epoch": 1.766077738515901,
"grad_norm": 0.25094646758800265,
"learning_rate": 2.2824947589098534e-05,
"loss": 0.3226,
"step": 1250
},
{
"epoch": 1.7674911660777384,
"grad_norm": 0.3050504923151135,
"learning_rate": 2.279874213836478e-05,
"loss": 0.3316,
"step": 1251
},
{
"epoch": 1.768904593639576,
"grad_norm": 0.2775743118147715,
"learning_rate": 2.2772536687631028e-05,
"loss": 0.3392,
"step": 1252
},
{
"epoch": 1.7703180212014136,
"grad_norm": 0.25770718195762177,
"learning_rate": 2.2746331236897274e-05,
"loss": 0.3265,
"step": 1253
},
{
"epoch": 1.771731448763251,
"grad_norm": 0.2875670685747609,
"learning_rate": 2.2720125786163524e-05,
"loss": 0.3495,
"step": 1254
},
{
"epoch": 1.7731448763250883,
"grad_norm": 0.30436920978538057,
"learning_rate": 2.2693920335429768e-05,
"loss": 0.3546,
"step": 1255
},
{
"epoch": 1.7745583038869257,
"grad_norm": 0.28715536803706454,
"learning_rate": 2.2667714884696018e-05,
"loss": 0.3444,
"step": 1256
},
{
"epoch": 1.7759717314487633,
"grad_norm": 0.28235090254093226,
"learning_rate": 2.2641509433962265e-05,
"loss": 0.3525,
"step": 1257
},
{
"epoch": 1.7773851590106007,
"grad_norm": 0.2989526109082147,
"learning_rate": 2.261530398322851e-05,
"loss": 0.3393,
"step": 1258
},
{
"epoch": 1.7787985865724383,
"grad_norm": 0.2725807851794358,
"learning_rate": 2.258909853249476e-05,
"loss": 0.3405,
"step": 1259
},
{
"epoch": 1.7802120141342757,
"grad_norm": 0.29757002110946107,
"learning_rate": 2.256289308176101e-05,
"loss": 0.3526,
"step": 1260
},
{
"epoch": 1.781625441696113,
"grad_norm": 0.3018244283464577,
"learning_rate": 2.2536687631027252e-05,
"loss": 0.3421,
"step": 1261
},
{
"epoch": 1.7830388692579504,
"grad_norm": 0.24510859834743487,
"learning_rate": 2.2510482180293502e-05,
"loss": 0.3538,
"step": 1262
},
{
"epoch": 1.784452296819788,
"grad_norm": 0.35473091518704697,
"learning_rate": 2.248427672955975e-05,
"loss": 0.3387,
"step": 1263
},
{
"epoch": 1.7858657243816256,
"grad_norm": 0.25883958453070033,
"learning_rate": 2.2458071278825996e-05,
"loss": 0.3303,
"step": 1264
},
{
"epoch": 1.787279151943463,
"grad_norm": 0.24929972225079142,
"learning_rate": 2.2431865828092242e-05,
"loss": 0.3304,
"step": 1265
},
{
"epoch": 1.7886925795053004,
"grad_norm": 0.32128530830621493,
"learning_rate": 2.2405660377358493e-05,
"loss": 0.344,
"step": 1266
},
{
"epoch": 1.7901060070671377,
"grad_norm": 0.2522722051068213,
"learning_rate": 2.237945492662474e-05,
"loss": 0.3362,
"step": 1267
},
{
"epoch": 1.7915194346289751,
"grad_norm": 0.321932330344027,
"learning_rate": 2.2353249475890986e-05,
"loss": 0.3512,
"step": 1268
},
{
"epoch": 1.7929328621908127,
"grad_norm": 0.2867525387095273,
"learning_rate": 2.2327044025157233e-05,
"loss": 0.3259,
"step": 1269
},
{
"epoch": 1.7943462897526503,
"grad_norm": 0.27342669358055044,
"learning_rate": 2.230083857442348e-05,
"loss": 0.3369,
"step": 1270
},
{
"epoch": 1.7957597173144877,
"grad_norm": 0.32291277889358544,
"learning_rate": 2.227463312368973e-05,
"loss": 0.3454,
"step": 1271
},
{
"epoch": 1.797173144876325,
"grad_norm": 0.26808271922764454,
"learning_rate": 2.2248427672955977e-05,
"loss": 0.334,
"step": 1272
},
{
"epoch": 1.7985865724381624,
"grad_norm": 0.31135952756840257,
"learning_rate": 2.2222222222222223e-05,
"loss": 0.355,
"step": 1273
},
{
"epoch": 1.8,
"grad_norm": 0.33739025036081594,
"learning_rate": 2.219601677148847e-05,
"loss": 0.3338,
"step": 1274
},
{
"epoch": 1.8014134275618374,
"grad_norm": 0.29747234528572913,
"learning_rate": 2.216981132075472e-05,
"loss": 0.3427,
"step": 1275
},
{
"epoch": 1.802826855123675,
"grad_norm": 0.2738006462107312,
"learning_rate": 2.2143605870020964e-05,
"loss": 0.3382,
"step": 1276
},
{
"epoch": 1.8042402826855124,
"grad_norm": 0.3201718807280449,
"learning_rate": 2.2117400419287214e-05,
"loss": 0.326,
"step": 1277
},
{
"epoch": 1.8056537102473498,
"grad_norm": 0.27308455193934006,
"learning_rate": 2.209119496855346e-05,
"loss": 0.3574,
"step": 1278
},
{
"epoch": 1.8070671378091872,
"grad_norm": 0.2546885035632462,
"learning_rate": 2.2064989517819707e-05,
"loss": 0.3465,
"step": 1279
},
{
"epoch": 1.8084805653710248,
"grad_norm": 0.3039028924913469,
"learning_rate": 2.2038784067085954e-05,
"loss": 0.3384,
"step": 1280
},
{
"epoch": 1.8098939929328623,
"grad_norm": 0.3080156940503247,
"learning_rate": 2.2012578616352204e-05,
"loss": 0.3718,
"step": 1281
},
{
"epoch": 1.8113074204946997,
"grad_norm": 0.2801383635751534,
"learning_rate": 2.1986373165618448e-05,
"loss": 0.3376,
"step": 1282
},
{
"epoch": 1.812720848056537,
"grad_norm": 0.27250755111012204,
"learning_rate": 2.1960167714884698e-05,
"loss": 0.3326,
"step": 1283
},
{
"epoch": 1.8141342756183745,
"grad_norm": 0.3042050747598181,
"learning_rate": 2.1933962264150945e-05,
"loss": 0.3536,
"step": 1284
},
{
"epoch": 1.8155477031802119,
"grad_norm": 0.3019538950210667,
"learning_rate": 2.190775681341719e-05,
"loss": 0.3255,
"step": 1285
},
{
"epoch": 1.8169611307420495,
"grad_norm": 0.2960054557759917,
"learning_rate": 2.1881551362683438e-05,
"loss": 0.3263,
"step": 1286
},
{
"epoch": 1.818374558303887,
"grad_norm": 0.28689967741091826,
"learning_rate": 2.1855345911949688e-05,
"loss": 0.3354,
"step": 1287
},
{
"epoch": 1.8197879858657244,
"grad_norm": 0.30449031969595985,
"learning_rate": 2.182914046121593e-05,
"loss": 0.3307,
"step": 1288
},
{
"epoch": 1.8212014134275618,
"grad_norm": 0.28051849904880405,
"learning_rate": 2.1802935010482182e-05,
"loss": 0.3399,
"step": 1289
},
{
"epoch": 1.8226148409893992,
"grad_norm": 0.2982026595901629,
"learning_rate": 2.177672955974843e-05,
"loss": 0.3429,
"step": 1290
},
{
"epoch": 1.8240282685512368,
"grad_norm": 0.3537772908275509,
"learning_rate": 2.1750524109014675e-05,
"loss": 0.3464,
"step": 1291
},
{
"epoch": 1.8254416961130742,
"grad_norm": 0.2688300097545258,
"learning_rate": 2.1724318658280922e-05,
"loss": 0.3435,
"step": 1292
},
{
"epoch": 1.8268551236749118,
"grad_norm": 0.29877539171193335,
"learning_rate": 2.1698113207547172e-05,
"loss": 0.3211,
"step": 1293
},
{
"epoch": 1.8282685512367491,
"grad_norm": 0.2732630422742897,
"learning_rate": 2.1671907756813416e-05,
"loss": 0.3535,
"step": 1294
},
{
"epoch": 1.8296819787985865,
"grad_norm": 0.26750654641717586,
"learning_rate": 2.1645702306079666e-05,
"loss": 0.3479,
"step": 1295
},
{
"epoch": 1.831095406360424,
"grad_norm": 0.3078044938284179,
"learning_rate": 2.1619496855345913e-05,
"loss": 0.3478,
"step": 1296
},
{
"epoch": 1.8325088339222615,
"grad_norm": 0.28393708854563326,
"learning_rate": 2.159329140461216e-05,
"loss": 0.3219,
"step": 1297
},
{
"epoch": 1.833922261484099,
"grad_norm": 0.25320634503475853,
"learning_rate": 2.1567085953878406e-05,
"loss": 0.3307,
"step": 1298
},
{
"epoch": 1.8353356890459365,
"grad_norm": 0.30633722195674584,
"learning_rate": 2.1540880503144656e-05,
"loss": 0.3699,
"step": 1299
},
{
"epoch": 1.8367491166077738,
"grad_norm": 0.2707219606302514,
"learning_rate": 2.1514675052410903e-05,
"loss": 0.3095,
"step": 1300
},
{
"epoch": 1.8381625441696112,
"grad_norm": 0.26635484011696825,
"learning_rate": 2.148846960167715e-05,
"loss": 0.3171,
"step": 1301
},
{
"epoch": 1.8395759717314486,
"grad_norm": 0.3050337198880779,
"learning_rate": 2.1462264150943397e-05,
"loss": 0.3648,
"step": 1302
},
{
"epoch": 1.8409893992932862,
"grad_norm": 0.2925289368001291,
"learning_rate": 2.1436058700209643e-05,
"loss": 0.3481,
"step": 1303
},
{
"epoch": 1.8424028268551238,
"grad_norm": 0.2684151219047965,
"learning_rate": 2.1409853249475894e-05,
"loss": 0.3396,
"step": 1304
},
{
"epoch": 1.8438162544169612,
"grad_norm": 0.3621813876046159,
"learning_rate": 2.138364779874214e-05,
"loss": 0.3501,
"step": 1305
},
{
"epoch": 1.8452296819787986,
"grad_norm": 0.30861364496158267,
"learning_rate": 2.1357442348008387e-05,
"loss": 0.3345,
"step": 1306
},
{
"epoch": 1.846643109540636,
"grad_norm": 0.3397167811319001,
"learning_rate": 2.1331236897274634e-05,
"loss": 0.323,
"step": 1307
},
{
"epoch": 1.8480565371024735,
"grad_norm": 0.2923448853626527,
"learning_rate": 2.1305031446540884e-05,
"loss": 0.3298,
"step": 1308
},
{
"epoch": 1.849469964664311,
"grad_norm": 0.3594971971601335,
"learning_rate": 2.1278825995807127e-05,
"loss": 0.3401,
"step": 1309
},
{
"epoch": 1.8508833922261485,
"grad_norm": 0.30566354898731907,
"learning_rate": 2.1252620545073378e-05,
"loss": 0.3383,
"step": 1310
},
{
"epoch": 1.8522968197879859,
"grad_norm": 0.36085354953444815,
"learning_rate": 2.1226415094339624e-05,
"loss": 0.3371,
"step": 1311
},
{
"epoch": 1.8537102473498233,
"grad_norm": 0.3589924298684753,
"learning_rate": 2.120020964360587e-05,
"loss": 0.3386,
"step": 1312
},
{
"epoch": 1.8551236749116606,
"grad_norm": 0.267104072416573,
"learning_rate": 2.1174004192872118e-05,
"loss": 0.3444,
"step": 1313
},
{
"epoch": 1.8565371024734982,
"grad_norm": 0.31746477217597324,
"learning_rate": 2.1147798742138368e-05,
"loss": 0.3114,
"step": 1314
},
{
"epoch": 1.8579505300353358,
"grad_norm": 0.29761813917823465,
"learning_rate": 2.112159329140461e-05,
"loss": 0.3375,
"step": 1315
},
{
"epoch": 1.8593639575971732,
"grad_norm": 0.27276833465911504,
"learning_rate": 2.109538784067086e-05,
"loss": 0.342,
"step": 1316
},
{
"epoch": 1.8607773851590106,
"grad_norm": 0.2728610027877017,
"learning_rate": 2.106918238993711e-05,
"loss": 0.3425,
"step": 1317
},
{
"epoch": 1.862190812720848,
"grad_norm": 0.3112116492464612,
"learning_rate": 2.1042976939203355e-05,
"loss": 0.3295,
"step": 1318
},
{
"epoch": 1.8636042402826856,
"grad_norm": 0.2624011882043068,
"learning_rate": 2.1016771488469602e-05,
"loss": 0.347,
"step": 1319
},
{
"epoch": 1.865017667844523,
"grad_norm": 0.30799218491945024,
"learning_rate": 2.0990566037735852e-05,
"loss": 0.3523,
"step": 1320
},
{
"epoch": 1.8664310954063605,
"grad_norm": 0.2742437623401547,
"learning_rate": 2.0964360587002095e-05,
"loss": 0.3314,
"step": 1321
},
{
"epoch": 1.867844522968198,
"grad_norm": 0.2808161844680405,
"learning_rate": 2.0938155136268346e-05,
"loss": 0.3499,
"step": 1322
},
{
"epoch": 1.8692579505300353,
"grad_norm": 0.2771996708989456,
"learning_rate": 2.0911949685534592e-05,
"loss": 0.3379,
"step": 1323
},
{
"epoch": 1.8706713780918727,
"grad_norm": 0.24519006259036635,
"learning_rate": 2.088574423480084e-05,
"loss": 0.3196,
"step": 1324
},
{
"epoch": 1.8720848056537103,
"grad_norm": 0.26827064134313067,
"learning_rate": 2.0859538784067086e-05,
"loss": 0.3507,
"step": 1325
},
{
"epoch": 1.8734982332155476,
"grad_norm": 0.2690606376095561,
"learning_rate": 2.0833333333333336e-05,
"loss": 0.3357,
"step": 1326
},
{
"epoch": 1.8749116607773852,
"grad_norm": 0.2529676703934084,
"learning_rate": 2.080712788259958e-05,
"loss": 0.3586,
"step": 1327
},
{
"epoch": 1.8763250883392226,
"grad_norm": 0.2761519125252781,
"learning_rate": 2.078092243186583e-05,
"loss": 0.3339,
"step": 1328
},
{
"epoch": 1.87773851590106,
"grad_norm": 0.2719667970227588,
"learning_rate": 2.0754716981132076e-05,
"loss": 0.3336,
"step": 1329
},
{
"epoch": 1.8791519434628974,
"grad_norm": 0.2988714341496327,
"learning_rate": 2.0728511530398323e-05,
"loss": 0.353,
"step": 1330
},
{
"epoch": 1.880565371024735,
"grad_norm": 0.2789051886235331,
"learning_rate": 2.070230607966457e-05,
"loss": 0.3295,
"step": 1331
},
{
"epoch": 1.8819787985865726,
"grad_norm": 0.24091079961785297,
"learning_rate": 2.067610062893082e-05,
"loss": 0.3412,
"step": 1332
},
{
"epoch": 1.88339222614841,
"grad_norm": 0.24460765690099212,
"learning_rate": 2.0649895178197063e-05,
"loss": 0.3431,
"step": 1333
},
{
"epoch": 1.8848056537102473,
"grad_norm": 0.2630396312705882,
"learning_rate": 2.0623689727463314e-05,
"loss": 0.3248,
"step": 1334
},
{
"epoch": 1.8862190812720847,
"grad_norm": 0.25511152821938765,
"learning_rate": 2.059748427672956e-05,
"loss": 0.3478,
"step": 1335
},
{
"epoch": 1.8876325088339223,
"grad_norm": 0.26627701080163624,
"learning_rate": 2.0571278825995807e-05,
"loss": 0.3448,
"step": 1336
},
{
"epoch": 1.8890459363957597,
"grad_norm": 0.2441561659184287,
"learning_rate": 2.0545073375262054e-05,
"loss": 0.3459,
"step": 1337
},
{
"epoch": 1.8904593639575973,
"grad_norm": 0.2601519400629045,
"learning_rate": 2.0518867924528304e-05,
"loss": 0.3439,
"step": 1338
},
{
"epoch": 1.8918727915194347,
"grad_norm": 0.24861913448955664,
"learning_rate": 2.049266247379455e-05,
"loss": 0.3454,
"step": 1339
},
{
"epoch": 1.893286219081272,
"grad_norm": 0.2513215329036349,
"learning_rate": 2.0466457023060798e-05,
"loss": 0.3538,
"step": 1340
},
{
"epoch": 1.8946996466431094,
"grad_norm": 0.2617515176873383,
"learning_rate": 2.0440251572327044e-05,
"loss": 0.3327,
"step": 1341
},
{
"epoch": 1.896113074204947,
"grad_norm": 0.24555804695957803,
"learning_rate": 2.041404612159329e-05,
"loss": 0.3252,
"step": 1342
},
{
"epoch": 1.8975265017667846,
"grad_norm": 0.2276645411670321,
"learning_rate": 2.038784067085954e-05,
"loss": 0.3394,
"step": 1343
},
{
"epoch": 1.898939929328622,
"grad_norm": 0.26961044775529774,
"learning_rate": 2.0361635220125788e-05,
"loss": 0.3516,
"step": 1344
},
{
"epoch": 1.9003533568904594,
"grad_norm": 0.2677820570880079,
"learning_rate": 2.0335429769392035e-05,
"loss": 0.3429,
"step": 1345
},
{
"epoch": 1.9017667844522967,
"grad_norm": 0.23493388499566126,
"learning_rate": 2.030922431865828e-05,
"loss": 0.3585,
"step": 1346
},
{
"epoch": 1.9031802120141341,
"grad_norm": 0.29220310892867357,
"learning_rate": 2.0283018867924532e-05,
"loss": 0.3453,
"step": 1347
},
{
"epoch": 1.9045936395759717,
"grad_norm": 0.25422034208386085,
"learning_rate": 2.0256813417190775e-05,
"loss": 0.3187,
"step": 1348
},
{
"epoch": 1.9060070671378093,
"grad_norm": 0.27724887572053236,
"learning_rate": 2.0230607966457025e-05,
"loss": 0.3524,
"step": 1349
},
{
"epoch": 1.9074204946996467,
"grad_norm": 0.26454990566281505,
"learning_rate": 2.0204402515723272e-05,
"loss": 0.3421,
"step": 1350
},
{
"epoch": 1.908833922261484,
"grad_norm": 0.27722300434594666,
"learning_rate": 2.017819706498952e-05,
"loss": 0.3666,
"step": 1351
},
{
"epoch": 1.9102473498233214,
"grad_norm": 0.2943199406563023,
"learning_rate": 2.0151991614255766e-05,
"loss": 0.3399,
"step": 1352
},
{
"epoch": 1.911660777385159,
"grad_norm": 0.266917836463146,
"learning_rate": 2.0125786163522016e-05,
"loss": 0.3487,
"step": 1353
},
{
"epoch": 1.9130742049469964,
"grad_norm": 0.2733602674836366,
"learning_rate": 2.009958071278826e-05,
"loss": 0.3446,
"step": 1354
},
{
"epoch": 1.914487632508834,
"grad_norm": 0.28970407261780934,
"learning_rate": 2.007337526205451e-05,
"loss": 0.3475,
"step": 1355
},
{
"epoch": 1.9159010600706714,
"grad_norm": 0.2665735345217564,
"learning_rate": 2.0047169811320756e-05,
"loss": 0.3447,
"step": 1356
},
{
"epoch": 1.9173144876325088,
"grad_norm": 0.27510813977714343,
"learning_rate": 2.0020964360587003e-05,
"loss": 0.3554,
"step": 1357
},
{
"epoch": 1.9187279151943462,
"grad_norm": 0.28319352464190384,
"learning_rate": 1.999475890985325e-05,
"loss": 0.327,
"step": 1358
},
{
"epoch": 1.9201413427561838,
"grad_norm": 0.2519973930017835,
"learning_rate": 1.99685534591195e-05,
"loss": 0.3559,
"step": 1359
},
{
"epoch": 1.9215547703180214,
"grad_norm": 0.2941757379282443,
"learning_rate": 1.9942348008385743e-05,
"loss": 0.3261,
"step": 1360
},
{
"epoch": 1.9229681978798587,
"grad_norm": 0.2477036814138157,
"learning_rate": 1.9916142557651993e-05,
"loss": 0.3351,
"step": 1361
},
{
"epoch": 1.924381625441696,
"grad_norm": 0.24804294609175076,
"learning_rate": 1.988993710691824e-05,
"loss": 0.3095,
"step": 1362
},
{
"epoch": 1.9257950530035335,
"grad_norm": 0.27649105068498375,
"learning_rate": 1.9863731656184487e-05,
"loss": 0.3453,
"step": 1363
},
{
"epoch": 1.9272084805653709,
"grad_norm": 0.24649914789198402,
"learning_rate": 1.9837526205450734e-05,
"loss": 0.331,
"step": 1364
},
{
"epoch": 1.9286219081272085,
"grad_norm": 0.24568642745091362,
"learning_rate": 1.9811320754716984e-05,
"loss": 0.3468,
"step": 1365
},
{
"epoch": 1.930035335689046,
"grad_norm": 0.2891966452728904,
"learning_rate": 1.9785115303983227e-05,
"loss": 0.3235,
"step": 1366
},
{
"epoch": 1.9314487632508834,
"grad_norm": 0.23954882020189067,
"learning_rate": 1.9758909853249477e-05,
"loss": 0.3426,
"step": 1367
},
{
"epoch": 1.9328621908127208,
"grad_norm": 0.2464974047366223,
"learning_rate": 1.9732704402515724e-05,
"loss": 0.3384,
"step": 1368
},
{
"epoch": 1.9342756183745582,
"grad_norm": 0.2699821732169366,
"learning_rate": 1.970649895178197e-05,
"loss": 0.3381,
"step": 1369
},
{
"epoch": 1.9356890459363958,
"grad_norm": 0.2612229973202514,
"learning_rate": 1.9680293501048218e-05,
"loss": 0.3242,
"step": 1370
},
{
"epoch": 1.9371024734982332,
"grad_norm": 0.24744748911694578,
"learning_rate": 1.9654088050314468e-05,
"loss": 0.3396,
"step": 1371
},
{
"epoch": 1.9385159010600708,
"grad_norm": 0.2633400293779395,
"learning_rate": 1.9627882599580715e-05,
"loss": 0.3347,
"step": 1372
},
{
"epoch": 1.9399293286219081,
"grad_norm": 0.2740610643340273,
"learning_rate": 1.960167714884696e-05,
"loss": 0.3334,
"step": 1373
},
{
"epoch": 1.9413427561837455,
"grad_norm": 0.24575362552934935,
"learning_rate": 1.9575471698113208e-05,
"loss": 0.3326,
"step": 1374
},
{
"epoch": 1.942756183745583,
"grad_norm": 0.2703406969520792,
"learning_rate": 1.9549266247379455e-05,
"loss": 0.335,
"step": 1375
},
{
"epoch": 1.9441696113074205,
"grad_norm": 0.2887020402824004,
"learning_rate": 1.9523060796645705e-05,
"loss": 0.3538,
"step": 1376
},
{
"epoch": 1.945583038869258,
"grad_norm": 0.2777649459848222,
"learning_rate": 1.9496855345911952e-05,
"loss": 0.3376,
"step": 1377
},
{
"epoch": 1.9469964664310955,
"grad_norm": 0.23996465818270984,
"learning_rate": 1.94706498951782e-05,
"loss": 0.3472,
"step": 1378
},
{
"epoch": 1.9484098939929329,
"grad_norm": 0.28368902752442227,
"learning_rate": 1.9444444444444445e-05,
"loss": 0.3368,
"step": 1379
},
{
"epoch": 1.9498233215547702,
"grad_norm": 0.3002368104228538,
"learning_rate": 1.9418238993710696e-05,
"loss": 0.3144,
"step": 1380
},
{
"epoch": 1.9512367491166078,
"grad_norm": 0.23076244573292265,
"learning_rate": 1.939203354297694e-05,
"loss": 0.3252,
"step": 1381
},
{
"epoch": 1.9526501766784452,
"grad_norm": 0.2797759287943641,
"learning_rate": 1.936582809224319e-05,
"loss": 0.3509,
"step": 1382
},
{
"epoch": 1.9540636042402828,
"grad_norm": 0.30441101410415045,
"learning_rate": 1.9339622641509436e-05,
"loss": 0.3335,
"step": 1383
},
{
"epoch": 1.9554770318021202,
"grad_norm": 0.261120607021978,
"learning_rate": 1.9313417190775683e-05,
"loss": 0.33,
"step": 1384
},
{
"epoch": 1.9568904593639576,
"grad_norm": 0.2578751126290832,
"learning_rate": 1.928721174004193e-05,
"loss": 0.3584,
"step": 1385
},
{
"epoch": 1.958303886925795,
"grad_norm": 0.29341650527607166,
"learning_rate": 1.926100628930818e-05,
"loss": 0.3569,
"step": 1386
},
{
"epoch": 1.9597173144876325,
"grad_norm": 0.28532014425314356,
"learning_rate": 1.9234800838574423e-05,
"loss": 0.3431,
"step": 1387
},
{
"epoch": 1.96113074204947,
"grad_norm": 0.2439296185811766,
"learning_rate": 1.9208595387840673e-05,
"loss": 0.3374,
"step": 1388
},
{
"epoch": 1.9625441696113075,
"grad_norm": 0.26951247272496226,
"learning_rate": 1.918238993710692e-05,
"loss": 0.3471,
"step": 1389
},
{
"epoch": 1.9639575971731449,
"grad_norm": 0.2755414011615463,
"learning_rate": 1.9156184486373167e-05,
"loss": 0.3355,
"step": 1390
},
{
"epoch": 1.9653710247349823,
"grad_norm": 0.25766731761611017,
"learning_rate": 1.9129979035639413e-05,
"loss": 0.345,
"step": 1391
},
{
"epoch": 1.9667844522968196,
"grad_norm": 0.27971953298754065,
"learning_rate": 1.9103773584905664e-05,
"loss": 0.3452,
"step": 1392
},
{
"epoch": 1.9681978798586572,
"grad_norm": 0.32831401927574855,
"learning_rate": 1.9077568134171907e-05,
"loss": 0.3485,
"step": 1393
},
{
"epoch": 1.9696113074204948,
"grad_norm": 0.25851688285536817,
"learning_rate": 1.9051362683438157e-05,
"loss": 0.3412,
"step": 1394
},
{
"epoch": 1.9710247349823322,
"grad_norm": 0.32804817899999567,
"learning_rate": 1.9025157232704404e-05,
"loss": 0.359,
"step": 1395
},
{
"epoch": 1.9724381625441696,
"grad_norm": 0.30571041182901765,
"learning_rate": 1.899895178197065e-05,
"loss": 0.3356,
"step": 1396
},
{
"epoch": 1.973851590106007,
"grad_norm": 0.3366010092854754,
"learning_rate": 1.8972746331236897e-05,
"loss": 0.3594,
"step": 1397
},
{
"epoch": 1.9752650176678446,
"grad_norm": 0.29801314989926564,
"learning_rate": 1.8946540880503148e-05,
"loss": 0.3511,
"step": 1398
},
{
"epoch": 1.976678445229682,
"grad_norm": 0.26924002439104294,
"learning_rate": 1.892033542976939e-05,
"loss": 0.3418,
"step": 1399
},
{
"epoch": 1.9780918727915195,
"grad_norm": 0.2907412990749467,
"learning_rate": 1.889412997903564e-05,
"loss": 0.3511,
"step": 1400
},
{
"epoch": 1.979505300353357,
"grad_norm": 0.27949040107922085,
"learning_rate": 1.8867924528301888e-05,
"loss": 0.36,
"step": 1401
},
{
"epoch": 1.9809187279151943,
"grad_norm": 0.2961154971173404,
"learning_rate": 1.8841719077568135e-05,
"loss": 0.3396,
"step": 1402
},
{
"epoch": 1.9823321554770317,
"grad_norm": 0.6266038101728312,
"learning_rate": 1.881551362683438e-05,
"loss": 0.3358,
"step": 1403
},
{
"epoch": 1.9837455830388693,
"grad_norm": 0.2773782053840182,
"learning_rate": 1.878930817610063e-05,
"loss": 0.3515,
"step": 1404
},
{
"epoch": 1.9851590106007069,
"grad_norm": 0.3106860591475735,
"learning_rate": 1.876310272536688e-05,
"loss": 0.3471,
"step": 1405
},
{
"epoch": 1.9865724381625443,
"grad_norm": 0.2949141435203227,
"learning_rate": 1.8736897274633125e-05,
"loss": 0.3615,
"step": 1406
},
{
"epoch": 1.9879858657243816,
"grad_norm": 0.31766313129497614,
"learning_rate": 1.8710691823899372e-05,
"loss": 0.3274,
"step": 1407
},
{
"epoch": 1.989399293286219,
"grad_norm": 0.3309263471228632,
"learning_rate": 1.868448637316562e-05,
"loss": 0.333,
"step": 1408
},
{
"epoch": 1.9908127208480564,
"grad_norm": 0.27610409759659155,
"learning_rate": 1.865828092243187e-05,
"loss": 0.3303,
"step": 1409
},
{
"epoch": 1.992226148409894,
"grad_norm": 0.28297640158162235,
"learning_rate": 1.8632075471698112e-05,
"loss": 0.3427,
"step": 1410
},
{
"epoch": 1.9936395759717316,
"grad_norm": 0.32531198404783057,
"learning_rate": 1.8605870020964362e-05,
"loss": 0.3476,
"step": 1411
},
{
"epoch": 1.995053003533569,
"grad_norm": 0.3161057116208467,
"learning_rate": 1.857966457023061e-05,
"loss": 0.3368,
"step": 1412
},
{
"epoch": 1.9964664310954063,
"grad_norm": 0.2862168035668186,
"learning_rate": 1.8553459119496856e-05,
"loss": 0.3366,
"step": 1413
},
{
"epoch": 1.9978798586572437,
"grad_norm": 0.3266588803910509,
"learning_rate": 1.8527253668763103e-05,
"loss": 0.3379,
"step": 1414
},
{
"epoch": 1.9992932862190813,
"grad_norm": 0.3118228381093671,
"learning_rate": 1.8501048218029353e-05,
"loss": 0.3518,
"step": 1415
},
{
"epoch": 2.0,
"grad_norm": 0.4335233273647243,
"learning_rate": 1.8474842767295596e-05,
"loss": 0.3455,
"step": 1416
},
{
"epoch": 2.0014134275618374,
"grad_norm": 0.35403206735834764,
"learning_rate": 1.8448637316561846e-05,
"loss": 0.2712,
"step": 1417
},
{
"epoch": 2.0028268551236748,
"grad_norm": 0.3211266157936176,
"learning_rate": 1.8422431865828093e-05,
"loss": 0.2685,
"step": 1418
},
{
"epoch": 2.0042402826855126,
"grad_norm": 0.3097366973448163,
"learning_rate": 1.839622641509434e-05,
"loss": 0.2578,
"step": 1419
},
{
"epoch": 2.00565371024735,
"grad_norm": 0.41987040858405794,
"learning_rate": 1.8370020964360587e-05,
"loss": 0.2761,
"step": 1420
},
{
"epoch": 2.0070671378091873,
"grad_norm": 0.29688402188498497,
"learning_rate": 1.8343815513626837e-05,
"loss": 0.2763,
"step": 1421
},
{
"epoch": 2.0084805653710247,
"grad_norm": 0.297219458331268,
"learning_rate": 1.831761006289308e-05,
"loss": 0.2594,
"step": 1422
},
{
"epoch": 2.009893992932862,
"grad_norm": 0.4324236690410425,
"learning_rate": 1.829140461215933e-05,
"loss": 0.2713,
"step": 1423
},
{
"epoch": 2.0113074204946995,
"grad_norm": 0.29021075315566064,
"learning_rate": 1.8265199161425577e-05,
"loss": 0.2476,
"step": 1424
},
{
"epoch": 2.0127208480565373,
"grad_norm": 0.30276006968860303,
"learning_rate": 1.8238993710691824e-05,
"loss": 0.2561,
"step": 1425
},
{
"epoch": 2.0141342756183747,
"grad_norm": 0.37117850884141185,
"learning_rate": 1.821278825995807e-05,
"loss": 0.2676,
"step": 1426
},
{
"epoch": 2.015547703180212,
"grad_norm": 0.2856005533556887,
"learning_rate": 1.818658280922432e-05,
"loss": 0.2719,
"step": 1427
},
{
"epoch": 2.0169611307420494,
"grad_norm": 0.27352647314233186,
"learning_rate": 1.8160377358490564e-05,
"loss": 0.2617,
"step": 1428
},
{
"epoch": 2.018374558303887,
"grad_norm": 0.2794954968460129,
"learning_rate": 1.8134171907756814e-05,
"loss": 0.2793,
"step": 1429
},
{
"epoch": 2.019787985865724,
"grad_norm": 0.2618067380713214,
"learning_rate": 1.810796645702306e-05,
"loss": 0.2674,
"step": 1430
},
{
"epoch": 2.021201413427562,
"grad_norm": 0.2787259339740558,
"learning_rate": 1.8081761006289308e-05,
"loss": 0.2796,
"step": 1431
},
{
"epoch": 2.0226148409893994,
"grad_norm": 0.2416530431834768,
"learning_rate": 1.8055555555555555e-05,
"loss": 0.2651,
"step": 1432
},
{
"epoch": 2.0240282685512367,
"grad_norm": 0.2427455800021535,
"learning_rate": 1.8029350104821805e-05,
"loss": 0.2606,
"step": 1433
},
{
"epoch": 2.025441696113074,
"grad_norm": 0.264771286907353,
"learning_rate": 1.8003144654088048e-05,
"loss": 0.2657,
"step": 1434
},
{
"epoch": 2.0268551236749115,
"grad_norm": 0.2642944132665096,
"learning_rate": 1.79769392033543e-05,
"loss": 0.2656,
"step": 1435
},
{
"epoch": 2.0282685512367493,
"grad_norm": 0.27994050039032714,
"learning_rate": 1.7950733752620545e-05,
"loss": 0.2674,
"step": 1436
},
{
"epoch": 2.0296819787985867,
"grad_norm": 0.24410822144872132,
"learning_rate": 1.7924528301886792e-05,
"loss": 0.2626,
"step": 1437
},
{
"epoch": 2.031095406360424,
"grad_norm": 0.23202476707577518,
"learning_rate": 1.789832285115304e-05,
"loss": 0.2667,
"step": 1438
},
{
"epoch": 2.0325088339222614,
"grad_norm": 0.24763160009540283,
"learning_rate": 1.787211740041929e-05,
"loss": 0.2581,
"step": 1439
},
{
"epoch": 2.033922261484099,
"grad_norm": 0.2970637174841249,
"learning_rate": 1.7845911949685536e-05,
"loss": 0.2713,
"step": 1440
},
{
"epoch": 2.035335689045936,
"grad_norm": 0.24937900384764836,
"learning_rate": 1.7819706498951782e-05,
"loss": 0.2674,
"step": 1441
},
{
"epoch": 2.036749116607774,
"grad_norm": 0.2816108593566526,
"learning_rate": 1.779350104821803e-05,
"loss": 0.278,
"step": 1442
},
{
"epoch": 2.0381625441696114,
"grad_norm": 0.2804063122569137,
"learning_rate": 1.7767295597484276e-05,
"loss": 0.2705,
"step": 1443
},
{
"epoch": 2.039575971731449,
"grad_norm": 0.33147820145797435,
"learning_rate": 1.7741090146750526e-05,
"loss": 0.2628,
"step": 1444
},
{
"epoch": 2.040989399293286,
"grad_norm": 0.2577585114514762,
"learning_rate": 1.7714884696016773e-05,
"loss": 0.2713,
"step": 1445
},
{
"epoch": 2.0424028268551235,
"grad_norm": 0.28613793250702724,
"learning_rate": 1.768867924528302e-05,
"loss": 0.2548,
"step": 1446
},
{
"epoch": 2.043816254416961,
"grad_norm": 0.25221865119502557,
"learning_rate": 1.7662473794549266e-05,
"loss": 0.2713,
"step": 1447
},
{
"epoch": 2.0452296819787987,
"grad_norm": 0.2501584823870619,
"learning_rate": 1.7636268343815517e-05,
"loss": 0.2588,
"step": 1448
},
{
"epoch": 2.046643109540636,
"grad_norm": 0.27695705097468043,
"learning_rate": 1.761006289308176e-05,
"loss": 0.2575,
"step": 1449
},
{
"epoch": 2.0480565371024735,
"grad_norm": 0.26334598661827097,
"learning_rate": 1.758385744234801e-05,
"loss": 0.2561,
"step": 1450
},
{
"epoch": 2.049469964664311,
"grad_norm": 0.2397412460149911,
"learning_rate": 1.7557651991614257e-05,
"loss": 0.2825,
"step": 1451
},
{
"epoch": 2.0508833922261482,
"grad_norm": 0.26395851343738785,
"learning_rate": 1.7531446540880504e-05,
"loss": 0.2433,
"step": 1452
},
{
"epoch": 2.052296819787986,
"grad_norm": 0.27466904893438066,
"learning_rate": 1.750524109014675e-05,
"loss": 0.2755,
"step": 1453
},
{
"epoch": 2.0537102473498234,
"grad_norm": 0.2515325456987595,
"learning_rate": 1.7479035639413e-05,
"loss": 0.2833,
"step": 1454
},
{
"epoch": 2.055123674911661,
"grad_norm": 0.2514285530952254,
"learning_rate": 1.7452830188679244e-05,
"loss": 0.2632,
"step": 1455
},
{
"epoch": 2.056537102473498,
"grad_norm": 0.23396335151589076,
"learning_rate": 1.7426624737945494e-05,
"loss": 0.2474,
"step": 1456
},
{
"epoch": 2.0579505300353356,
"grad_norm": 0.23566644723851013,
"learning_rate": 1.740041928721174e-05,
"loss": 0.2687,
"step": 1457
},
{
"epoch": 2.059363957597173,
"grad_norm": 0.23470068923902548,
"learning_rate": 1.7374213836477988e-05,
"loss": 0.2553,
"step": 1458
},
{
"epoch": 2.0607773851590108,
"grad_norm": 0.25400344966104477,
"learning_rate": 1.7348008385744234e-05,
"loss": 0.2661,
"step": 1459
},
{
"epoch": 2.062190812720848,
"grad_norm": 0.23545263655072862,
"learning_rate": 1.7321802935010485e-05,
"loss": 0.2685,
"step": 1460
},
{
"epoch": 2.0636042402826855,
"grad_norm": 0.21724780263757043,
"learning_rate": 1.7295597484276728e-05,
"loss": 0.2447,
"step": 1461
},
{
"epoch": 2.065017667844523,
"grad_norm": 0.27440455590033974,
"learning_rate": 1.7269392033542978e-05,
"loss": 0.2717,
"step": 1462
},
{
"epoch": 2.0664310954063603,
"grad_norm": 0.22445921296637483,
"learning_rate": 1.7243186582809225e-05,
"loss": 0.2776,
"step": 1463
},
{
"epoch": 2.0678445229681977,
"grad_norm": 0.2232672488779404,
"learning_rate": 1.7216981132075472e-05,
"loss": 0.2485,
"step": 1464
},
{
"epoch": 2.0692579505300355,
"grad_norm": 0.25953077928474383,
"learning_rate": 1.719077568134172e-05,
"loss": 0.2677,
"step": 1465
},
{
"epoch": 2.070671378091873,
"grad_norm": 0.23547348769727375,
"learning_rate": 1.716457023060797e-05,
"loss": 0.2773,
"step": 1466
},
{
"epoch": 2.0720848056537102,
"grad_norm": 0.22881545069302148,
"learning_rate": 1.7138364779874212e-05,
"loss": 0.2632,
"step": 1467
},
{
"epoch": 2.0734982332155476,
"grad_norm": 0.292202133048355,
"learning_rate": 1.7112159329140462e-05,
"loss": 0.2748,
"step": 1468
},
{
"epoch": 2.074911660777385,
"grad_norm": 0.3215274579793553,
"learning_rate": 1.708595387840671e-05,
"loss": 0.2704,
"step": 1469
},
{
"epoch": 2.076325088339223,
"grad_norm": 0.249040207798343,
"learning_rate": 1.7059748427672956e-05,
"loss": 0.2606,
"step": 1470
},
{
"epoch": 2.07773851590106,
"grad_norm": 0.284866342345612,
"learning_rate": 1.7033542976939203e-05,
"loss": 0.2619,
"step": 1471
},
{
"epoch": 2.0791519434628976,
"grad_norm": 0.24382900003424937,
"learning_rate": 1.7007337526205453e-05,
"loss": 0.2787,
"step": 1472
},
{
"epoch": 2.080565371024735,
"grad_norm": 0.23188629778187936,
"learning_rate": 1.69811320754717e-05,
"loss": 0.2527,
"step": 1473
},
{
"epoch": 2.0819787985865723,
"grad_norm": 0.2677853734660904,
"learning_rate": 1.6954926624737946e-05,
"loss": 0.2704,
"step": 1474
},
{
"epoch": 2.0833922261484097,
"grad_norm": 0.2267054243436212,
"learning_rate": 1.6928721174004193e-05,
"loss": 0.2709,
"step": 1475
},
{
"epoch": 2.0848056537102475,
"grad_norm": 0.2510873489789455,
"learning_rate": 1.690251572327044e-05,
"loss": 0.2618,
"step": 1476
},
{
"epoch": 2.086219081272085,
"grad_norm": 0.24542626270184298,
"learning_rate": 1.687631027253669e-05,
"loss": 0.2695,
"step": 1477
},
{
"epoch": 2.0876325088339223,
"grad_norm": 0.20879273104279764,
"learning_rate": 1.6850104821802937e-05,
"loss": 0.2551,
"step": 1478
},
{
"epoch": 2.0890459363957596,
"grad_norm": 0.26343626544275783,
"learning_rate": 1.6823899371069183e-05,
"loss": 0.28,
"step": 1479
},
{
"epoch": 2.090459363957597,
"grad_norm": 0.2665475313440541,
"learning_rate": 1.679769392033543e-05,
"loss": 0.2677,
"step": 1480
},
{
"epoch": 2.091872791519435,
"grad_norm": 0.22749390609472736,
"learning_rate": 1.677148846960168e-05,
"loss": 0.2609,
"step": 1481
},
{
"epoch": 2.093286219081272,
"grad_norm": 0.22703967953877865,
"learning_rate": 1.6745283018867924e-05,
"loss": 0.263,
"step": 1482
},
{
"epoch": 2.0946996466431096,
"grad_norm": 0.23410057446548124,
"learning_rate": 1.6719077568134174e-05,
"loss": 0.2594,
"step": 1483
},
{
"epoch": 2.096113074204947,
"grad_norm": 0.23866324012672924,
"learning_rate": 1.669287211740042e-05,
"loss": 0.2552,
"step": 1484
},
{
"epoch": 2.0975265017667843,
"grad_norm": 0.2270684675646396,
"learning_rate": 1.6666666666666667e-05,
"loss": 0.2746,
"step": 1485
},
{
"epoch": 2.0989399293286217,
"grad_norm": 0.24079956745773862,
"learning_rate": 1.6640461215932914e-05,
"loss": 0.285,
"step": 1486
},
{
"epoch": 2.1003533568904595,
"grad_norm": 0.23589149417064958,
"learning_rate": 1.6614255765199164e-05,
"loss": 0.2661,
"step": 1487
},
{
"epoch": 2.101766784452297,
"grad_norm": 0.23004530674395424,
"learning_rate": 1.6588050314465408e-05,
"loss": 0.2728,
"step": 1488
},
{
"epoch": 2.1031802120141343,
"grad_norm": 0.24680840208888893,
"learning_rate": 1.6561844863731658e-05,
"loss": 0.2622,
"step": 1489
},
{
"epoch": 2.1045936395759717,
"grad_norm": 0.23849392812380515,
"learning_rate": 1.6535639412997905e-05,
"loss": 0.267,
"step": 1490
},
{
"epoch": 2.106007067137809,
"grad_norm": 0.23670392851054678,
"learning_rate": 1.650943396226415e-05,
"loss": 0.2582,
"step": 1491
},
{
"epoch": 2.1074204946996464,
"grad_norm": 0.2509262879145169,
"learning_rate": 1.6483228511530398e-05,
"loss": 0.2685,
"step": 1492
},
{
"epoch": 2.1088339222614843,
"grad_norm": 0.23667410997302119,
"learning_rate": 1.645702306079665e-05,
"loss": 0.2613,
"step": 1493
},
{
"epoch": 2.1102473498233216,
"grad_norm": 0.23118439471601826,
"learning_rate": 1.6430817610062892e-05,
"loss": 0.2937,
"step": 1494
},
{
"epoch": 2.111660777385159,
"grad_norm": 0.2560010684558792,
"learning_rate": 1.6404612159329142e-05,
"loss": 0.2743,
"step": 1495
},
{
"epoch": 2.1130742049469964,
"grad_norm": 0.22571715466961964,
"learning_rate": 1.637840670859539e-05,
"loss": 0.262,
"step": 1496
},
{
"epoch": 2.1144876325088338,
"grad_norm": 0.23789897426516335,
"learning_rate": 1.6352201257861635e-05,
"loss": 0.2709,
"step": 1497
},
{
"epoch": 2.1159010600706716,
"grad_norm": 0.27528631993151575,
"learning_rate": 1.6325995807127882e-05,
"loss": 0.261,
"step": 1498
},
{
"epoch": 2.117314487632509,
"grad_norm": 0.24492806067816503,
"learning_rate": 1.6299790356394132e-05,
"loss": 0.2627,
"step": 1499
},
{
"epoch": 2.1187279151943463,
"grad_norm": 0.22968850215682476,
"learning_rate": 1.6273584905660376e-05,
"loss": 0.2684,
"step": 1500
},
{
"epoch": 2.1201413427561837,
"grad_norm": 0.24510977693089941,
"learning_rate": 1.6247379454926626e-05,
"loss": 0.2579,
"step": 1501
},
{
"epoch": 2.121554770318021,
"grad_norm": 0.27220300868344405,
"learning_rate": 1.6221174004192873e-05,
"loss": 0.2637,
"step": 1502
},
{
"epoch": 2.1229681978798585,
"grad_norm": 0.23963585346815983,
"learning_rate": 1.619496855345912e-05,
"loss": 0.2637,
"step": 1503
},
{
"epoch": 2.1243816254416963,
"grad_norm": 0.22549052137704395,
"learning_rate": 1.6168763102725366e-05,
"loss": 0.2569,
"step": 1504
},
{
"epoch": 2.1257950530035337,
"grad_norm": 0.28075094223491676,
"learning_rate": 1.6142557651991616e-05,
"loss": 0.2712,
"step": 1505
},
{
"epoch": 2.127208480565371,
"grad_norm": 0.22198852952313736,
"learning_rate": 1.6116352201257863e-05,
"loss": 0.2547,
"step": 1506
},
{
"epoch": 2.1286219081272084,
"grad_norm": 0.24084612727476723,
"learning_rate": 1.609014675052411e-05,
"loss": 0.2776,
"step": 1507
},
{
"epoch": 2.130035335689046,
"grad_norm": 0.3008046814262076,
"learning_rate": 1.6063941299790357e-05,
"loss": 0.2805,
"step": 1508
},
{
"epoch": 2.1314487632508836,
"grad_norm": 0.22148153059694778,
"learning_rate": 1.6037735849056604e-05,
"loss": 0.2572,
"step": 1509
},
{
"epoch": 2.132862190812721,
"grad_norm": 0.21669866832024834,
"learning_rate": 1.6011530398322854e-05,
"loss": 0.2754,
"step": 1510
},
{
"epoch": 2.1342756183745584,
"grad_norm": 0.24258833241939304,
"learning_rate": 1.59853249475891e-05,
"loss": 0.2582,
"step": 1511
},
{
"epoch": 2.1356890459363957,
"grad_norm": 0.2537654616561281,
"learning_rate": 1.5959119496855347e-05,
"loss": 0.2648,
"step": 1512
},
{
"epoch": 2.137102473498233,
"grad_norm": 0.23461228316940397,
"learning_rate": 1.5932914046121594e-05,
"loss": 0.2658,
"step": 1513
},
{
"epoch": 2.1385159010600705,
"grad_norm": 0.2527876336382696,
"learning_rate": 1.5906708595387844e-05,
"loss": 0.2645,
"step": 1514
},
{
"epoch": 2.1399293286219083,
"grad_norm": 0.23727046925398404,
"learning_rate": 1.5880503144654088e-05,
"loss": 0.2787,
"step": 1515
},
{
"epoch": 2.1413427561837457,
"grad_norm": 0.22503550473136746,
"learning_rate": 1.5854297693920338e-05,
"loss": 0.2526,
"step": 1516
},
{
"epoch": 2.142756183745583,
"grad_norm": 0.23942718288630324,
"learning_rate": 1.5828092243186584e-05,
"loss": 0.2781,
"step": 1517
},
{
"epoch": 2.1441696113074205,
"grad_norm": 0.21218271084439042,
"learning_rate": 1.580188679245283e-05,
"loss": 0.2652,
"step": 1518
},
{
"epoch": 2.145583038869258,
"grad_norm": 0.23741934537461637,
"learning_rate": 1.5775681341719078e-05,
"loss": 0.2713,
"step": 1519
},
{
"epoch": 2.146996466431095,
"grad_norm": 0.20702030221655157,
"learning_rate": 1.5749475890985328e-05,
"loss": 0.2671,
"step": 1520
},
{
"epoch": 2.148409893992933,
"grad_norm": 0.24730015439404526,
"learning_rate": 1.572327044025157e-05,
"loss": 0.2908,
"step": 1521
},
{
"epoch": 2.1498233215547704,
"grad_norm": 0.24390762136510735,
"learning_rate": 1.5697064989517822e-05,
"loss": 0.277,
"step": 1522
},
{
"epoch": 2.151236749116608,
"grad_norm": 0.2069684158460973,
"learning_rate": 1.567085953878407e-05,
"loss": 0.2643,
"step": 1523
},
{
"epoch": 2.152650176678445,
"grad_norm": 0.20623782074794828,
"learning_rate": 1.5644654088050315e-05,
"loss": 0.2648,
"step": 1524
},
{
"epoch": 2.1540636042402825,
"grad_norm": 0.24620195949131105,
"learning_rate": 1.5618448637316562e-05,
"loss": 0.2834,
"step": 1525
},
{
"epoch": 2.1554770318021204,
"grad_norm": 0.2134325056203705,
"learning_rate": 1.5592243186582812e-05,
"loss": 0.2759,
"step": 1526
},
{
"epoch": 2.1568904593639577,
"grad_norm": 0.22151499659220392,
"learning_rate": 1.5566037735849056e-05,
"loss": 0.2568,
"step": 1527
},
{
"epoch": 2.158303886925795,
"grad_norm": 0.2252580161586547,
"learning_rate": 1.5539832285115306e-05,
"loss": 0.268,
"step": 1528
},
{
"epoch": 2.1597173144876325,
"grad_norm": 0.22327743964788657,
"learning_rate": 1.5513626834381552e-05,
"loss": 0.2712,
"step": 1529
},
{
"epoch": 2.16113074204947,
"grad_norm": 0.23392305058368595,
"learning_rate": 1.54874213836478e-05,
"loss": 0.2723,
"step": 1530
},
{
"epoch": 2.1625441696113072,
"grad_norm": 0.24314556538388707,
"learning_rate": 1.5461215932914046e-05,
"loss": 0.2852,
"step": 1531
},
{
"epoch": 2.163957597173145,
"grad_norm": 0.22272244917060213,
"learning_rate": 1.5435010482180296e-05,
"loss": 0.268,
"step": 1532
},
{
"epoch": 2.1653710247349824,
"grad_norm": 0.21601308214481943,
"learning_rate": 1.540880503144654e-05,
"loss": 0.2669,
"step": 1533
},
{
"epoch": 2.16678445229682,
"grad_norm": 0.23266251078321876,
"learning_rate": 1.538259958071279e-05,
"loss": 0.2691,
"step": 1534
},
{
"epoch": 2.168197879858657,
"grad_norm": 0.22428851817698137,
"learning_rate": 1.5356394129979037e-05,
"loss": 0.2475,
"step": 1535
},
{
"epoch": 2.1696113074204946,
"grad_norm": 0.213393884907496,
"learning_rate": 1.5330188679245283e-05,
"loss": 0.2555,
"step": 1536
},
{
"epoch": 2.171024734982332,
"grad_norm": 0.23009321412456962,
"learning_rate": 1.530398322851153e-05,
"loss": 0.2714,
"step": 1537
},
{
"epoch": 2.1724381625441698,
"grad_norm": 0.2587074050241764,
"learning_rate": 1.527777777777778e-05,
"loss": 0.2722,
"step": 1538
},
{
"epoch": 2.173851590106007,
"grad_norm": 0.2308258016383529,
"learning_rate": 1.5251572327044025e-05,
"loss": 0.2453,
"step": 1539
},
{
"epoch": 2.1752650176678445,
"grad_norm": 0.24882239198906997,
"learning_rate": 1.5225366876310274e-05,
"loss": 0.2769,
"step": 1540
},
{
"epoch": 2.176678445229682,
"grad_norm": 0.2549156055033187,
"learning_rate": 1.5199161425576522e-05,
"loss": 0.2509,
"step": 1541
},
{
"epoch": 2.1780918727915193,
"grad_norm": 0.2352550589762924,
"learning_rate": 1.5172955974842767e-05,
"loss": 0.2555,
"step": 1542
},
{
"epoch": 2.179505300353357,
"grad_norm": 0.2400175094211747,
"learning_rate": 1.5146750524109016e-05,
"loss": 0.2668,
"step": 1543
},
{
"epoch": 2.1809187279151945,
"grad_norm": 0.24570657808789098,
"learning_rate": 1.5120545073375264e-05,
"loss": 0.2757,
"step": 1544
},
{
"epoch": 2.182332155477032,
"grad_norm": 0.22197802629509106,
"learning_rate": 1.509433962264151e-05,
"loss": 0.2488,
"step": 1545
},
{
"epoch": 2.1837455830388692,
"grad_norm": 0.2565370884313056,
"learning_rate": 1.5068134171907758e-05,
"loss": 0.2771,
"step": 1546
},
{
"epoch": 2.1851590106007066,
"grad_norm": 0.2706863138019699,
"learning_rate": 1.5041928721174006e-05,
"loss": 0.2715,
"step": 1547
},
{
"epoch": 2.186572438162544,
"grad_norm": 0.21910122207984709,
"learning_rate": 1.5015723270440251e-05,
"loss": 0.2582,
"step": 1548
},
{
"epoch": 2.187985865724382,
"grad_norm": 0.23068745601797644,
"learning_rate": 1.49895178197065e-05,
"loss": 0.2778,
"step": 1549
},
{
"epoch": 2.189399293286219,
"grad_norm": 0.2667513925677984,
"learning_rate": 1.4963312368972748e-05,
"loss": 0.2584,
"step": 1550
},
{
"epoch": 2.1908127208480566,
"grad_norm": 0.27051255172508376,
"learning_rate": 1.4937106918238993e-05,
"loss": 0.2661,
"step": 1551
},
{
"epoch": 2.192226148409894,
"grad_norm": 0.2641732872756639,
"learning_rate": 1.4910901467505242e-05,
"loss": 0.2814,
"step": 1552
},
{
"epoch": 2.1936395759717313,
"grad_norm": 0.24493430501153174,
"learning_rate": 1.488469601677149e-05,
"loss": 0.2534,
"step": 1553
},
{
"epoch": 2.1950530035335687,
"grad_norm": 0.2625508226313674,
"learning_rate": 1.4858490566037735e-05,
"loss": 0.2544,
"step": 1554
},
{
"epoch": 2.1964664310954065,
"grad_norm": 0.2321343757224512,
"learning_rate": 1.4832285115303984e-05,
"loss": 0.2768,
"step": 1555
},
{
"epoch": 2.197879858657244,
"grad_norm": 0.24350174408814293,
"learning_rate": 1.4806079664570232e-05,
"loss": 0.267,
"step": 1556
},
{
"epoch": 2.1992932862190813,
"grad_norm": 0.22674531902740033,
"learning_rate": 1.4779874213836479e-05,
"loss": 0.2626,
"step": 1557
},
{
"epoch": 2.2007067137809186,
"grad_norm": 0.24040444896387456,
"learning_rate": 1.4753668763102726e-05,
"loss": 0.2684,
"step": 1558
},
{
"epoch": 2.202120141342756,
"grad_norm": 0.23524345502833513,
"learning_rate": 1.4727463312368974e-05,
"loss": 0.267,
"step": 1559
},
{
"epoch": 2.203533568904594,
"grad_norm": 0.22864443492456502,
"learning_rate": 1.4701257861635221e-05,
"loss": 0.2595,
"step": 1560
},
{
"epoch": 2.204946996466431,
"grad_norm": 0.24238554407025614,
"learning_rate": 1.467505241090147e-05,
"loss": 0.2708,
"step": 1561
},
{
"epoch": 2.2063604240282686,
"grad_norm": 0.23485460133805863,
"learning_rate": 1.4648846960167716e-05,
"loss": 0.2684,
"step": 1562
},
{
"epoch": 2.207773851590106,
"grad_norm": 0.23646108314933598,
"learning_rate": 1.4622641509433963e-05,
"loss": 0.2783,
"step": 1563
},
{
"epoch": 2.2091872791519434,
"grad_norm": 0.2227695747468476,
"learning_rate": 1.4596436058700211e-05,
"loss": 0.2735,
"step": 1564
},
{
"epoch": 2.2106007067137807,
"grad_norm": 0.2244830015932064,
"learning_rate": 1.4570230607966457e-05,
"loss": 0.2657,
"step": 1565
},
{
"epoch": 2.2120141342756185,
"grad_norm": 0.26758226160012494,
"learning_rate": 1.4544025157232705e-05,
"loss": 0.2739,
"step": 1566
},
{
"epoch": 2.213427561837456,
"grad_norm": 0.2118479840410938,
"learning_rate": 1.4517819706498954e-05,
"loss": 0.2731,
"step": 1567
},
{
"epoch": 2.2148409893992933,
"grad_norm": 0.23579092222065717,
"learning_rate": 1.4491614255765199e-05,
"loss": 0.2596,
"step": 1568
},
{
"epoch": 2.2162544169611307,
"grad_norm": 0.2773634143958361,
"learning_rate": 1.4465408805031447e-05,
"loss": 0.2776,
"step": 1569
},
{
"epoch": 2.217667844522968,
"grad_norm": 0.23076946472820128,
"learning_rate": 1.4439203354297696e-05,
"loss": 0.2609,
"step": 1570
},
{
"epoch": 2.2190812720848054,
"grad_norm": 0.2618372333308278,
"learning_rate": 1.441299790356394e-05,
"loss": 0.2679,
"step": 1571
},
{
"epoch": 2.2204946996466433,
"grad_norm": 0.25941063639727346,
"learning_rate": 1.4386792452830189e-05,
"loss": 0.2816,
"step": 1572
},
{
"epoch": 2.2219081272084806,
"grad_norm": 0.22691127451457804,
"learning_rate": 1.4360587002096438e-05,
"loss": 0.2618,
"step": 1573
},
{
"epoch": 2.223321554770318,
"grad_norm": 0.25478218364111027,
"learning_rate": 1.4334381551362683e-05,
"loss": 0.2736,
"step": 1574
},
{
"epoch": 2.2247349823321554,
"grad_norm": 0.2300653159084738,
"learning_rate": 1.4308176100628931e-05,
"loss": 0.2798,
"step": 1575
},
{
"epoch": 2.2261484098939928,
"grad_norm": 0.226796023121057,
"learning_rate": 1.428197064989518e-05,
"loss": 0.2549,
"step": 1576
},
{
"epoch": 2.2275618374558306,
"grad_norm": 0.23934500955855265,
"learning_rate": 1.4255765199161425e-05,
"loss": 0.272,
"step": 1577
},
{
"epoch": 2.228975265017668,
"grad_norm": 0.21239765890325496,
"learning_rate": 1.4229559748427673e-05,
"loss": 0.2774,
"step": 1578
},
{
"epoch": 2.2303886925795053,
"grad_norm": 0.22687416228788465,
"learning_rate": 1.4203354297693922e-05,
"loss": 0.2485,
"step": 1579
},
{
"epoch": 2.2318021201413427,
"grad_norm": 0.21762879167169033,
"learning_rate": 1.4177148846960167e-05,
"loss": 0.2687,
"step": 1580
},
{
"epoch": 2.23321554770318,
"grad_norm": 0.21979870758030692,
"learning_rate": 1.4150943396226415e-05,
"loss": 0.2763,
"step": 1581
},
{
"epoch": 2.2346289752650175,
"grad_norm": 0.22135239936524478,
"learning_rate": 1.4124737945492664e-05,
"loss": 0.2652,
"step": 1582
},
{
"epoch": 2.2360424028268553,
"grad_norm": 0.2527670598132533,
"learning_rate": 1.4098532494758909e-05,
"loss": 0.2654,
"step": 1583
},
{
"epoch": 2.2374558303886927,
"grad_norm": 0.2379558190924844,
"learning_rate": 1.4072327044025157e-05,
"loss": 0.2752,
"step": 1584
},
{
"epoch": 2.23886925795053,
"grad_norm": 0.2191212423832416,
"learning_rate": 1.4046121593291406e-05,
"loss": 0.2508,
"step": 1585
},
{
"epoch": 2.2402826855123674,
"grad_norm": 0.23290476277719893,
"learning_rate": 1.401991614255765e-05,
"loss": 0.2664,
"step": 1586
},
{
"epoch": 2.241696113074205,
"grad_norm": 0.23058756525119078,
"learning_rate": 1.3993710691823899e-05,
"loss": 0.2792,
"step": 1587
},
{
"epoch": 2.243109540636042,
"grad_norm": 0.2198947688972119,
"learning_rate": 1.3967505241090148e-05,
"loss": 0.2523,
"step": 1588
},
{
"epoch": 2.24452296819788,
"grad_norm": 0.2300759130810082,
"learning_rate": 1.3941299790356394e-05,
"loss": 0.2559,
"step": 1589
},
{
"epoch": 2.2459363957597174,
"grad_norm": 0.2157504043790022,
"learning_rate": 1.3915094339622641e-05,
"loss": 0.2571,
"step": 1590
},
{
"epoch": 2.2473498233215548,
"grad_norm": 0.23129284016229562,
"learning_rate": 1.388888888888889e-05,
"loss": 0.2467,
"step": 1591
},
{
"epoch": 2.248763250883392,
"grad_norm": 0.22119924933805812,
"learning_rate": 1.3862683438155136e-05,
"loss": 0.2747,
"step": 1592
},
{
"epoch": 2.2501766784452295,
"grad_norm": 0.22307658176594453,
"learning_rate": 1.3836477987421385e-05,
"loss": 0.2506,
"step": 1593
},
{
"epoch": 2.2515901060070673,
"grad_norm": 0.22771699771909104,
"learning_rate": 1.3810272536687632e-05,
"loss": 0.2728,
"step": 1594
},
{
"epoch": 2.2530035335689047,
"grad_norm": 0.22780575620146742,
"learning_rate": 1.3784067085953878e-05,
"loss": 0.2654,
"step": 1595
},
{
"epoch": 2.254416961130742,
"grad_norm": 0.21902088596035363,
"learning_rate": 1.3757861635220127e-05,
"loss": 0.2729,
"step": 1596
},
{
"epoch": 2.2558303886925795,
"grad_norm": 0.2485941965855047,
"learning_rate": 1.3731656184486375e-05,
"loss": 0.2673,
"step": 1597
},
{
"epoch": 2.257243816254417,
"grad_norm": 0.23393473063181308,
"learning_rate": 1.370545073375262e-05,
"loss": 0.2806,
"step": 1598
},
{
"epoch": 2.2586572438162547,
"grad_norm": 0.20818134991067308,
"learning_rate": 1.3679245283018869e-05,
"loss": 0.2696,
"step": 1599
},
{
"epoch": 2.260070671378092,
"grad_norm": 0.258171502070671,
"learning_rate": 1.3653039832285117e-05,
"loss": 0.2721,
"step": 1600
},
{
"epoch": 2.2614840989399294,
"grad_norm": 0.2271538562064669,
"learning_rate": 1.3626834381551362e-05,
"loss": 0.2795,
"step": 1601
},
{
"epoch": 2.262897526501767,
"grad_norm": 0.2215486080232993,
"learning_rate": 1.360062893081761e-05,
"loss": 0.2723,
"step": 1602
},
{
"epoch": 2.264310954063604,
"grad_norm": 0.24006773700829792,
"learning_rate": 1.357442348008386e-05,
"loss": 0.2592,
"step": 1603
},
{
"epoch": 2.2657243816254415,
"grad_norm": 0.27332093502925525,
"learning_rate": 1.3548218029350104e-05,
"loss": 0.2888,
"step": 1604
},
{
"epoch": 2.267137809187279,
"grad_norm": 0.22320286965794567,
"learning_rate": 1.3522012578616353e-05,
"loss": 0.2675,
"step": 1605
},
{
"epoch": 2.2685512367491167,
"grad_norm": 0.2417438385119587,
"learning_rate": 1.3495807127882601e-05,
"loss": 0.2641,
"step": 1606
},
{
"epoch": 2.269964664310954,
"grad_norm": 0.247070267297521,
"learning_rate": 1.3469601677148846e-05,
"loss": 0.2755,
"step": 1607
},
{
"epoch": 2.2713780918727915,
"grad_norm": 0.22379149408244225,
"learning_rate": 1.3443396226415095e-05,
"loss": 0.2539,
"step": 1608
},
{
"epoch": 2.272791519434629,
"grad_norm": 0.22915456223089736,
"learning_rate": 1.3417190775681343e-05,
"loss": 0.2675,
"step": 1609
},
{
"epoch": 2.2742049469964662,
"grad_norm": 0.25455487442063124,
"learning_rate": 1.3390985324947588e-05,
"loss": 0.2612,
"step": 1610
},
{
"epoch": 2.275618374558304,
"grad_norm": 0.22553640113551945,
"learning_rate": 1.3364779874213837e-05,
"loss": 0.2581,
"step": 1611
},
{
"epoch": 2.2770318021201414,
"grad_norm": 0.22178854460767228,
"learning_rate": 1.3338574423480085e-05,
"loss": 0.2679,
"step": 1612
},
{
"epoch": 2.278445229681979,
"grad_norm": 0.2641204609846426,
"learning_rate": 1.331236897274633e-05,
"loss": 0.2758,
"step": 1613
},
{
"epoch": 2.279858657243816,
"grad_norm": 0.24496408330785086,
"learning_rate": 1.3286163522012579e-05,
"loss": 0.2527,
"step": 1614
},
{
"epoch": 2.2812720848056536,
"grad_norm": 0.23490001411549347,
"learning_rate": 1.3259958071278827e-05,
"loss": 0.2712,
"step": 1615
},
{
"epoch": 2.2826855123674914,
"grad_norm": 0.20990850132465172,
"learning_rate": 1.3233752620545072e-05,
"loss": 0.2478,
"step": 1616
},
{
"epoch": 2.2840989399293288,
"grad_norm": 0.25671540854753494,
"learning_rate": 1.320754716981132e-05,
"loss": 0.2919,
"step": 1617
},
{
"epoch": 2.285512367491166,
"grad_norm": 0.21492678161698445,
"learning_rate": 1.318134171907757e-05,
"loss": 0.2545,
"step": 1618
},
{
"epoch": 2.2869257950530035,
"grad_norm": 0.21756121560175376,
"learning_rate": 1.3155136268343814e-05,
"loss": 0.2641,
"step": 1619
},
{
"epoch": 2.288339222614841,
"grad_norm": 0.2183311222820037,
"learning_rate": 1.3128930817610063e-05,
"loss": 0.2744,
"step": 1620
},
{
"epoch": 2.2897526501766783,
"grad_norm": 0.2243257147324064,
"learning_rate": 1.3102725366876311e-05,
"loss": 0.258,
"step": 1621
},
{
"epoch": 2.2911660777385157,
"grad_norm": 0.22626583059015312,
"learning_rate": 1.3076519916142556e-05,
"loss": 0.2765,
"step": 1622
},
{
"epoch": 2.2925795053003535,
"grad_norm": 0.21792045298568563,
"learning_rate": 1.3050314465408805e-05,
"loss": 0.272,
"step": 1623
},
{
"epoch": 2.293992932862191,
"grad_norm": 0.22949863581581786,
"learning_rate": 1.3024109014675053e-05,
"loss": 0.2696,
"step": 1624
},
{
"epoch": 2.2954063604240282,
"grad_norm": 0.2146333264428167,
"learning_rate": 1.29979035639413e-05,
"loss": 0.2637,
"step": 1625
},
{
"epoch": 2.2968197879858656,
"grad_norm": 0.24451932299982515,
"learning_rate": 1.2971698113207547e-05,
"loss": 0.2754,
"step": 1626
},
{
"epoch": 2.298233215547703,
"grad_norm": 0.23010156497180856,
"learning_rate": 1.2945492662473795e-05,
"loss": 0.2607,
"step": 1627
},
{
"epoch": 2.299646643109541,
"grad_norm": 0.2078602632068109,
"learning_rate": 1.2919287211740042e-05,
"loss": 0.2669,
"step": 1628
},
{
"epoch": 2.301060070671378,
"grad_norm": 0.24119687384979743,
"learning_rate": 1.289308176100629e-05,
"loss": 0.2719,
"step": 1629
},
{
"epoch": 2.3024734982332156,
"grad_norm": 0.22211211026981562,
"learning_rate": 1.2866876310272537e-05,
"loss": 0.263,
"step": 1630
},
{
"epoch": 2.303886925795053,
"grad_norm": 0.24082511144929436,
"learning_rate": 1.2840670859538784e-05,
"loss": 0.2632,
"step": 1631
},
{
"epoch": 2.3053003533568903,
"grad_norm": 0.2356278412390576,
"learning_rate": 1.2814465408805033e-05,
"loss": 0.2796,
"step": 1632
},
{
"epoch": 2.306713780918728,
"grad_norm": 0.232806007858459,
"learning_rate": 1.2788259958071281e-05,
"loss": 0.2743,
"step": 1633
},
{
"epoch": 2.3081272084805655,
"grad_norm": 0.21933746253279304,
"learning_rate": 1.2762054507337526e-05,
"loss": 0.2687,
"step": 1634
},
{
"epoch": 2.309540636042403,
"grad_norm": 0.238140549936608,
"learning_rate": 1.2735849056603775e-05,
"loss": 0.2746,
"step": 1635
},
{
"epoch": 2.3109540636042403,
"grad_norm": 0.23606738996836554,
"learning_rate": 1.2709643605870023e-05,
"loss": 0.2672,
"step": 1636
},
{
"epoch": 2.3123674911660776,
"grad_norm": 0.2508673666276745,
"learning_rate": 1.2683438155136268e-05,
"loss": 0.2665,
"step": 1637
},
{
"epoch": 2.313780918727915,
"grad_norm": 0.21822314354650496,
"learning_rate": 1.2657232704402517e-05,
"loss": 0.2662,
"step": 1638
},
{
"epoch": 2.3151943462897524,
"grad_norm": 0.25044123885512026,
"learning_rate": 1.2631027253668765e-05,
"loss": 0.2741,
"step": 1639
},
{
"epoch": 2.3166077738515902,
"grad_norm": 0.24746171683154466,
"learning_rate": 1.260482180293501e-05,
"loss": 0.2636,
"step": 1640
},
{
"epoch": 2.3180212014134276,
"grad_norm": 0.24212271410997105,
"learning_rate": 1.2578616352201259e-05,
"loss": 0.2629,
"step": 1641
},
{
"epoch": 2.319434628975265,
"grad_norm": 0.23914863590918295,
"learning_rate": 1.2552410901467507e-05,
"loss": 0.27,
"step": 1642
},
{
"epoch": 2.3208480565371024,
"grad_norm": 0.2398758357005626,
"learning_rate": 1.2526205450733752e-05,
"loss": 0.2629,
"step": 1643
},
{
"epoch": 2.3222614840989397,
"grad_norm": 0.2351279016326098,
"learning_rate": 1.25e-05,
"loss": 0.2722,
"step": 1644
},
{
"epoch": 2.3236749116607776,
"grad_norm": 0.2583133168152784,
"learning_rate": 1.2473794549266247e-05,
"loss": 0.2715,
"step": 1645
},
{
"epoch": 2.325088339222615,
"grad_norm": 0.2572922546813558,
"learning_rate": 1.2447589098532496e-05,
"loss": 0.2649,
"step": 1646
},
{
"epoch": 2.3265017667844523,
"grad_norm": 0.22122996521582272,
"learning_rate": 1.2421383647798743e-05,
"loss": 0.2596,
"step": 1647
},
{
"epoch": 2.3279151943462897,
"grad_norm": 0.24810016714801278,
"learning_rate": 1.239517819706499e-05,
"loss": 0.2522,
"step": 1648
},
{
"epoch": 2.329328621908127,
"grad_norm": 0.2506925727668531,
"learning_rate": 1.2368972746331238e-05,
"loss": 0.2594,
"step": 1649
},
{
"epoch": 2.330742049469965,
"grad_norm": 0.22831639639891502,
"learning_rate": 1.2342767295597485e-05,
"loss": 0.2631,
"step": 1650
},
{
"epoch": 2.3321554770318023,
"grad_norm": 0.2511228454416184,
"learning_rate": 1.2316561844863731e-05,
"loss": 0.2721,
"step": 1651
},
{
"epoch": 2.3335689045936396,
"grad_norm": 0.24207138224190264,
"learning_rate": 1.229035639412998e-05,
"loss": 0.2579,
"step": 1652
},
{
"epoch": 2.334982332155477,
"grad_norm": 0.2038582564822866,
"learning_rate": 1.2264150943396227e-05,
"loss": 0.2573,
"step": 1653
},
{
"epoch": 2.3363957597173144,
"grad_norm": 0.23298905603535858,
"learning_rate": 1.2237945492662473e-05,
"loss": 0.2776,
"step": 1654
},
{
"epoch": 2.3378091872791518,
"grad_norm": 0.260060097465976,
"learning_rate": 1.2211740041928722e-05,
"loss": 0.2685,
"step": 1655
},
{
"epoch": 2.3392226148409896,
"grad_norm": 0.22452817393203417,
"learning_rate": 1.2185534591194969e-05,
"loss": 0.2772,
"step": 1656
},
{
"epoch": 2.340636042402827,
"grad_norm": 0.23274085388639684,
"learning_rate": 1.2159329140461215e-05,
"loss": 0.2628,
"step": 1657
},
{
"epoch": 2.3420494699646643,
"grad_norm": 0.22835562525972522,
"learning_rate": 1.2133123689727464e-05,
"loss": 0.2713,
"step": 1658
},
{
"epoch": 2.3434628975265017,
"grad_norm": 0.20839549940183769,
"learning_rate": 1.210691823899371e-05,
"loss": 0.265,
"step": 1659
},
{
"epoch": 2.344876325088339,
"grad_norm": 0.21010641262250007,
"learning_rate": 1.2080712788259959e-05,
"loss": 0.2673,
"step": 1660
},
{
"epoch": 2.3462897526501765,
"grad_norm": 0.20901632530961425,
"learning_rate": 1.2054507337526206e-05,
"loss": 0.2564,
"step": 1661
},
{
"epoch": 2.3477031802120143,
"grad_norm": 0.23571515842336646,
"learning_rate": 1.2028301886792454e-05,
"loss": 0.2651,
"step": 1662
},
{
"epoch": 2.3491166077738517,
"grad_norm": 0.23034125737457478,
"learning_rate": 1.2002096436058701e-05,
"loss": 0.2759,
"step": 1663
},
{
"epoch": 2.350530035335689,
"grad_norm": 0.23666550980246828,
"learning_rate": 1.197589098532495e-05,
"loss": 0.2716,
"step": 1664
},
{
"epoch": 2.3519434628975264,
"grad_norm": 0.23859213090888923,
"learning_rate": 1.1949685534591196e-05,
"loss": 0.2664,
"step": 1665
},
{
"epoch": 2.353356890459364,
"grad_norm": 0.22806768651445264,
"learning_rate": 1.1923480083857443e-05,
"loss": 0.2738,
"step": 1666
},
{
"epoch": 2.3547703180212016,
"grad_norm": 0.24536175457480014,
"learning_rate": 1.1897274633123692e-05,
"loss": 0.2779,
"step": 1667
},
{
"epoch": 2.356183745583039,
"grad_norm": 0.2375459417849049,
"learning_rate": 1.1871069182389938e-05,
"loss": 0.2639,
"step": 1668
},
{
"epoch": 2.3575971731448764,
"grad_norm": 0.23057335927295924,
"learning_rate": 1.1844863731656185e-05,
"loss": 0.275,
"step": 1669
},
{
"epoch": 2.3590106007067138,
"grad_norm": 0.24666638224290255,
"learning_rate": 1.1818658280922434e-05,
"loss": 0.2782,
"step": 1670
},
{
"epoch": 2.360424028268551,
"grad_norm": 0.22278254236022305,
"learning_rate": 1.179245283018868e-05,
"loss": 0.2581,
"step": 1671
},
{
"epoch": 2.3618374558303885,
"grad_norm": 0.22403651829531246,
"learning_rate": 1.1766247379454927e-05,
"loss": 0.265,
"step": 1672
},
{
"epoch": 2.3632508833922263,
"grad_norm": 0.23912274035146625,
"learning_rate": 1.1740041928721176e-05,
"loss": 0.2512,
"step": 1673
},
{
"epoch": 2.3646643109540637,
"grad_norm": 0.2331046167972634,
"learning_rate": 1.1713836477987422e-05,
"loss": 0.2601,
"step": 1674
},
{
"epoch": 2.366077738515901,
"grad_norm": 0.22923465528843084,
"learning_rate": 1.1687631027253669e-05,
"loss": 0.2703,
"step": 1675
},
{
"epoch": 2.3674911660777385,
"grad_norm": 0.22572227313921225,
"learning_rate": 1.1661425576519918e-05,
"loss": 0.2638,
"step": 1676
},
{
"epoch": 2.368904593639576,
"grad_norm": 0.21867488899601809,
"learning_rate": 1.1635220125786164e-05,
"loss": 0.2538,
"step": 1677
},
{
"epoch": 2.370318021201413,
"grad_norm": 0.23281286479668084,
"learning_rate": 1.1609014675052411e-05,
"loss": 0.2703,
"step": 1678
},
{
"epoch": 2.371731448763251,
"grad_norm": 0.22064426490750158,
"learning_rate": 1.158280922431866e-05,
"loss": 0.2678,
"step": 1679
},
{
"epoch": 2.3731448763250884,
"grad_norm": 0.23620431448453355,
"learning_rate": 1.1556603773584906e-05,
"loss": 0.2732,
"step": 1680
},
{
"epoch": 2.374558303886926,
"grad_norm": 0.22903587838831707,
"learning_rate": 1.1530398322851153e-05,
"loss": 0.2628,
"step": 1681
},
{
"epoch": 2.375971731448763,
"grad_norm": 0.21846258026004597,
"learning_rate": 1.1504192872117402e-05,
"loss": 0.2691,
"step": 1682
},
{
"epoch": 2.3773851590106005,
"grad_norm": 0.20575656354186342,
"learning_rate": 1.1477987421383648e-05,
"loss": 0.2613,
"step": 1683
},
{
"epoch": 2.3787985865724384,
"grad_norm": 0.23209255292202438,
"learning_rate": 1.1451781970649895e-05,
"loss": 0.2838,
"step": 1684
},
{
"epoch": 2.3802120141342757,
"grad_norm": 0.25728641954961096,
"learning_rate": 1.1425576519916142e-05,
"loss": 0.2574,
"step": 1685
},
{
"epoch": 2.381625441696113,
"grad_norm": 0.21426023080150222,
"learning_rate": 1.139937106918239e-05,
"loss": 0.2608,
"step": 1686
},
{
"epoch": 2.3830388692579505,
"grad_norm": 0.21272310036325567,
"learning_rate": 1.1373165618448637e-05,
"loss": 0.2597,
"step": 1687
},
{
"epoch": 2.384452296819788,
"grad_norm": 0.23321976771422645,
"learning_rate": 1.1346960167714884e-05,
"loss": 0.2728,
"step": 1688
},
{
"epoch": 2.3858657243816253,
"grad_norm": 0.236890540555355,
"learning_rate": 1.1320754716981132e-05,
"loss": 0.265,
"step": 1689
},
{
"epoch": 2.387279151943463,
"grad_norm": 0.22223166769039798,
"learning_rate": 1.129454926624738e-05,
"loss": 0.2724,
"step": 1690
},
{
"epoch": 2.3886925795053005,
"grad_norm": 0.21388541399726127,
"learning_rate": 1.1268343815513626e-05,
"loss": 0.267,
"step": 1691
},
{
"epoch": 2.390106007067138,
"grad_norm": 0.21477422783906008,
"learning_rate": 1.1242138364779874e-05,
"loss": 0.2804,
"step": 1692
},
{
"epoch": 2.391519434628975,
"grad_norm": 0.23000936673014483,
"learning_rate": 1.1215932914046121e-05,
"loss": 0.2663,
"step": 1693
},
{
"epoch": 2.3929328621908126,
"grad_norm": 0.2394296534648666,
"learning_rate": 1.118972746331237e-05,
"loss": 0.2607,
"step": 1694
},
{
"epoch": 2.39434628975265,
"grad_norm": 0.2177745764572813,
"learning_rate": 1.1163522012578616e-05,
"loss": 0.2709,
"step": 1695
},
{
"epoch": 2.395759717314488,
"grad_norm": 0.21578501257202828,
"learning_rate": 1.1137316561844865e-05,
"loss": 0.275,
"step": 1696
},
{
"epoch": 2.397173144876325,
"grad_norm": 0.2558377198619558,
"learning_rate": 1.1111111111111112e-05,
"loss": 0.2558,
"step": 1697
},
{
"epoch": 2.3985865724381625,
"grad_norm": 0.24598097009615633,
"learning_rate": 1.108490566037736e-05,
"loss": 0.2696,
"step": 1698
},
{
"epoch": 2.4,
"grad_norm": 0.22111136200661738,
"learning_rate": 1.1058700209643607e-05,
"loss": 0.2658,
"step": 1699
},
{
"epoch": 2.4014134275618373,
"grad_norm": 0.21499943004616512,
"learning_rate": 1.1032494758909854e-05,
"loss": 0.2711,
"step": 1700
},
{
"epoch": 2.402826855123675,
"grad_norm": 0.2721375322575388,
"learning_rate": 1.1006289308176102e-05,
"loss": 0.2707,
"step": 1701
},
{
"epoch": 2.4042402826855125,
"grad_norm": 0.23976326845635612,
"learning_rate": 1.0980083857442349e-05,
"loss": 0.2637,
"step": 1702
},
{
"epoch": 2.40565371024735,
"grad_norm": 0.21443484104662702,
"learning_rate": 1.0953878406708596e-05,
"loss": 0.26,
"step": 1703
},
{
"epoch": 2.4070671378091872,
"grad_norm": 0.21007666888075263,
"learning_rate": 1.0927672955974844e-05,
"loss": 0.2563,
"step": 1704
},
{
"epoch": 2.4084805653710246,
"grad_norm": 0.2205515238439305,
"learning_rate": 1.0901467505241091e-05,
"loss": 0.2705,
"step": 1705
},
{
"epoch": 2.4098939929328624,
"grad_norm": 0.23107274766728914,
"learning_rate": 1.0875262054507338e-05,
"loss": 0.27,
"step": 1706
},
{
"epoch": 2.4113074204947,
"grad_norm": 0.20885494102050847,
"learning_rate": 1.0849056603773586e-05,
"loss": 0.2529,
"step": 1707
},
{
"epoch": 2.412720848056537,
"grad_norm": 0.2156778712856926,
"learning_rate": 1.0822851153039833e-05,
"loss": 0.2713,
"step": 1708
},
{
"epoch": 2.4141342756183746,
"grad_norm": 0.21726202441505,
"learning_rate": 1.079664570230608e-05,
"loss": 0.2606,
"step": 1709
},
{
"epoch": 2.415547703180212,
"grad_norm": 0.24937982548204907,
"learning_rate": 1.0770440251572328e-05,
"loss": 0.2757,
"step": 1710
},
{
"epoch": 2.4169611307420493,
"grad_norm": 0.2095091216085255,
"learning_rate": 1.0744234800838575e-05,
"loss": 0.2604,
"step": 1711
},
{
"epoch": 2.4183745583038867,
"grad_norm": 0.24400489131481568,
"learning_rate": 1.0718029350104822e-05,
"loss": 0.2886,
"step": 1712
},
{
"epoch": 2.4197879858657245,
"grad_norm": 0.22120437836032722,
"learning_rate": 1.069182389937107e-05,
"loss": 0.264,
"step": 1713
},
{
"epoch": 2.421201413427562,
"grad_norm": 0.23607490149597604,
"learning_rate": 1.0665618448637317e-05,
"loss": 0.2832,
"step": 1714
},
{
"epoch": 2.4226148409893993,
"grad_norm": 0.22186170864322313,
"learning_rate": 1.0639412997903564e-05,
"loss": 0.2684,
"step": 1715
},
{
"epoch": 2.4240282685512367,
"grad_norm": 0.1946529598247961,
"learning_rate": 1.0613207547169812e-05,
"loss": 0.2571,
"step": 1716
},
{
"epoch": 2.425441696113074,
"grad_norm": 0.21241779272232295,
"learning_rate": 1.0587002096436059e-05,
"loss": 0.2673,
"step": 1717
},
{
"epoch": 2.426855123674912,
"grad_norm": 0.21677725406274506,
"learning_rate": 1.0560796645702306e-05,
"loss": 0.2636,
"step": 1718
},
{
"epoch": 2.4282685512367492,
"grad_norm": 0.22880668055602282,
"learning_rate": 1.0534591194968554e-05,
"loss": 0.2796,
"step": 1719
},
{
"epoch": 2.4296819787985866,
"grad_norm": 0.20179965473614403,
"learning_rate": 1.0508385744234801e-05,
"loss": 0.2651,
"step": 1720
},
{
"epoch": 2.431095406360424,
"grad_norm": 0.20952542690034162,
"learning_rate": 1.0482180293501048e-05,
"loss": 0.2555,
"step": 1721
},
{
"epoch": 2.4325088339222614,
"grad_norm": 0.21784994159493387,
"learning_rate": 1.0455974842767296e-05,
"loss": 0.2697,
"step": 1722
},
{
"epoch": 2.433922261484099,
"grad_norm": 0.22002617994371085,
"learning_rate": 1.0429769392033543e-05,
"loss": 0.2603,
"step": 1723
},
{
"epoch": 2.4353356890459366,
"grad_norm": 0.20526590326516672,
"learning_rate": 1.040356394129979e-05,
"loss": 0.2745,
"step": 1724
},
{
"epoch": 2.436749116607774,
"grad_norm": 0.20443178743143417,
"learning_rate": 1.0377358490566038e-05,
"loss": 0.2662,
"step": 1725
},
{
"epoch": 2.4381625441696113,
"grad_norm": 0.22664568233477747,
"learning_rate": 1.0351153039832285e-05,
"loss": 0.259,
"step": 1726
},
{
"epoch": 2.4395759717314487,
"grad_norm": 0.2216629419476058,
"learning_rate": 1.0324947589098532e-05,
"loss": 0.2714,
"step": 1727
},
{
"epoch": 2.440989399293286,
"grad_norm": 0.2233489219093178,
"learning_rate": 1.029874213836478e-05,
"loss": 0.2717,
"step": 1728
},
{
"epoch": 2.4424028268551234,
"grad_norm": 0.22825587522378768,
"learning_rate": 1.0272536687631027e-05,
"loss": 0.2615,
"step": 1729
},
{
"epoch": 2.4438162544169613,
"grad_norm": 0.21983286628854004,
"learning_rate": 1.0246331236897275e-05,
"loss": 0.2698,
"step": 1730
},
{
"epoch": 2.4452296819787986,
"grad_norm": 0.21462396633131342,
"learning_rate": 1.0220125786163522e-05,
"loss": 0.266,
"step": 1731
},
{
"epoch": 2.446643109540636,
"grad_norm": 0.200143338057521,
"learning_rate": 1.019392033542977e-05,
"loss": 0.2519,
"step": 1732
},
{
"epoch": 2.4480565371024734,
"grad_norm": 0.21091922043186515,
"learning_rate": 1.0167714884696017e-05,
"loss": 0.2685,
"step": 1733
},
{
"epoch": 2.4494699646643108,
"grad_norm": 0.24480581552526975,
"learning_rate": 1.0141509433962266e-05,
"loss": 0.2854,
"step": 1734
},
{
"epoch": 2.4508833922261486,
"grad_norm": 0.21305583173290846,
"learning_rate": 1.0115303983228513e-05,
"loss": 0.2735,
"step": 1735
},
{
"epoch": 2.452296819787986,
"grad_norm": 0.20665209820461783,
"learning_rate": 1.008909853249476e-05,
"loss": 0.2726,
"step": 1736
},
{
"epoch": 2.4537102473498233,
"grad_norm": 0.2165737009978498,
"learning_rate": 1.0062893081761008e-05,
"loss": 0.2655,
"step": 1737
},
{
"epoch": 2.4551236749116607,
"grad_norm": 0.20716841677219086,
"learning_rate": 1.0036687631027255e-05,
"loss": 0.2719,
"step": 1738
},
{
"epoch": 2.456537102473498,
"grad_norm": 0.21544577280403304,
"learning_rate": 1.0010482180293501e-05,
"loss": 0.2809,
"step": 1739
},
{
"epoch": 2.457950530035336,
"grad_norm": 0.19980625951698952,
"learning_rate": 9.98427672955975e-06,
"loss": 0.2735,
"step": 1740
},
{
"epoch": 2.4593639575971733,
"grad_norm": 0.2054297205122213,
"learning_rate": 9.958071278825997e-06,
"loss": 0.2517,
"step": 1741
},
{
"epoch": 2.4607773851590107,
"grad_norm": 0.22193505437937164,
"learning_rate": 9.931865828092243e-06,
"loss": 0.2671,
"step": 1742
},
{
"epoch": 2.462190812720848,
"grad_norm": 0.2099148578103325,
"learning_rate": 9.905660377358492e-06,
"loss": 0.2753,
"step": 1743
},
{
"epoch": 2.4636042402826854,
"grad_norm": 0.20800956395860518,
"learning_rate": 9.879454926624739e-06,
"loss": 0.2676,
"step": 1744
},
{
"epoch": 2.465017667844523,
"grad_norm": 0.2136840613700657,
"learning_rate": 9.853249475890985e-06,
"loss": 0.2665,
"step": 1745
},
{
"epoch": 2.46643109540636,
"grad_norm": 0.21089234009647853,
"learning_rate": 9.827044025157234e-06,
"loss": 0.2721,
"step": 1746
},
{
"epoch": 2.467844522968198,
"grad_norm": 0.23020962079164198,
"learning_rate": 9.80083857442348e-06,
"loss": 0.2742,
"step": 1747
},
{
"epoch": 2.4692579505300354,
"grad_norm": 0.21393017396144964,
"learning_rate": 9.774633123689727e-06,
"loss": 0.2671,
"step": 1748
},
{
"epoch": 2.4706713780918728,
"grad_norm": 0.38827781746884343,
"learning_rate": 9.748427672955976e-06,
"loss": 0.2727,
"step": 1749
},
{
"epoch": 2.47208480565371,
"grad_norm": 0.2236921661727747,
"learning_rate": 9.722222222222223e-06,
"loss": 0.2602,
"step": 1750
},
{
"epoch": 2.4734982332155475,
"grad_norm": 0.2464022741010232,
"learning_rate": 9.69601677148847e-06,
"loss": 0.2826,
"step": 1751
},
{
"epoch": 2.4749116607773853,
"grad_norm": 0.19817717991020808,
"learning_rate": 9.669811320754718e-06,
"loss": 0.2666,
"step": 1752
},
{
"epoch": 2.4763250883392227,
"grad_norm": 0.21077812183859168,
"learning_rate": 9.643605870020965e-06,
"loss": 0.2507,
"step": 1753
},
{
"epoch": 2.47773851590106,
"grad_norm": 0.2300539577507736,
"learning_rate": 9.617400419287211e-06,
"loss": 0.2671,
"step": 1754
},
{
"epoch": 2.4791519434628975,
"grad_norm": 0.2296862139382608,
"learning_rate": 9.59119496855346e-06,
"loss": 0.2738,
"step": 1755
},
{
"epoch": 2.480565371024735,
"grad_norm": 0.2037100488520109,
"learning_rate": 9.564989517819707e-06,
"loss": 0.2691,
"step": 1756
},
{
"epoch": 2.4819787985865727,
"grad_norm": 0.21787792814801407,
"learning_rate": 9.538784067085953e-06,
"loss": 0.2577,
"step": 1757
},
{
"epoch": 2.48339222614841,
"grad_norm": 0.2182173539542276,
"learning_rate": 9.512578616352202e-06,
"loss": 0.2699,
"step": 1758
},
{
"epoch": 2.4848056537102474,
"grad_norm": 0.20036275666817438,
"learning_rate": 9.486373165618449e-06,
"loss": 0.2478,
"step": 1759
},
{
"epoch": 2.486219081272085,
"grad_norm": 0.2218908339001936,
"learning_rate": 9.460167714884695e-06,
"loss": 0.2577,
"step": 1760
},
{
"epoch": 2.487632508833922,
"grad_norm": 0.22714078841557467,
"learning_rate": 9.433962264150944e-06,
"loss": 0.278,
"step": 1761
},
{
"epoch": 2.4890459363957596,
"grad_norm": 0.21612929713119913,
"learning_rate": 9.40775681341719e-06,
"loss": 0.2655,
"step": 1762
},
{
"epoch": 2.490459363957597,
"grad_norm": 0.21673872393881016,
"learning_rate": 9.38155136268344e-06,
"loss": 0.2737,
"step": 1763
},
{
"epoch": 2.4918727915194347,
"grad_norm": 0.20437976951355732,
"learning_rate": 9.355345911949686e-06,
"loss": 0.2458,
"step": 1764
},
{
"epoch": 2.493286219081272,
"grad_norm": 0.2317796242536716,
"learning_rate": 9.329140461215934e-06,
"loss": 0.2697,
"step": 1765
},
{
"epoch": 2.4946996466431095,
"grad_norm": 0.21851620824225462,
"learning_rate": 9.302935010482181e-06,
"loss": 0.2519,
"step": 1766
},
{
"epoch": 2.496113074204947,
"grad_norm": 0.2149470073898198,
"learning_rate": 9.276729559748428e-06,
"loss": 0.2587,
"step": 1767
},
{
"epoch": 2.4975265017667843,
"grad_norm": 0.20561613927841232,
"learning_rate": 9.250524109014676e-06,
"loss": 0.2585,
"step": 1768
},
{
"epoch": 2.498939929328622,
"grad_norm": 0.21483536945843965,
"learning_rate": 9.224318658280923e-06,
"loss": 0.2671,
"step": 1769
},
{
"epoch": 2.5003533568904595,
"grad_norm": 0.22757547477497836,
"learning_rate": 9.19811320754717e-06,
"loss": 0.2787,
"step": 1770
},
{
"epoch": 2.501766784452297,
"grad_norm": 0.21986090804597658,
"learning_rate": 9.171907756813418e-06,
"loss": 0.2609,
"step": 1771
},
{
"epoch": 2.503180212014134,
"grad_norm": 0.22990485422632295,
"learning_rate": 9.145702306079665e-06,
"loss": 0.2818,
"step": 1772
},
{
"epoch": 2.5045936395759716,
"grad_norm": 0.22022907018421908,
"learning_rate": 9.119496855345912e-06,
"loss": 0.2654,
"step": 1773
},
{
"epoch": 2.5060070671378094,
"grad_norm": 0.2344353672469287,
"learning_rate": 9.09329140461216e-06,
"loss": 0.2795,
"step": 1774
},
{
"epoch": 2.507420494699647,
"grad_norm": 0.23588167151414127,
"learning_rate": 9.067085953878407e-06,
"loss": 0.2556,
"step": 1775
},
{
"epoch": 2.508833922261484,
"grad_norm": 0.2277574279711283,
"learning_rate": 9.040880503144654e-06,
"loss": 0.2612,
"step": 1776
},
{
"epoch": 2.5102473498233215,
"grad_norm": 0.22657116385257703,
"learning_rate": 9.014675052410902e-06,
"loss": 0.2913,
"step": 1777
},
{
"epoch": 2.511660777385159,
"grad_norm": 0.21277577889083146,
"learning_rate": 8.98846960167715e-06,
"loss": 0.2633,
"step": 1778
},
{
"epoch": 2.5130742049469967,
"grad_norm": 0.21039230449086796,
"learning_rate": 8.962264150943396e-06,
"loss": 0.2699,
"step": 1779
},
{
"epoch": 2.5144876325088337,
"grad_norm": 0.22593009268629313,
"learning_rate": 8.936058700209644e-06,
"loss": 0.2713,
"step": 1780
},
{
"epoch": 2.5159010600706715,
"grad_norm": 0.2356794520586271,
"learning_rate": 8.909853249475891e-06,
"loss": 0.2596,
"step": 1781
},
{
"epoch": 2.517314487632509,
"grad_norm": 0.22060921655240312,
"learning_rate": 8.883647798742138e-06,
"loss": 0.2608,
"step": 1782
},
{
"epoch": 2.5187279151943462,
"grad_norm": 0.23789918238320912,
"learning_rate": 8.857442348008386e-06,
"loss": 0.2902,
"step": 1783
},
{
"epoch": 2.5201413427561836,
"grad_norm": 0.2149434376358611,
"learning_rate": 8.831236897274633e-06,
"loss": 0.2566,
"step": 1784
},
{
"epoch": 2.521554770318021,
"grad_norm": 0.24534292909778133,
"learning_rate": 8.80503144654088e-06,
"loss": 0.2758,
"step": 1785
},
{
"epoch": 2.522968197879859,
"grad_norm": 0.24225682521751427,
"learning_rate": 8.778825995807128e-06,
"loss": 0.2755,
"step": 1786
},
{
"epoch": 2.524381625441696,
"grad_norm": 0.2358131704787096,
"learning_rate": 8.752620545073375e-06,
"loss": 0.2619,
"step": 1787
},
{
"epoch": 2.5257950530035336,
"grad_norm": 0.2633229331238676,
"learning_rate": 8.726415094339622e-06,
"loss": 0.2681,
"step": 1788
},
{
"epoch": 2.527208480565371,
"grad_norm": 0.23179193705979026,
"learning_rate": 8.70020964360587e-06,
"loss": 0.2695,
"step": 1789
},
{
"epoch": 2.5286219081272083,
"grad_norm": 0.2213412168151524,
"learning_rate": 8.674004192872117e-06,
"loss": 0.2524,
"step": 1790
},
{
"epoch": 2.530035335689046,
"grad_norm": 0.2179589160901368,
"learning_rate": 8.647798742138364e-06,
"loss": 0.2668,
"step": 1791
},
{
"epoch": 2.5314487632508835,
"grad_norm": 0.24435300916695687,
"learning_rate": 8.621593291404612e-06,
"loss": 0.2485,
"step": 1792
},
{
"epoch": 2.532862190812721,
"grad_norm": 0.2379382682074806,
"learning_rate": 8.59538784067086e-06,
"loss": 0.2698,
"step": 1793
},
{
"epoch": 2.5342756183745583,
"grad_norm": 0.20740414870140808,
"learning_rate": 8.569182389937106e-06,
"loss": 0.2613,
"step": 1794
},
{
"epoch": 2.5356890459363957,
"grad_norm": 0.22746599997419034,
"learning_rate": 8.542976939203354e-06,
"loss": 0.298,
"step": 1795
},
{
"epoch": 2.5371024734982335,
"grad_norm": 0.2240952599219384,
"learning_rate": 8.516771488469601e-06,
"loss": 0.2583,
"step": 1796
},
{
"epoch": 2.5385159010600704,
"grad_norm": 0.24228914328327195,
"learning_rate": 8.49056603773585e-06,
"loss": 0.2741,
"step": 1797
},
{
"epoch": 2.5399293286219082,
"grad_norm": 0.24402699178333428,
"learning_rate": 8.464360587002096e-06,
"loss": 0.2635,
"step": 1798
},
{
"epoch": 2.5413427561837456,
"grad_norm": 0.20565587615322473,
"learning_rate": 8.438155136268345e-06,
"loss": 0.2585,
"step": 1799
},
{
"epoch": 2.542756183745583,
"grad_norm": 0.22031484815985722,
"learning_rate": 8.411949685534592e-06,
"loss": 0.2782,
"step": 1800
},
{
"epoch": 2.5441696113074204,
"grad_norm": 0.21044577562907993,
"learning_rate": 8.38574423480084e-06,
"loss": 0.2584,
"step": 1801
},
{
"epoch": 2.5455830388692577,
"grad_norm": 0.22755142283179605,
"learning_rate": 8.359538784067087e-06,
"loss": 0.2732,
"step": 1802
},
{
"epoch": 2.5469964664310956,
"grad_norm": 0.22700566300932096,
"learning_rate": 8.333333333333334e-06,
"loss": 0.2786,
"step": 1803
},
{
"epoch": 2.548409893992933,
"grad_norm": 0.18978814912771402,
"learning_rate": 8.307127882599582e-06,
"loss": 0.245,
"step": 1804
},
{
"epoch": 2.5498233215547703,
"grad_norm": 0.21430942980154732,
"learning_rate": 8.280922431865829e-06,
"loss": 0.2612,
"step": 1805
},
{
"epoch": 2.5512367491166077,
"grad_norm": 0.22577650179084377,
"learning_rate": 8.254716981132076e-06,
"loss": 0.2578,
"step": 1806
},
{
"epoch": 2.552650176678445,
"grad_norm": 0.21140898795246693,
"learning_rate": 8.228511530398324e-06,
"loss": 0.2582,
"step": 1807
},
{
"epoch": 2.554063604240283,
"grad_norm": 0.19788365263156718,
"learning_rate": 8.202306079664571e-06,
"loss": 0.2712,
"step": 1808
},
{
"epoch": 2.5554770318021203,
"grad_norm": 0.1992555377321615,
"learning_rate": 8.176100628930818e-06,
"loss": 0.2549,
"step": 1809
},
{
"epoch": 2.5568904593639576,
"grad_norm": 0.2126210896633446,
"learning_rate": 8.149895178197066e-06,
"loss": 0.2748,
"step": 1810
},
{
"epoch": 2.558303886925795,
"grad_norm": 0.2064665682259444,
"learning_rate": 8.123689727463313e-06,
"loss": 0.2722,
"step": 1811
},
{
"epoch": 2.5597173144876324,
"grad_norm": 0.2081461497360593,
"learning_rate": 8.09748427672956e-06,
"loss": 0.2697,
"step": 1812
},
{
"epoch": 2.56113074204947,
"grad_norm": 0.22280487435979748,
"learning_rate": 8.071278825995808e-06,
"loss": 0.2786,
"step": 1813
},
{
"epoch": 2.562544169611307,
"grad_norm": 0.19586485360629918,
"learning_rate": 8.045073375262055e-06,
"loss": 0.2586,
"step": 1814
},
{
"epoch": 2.563957597173145,
"grad_norm": 0.20968732205106036,
"learning_rate": 8.018867924528302e-06,
"loss": 0.2647,
"step": 1815
},
{
"epoch": 2.5653710247349824,
"grad_norm": 0.1978775069226037,
"learning_rate": 7.99266247379455e-06,
"loss": 0.2559,
"step": 1816
},
{
"epoch": 2.5667844522968197,
"grad_norm": 0.20389492108394006,
"learning_rate": 7.966457023060797e-06,
"loss": 0.268,
"step": 1817
},
{
"epoch": 2.568197879858657,
"grad_norm": 0.19599195247499532,
"learning_rate": 7.940251572327044e-06,
"loss": 0.274,
"step": 1818
},
{
"epoch": 2.5696113074204945,
"grad_norm": 0.22009193827922027,
"learning_rate": 7.914046121593292e-06,
"loss": 0.2705,
"step": 1819
},
{
"epoch": 2.5710247349823323,
"grad_norm": 0.20151418945619484,
"learning_rate": 7.887840670859539e-06,
"loss": 0.2739,
"step": 1820
},
{
"epoch": 2.5724381625441697,
"grad_norm": 0.2106113674126261,
"learning_rate": 7.861635220125786e-06,
"loss": 0.2548,
"step": 1821
},
{
"epoch": 2.573851590106007,
"grad_norm": 0.21132749307062396,
"learning_rate": 7.835429769392034e-06,
"loss": 0.2575,
"step": 1822
},
{
"epoch": 2.5752650176678444,
"grad_norm": 0.20648996106372422,
"learning_rate": 7.809224318658281e-06,
"loss": 0.2589,
"step": 1823
},
{
"epoch": 2.576678445229682,
"grad_norm": 0.19425214691086712,
"learning_rate": 7.783018867924528e-06,
"loss": 0.2674,
"step": 1824
},
{
"epoch": 2.5780918727915196,
"grad_norm": 0.20482886281004895,
"learning_rate": 7.756813417190776e-06,
"loss": 0.2561,
"step": 1825
},
{
"epoch": 2.579505300353357,
"grad_norm": 0.19525440024670962,
"learning_rate": 7.730607966457023e-06,
"loss": 0.2632,
"step": 1826
},
{
"epoch": 2.5809187279151944,
"grad_norm": 0.2147153303587213,
"learning_rate": 7.70440251572327e-06,
"loss": 0.2684,
"step": 1827
},
{
"epoch": 2.5823321554770318,
"grad_norm": 0.20835434663220231,
"learning_rate": 7.678197064989518e-06,
"loss": 0.2625,
"step": 1828
},
{
"epoch": 2.583745583038869,
"grad_norm": 0.1959009950689501,
"learning_rate": 7.651991614255765e-06,
"loss": 0.2722,
"step": 1829
},
{
"epoch": 2.585159010600707,
"grad_norm": 0.19686212143163162,
"learning_rate": 7.625786163522013e-06,
"loss": 0.2519,
"step": 1830
},
{
"epoch": 2.586572438162544,
"grad_norm": 0.1915225723253146,
"learning_rate": 7.599580712788261e-06,
"loss": 0.2528,
"step": 1831
},
{
"epoch": 2.5879858657243817,
"grad_norm": 0.20565989519980726,
"learning_rate": 7.573375262054508e-06,
"loss": 0.2605,
"step": 1832
},
{
"epoch": 2.589399293286219,
"grad_norm": 0.21754553955647166,
"learning_rate": 7.547169811320755e-06,
"loss": 0.2714,
"step": 1833
},
{
"epoch": 2.5908127208480565,
"grad_norm": 0.21773739253591498,
"learning_rate": 7.520964360587003e-06,
"loss": 0.2697,
"step": 1834
},
{
"epoch": 2.592226148409894,
"grad_norm": 0.20123763461046912,
"learning_rate": 7.49475890985325e-06,
"loss": 0.2711,
"step": 1835
},
{
"epoch": 2.5936395759717312,
"grad_norm": 0.19777113107871222,
"learning_rate": 7.468553459119497e-06,
"loss": 0.2626,
"step": 1836
},
{
"epoch": 2.595053003533569,
"grad_norm": 0.2205681460768372,
"learning_rate": 7.442348008385745e-06,
"loss": 0.2656,
"step": 1837
},
{
"epoch": 2.5964664310954064,
"grad_norm": 0.20030879091733,
"learning_rate": 7.416142557651992e-06,
"loss": 0.2671,
"step": 1838
},
{
"epoch": 2.597879858657244,
"grad_norm": 0.1938129448137822,
"learning_rate": 7.3899371069182395e-06,
"loss": 0.2573,
"step": 1839
},
{
"epoch": 2.599293286219081,
"grad_norm": 0.22512846963720023,
"learning_rate": 7.363731656184487e-06,
"loss": 0.254,
"step": 1840
},
{
"epoch": 2.6007067137809186,
"grad_norm": 0.2166930368291344,
"learning_rate": 7.337526205450735e-06,
"loss": 0.2685,
"step": 1841
},
{
"epoch": 2.6021201413427564,
"grad_norm": 0.20285400855961547,
"learning_rate": 7.3113207547169815e-06,
"loss": 0.2718,
"step": 1842
},
{
"epoch": 2.6035335689045938,
"grad_norm": 0.1972879208234158,
"learning_rate": 7.285115303983228e-06,
"loss": 0.2501,
"step": 1843
},
{
"epoch": 2.604946996466431,
"grad_norm": 0.20245655010460528,
"learning_rate": 7.258909853249477e-06,
"loss": 0.2488,
"step": 1844
},
{
"epoch": 2.6063604240282685,
"grad_norm": 0.21185584568491958,
"learning_rate": 7.2327044025157235e-06,
"loss": 0.2657,
"step": 1845
},
{
"epoch": 2.607773851590106,
"grad_norm": 0.20316331266857923,
"learning_rate": 7.20649895178197e-06,
"loss": 0.268,
"step": 1846
},
{
"epoch": 2.6091872791519437,
"grad_norm": 0.19054430834745423,
"learning_rate": 7.180293501048219e-06,
"loss": 0.2668,
"step": 1847
},
{
"epoch": 2.610600706713781,
"grad_norm": 0.2050948125385338,
"learning_rate": 7.1540880503144655e-06,
"loss": 0.2641,
"step": 1848
},
{
"epoch": 2.6120141342756185,
"grad_norm": 0.20448945551665926,
"learning_rate": 7.127882599580712e-06,
"loss": 0.2693,
"step": 1849
},
{
"epoch": 2.613427561837456,
"grad_norm": 0.2118130321077605,
"learning_rate": 7.101677148846961e-06,
"loss": 0.2739,
"step": 1850
},
{
"epoch": 2.614840989399293,
"grad_norm": 0.2185158546757277,
"learning_rate": 7.0754716981132075e-06,
"loss": 0.2703,
"step": 1851
},
{
"epoch": 2.6162544169611306,
"grad_norm": 0.2042228402908851,
"learning_rate": 7.049266247379454e-06,
"loss": 0.2599,
"step": 1852
},
{
"epoch": 2.617667844522968,
"grad_norm": 0.19186169278927617,
"learning_rate": 7.023060796645703e-06,
"loss": 0.2581,
"step": 1853
},
{
"epoch": 2.619081272084806,
"grad_norm": 0.23479495376225484,
"learning_rate": 6.9968553459119495e-06,
"loss": 0.2692,
"step": 1854
},
{
"epoch": 2.620494699646643,
"grad_norm": 0.20449950938804232,
"learning_rate": 6.970649895178197e-06,
"loss": 0.2656,
"step": 1855
},
{
"epoch": 2.6219081272084805,
"grad_norm": 0.1980960298319723,
"learning_rate": 6.944444444444445e-06,
"loss": 0.2554,
"step": 1856
},
{
"epoch": 2.623321554770318,
"grad_norm": 0.2085579324617458,
"learning_rate": 6.918238993710692e-06,
"loss": 0.2709,
"step": 1857
},
{
"epoch": 2.6247349823321553,
"grad_norm": 0.2086243859885124,
"learning_rate": 6.892033542976939e-06,
"loss": 0.2828,
"step": 1858
},
{
"epoch": 2.626148409893993,
"grad_norm": 0.21042015597629446,
"learning_rate": 6.865828092243188e-06,
"loss": 0.2785,
"step": 1859
},
{
"epoch": 2.6275618374558305,
"grad_norm": 0.21201991872125664,
"learning_rate": 6.839622641509434e-06,
"loss": 0.2674,
"step": 1860
},
{
"epoch": 2.628975265017668,
"grad_norm": 0.19756520443061665,
"learning_rate": 6.813417190775681e-06,
"loss": 0.2532,
"step": 1861
},
{
"epoch": 2.6303886925795052,
"grad_norm": 0.20966171463683306,
"learning_rate": 6.78721174004193e-06,
"loss": 0.2815,
"step": 1862
},
{
"epoch": 2.6318021201413426,
"grad_norm": 0.19978232161248297,
"learning_rate": 6.761006289308176e-06,
"loss": 0.2553,
"step": 1863
},
{
"epoch": 2.6332155477031804,
"grad_norm": 0.2070655211235095,
"learning_rate": 6.734800838574423e-06,
"loss": 0.2611,
"step": 1864
},
{
"epoch": 2.634628975265018,
"grad_norm": 0.20716884921827003,
"learning_rate": 6.708595387840672e-06,
"loss": 0.2538,
"step": 1865
},
{
"epoch": 2.636042402826855,
"grad_norm": 0.1896625359034186,
"learning_rate": 6.682389937106918e-06,
"loss": 0.246,
"step": 1866
},
{
"epoch": 2.6374558303886926,
"grad_norm": 0.225326785736261,
"learning_rate": 6.656184486373165e-06,
"loss": 0.2775,
"step": 1867
},
{
"epoch": 2.63886925795053,
"grad_norm": 0.2016266819036545,
"learning_rate": 6.629979035639414e-06,
"loss": 0.2651,
"step": 1868
},
{
"epoch": 2.6402826855123673,
"grad_norm": 0.1888785108609737,
"learning_rate": 6.60377358490566e-06,
"loss": 0.2641,
"step": 1869
},
{
"epoch": 2.6416961130742047,
"grad_norm": 0.19724826845034657,
"learning_rate": 6.577568134171907e-06,
"loss": 0.2829,
"step": 1870
},
{
"epoch": 2.6431095406360425,
"grad_norm": 0.21712890407985966,
"learning_rate": 6.551362683438156e-06,
"loss": 0.2594,
"step": 1871
},
{
"epoch": 2.64452296819788,
"grad_norm": 0.20267881947023542,
"learning_rate": 6.5251572327044024e-06,
"loss": 0.2625,
"step": 1872
},
{
"epoch": 2.6459363957597173,
"grad_norm": 0.2902256833253672,
"learning_rate": 6.49895178197065e-06,
"loss": 0.2834,
"step": 1873
},
{
"epoch": 2.6473498233215547,
"grad_norm": 0.2080101267413167,
"learning_rate": 6.472746331236898e-06,
"loss": 0.2675,
"step": 1874
},
{
"epoch": 2.648763250883392,
"grad_norm": 0.1965414806644538,
"learning_rate": 6.446540880503145e-06,
"loss": 0.2391,
"step": 1875
},
{
"epoch": 2.65017667844523,
"grad_norm": 0.2171007960819311,
"learning_rate": 6.420335429769392e-06,
"loss": 0.2622,
"step": 1876
},
{
"epoch": 2.6515901060070672,
"grad_norm": 0.21212680603638742,
"learning_rate": 6.3941299790356405e-06,
"loss": 0.2745,
"step": 1877
},
{
"epoch": 2.6530035335689046,
"grad_norm": 0.20604551549453892,
"learning_rate": 6.367924528301887e-06,
"loss": 0.2666,
"step": 1878
},
{
"epoch": 2.654416961130742,
"grad_norm": 0.20923101680683912,
"learning_rate": 6.341719077568134e-06,
"loss": 0.2685,
"step": 1879
},
{
"epoch": 2.6558303886925794,
"grad_norm": 0.1993289474395926,
"learning_rate": 6.3155136268343825e-06,
"loss": 0.2703,
"step": 1880
},
{
"epoch": 2.657243816254417,
"grad_norm": 0.2152068442476094,
"learning_rate": 6.289308176100629e-06,
"loss": 0.2511,
"step": 1881
},
{
"epoch": 2.6586572438162546,
"grad_norm": 0.20584799840460835,
"learning_rate": 6.263102725366876e-06,
"loss": 0.2578,
"step": 1882
},
{
"epoch": 2.660070671378092,
"grad_norm": 0.1926939020776196,
"learning_rate": 6.236897274633124e-06,
"loss": 0.2578,
"step": 1883
},
{
"epoch": 2.6614840989399293,
"grad_norm": 0.19585747815915988,
"learning_rate": 6.210691823899371e-06,
"loss": 0.2472,
"step": 1884
},
{
"epoch": 2.6628975265017667,
"grad_norm": 0.21063296298953596,
"learning_rate": 6.184486373165619e-06,
"loss": 0.276,
"step": 1885
},
{
"epoch": 2.664310954063604,
"grad_norm": 0.1965084465968782,
"learning_rate": 6.158280922431866e-06,
"loss": 0.2542,
"step": 1886
},
{
"epoch": 2.6657243816254415,
"grad_norm": 0.1913442967836528,
"learning_rate": 6.132075471698113e-06,
"loss": 0.2666,
"step": 1887
},
{
"epoch": 2.6671378091872793,
"grad_norm": 0.21350970127058508,
"learning_rate": 6.105870020964361e-06,
"loss": 0.286,
"step": 1888
},
{
"epoch": 2.6685512367491167,
"grad_norm": 0.1959841914151124,
"learning_rate": 6.079664570230608e-06,
"loss": 0.2636,
"step": 1889
},
{
"epoch": 2.669964664310954,
"grad_norm": 0.1886168669314577,
"learning_rate": 6.053459119496855e-06,
"loss": 0.2539,
"step": 1890
},
{
"epoch": 2.6713780918727914,
"grad_norm": 0.1938457634540867,
"learning_rate": 6.027253668763103e-06,
"loss": 0.2629,
"step": 1891
},
{
"epoch": 2.672791519434629,
"grad_norm": 0.19064526816578825,
"learning_rate": 6.0010482180293506e-06,
"loss": 0.2608,
"step": 1892
},
{
"epoch": 2.6742049469964666,
"grad_norm": 0.1913977377832736,
"learning_rate": 5.974842767295598e-06,
"loss": 0.2739,
"step": 1893
},
{
"epoch": 2.675618374558304,
"grad_norm": 0.1913060097668888,
"learning_rate": 5.948637316561846e-06,
"loss": 0.2627,
"step": 1894
},
{
"epoch": 2.6770318021201414,
"grad_norm": 0.19959558525097545,
"learning_rate": 5.9224318658280926e-06,
"loss": 0.2649,
"step": 1895
},
{
"epoch": 2.6784452296819787,
"grad_norm": 0.21225443301286062,
"learning_rate": 5.89622641509434e-06,
"loss": 0.2843,
"step": 1896
},
{
"epoch": 2.679858657243816,
"grad_norm": 0.2052750432720131,
"learning_rate": 5.870020964360588e-06,
"loss": 0.2674,
"step": 1897
},
{
"epoch": 2.681272084805654,
"grad_norm": 0.18915385290536685,
"learning_rate": 5.8438155136268346e-06,
"loss": 0.2568,
"step": 1898
},
{
"epoch": 2.6826855123674913,
"grad_norm": 0.20344412619826882,
"learning_rate": 5.817610062893082e-06,
"loss": 0.2691,
"step": 1899
},
{
"epoch": 2.6840989399293287,
"grad_norm": 0.20022764185553849,
"learning_rate": 5.79140461215933e-06,
"loss": 0.2674,
"step": 1900
},
{
"epoch": 2.685512367491166,
"grad_norm": 0.2064279252781221,
"learning_rate": 5.7651991614255766e-06,
"loss": 0.2559,
"step": 1901
},
{
"epoch": 2.6869257950530034,
"grad_norm": 0.21220373016491145,
"learning_rate": 5.738993710691824e-06,
"loss": 0.266,
"step": 1902
},
{
"epoch": 2.6883392226148413,
"grad_norm": 0.21319699245406062,
"learning_rate": 5.712788259958071e-06,
"loss": 0.2708,
"step": 1903
},
{
"epoch": 2.689752650176678,
"grad_norm": 0.22338454155283038,
"learning_rate": 5.686582809224319e-06,
"loss": 0.2689,
"step": 1904
},
{
"epoch": 2.691166077738516,
"grad_norm": 0.1962175940983197,
"learning_rate": 5.660377358490566e-06,
"loss": 0.2571,
"step": 1905
},
{
"epoch": 2.6925795053003534,
"grad_norm": 0.22255571257866014,
"learning_rate": 5.634171907756813e-06,
"loss": 0.2693,
"step": 1906
},
{
"epoch": 2.6939929328621908,
"grad_norm": 0.22871295058874744,
"learning_rate": 5.607966457023061e-06,
"loss": 0.2734,
"step": 1907
},
{
"epoch": 2.695406360424028,
"grad_norm": 0.20031307950386681,
"learning_rate": 5.581761006289308e-06,
"loss": 0.2585,
"step": 1908
},
{
"epoch": 2.6968197879858655,
"grad_norm": 0.23051372397619532,
"learning_rate": 5.555555555555556e-06,
"loss": 0.2701,
"step": 1909
},
{
"epoch": 2.6982332155477033,
"grad_norm": 0.20238582673271824,
"learning_rate": 5.5293501048218034e-06,
"loss": 0.2613,
"step": 1910
},
{
"epoch": 2.6996466431095407,
"grad_norm": 0.20179379679291706,
"learning_rate": 5.503144654088051e-06,
"loss": 0.2783,
"step": 1911
},
{
"epoch": 2.701060070671378,
"grad_norm": 0.21018615600649335,
"learning_rate": 5.476939203354298e-06,
"loss": 0.2606,
"step": 1912
},
{
"epoch": 2.7024734982332155,
"grad_norm": 0.217763395306205,
"learning_rate": 5.4507337526205454e-06,
"loss": 0.2853,
"step": 1913
},
{
"epoch": 2.703886925795053,
"grad_norm": 0.21167399357279656,
"learning_rate": 5.424528301886793e-06,
"loss": 0.2582,
"step": 1914
},
{
"epoch": 2.7053003533568907,
"grad_norm": 0.21474394784700176,
"learning_rate": 5.39832285115304e-06,
"loss": 0.2532,
"step": 1915
},
{
"epoch": 2.706713780918728,
"grad_norm": 0.19907029731035414,
"learning_rate": 5.3721174004192875e-06,
"loss": 0.2711,
"step": 1916
},
{
"epoch": 2.7081272084805654,
"grad_norm": 0.20061920243237963,
"learning_rate": 5.345911949685535e-06,
"loss": 0.2661,
"step": 1917
},
{
"epoch": 2.709540636042403,
"grad_norm": 0.20622697301155962,
"learning_rate": 5.319706498951782e-06,
"loss": 0.2712,
"step": 1918
},
{
"epoch": 2.71095406360424,
"grad_norm": 0.1959469863405829,
"learning_rate": 5.2935010482180295e-06,
"loss": 0.2608,
"step": 1919
},
{
"epoch": 2.712367491166078,
"grad_norm": 0.19852239377622755,
"learning_rate": 5.267295597484277e-06,
"loss": 0.2481,
"step": 1920
},
{
"epoch": 2.713780918727915,
"grad_norm": 0.20921522786702557,
"learning_rate": 5.241090146750524e-06,
"loss": 0.269,
"step": 1921
},
{
"epoch": 2.7151943462897528,
"grad_norm": 0.19829093891149097,
"learning_rate": 5.2148846960167715e-06,
"loss": 0.2651,
"step": 1922
},
{
"epoch": 2.71660777385159,
"grad_norm": 0.20394610893144713,
"learning_rate": 5.188679245283019e-06,
"loss": 0.2632,
"step": 1923
},
{
"epoch": 2.7180212014134275,
"grad_norm": 0.2119456104960267,
"learning_rate": 5.162473794549266e-06,
"loss": 0.2782,
"step": 1924
},
{
"epoch": 2.719434628975265,
"grad_norm": 0.19569749650979637,
"learning_rate": 5.1362683438155135e-06,
"loss": 0.2763,
"step": 1925
},
{
"epoch": 2.7208480565371023,
"grad_norm": 0.20253336727134952,
"learning_rate": 5.110062893081761e-06,
"loss": 0.2683,
"step": 1926
},
{
"epoch": 2.72226148409894,
"grad_norm": 0.19915784908808343,
"learning_rate": 5.083857442348009e-06,
"loss": 0.2718,
"step": 1927
},
{
"epoch": 2.7236749116607775,
"grad_norm": 0.19948078360374177,
"learning_rate": 5.057651991614256e-06,
"loss": 0.2535,
"step": 1928
},
{
"epoch": 2.725088339222615,
"grad_norm": 0.21552830350137284,
"learning_rate": 5.031446540880504e-06,
"loss": 0.2623,
"step": 1929
},
{
"epoch": 2.726501766784452,
"grad_norm": 0.20368657816479266,
"learning_rate": 5.005241090146751e-06,
"loss": 0.2661,
"step": 1930
},
{
"epoch": 2.7279151943462896,
"grad_norm": 0.20478192141320492,
"learning_rate": 4.979035639412998e-06,
"loss": 0.2624,
"step": 1931
},
{
"epoch": 2.7293286219081274,
"grad_norm": 0.20253837486186402,
"learning_rate": 4.952830188679246e-06,
"loss": 0.2591,
"step": 1932
},
{
"epoch": 2.730742049469965,
"grad_norm": 0.18634802439249204,
"learning_rate": 4.926624737945493e-06,
"loss": 0.2582,
"step": 1933
},
{
"epoch": 2.732155477031802,
"grad_norm": 0.20988647108751646,
"learning_rate": 4.90041928721174e-06,
"loss": 0.2549,
"step": 1934
},
{
"epoch": 2.7335689045936395,
"grad_norm": 0.23628926779817028,
"learning_rate": 4.874213836477988e-06,
"loss": 0.2744,
"step": 1935
},
{
"epoch": 2.734982332155477,
"grad_norm": 0.2115267850163653,
"learning_rate": 4.848008385744235e-06,
"loss": 0.2661,
"step": 1936
},
{
"epoch": 2.7363957597173147,
"grad_norm": 0.2022987516598711,
"learning_rate": 4.821802935010482e-06,
"loss": 0.2864,
"step": 1937
},
{
"epoch": 2.7378091872791517,
"grad_norm": 0.20777147408507227,
"learning_rate": 4.79559748427673e-06,
"loss": 0.2697,
"step": 1938
},
{
"epoch": 2.7392226148409895,
"grad_norm": 0.19026041806793298,
"learning_rate": 4.769392033542977e-06,
"loss": 0.2654,
"step": 1939
},
{
"epoch": 2.740636042402827,
"grad_norm": 0.2020291385224266,
"learning_rate": 4.743186582809224e-06,
"loss": 0.2554,
"step": 1940
},
{
"epoch": 2.7420494699646643,
"grad_norm": 0.2036368554567803,
"learning_rate": 4.716981132075472e-06,
"loss": 0.2562,
"step": 1941
},
{
"epoch": 2.7434628975265016,
"grad_norm": 0.20113266308214173,
"learning_rate": 4.69077568134172e-06,
"loss": 0.2655,
"step": 1942
},
{
"epoch": 2.744876325088339,
"grad_norm": 0.20897050504377154,
"learning_rate": 4.664570230607967e-06,
"loss": 0.2654,
"step": 1943
},
{
"epoch": 2.746289752650177,
"grad_norm": 0.2016510658561997,
"learning_rate": 4.638364779874214e-06,
"loss": 0.259,
"step": 1944
},
{
"epoch": 2.747703180212014,
"grad_norm": 0.19124264307610603,
"learning_rate": 4.612159329140462e-06,
"loss": 0.2485,
"step": 1945
},
{
"epoch": 2.7491166077738516,
"grad_norm": 0.2043271265913991,
"learning_rate": 4.585953878406709e-06,
"loss": 0.2479,
"step": 1946
},
{
"epoch": 2.750530035335689,
"grad_norm": 0.2164416969799556,
"learning_rate": 4.559748427672956e-06,
"loss": 0.2708,
"step": 1947
},
{
"epoch": 2.7519434628975263,
"grad_norm": 0.19103257007821284,
"learning_rate": 4.533542976939204e-06,
"loss": 0.268,
"step": 1948
},
{
"epoch": 2.753356890459364,
"grad_norm": 0.18924997326279927,
"learning_rate": 4.507337526205451e-06,
"loss": 0.2773,
"step": 1949
},
{
"epoch": 2.7547703180212015,
"grad_norm": 0.18799293233577408,
"learning_rate": 4.481132075471698e-06,
"loss": 0.2561,
"step": 1950
},
{
"epoch": 2.756183745583039,
"grad_norm": 0.2033901547137542,
"learning_rate": 4.454926624737946e-06,
"loss": 0.2614,
"step": 1951
},
{
"epoch": 2.7575971731448763,
"grad_norm": 0.2009991093062796,
"learning_rate": 4.428721174004193e-06,
"loss": 0.2759,
"step": 1952
},
{
"epoch": 2.7590106007067137,
"grad_norm": 0.2033246337462216,
"learning_rate": 4.40251572327044e-06,
"loss": 0.2852,
"step": 1953
},
{
"epoch": 2.7604240282685515,
"grad_norm": 0.1960184889302944,
"learning_rate": 4.376310272536688e-06,
"loss": 0.257,
"step": 1954
},
{
"epoch": 2.7618374558303884,
"grad_norm": 0.20893474627273104,
"learning_rate": 4.350104821802935e-06,
"loss": 0.2698,
"step": 1955
},
{
"epoch": 2.7632508833922262,
"grad_norm": 0.19749857207921542,
"learning_rate": 4.323899371069182e-06,
"loss": 0.2821,
"step": 1956
},
{
"epoch": 2.7646643109540636,
"grad_norm": 0.19556992623332792,
"learning_rate": 4.29769392033543e-06,
"loss": 0.2608,
"step": 1957
},
{
"epoch": 2.766077738515901,
"grad_norm": 0.18885708959578315,
"learning_rate": 4.271488469601677e-06,
"loss": 0.2551,
"step": 1958
},
{
"epoch": 2.7674911660777384,
"grad_norm": 0.20100595058938528,
"learning_rate": 4.245283018867925e-06,
"loss": 0.2674,
"step": 1959
},
{
"epoch": 2.7689045936395758,
"grad_norm": 0.18622826035547804,
"learning_rate": 4.2190775681341725e-06,
"loss": 0.253,
"step": 1960
},
{
"epoch": 2.7703180212014136,
"grad_norm": 0.2100710164979557,
"learning_rate": 4.19287211740042e-06,
"loss": 0.2862,
"step": 1961
},
{
"epoch": 2.771731448763251,
"grad_norm": 0.21624694792422516,
"learning_rate": 4.166666666666667e-06,
"loss": 0.2654,
"step": 1962
},
{
"epoch": 2.7731448763250883,
"grad_norm": 0.19214541303674557,
"learning_rate": 4.1404612159329145e-06,
"loss": 0.263,
"step": 1963
},
{
"epoch": 2.7745583038869257,
"grad_norm": 0.18750492058244628,
"learning_rate": 4.114255765199162e-06,
"loss": 0.2583,
"step": 1964
},
{
"epoch": 2.775971731448763,
"grad_norm": 0.19252162720488705,
"learning_rate": 4.088050314465409e-06,
"loss": 0.281,
"step": 1965
},
{
"epoch": 2.777385159010601,
"grad_norm": 0.19259456073777542,
"learning_rate": 4.0618448637316565e-06,
"loss": 0.2577,
"step": 1966
},
{
"epoch": 2.7787985865724383,
"grad_norm": 0.20902073796058573,
"learning_rate": 4.035639412997904e-06,
"loss": 0.2638,
"step": 1967
},
{
"epoch": 2.7802120141342757,
"grad_norm": 0.20265534056341541,
"learning_rate": 4.009433962264151e-06,
"loss": 0.2525,
"step": 1968
},
{
"epoch": 2.781625441696113,
"grad_norm": 0.19702857298061338,
"learning_rate": 3.9832285115303985e-06,
"loss": 0.2713,
"step": 1969
},
{
"epoch": 2.7830388692579504,
"grad_norm": 0.19365987462494205,
"learning_rate": 3.957023060796646e-06,
"loss": 0.2614,
"step": 1970
},
{
"epoch": 2.7844522968197882,
"grad_norm": 0.19852627372838846,
"learning_rate": 3.930817610062893e-06,
"loss": 0.2567,
"step": 1971
},
{
"epoch": 2.7858657243816256,
"grad_norm": 0.19420014280825842,
"learning_rate": 3.9046121593291405e-06,
"loss": 0.2384,
"step": 1972
},
{
"epoch": 2.787279151943463,
"grad_norm": 0.20726366724281955,
"learning_rate": 3.878406708595388e-06,
"loss": 0.2647,
"step": 1973
},
{
"epoch": 2.7886925795053004,
"grad_norm": 0.2026127864668435,
"learning_rate": 3.852201257861635e-06,
"loss": 0.2646,
"step": 1974
},
{
"epoch": 2.7901060070671377,
"grad_norm": 0.20093965835818348,
"learning_rate": 3.8259958071278825e-06,
"loss": 0.2821,
"step": 1975
},
{
"epoch": 2.791519434628975,
"grad_norm": 0.2126191991846697,
"learning_rate": 3.7997903563941306e-06,
"loss": 0.2709,
"step": 1976
},
{
"epoch": 2.7929328621908125,
"grad_norm": 0.19574913230082228,
"learning_rate": 3.7735849056603773e-06,
"loss": 0.265,
"step": 1977
},
{
"epoch": 2.7943462897526503,
"grad_norm": 0.20527277069994063,
"learning_rate": 3.747379454926625e-06,
"loss": 0.2534,
"step": 1978
},
{
"epoch": 2.7957597173144877,
"grad_norm": 0.19516004039011584,
"learning_rate": 3.7211740041928726e-06,
"loss": 0.2559,
"step": 1979
},
{
"epoch": 2.797173144876325,
"grad_norm": 0.1953784495991323,
"learning_rate": 3.6949685534591198e-06,
"loss": 0.2693,
"step": 1980
},
{
"epoch": 2.7985865724381624,
"grad_norm": 0.1972487176962256,
"learning_rate": 3.6687631027253674e-06,
"loss": 0.2577,
"step": 1981
},
{
"epoch": 2.8,
"grad_norm": 0.20332737861765957,
"learning_rate": 3.642557651991614e-06,
"loss": 0.2751,
"step": 1982
},
{
"epoch": 2.8014134275618376,
"grad_norm": 0.1839383400906213,
"learning_rate": 3.6163522012578618e-06,
"loss": 0.257,
"step": 1983
},
{
"epoch": 2.802826855123675,
"grad_norm": 0.20104824614991842,
"learning_rate": 3.5901467505241094e-06,
"loss": 0.2618,
"step": 1984
},
{
"epoch": 2.8042402826855124,
"grad_norm": 0.22142050684233686,
"learning_rate": 3.563941299790356e-06,
"loss": 0.2651,
"step": 1985
},
{
"epoch": 2.8056537102473498,
"grad_norm": 0.19900474947948332,
"learning_rate": 3.5377358490566038e-06,
"loss": 0.2604,
"step": 1986
},
{
"epoch": 2.807067137809187,
"grad_norm": 0.19685095019146118,
"learning_rate": 3.5115303983228514e-06,
"loss": 0.272,
"step": 1987
},
{
"epoch": 2.808480565371025,
"grad_norm": 0.20618451153267317,
"learning_rate": 3.4853249475890986e-06,
"loss": 0.2719,
"step": 1988
},
{
"epoch": 2.8098939929328623,
"grad_norm": 0.18829408776299097,
"learning_rate": 3.459119496855346e-06,
"loss": 0.2703,
"step": 1989
},
{
"epoch": 2.8113074204946997,
"grad_norm": 0.19147926645963975,
"learning_rate": 3.432914046121594e-06,
"loss": 0.2616,
"step": 1990
},
{
"epoch": 2.812720848056537,
"grad_norm": 0.1904258926759524,
"learning_rate": 3.4067085953878406e-06,
"loss": 0.2523,
"step": 1991
},
{
"epoch": 2.8141342756183745,
"grad_norm": 0.2014474907028523,
"learning_rate": 3.380503144654088e-06,
"loss": 0.2648,
"step": 1992
},
{
"epoch": 2.815547703180212,
"grad_norm": 0.2010831890859877,
"learning_rate": 3.354297693920336e-06,
"loss": 0.2593,
"step": 1993
},
{
"epoch": 2.8169611307420492,
"grad_norm": 0.18910761397048476,
"learning_rate": 3.3280922431865826e-06,
"loss": 0.2499,
"step": 1994
},
{
"epoch": 2.818374558303887,
"grad_norm": 0.1897251062446274,
"learning_rate": 3.30188679245283e-06,
"loss": 0.2423,
"step": 1995
},
{
"epoch": 2.8197879858657244,
"grad_norm": 0.19230888468540824,
"learning_rate": 3.275681341719078e-06,
"loss": 0.2606,
"step": 1996
},
{
"epoch": 2.821201413427562,
"grad_norm": 0.19624887264402938,
"learning_rate": 3.249475890985325e-06,
"loss": 0.2592,
"step": 1997
},
{
"epoch": 2.822614840989399,
"grad_norm": 0.19809845880041374,
"learning_rate": 3.2232704402515726e-06,
"loss": 0.2545,
"step": 1998
},
{
"epoch": 2.8240282685512366,
"grad_norm": 0.18577075492900788,
"learning_rate": 3.1970649895178203e-06,
"loss": 0.2543,
"step": 1999
},
{
"epoch": 2.8254416961130744,
"grad_norm": 0.18186020383409718,
"learning_rate": 3.170859538784067e-06,
"loss": 0.2679,
"step": 2000
},
{
"epoch": 2.8268551236749118,
"grad_norm": 0.18549174402283922,
"learning_rate": 3.1446540880503146e-06,
"loss": 0.2664,
"step": 2001
},
{
"epoch": 2.828268551236749,
"grad_norm": 0.1929206742509979,
"learning_rate": 3.118448637316562e-06,
"loss": 0.2649,
"step": 2002
},
{
"epoch": 2.8296819787985865,
"grad_norm": 0.19860339243507685,
"learning_rate": 3.0922431865828095e-06,
"loss": 0.2713,
"step": 2003
},
{
"epoch": 2.831095406360424,
"grad_norm": 0.40202133842109994,
"learning_rate": 3.0660377358490567e-06,
"loss": 0.2838,
"step": 2004
},
{
"epoch": 2.8325088339222617,
"grad_norm": 0.19126582287162652,
"learning_rate": 3.039832285115304e-06,
"loss": 0.2579,
"step": 2005
},
{
"epoch": 2.833922261484099,
"grad_norm": 0.18142757561847356,
"learning_rate": 3.0136268343815515e-06,
"loss": 0.2545,
"step": 2006
},
{
"epoch": 2.8353356890459365,
"grad_norm": 0.19595620456024404,
"learning_rate": 2.987421383647799e-06,
"loss": 0.2696,
"step": 2007
},
{
"epoch": 2.836749116607774,
"grad_norm": 0.1937722028429237,
"learning_rate": 2.9612159329140463e-06,
"loss": 0.2429,
"step": 2008
},
{
"epoch": 2.8381625441696112,
"grad_norm": 0.19582262057664096,
"learning_rate": 2.935010482180294e-06,
"loss": 0.2532,
"step": 2009
},
{
"epoch": 2.8395759717314486,
"grad_norm": 0.19037191439408682,
"learning_rate": 2.908805031446541e-06,
"loss": 0.2711,
"step": 2010
},
{
"epoch": 2.840989399293286,
"grad_norm": 0.212821667604591,
"learning_rate": 2.8825995807127883e-06,
"loss": 0.2724,
"step": 2011
},
{
"epoch": 2.842402826855124,
"grad_norm": 0.20593987654682933,
"learning_rate": 2.8563941299790355e-06,
"loss": 0.2741,
"step": 2012
},
{
"epoch": 2.843816254416961,
"grad_norm": 0.19478999741430214,
"learning_rate": 2.830188679245283e-06,
"loss": 0.2509,
"step": 2013
},
{
"epoch": 2.8452296819787986,
"grad_norm": 0.19586587423540036,
"learning_rate": 2.8039832285115303e-06,
"loss": 0.2626,
"step": 2014
},
{
"epoch": 2.846643109540636,
"grad_norm": 0.19486845761362925,
"learning_rate": 2.777777777777778e-06,
"loss": 0.2574,
"step": 2015
},
{
"epoch": 2.8480565371024733,
"grad_norm": 0.20775944084738007,
"learning_rate": 2.7515723270440255e-06,
"loss": 0.2807,
"step": 2016
},
{
"epoch": 2.849469964664311,
"grad_norm": 0.20298539230380555,
"learning_rate": 2.7253668763102727e-06,
"loss": 0.2691,
"step": 2017
},
{
"epoch": 2.8508833922261485,
"grad_norm": 0.18489732448898485,
"learning_rate": 2.69916142557652e-06,
"loss": 0.2539,
"step": 2018
},
{
"epoch": 2.852296819787986,
"grad_norm": 0.1910424811989875,
"learning_rate": 2.6729559748427675e-06,
"loss": 0.2605,
"step": 2019
},
{
"epoch": 2.8537102473498233,
"grad_norm": 0.20344024343013004,
"learning_rate": 2.6467505241090147e-06,
"loss": 0.2589,
"step": 2020
},
{
"epoch": 2.8551236749116606,
"grad_norm": 0.1970629919336399,
"learning_rate": 2.620545073375262e-06,
"loss": 0.2576,
"step": 2021
},
{
"epoch": 2.8565371024734985,
"grad_norm": 0.19651554782678599,
"learning_rate": 2.5943396226415095e-06,
"loss": 0.2799,
"step": 2022
},
{
"epoch": 2.857950530035336,
"grad_norm": 0.20827447906640198,
"learning_rate": 2.5681341719077567e-06,
"loss": 0.283,
"step": 2023
},
{
"epoch": 2.859363957597173,
"grad_norm": 0.20139685006077426,
"learning_rate": 2.5419287211740044e-06,
"loss": 0.2866,
"step": 2024
},
{
"epoch": 2.8607773851590106,
"grad_norm": 0.1834284434188632,
"learning_rate": 2.515723270440252e-06,
"loss": 0.247,
"step": 2025
},
{
"epoch": 2.862190812720848,
"grad_norm": 0.19127316243243989,
"learning_rate": 2.489517819706499e-06,
"loss": 0.2609,
"step": 2026
},
{
"epoch": 2.863604240282686,
"grad_norm": 0.21898396746987633,
"learning_rate": 2.4633123689727464e-06,
"loss": 0.2694,
"step": 2027
},
{
"epoch": 2.8650176678445227,
"grad_norm": 0.18257697664817718,
"learning_rate": 2.437106918238994e-06,
"loss": 0.2735,
"step": 2028
},
{
"epoch": 2.8664310954063605,
"grad_norm": 0.17868569722706218,
"learning_rate": 2.410901467505241e-06,
"loss": 0.2472,
"step": 2029
},
{
"epoch": 2.867844522968198,
"grad_norm": 0.1962951038805954,
"learning_rate": 2.3846960167714884e-06,
"loss": 0.2666,
"step": 2030
},
{
"epoch": 2.8692579505300353,
"grad_norm": 0.19412021604526272,
"learning_rate": 2.358490566037736e-06,
"loss": 0.2656,
"step": 2031
},
{
"epoch": 2.8706713780918727,
"grad_norm": 0.18325364359670562,
"learning_rate": 2.3322851153039836e-06,
"loss": 0.2614,
"step": 2032
},
{
"epoch": 2.87208480565371,
"grad_norm": 0.19396832292865296,
"learning_rate": 2.306079664570231e-06,
"loss": 0.2727,
"step": 2033
},
{
"epoch": 2.873498233215548,
"grad_norm": 0.19390679044718392,
"learning_rate": 2.279874213836478e-06,
"loss": 0.2651,
"step": 2034
},
{
"epoch": 2.8749116607773852,
"grad_norm": 0.1851533548661732,
"learning_rate": 2.2536687631027256e-06,
"loss": 0.2658,
"step": 2035
},
{
"epoch": 2.8763250883392226,
"grad_norm": 0.18822098380601565,
"learning_rate": 2.227463312368973e-06,
"loss": 0.281,
"step": 2036
},
{
"epoch": 2.87773851590106,
"grad_norm": 0.1840985342924395,
"learning_rate": 2.20125786163522e-06,
"loss": 0.2643,
"step": 2037
},
{
"epoch": 2.8791519434628974,
"grad_norm": 0.20000456583301818,
"learning_rate": 2.1750524109014676e-06,
"loss": 0.2707,
"step": 2038
},
{
"epoch": 2.880565371024735,
"grad_norm": 0.18211998109441505,
"learning_rate": 2.148846960167715e-06,
"loss": 0.25,
"step": 2039
},
{
"epoch": 2.8819787985865726,
"grad_norm": 0.2148826537072035,
"learning_rate": 2.1226415094339624e-06,
"loss": 0.2719,
"step": 2040
},
{
"epoch": 2.88339222614841,
"grad_norm": 0.17943958041453964,
"learning_rate": 2.09643605870021e-06,
"loss": 0.2584,
"step": 2041
},
{
"epoch": 2.8848056537102473,
"grad_norm": 0.18045324741166588,
"learning_rate": 2.0702306079664572e-06,
"loss": 0.2614,
"step": 2042
},
{
"epoch": 2.8862190812720847,
"grad_norm": 0.19402945130037488,
"learning_rate": 2.0440251572327044e-06,
"loss": 0.2596,
"step": 2043
},
{
"epoch": 2.8876325088339225,
"grad_norm": 0.18283370514350436,
"learning_rate": 2.017819706498952e-06,
"loss": 0.2615,
"step": 2044
},
{
"epoch": 2.8890459363957595,
"grad_norm": 0.19071781807915697,
"learning_rate": 1.9916142557651992e-06,
"loss": 0.2657,
"step": 2045
},
{
"epoch": 2.8904593639575973,
"grad_norm": 0.20268241368258638,
"learning_rate": 1.9654088050314464e-06,
"loss": 0.2631,
"step": 2046
},
{
"epoch": 2.8918727915194347,
"grad_norm": 0.17987086070018066,
"learning_rate": 1.939203354297694e-06,
"loss": 0.2537,
"step": 2047
},
{
"epoch": 2.893286219081272,
"grad_norm": 0.18635892226535067,
"learning_rate": 1.9129979035639413e-06,
"loss": 0.2622,
"step": 2048
},
{
"epoch": 2.8946996466431094,
"grad_norm": 0.1897729442367103,
"learning_rate": 1.8867924528301887e-06,
"loss": 0.2643,
"step": 2049
},
{
"epoch": 2.896113074204947,
"grad_norm": 0.2003388719400763,
"learning_rate": 1.8605870020964363e-06,
"loss": 0.2609,
"step": 2050
},
{
"epoch": 2.8975265017667846,
"grad_norm": 0.2150326814802859,
"learning_rate": 1.8343815513626837e-06,
"loss": 0.2745,
"step": 2051
},
{
"epoch": 2.898939929328622,
"grad_norm": 0.18618171778152465,
"learning_rate": 1.8081761006289309e-06,
"loss": 0.2684,
"step": 2052
},
{
"epoch": 2.9003533568904594,
"grad_norm": 0.188879908148273,
"learning_rate": 1.781970649895178e-06,
"loss": 0.2584,
"step": 2053
},
{
"epoch": 2.9017667844522967,
"grad_norm": 0.18570695178194174,
"learning_rate": 1.7557651991614257e-06,
"loss": 0.2598,
"step": 2054
},
{
"epoch": 2.903180212014134,
"grad_norm": 0.19205130106298623,
"learning_rate": 1.729559748427673e-06,
"loss": 0.268,
"step": 2055
},
{
"epoch": 2.904593639575972,
"grad_norm": 0.19017267621003683,
"learning_rate": 1.7033542976939203e-06,
"loss": 0.2626,
"step": 2056
},
{
"epoch": 2.9060070671378093,
"grad_norm": 0.19235724434203175,
"learning_rate": 1.677148846960168e-06,
"loss": 0.2549,
"step": 2057
},
{
"epoch": 2.9074204946996467,
"grad_norm": 0.17784544132982397,
"learning_rate": 1.650943396226415e-06,
"loss": 0.2592,
"step": 2058
},
{
"epoch": 2.908833922261484,
"grad_norm": 0.17706036607654418,
"learning_rate": 1.6247379454926625e-06,
"loss": 0.255,
"step": 2059
},
{
"epoch": 2.9102473498233214,
"grad_norm": 0.19294234552286246,
"learning_rate": 1.5985324947589101e-06,
"loss": 0.2588,
"step": 2060
},
{
"epoch": 2.9116607773851593,
"grad_norm": 0.1841755730061229,
"learning_rate": 1.5723270440251573e-06,
"loss": 0.2616,
"step": 2061
},
{
"epoch": 2.913074204946996,
"grad_norm": 0.19733468693986114,
"learning_rate": 1.5461215932914047e-06,
"loss": 0.2618,
"step": 2062
},
{
"epoch": 2.914487632508834,
"grad_norm": 0.18921096360037726,
"learning_rate": 1.519916142557652e-06,
"loss": 0.2741,
"step": 2063
},
{
"epoch": 2.9159010600706714,
"grad_norm": 0.17481184982623868,
"learning_rate": 1.4937106918238995e-06,
"loss": 0.2476,
"step": 2064
},
{
"epoch": 2.917314487632509,
"grad_norm": 0.1867008324580803,
"learning_rate": 1.467505241090147e-06,
"loss": 0.2693,
"step": 2065
},
{
"epoch": 2.918727915194346,
"grad_norm": 0.17989212999594018,
"learning_rate": 1.4412997903563941e-06,
"loss": 0.2554,
"step": 2066
},
{
"epoch": 2.9201413427561835,
"grad_norm": 0.19575675483240412,
"learning_rate": 1.4150943396226415e-06,
"loss": 0.2726,
"step": 2067
},
{
"epoch": 2.9215547703180214,
"grad_norm": 0.2057998177338158,
"learning_rate": 1.388888888888889e-06,
"loss": 0.2868,
"step": 2068
},
{
"epoch": 2.9229681978798587,
"grad_norm": 0.18266422872996527,
"learning_rate": 1.3626834381551364e-06,
"loss": 0.2535,
"step": 2069
},
{
"epoch": 2.924381625441696,
"grad_norm": 0.17783312089992542,
"learning_rate": 1.3364779874213838e-06,
"loss": 0.2613,
"step": 2070
},
{
"epoch": 2.9257950530035335,
"grad_norm": 0.20397958766020902,
"learning_rate": 1.310272536687631e-06,
"loss": 0.262,
"step": 2071
},
{
"epoch": 2.927208480565371,
"grad_norm": 0.18369496083750017,
"learning_rate": 1.2840670859538784e-06,
"loss": 0.2681,
"step": 2072
},
{
"epoch": 2.9286219081272087,
"grad_norm": 0.21791290196265936,
"learning_rate": 1.257861635220126e-06,
"loss": 0.2732,
"step": 2073
},
{
"epoch": 2.930035335689046,
"grad_norm": 0.19114829604287598,
"learning_rate": 1.2316561844863732e-06,
"loss": 0.2676,
"step": 2074
},
{
"epoch": 2.9314487632508834,
"grad_norm": 0.18789597383617893,
"learning_rate": 1.2054507337526206e-06,
"loss": 0.263,
"step": 2075
},
{
"epoch": 2.932862190812721,
"grad_norm": 0.1797811429247796,
"learning_rate": 1.179245283018868e-06,
"loss": 0.2706,
"step": 2076
},
{
"epoch": 2.934275618374558,
"grad_norm": 0.20492554092040738,
"learning_rate": 1.1530398322851154e-06,
"loss": 0.2652,
"step": 2077
},
{
"epoch": 2.935689045936396,
"grad_norm": 0.19385700363568287,
"learning_rate": 1.1268343815513628e-06,
"loss": 0.2831,
"step": 2078
},
{
"epoch": 2.937102473498233,
"grad_norm": 0.1978777530615443,
"learning_rate": 1.10062893081761e-06,
"loss": 0.2805,
"step": 2079
},
{
"epoch": 2.9385159010600708,
"grad_norm": 0.18375830696928333,
"learning_rate": 1.0744234800838574e-06,
"loss": 0.251,
"step": 2080
},
{
"epoch": 2.939929328621908,
"grad_norm": 0.18937103151806278,
"learning_rate": 1.048218029350105e-06,
"loss": 0.2793,
"step": 2081
},
{
"epoch": 2.9413427561837455,
"grad_norm": 0.18353284424300498,
"learning_rate": 1.0220125786163522e-06,
"loss": 0.2688,
"step": 2082
},
{
"epoch": 2.942756183745583,
"grad_norm": 0.18869531533502673,
"learning_rate": 9.958071278825996e-07,
"loss": 0.2588,
"step": 2083
},
{
"epoch": 2.9441696113074203,
"grad_norm": 0.1843300989096968,
"learning_rate": 9.69601677148847e-07,
"loss": 0.258,
"step": 2084
},
{
"epoch": 2.945583038869258,
"grad_norm": 0.1723677595940564,
"learning_rate": 9.433962264150943e-07,
"loss": 0.2513,
"step": 2085
},
{
"epoch": 2.9469964664310955,
"grad_norm": 0.1943974749517005,
"learning_rate": 9.171907756813418e-07,
"loss": 0.2604,
"step": 2086
},
{
"epoch": 2.948409893992933,
"grad_norm": 0.20274972402727273,
"learning_rate": 8.90985324947589e-07,
"loss": 0.2679,
"step": 2087
},
{
"epoch": 2.9498233215547702,
"grad_norm": 0.1928414438102834,
"learning_rate": 8.647798742138365e-07,
"loss": 0.2739,
"step": 2088
},
{
"epoch": 2.9512367491166076,
"grad_norm": 0.1851537598002352,
"learning_rate": 8.38574423480084e-07,
"loss": 0.2645,
"step": 2089
},
{
"epoch": 2.9526501766784454,
"grad_norm": 0.1978548415086648,
"learning_rate": 8.123689727463313e-07,
"loss": 0.27,
"step": 2090
},
{
"epoch": 2.954063604240283,
"grad_norm": 0.19178742907166327,
"learning_rate": 7.861635220125787e-07,
"loss": 0.2703,
"step": 2091
},
{
"epoch": 2.95547703180212,
"grad_norm": 0.19113915466569348,
"learning_rate": 7.59958071278826e-07,
"loss": 0.2632,
"step": 2092
},
{
"epoch": 2.9568904593639576,
"grad_norm": 0.20160551285309256,
"learning_rate": 7.337526205450735e-07,
"loss": 0.2799,
"step": 2093
},
{
"epoch": 2.958303886925795,
"grad_norm": 0.20015020701224018,
"learning_rate": 7.075471698113208e-07,
"loss": 0.2661,
"step": 2094
},
{
"epoch": 2.9597173144876328,
"grad_norm": 0.18369661990088462,
"learning_rate": 6.813417190775682e-07,
"loss": 0.2785,
"step": 2095
},
{
"epoch": 2.9611307420494697,
"grad_norm": 0.184023084046079,
"learning_rate": 6.551362683438155e-07,
"loss": 0.2735,
"step": 2096
},
{
"epoch": 2.9625441696113075,
"grad_norm": 0.18406227305956177,
"learning_rate": 6.28930817610063e-07,
"loss": 0.2634,
"step": 2097
},
{
"epoch": 2.963957597173145,
"grad_norm": 0.18502727981270534,
"learning_rate": 6.027253668763103e-07,
"loss": 0.2626,
"step": 2098
},
{
"epoch": 2.9653710247349823,
"grad_norm": 0.18870096460256883,
"learning_rate": 5.765199161425577e-07,
"loss": 0.268,
"step": 2099
},
{
"epoch": 2.9667844522968196,
"grad_norm": 0.1948844260686584,
"learning_rate": 5.50314465408805e-07,
"loss": 0.2624,
"step": 2100
},
{
"epoch": 2.968197879858657,
"grad_norm": 0.18925167057706413,
"learning_rate": 5.241090146750525e-07,
"loss": 0.2607,
"step": 2101
},
{
"epoch": 2.969611307420495,
"grad_norm": 0.18820367372409674,
"learning_rate": 4.979035639412998e-07,
"loss": 0.268,
"step": 2102
},
{
"epoch": 2.971024734982332,
"grad_norm": 0.19177922432375683,
"learning_rate": 4.7169811320754717e-07,
"loss": 0.2729,
"step": 2103
},
{
"epoch": 2.9724381625441696,
"grad_norm": 0.18224912448893008,
"learning_rate": 4.454926624737945e-07,
"loss": 0.2719,
"step": 2104
},
{
"epoch": 2.973851590106007,
"grad_norm": 0.18298724199157484,
"learning_rate": 4.19287211740042e-07,
"loss": 0.2543,
"step": 2105
},
{
"epoch": 2.9752650176678443,
"grad_norm": 0.18540754874541907,
"learning_rate": 3.9308176100628933e-07,
"loss": 0.2678,
"step": 2106
},
{
"epoch": 2.976678445229682,
"grad_norm": 0.17875864123219445,
"learning_rate": 3.6687631027253674e-07,
"loss": 0.251,
"step": 2107
},
{
"epoch": 2.9780918727915195,
"grad_norm": 0.17886169386634168,
"learning_rate": 3.406708595387841e-07,
"loss": 0.2525,
"step": 2108
},
{
"epoch": 2.979505300353357,
"grad_norm": 0.189954060660477,
"learning_rate": 3.144654088050315e-07,
"loss": 0.2768,
"step": 2109
},
{
"epoch": 2.9809187279151943,
"grad_norm": 0.17528440442759247,
"learning_rate": 2.8825995807127885e-07,
"loss": 0.2551,
"step": 2110
},
{
"epoch": 2.9823321554770317,
"grad_norm": 0.1875563983060983,
"learning_rate": 2.6205450733752626e-07,
"loss": 0.2774,
"step": 2111
},
{
"epoch": 2.9837455830388695,
"grad_norm": 0.18867350915591768,
"learning_rate": 2.3584905660377358e-07,
"loss": 0.2793,
"step": 2112
},
{
"epoch": 2.985159010600707,
"grad_norm": 0.17648699192275705,
"learning_rate": 2.09643605870021e-07,
"loss": 0.2589,
"step": 2113
},
{
"epoch": 2.9865724381625443,
"grad_norm": 0.18157847966594196,
"learning_rate": 1.8343815513626837e-07,
"loss": 0.2474,
"step": 2114
},
{
"epoch": 2.9879858657243816,
"grad_norm": 0.18968751764556246,
"learning_rate": 1.5723270440251575e-07,
"loss": 0.2727,
"step": 2115
},
{
"epoch": 2.989399293286219,
"grad_norm": 0.18750276826284276,
"learning_rate": 1.3102725366876313e-07,
"loss": 0.2715,
"step": 2116
},
{
"epoch": 2.9908127208480564,
"grad_norm": 0.17719662700140035,
"learning_rate": 1.048218029350105e-07,
"loss": 0.2478,
"step": 2117
},
{
"epoch": 2.9922261484098938,
"grad_norm": 0.1855754506387633,
"learning_rate": 7.861635220125787e-08,
"loss": 0.2708,
"step": 2118
},
{
"epoch": 2.9936395759717316,
"grad_norm": 0.18124155972304776,
"learning_rate": 5.241090146750525e-08,
"loss": 0.2568,
"step": 2119
},
{
"epoch": 2.995053003533569,
"grad_norm": 0.18604015539805957,
"learning_rate": 2.6205450733752624e-08,
"loss": 0.2633,
"step": 2120
},
{
"epoch": 2.9964664310954063,
"grad_norm": 0.20215878920416785,
"learning_rate": 0.0,
"loss": 0.2646,
"step": 2121
},
{
"epoch": 2.9964664310954063,
"step": 2121,
"total_flos": 1.8119240085227438e+18,
"train_loss": 0.4189752566398511,
"train_runtime": 123070.2087,
"train_samples_per_second": 0.276,
"train_steps_per_second": 0.017
}
],
"logging_steps": 1,
"max_steps": 2121,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.8119240085227438e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}