tihilya's picture
Upload 12 files
179b25b verified
{
"best_global_step": 40000,
"best_metric": 0.983751846381093,
"best_model_checkpoint": "./outputs/checkpoint-40000",
"epoch": 2.3806802964021068,
"eval_steps": 5000,
"global_step": 40000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.005951849537243699,
"grad_norm": 14.474434852600098,
"learning_rate": 4.95e-07,
"loss": 1.738,
"step": 100
},
{
"epoch": 0.011903699074487397,
"grad_norm": 8.228065490722656,
"learning_rate": 9.950000000000002e-07,
"loss": 0.9352,
"step": 200
},
{
"epoch": 0.017855548611731095,
"grad_norm": 11.45659351348877,
"learning_rate": 1.495e-06,
"loss": 0.6597,
"step": 300
},
{
"epoch": 0.023807398148974795,
"grad_norm": 14.828703880310059,
"learning_rate": 1.9950000000000004e-06,
"loss": 0.5121,
"step": 400
},
{
"epoch": 0.02975924768621849,
"grad_norm": 3.7912120819091797,
"learning_rate": 2.4950000000000003e-06,
"loss": 0.3892,
"step": 500
},
{
"epoch": 0.03571109722346219,
"grad_norm": 312.57928466796875,
"learning_rate": 2.995e-06,
"loss": 0.3349,
"step": 600
},
{
"epoch": 0.041662946760705886,
"grad_norm": 10.562920570373535,
"learning_rate": 3.495e-06,
"loss": 0.3293,
"step": 700
},
{
"epoch": 0.04761479629794959,
"grad_norm": 21.415306091308594,
"learning_rate": 3.995000000000001e-06,
"loss": 0.286,
"step": 800
},
{
"epoch": 0.053566645835193286,
"grad_norm": 5.152000904083252,
"learning_rate": 4.495e-06,
"loss": 0.2326,
"step": 900
},
{
"epoch": 0.05951849537243698,
"grad_norm": 12.22340202331543,
"learning_rate": 4.9950000000000005e-06,
"loss": 0.2202,
"step": 1000
},
{
"epoch": 0.06547034490968068,
"grad_norm": 25.30424690246582,
"learning_rate": 5.495000000000001e-06,
"loss": 0.245,
"step": 1100
},
{
"epoch": 0.07142219444692438,
"grad_norm": 15.469476699829102,
"learning_rate": 5.995000000000001e-06,
"loss": 0.2306,
"step": 1200
},
{
"epoch": 0.07737404398416808,
"grad_norm": 7.698344707489014,
"learning_rate": 6.4950000000000005e-06,
"loss": 0.1827,
"step": 1300
},
{
"epoch": 0.08332589352141177,
"grad_norm": 1.8406133651733398,
"learning_rate": 6.995000000000001e-06,
"loss": 0.2019,
"step": 1400
},
{
"epoch": 0.08927774305865548,
"grad_norm": 7.113662242889404,
"learning_rate": 7.495000000000001e-06,
"loss": 0.1878,
"step": 1500
},
{
"epoch": 0.09522959259589918,
"grad_norm": 60.53151321411133,
"learning_rate": 7.995e-06,
"loss": 0.2029,
"step": 1600
},
{
"epoch": 0.10118144213314287,
"grad_norm": 9.212539672851562,
"learning_rate": 8.495e-06,
"loss": 0.169,
"step": 1700
},
{
"epoch": 0.10713329167038657,
"grad_norm": 4.615962028503418,
"learning_rate": 8.995000000000001e-06,
"loss": 0.2175,
"step": 1800
},
{
"epoch": 0.11308514120763027,
"grad_norm": 18.574737548828125,
"learning_rate": 9.495000000000001e-06,
"loss": 0.1756,
"step": 1900
},
{
"epoch": 0.11903699074487396,
"grad_norm": 1.425147294998169,
"learning_rate": 9.995000000000002e-06,
"loss": 0.143,
"step": 2000
},
{
"epoch": 0.12498884028211767,
"grad_norm": 0.27230679988861084,
"learning_rate": 1.0495000000000002e-05,
"loss": 0.1585,
"step": 2100
},
{
"epoch": 0.13094068981936136,
"grad_norm": 1.6913175582885742,
"learning_rate": 1.0995e-05,
"loss": 0.1642,
"step": 2200
},
{
"epoch": 0.13689253935660506,
"grad_norm": 3.160975456237793,
"learning_rate": 1.1495000000000001e-05,
"loss": 0.1311,
"step": 2300
},
{
"epoch": 0.14284438889384876,
"grad_norm": 4.693823337554932,
"learning_rate": 1.1995000000000001e-05,
"loss": 0.1641,
"step": 2400
},
{
"epoch": 0.14879623843109246,
"grad_norm": 0.5840986371040344,
"learning_rate": 1.2495000000000001e-05,
"loss": 0.1653,
"step": 2500
},
{
"epoch": 0.15474808796833617,
"grad_norm": 1.879236102104187,
"learning_rate": 1.2995000000000002e-05,
"loss": 0.1382,
"step": 2600
},
{
"epoch": 0.16069993750557987,
"grad_norm": 0.4364721477031708,
"learning_rate": 1.3495e-05,
"loss": 0.1754,
"step": 2700
},
{
"epoch": 0.16665178704282355,
"grad_norm": 3.1517608165740967,
"learning_rate": 1.3995e-05,
"loss": 0.1523,
"step": 2800
},
{
"epoch": 0.17260363658006725,
"grad_norm": 1.0098434686660767,
"learning_rate": 1.4495000000000001e-05,
"loss": 0.158,
"step": 2900
},
{
"epoch": 0.17855548611731095,
"grad_norm": 11.904523849487305,
"learning_rate": 1.4995000000000001e-05,
"loss": 0.1262,
"step": 3000
},
{
"epoch": 0.18450733565455466,
"grad_norm": 7.423610687255859,
"learning_rate": 1.5495000000000003e-05,
"loss": 0.156,
"step": 3100
},
{
"epoch": 0.19045918519179836,
"grad_norm": 1.702813982963562,
"learning_rate": 1.5995000000000002e-05,
"loss": 0.1257,
"step": 3200
},
{
"epoch": 0.19641103472904206,
"grad_norm": 5.983730316162109,
"learning_rate": 1.6495e-05,
"loss": 0.1414,
"step": 3300
},
{
"epoch": 0.20236288426628574,
"grad_norm": 0.07421358674764633,
"learning_rate": 1.6995000000000002e-05,
"loss": 0.1159,
"step": 3400
},
{
"epoch": 0.20831473380352944,
"grad_norm": 9.500596046447754,
"learning_rate": 1.7495e-05,
"loss": 0.1362,
"step": 3500
},
{
"epoch": 0.21426658334077314,
"grad_norm": 2.0403008460998535,
"learning_rate": 1.7995000000000003e-05,
"loss": 0.1183,
"step": 3600
},
{
"epoch": 0.22021843287801685,
"grad_norm": 19.424882888793945,
"learning_rate": 1.8495e-05,
"loss": 0.1235,
"step": 3700
},
{
"epoch": 0.22617028241526055,
"grad_norm": 22.792057037353516,
"learning_rate": 1.8995e-05,
"loss": 0.154,
"step": 3800
},
{
"epoch": 0.23212213195250425,
"grad_norm": 7.459188938140869,
"learning_rate": 1.9495000000000002e-05,
"loss": 0.1506,
"step": 3900
},
{
"epoch": 0.23807398148974793,
"grad_norm": 3.574876308441162,
"learning_rate": 1.9995e-05,
"loss": 0.1039,
"step": 4000
},
{
"epoch": 0.24402583102699163,
"grad_norm": 14.251306533813477,
"learning_rate": 1.9975253093363332e-05,
"loss": 0.1174,
"step": 4100
},
{
"epoch": 0.24997768056423533,
"grad_norm": 6.622380256652832,
"learning_rate": 1.9950256217972756e-05,
"loss": 0.1266,
"step": 4200
},
{
"epoch": 0.255929530101479,
"grad_norm": 56.632022857666016,
"learning_rate": 1.992525934258218e-05,
"loss": 0.1186,
"step": 4300
},
{
"epoch": 0.2618813796387227,
"grad_norm": 29.00509262084961,
"learning_rate": 1.9900262467191602e-05,
"loss": 0.1225,
"step": 4400
},
{
"epoch": 0.2678332291759664,
"grad_norm": 2.0864224433898926,
"learning_rate": 1.9875265591801026e-05,
"loss": 0.1123,
"step": 4500
},
{
"epoch": 0.2737850787132101,
"grad_norm": 0.5069118142127991,
"learning_rate": 1.985026871641045e-05,
"loss": 0.1047,
"step": 4600
},
{
"epoch": 0.2797369282504538,
"grad_norm": 9.549971580505371,
"learning_rate": 1.9825271841019876e-05,
"loss": 0.104,
"step": 4700
},
{
"epoch": 0.2856887777876975,
"grad_norm": 9.610527992248535,
"learning_rate": 1.98002749656293e-05,
"loss": 0.1267,
"step": 4800
},
{
"epoch": 0.2916406273249412,
"grad_norm": 1.1918669939041138,
"learning_rate": 1.977527809023872e-05,
"loss": 0.1135,
"step": 4900
},
{
"epoch": 0.29759247686218493,
"grad_norm": 4.561688423156738,
"learning_rate": 1.9750281214848146e-05,
"loss": 0.0959,
"step": 5000
},
{
"epoch": 0.29759247686218493,
"eval_accuracy": 0.9856438825798157,
"eval_f1": 0.9667512130569034,
"eval_loss": 0.050987500697374344,
"eval_precision": 0.9792783735478106,
"eval_recall": 0.9545405052264808,
"eval_runtime": 716.3292,
"eval_samples_per_second": 117.273,
"eval_steps_per_second": 3.666,
"step": 5000
},
{
"epoch": 0.30354432639942863,
"grad_norm": 21.709821701049805,
"learning_rate": 1.972528433945757e-05,
"loss": 0.1186,
"step": 5100
},
{
"epoch": 0.30949617593667234,
"grad_norm": 4.825197219848633,
"learning_rate": 1.9700287464066993e-05,
"loss": 0.1197,
"step": 5200
},
{
"epoch": 0.31544802547391604,
"grad_norm": 0.676392138004303,
"learning_rate": 1.9675290588676416e-05,
"loss": 0.1138,
"step": 5300
},
{
"epoch": 0.32139987501115974,
"grad_norm": 3.394355297088623,
"learning_rate": 1.9650293713285843e-05,
"loss": 0.1044,
"step": 5400
},
{
"epoch": 0.3273517245484034,
"grad_norm": 0.6152952909469604,
"learning_rate": 1.9625296837895263e-05,
"loss": 0.0888,
"step": 5500
},
{
"epoch": 0.3333035740856471,
"grad_norm": 3.3491861820220947,
"learning_rate": 1.9600299962504687e-05,
"loss": 0.0983,
"step": 5600
},
{
"epoch": 0.3392554236228908,
"grad_norm": 0.6051576733589172,
"learning_rate": 1.9575303087114113e-05,
"loss": 0.1086,
"step": 5700
},
{
"epoch": 0.3452072731601345,
"grad_norm": 0.05122395604848862,
"learning_rate": 1.9550306211723537e-05,
"loss": 0.0887,
"step": 5800
},
{
"epoch": 0.3511591226973782,
"grad_norm": 1.614094853401184,
"learning_rate": 1.952530933633296e-05,
"loss": 0.1085,
"step": 5900
},
{
"epoch": 0.3571109722346219,
"grad_norm": 0.3445202112197876,
"learning_rate": 1.9500312460942384e-05,
"loss": 0.0806,
"step": 6000
},
{
"epoch": 0.3630628217718656,
"grad_norm": 0.30529269576072693,
"learning_rate": 1.9475315585551807e-05,
"loss": 0.0717,
"step": 6100
},
{
"epoch": 0.3690146713091093,
"grad_norm": 6.663509368896484,
"learning_rate": 1.945031871016123e-05,
"loss": 0.0943,
"step": 6200
},
{
"epoch": 0.374966520846353,
"grad_norm": 53.11376953125,
"learning_rate": 1.9425321834770657e-05,
"loss": 0.1148,
"step": 6300
},
{
"epoch": 0.3809183703835967,
"grad_norm": 30.515066146850586,
"learning_rate": 1.940032495938008e-05,
"loss": 0.084,
"step": 6400
},
{
"epoch": 0.3868702199208404,
"grad_norm": 7.43032693862915,
"learning_rate": 1.93753280839895e-05,
"loss": 0.0716,
"step": 6500
},
{
"epoch": 0.3928220694580841,
"grad_norm": 0.10134006291627884,
"learning_rate": 1.9350331208598927e-05,
"loss": 0.1122,
"step": 6600
},
{
"epoch": 0.3987739189953278,
"grad_norm": 1.9222484827041626,
"learning_rate": 1.932533433320835e-05,
"loss": 0.116,
"step": 6700
},
{
"epoch": 0.4047257685325715,
"grad_norm": 5.026548385620117,
"learning_rate": 1.9300337457817774e-05,
"loss": 0.1087,
"step": 6800
},
{
"epoch": 0.4106776180698152,
"grad_norm": 5.591833591461182,
"learning_rate": 1.9275340582427198e-05,
"loss": 0.1,
"step": 6900
},
{
"epoch": 0.4166294676070589,
"grad_norm": 4.234807014465332,
"learning_rate": 1.9250343707036624e-05,
"loss": 0.08,
"step": 7000
},
{
"epoch": 0.4225813171443026,
"grad_norm": 0.3862815797328949,
"learning_rate": 1.9225346831646044e-05,
"loss": 0.1021,
"step": 7100
},
{
"epoch": 0.4285331666815463,
"grad_norm": 2.0624468326568604,
"learning_rate": 1.920034995625547e-05,
"loss": 0.1024,
"step": 7200
},
{
"epoch": 0.43448501621879,
"grad_norm": 0.02359202690422535,
"learning_rate": 1.9175353080864895e-05,
"loss": 0.0734,
"step": 7300
},
{
"epoch": 0.4404368657560337,
"grad_norm": 3.037367582321167,
"learning_rate": 1.9150356205474318e-05,
"loss": 0.0949,
"step": 7400
},
{
"epoch": 0.4463887152932774,
"grad_norm": 0.7619612812995911,
"learning_rate": 1.912535933008374e-05,
"loss": 0.0829,
"step": 7500
},
{
"epoch": 0.4523405648305211,
"grad_norm": 7.246056079864502,
"learning_rate": 1.9100362454693165e-05,
"loss": 0.0865,
"step": 7600
},
{
"epoch": 0.4582924143677648,
"grad_norm": 0.07375836372375488,
"learning_rate": 1.9075365579302588e-05,
"loss": 0.0657,
"step": 7700
},
{
"epoch": 0.4642442639050085,
"grad_norm": 0.25850656628608704,
"learning_rate": 1.905036870391201e-05,
"loss": 0.0958,
"step": 7800
},
{
"epoch": 0.4701961134422522,
"grad_norm": 6.8354692459106445,
"learning_rate": 1.902537182852144e-05,
"loss": 0.1001,
"step": 7900
},
{
"epoch": 0.47614796297949585,
"grad_norm": 6.075274467468262,
"learning_rate": 1.900037495313086e-05,
"loss": 0.0875,
"step": 8000
},
{
"epoch": 0.48209981251673956,
"grad_norm": 0.09836622327566147,
"learning_rate": 1.8975378077740282e-05,
"loss": 0.0638,
"step": 8100
},
{
"epoch": 0.48805166205398326,
"grad_norm": 4.014862060546875,
"learning_rate": 1.895038120234971e-05,
"loss": 0.1102,
"step": 8200
},
{
"epoch": 0.49400351159122696,
"grad_norm": 1.1304417848587036,
"learning_rate": 1.8925384326959132e-05,
"loss": 0.0893,
"step": 8300
},
{
"epoch": 0.49995536112847067,
"grad_norm": 4.048980712890625,
"learning_rate": 1.8900387451568555e-05,
"loss": 0.0771,
"step": 8400
},
{
"epoch": 0.5059072106657144,
"grad_norm": 0.1943063884973526,
"learning_rate": 1.887539057617798e-05,
"loss": 0.0802,
"step": 8500
},
{
"epoch": 0.511859060202958,
"grad_norm": 3.3784682750701904,
"learning_rate": 1.8850393700787402e-05,
"loss": 0.0859,
"step": 8600
},
{
"epoch": 0.5178109097402017,
"grad_norm": 3.344090700149536,
"learning_rate": 1.8825396825396826e-05,
"loss": 0.0816,
"step": 8700
},
{
"epoch": 0.5237627592774454,
"grad_norm": 0.16063201427459717,
"learning_rate": 1.8800399950006252e-05,
"loss": 0.0806,
"step": 8800
},
{
"epoch": 0.5297146088146891,
"grad_norm": 0.22334939241409302,
"learning_rate": 1.8775403074615676e-05,
"loss": 0.0802,
"step": 8900
},
{
"epoch": 0.5356664583519328,
"grad_norm": 0.22063274681568146,
"learning_rate": 1.8750406199225096e-05,
"loss": 0.0934,
"step": 9000
},
{
"epoch": 0.5416183078891765,
"grad_norm": 3.237891435623169,
"learning_rate": 1.8725409323834523e-05,
"loss": 0.0603,
"step": 9100
},
{
"epoch": 0.5475701574264202,
"grad_norm": 0.5304477214813232,
"learning_rate": 1.8700412448443946e-05,
"loss": 0.0862,
"step": 9200
},
{
"epoch": 0.5535220069636639,
"grad_norm": 0.03754859045147896,
"learning_rate": 1.867541557305337e-05,
"loss": 0.0811,
"step": 9300
},
{
"epoch": 0.5594738565009076,
"grad_norm": 5.268212795257568,
"learning_rate": 1.8650418697662793e-05,
"loss": 0.0613,
"step": 9400
},
{
"epoch": 0.5654257060381513,
"grad_norm": 0.6994486451148987,
"learning_rate": 1.862542182227222e-05,
"loss": 0.0721,
"step": 9500
},
{
"epoch": 0.571377555575395,
"grad_norm": 7.512162208557129,
"learning_rate": 1.860042494688164e-05,
"loss": 0.052,
"step": 9600
},
{
"epoch": 0.5773294051126387,
"grad_norm": 5.643643379211426,
"learning_rate": 1.8575428071491066e-05,
"loss": 0.107,
"step": 9700
},
{
"epoch": 0.5832812546498825,
"grad_norm": 5.92059850692749,
"learning_rate": 1.855043119610049e-05,
"loss": 0.0575,
"step": 9800
},
{
"epoch": 0.5892331041871262,
"grad_norm": 4.476712226867676,
"learning_rate": 1.8525434320709913e-05,
"loss": 0.0685,
"step": 9900
},
{
"epoch": 0.5951849537243699,
"grad_norm": 7.443460941314697,
"learning_rate": 1.8500437445319337e-05,
"loss": 0.0972,
"step": 10000
},
{
"epoch": 0.5951849537243699,
"eval_accuracy": 0.9890126895697926,
"eval_f1": 0.9745751039858965,
"eval_loss": 0.036706309765577316,
"eval_precision": 0.9863395595204907,
"eval_recall": 0.9630879790940766,
"eval_runtime": 699.1336,
"eval_samples_per_second": 120.157,
"eval_steps_per_second": 3.756,
"step": 10000
},
{
"epoch": 0.6011368032616136,
"grad_norm": 1.4378024339675903,
"learning_rate": 1.847544056992876e-05,
"loss": 0.0755,
"step": 10100
},
{
"epoch": 0.6070886527988573,
"grad_norm": 3.3750197887420654,
"learning_rate": 1.8450443694538183e-05,
"loss": 0.0656,
"step": 10200
},
{
"epoch": 0.613040502336101,
"grad_norm": 1.403449535369873,
"learning_rate": 1.8425446819147607e-05,
"loss": 0.0736,
"step": 10300
},
{
"epoch": 0.6189923518733447,
"grad_norm": 0.6102975606918335,
"learning_rate": 1.8400449943757034e-05,
"loss": 0.0575,
"step": 10400
},
{
"epoch": 0.6249442014105884,
"grad_norm": 11.931497573852539,
"learning_rate": 1.8375453068366457e-05,
"loss": 0.083,
"step": 10500
},
{
"epoch": 0.6308960509478321,
"grad_norm": 2.4090306758880615,
"learning_rate": 1.8350456192975877e-05,
"loss": 0.0939,
"step": 10600
},
{
"epoch": 0.6368479004850758,
"grad_norm": 9.093697547912598,
"learning_rate": 1.8325459317585304e-05,
"loss": 0.0792,
"step": 10700
},
{
"epoch": 0.6427997500223195,
"grad_norm": 5.047466278076172,
"learning_rate": 1.8300462442194727e-05,
"loss": 0.0644,
"step": 10800
},
{
"epoch": 0.6487515995595632,
"grad_norm": 0.031459350138902664,
"learning_rate": 1.827546556680415e-05,
"loss": 0.0853,
"step": 10900
},
{
"epoch": 0.6547034490968068,
"grad_norm": 4.757169723510742,
"learning_rate": 1.8250468691413574e-05,
"loss": 0.0831,
"step": 11000
},
{
"epoch": 0.6606552986340505,
"grad_norm": 5.49468994140625,
"learning_rate": 1.8225471816023e-05,
"loss": 0.0744,
"step": 11100
},
{
"epoch": 0.6666071481712942,
"grad_norm": 0.1340179443359375,
"learning_rate": 1.820047494063242e-05,
"loss": 0.0685,
"step": 11200
},
{
"epoch": 0.6725589977085379,
"grad_norm": 5.28952693939209,
"learning_rate": 1.8175478065241848e-05,
"loss": 0.0676,
"step": 11300
},
{
"epoch": 0.6785108472457816,
"grad_norm": 0.032111797481775284,
"learning_rate": 1.815048118985127e-05,
"loss": 0.0751,
"step": 11400
},
{
"epoch": 0.6844626967830253,
"grad_norm": 0.06453489512205124,
"learning_rate": 1.8125484314460694e-05,
"loss": 0.0696,
"step": 11500
},
{
"epoch": 0.690414546320269,
"grad_norm": 1.1088204383850098,
"learning_rate": 1.8100487439070118e-05,
"loss": 0.0569,
"step": 11600
},
{
"epoch": 0.6963663958575127,
"grad_norm": 3.420901298522949,
"learning_rate": 1.807549056367954e-05,
"loss": 0.0831,
"step": 11700
},
{
"epoch": 0.7023182453947564,
"grad_norm": 0.044298429042100906,
"learning_rate": 1.8050493688288965e-05,
"loss": 0.0811,
"step": 11800
},
{
"epoch": 0.7082700949320001,
"grad_norm": 0.17848756909370422,
"learning_rate": 1.8025496812898388e-05,
"loss": 0.064,
"step": 11900
},
{
"epoch": 0.7142219444692438,
"grad_norm": 1.6905055046081543,
"learning_rate": 1.8000499937507815e-05,
"loss": 0.1141,
"step": 12000
},
{
"epoch": 0.7201737940064875,
"grad_norm": 0.1147763580083847,
"learning_rate": 1.7975503062117238e-05,
"loss": 0.0787,
"step": 12100
},
{
"epoch": 0.7261256435437312,
"grad_norm": 5.295955181121826,
"learning_rate": 1.795050618672666e-05,
"loss": 0.0777,
"step": 12200
},
{
"epoch": 0.7320774930809749,
"grad_norm": 1.885426640510559,
"learning_rate": 1.7925509311336085e-05,
"loss": 0.0556,
"step": 12300
},
{
"epoch": 0.7380293426182186,
"grad_norm": 0.2982531785964966,
"learning_rate": 1.790051243594551e-05,
"loss": 0.0704,
"step": 12400
},
{
"epoch": 0.7439811921554623,
"grad_norm": 10.546908378601074,
"learning_rate": 1.7875515560554932e-05,
"loss": 0.0505,
"step": 12500
},
{
"epoch": 0.749933041692706,
"grad_norm": 0.013311674818396568,
"learning_rate": 1.7850518685164355e-05,
"loss": 0.0622,
"step": 12600
},
{
"epoch": 0.7558848912299497,
"grad_norm": 20.8616886138916,
"learning_rate": 1.782552180977378e-05,
"loss": 0.0588,
"step": 12700
},
{
"epoch": 0.7618367407671934,
"grad_norm": 0.5590173006057739,
"learning_rate": 1.7800524934383202e-05,
"loss": 0.0748,
"step": 12800
},
{
"epoch": 0.7677885903044371,
"grad_norm": 0.053356532007455826,
"learning_rate": 1.777552805899263e-05,
"loss": 0.0866,
"step": 12900
},
{
"epoch": 0.7737404398416808,
"grad_norm": 8.997873306274414,
"learning_rate": 1.7750531183602052e-05,
"loss": 0.0614,
"step": 13000
},
{
"epoch": 0.7796922893789245,
"grad_norm": 0.15272608399391174,
"learning_rate": 1.7725534308211472e-05,
"loss": 0.0579,
"step": 13100
},
{
"epoch": 0.7856441389161682,
"grad_norm": 9.847796440124512,
"learning_rate": 1.77005374328209e-05,
"loss": 0.0717,
"step": 13200
},
{
"epoch": 0.791595988453412,
"grad_norm": 0.4942564070224762,
"learning_rate": 1.7675540557430322e-05,
"loss": 0.0713,
"step": 13300
},
{
"epoch": 0.7975478379906557,
"grad_norm": 12.623053550720215,
"learning_rate": 1.7650543682039746e-05,
"loss": 0.0601,
"step": 13400
},
{
"epoch": 0.8034996875278992,
"grad_norm": 0.47039860486984253,
"learning_rate": 1.762554680664917e-05,
"loss": 0.0706,
"step": 13500
},
{
"epoch": 0.809451537065143,
"grad_norm": 0.06241678074002266,
"learning_rate": 1.7600549931258596e-05,
"loss": 0.0739,
"step": 13600
},
{
"epoch": 0.8154033866023866,
"grad_norm": 0.5157163739204407,
"learning_rate": 1.7575553055868016e-05,
"loss": 0.0711,
"step": 13700
},
{
"epoch": 0.8213552361396304,
"grad_norm": 0.684218168258667,
"learning_rate": 1.7550556180477443e-05,
"loss": 0.0704,
"step": 13800
},
{
"epoch": 0.827307085676874,
"grad_norm": 5.712099075317383,
"learning_rate": 1.7525559305086866e-05,
"loss": 0.069,
"step": 13900
},
{
"epoch": 0.8332589352141178,
"grad_norm": 10.816957473754883,
"learning_rate": 1.750056242969629e-05,
"loss": 0.0641,
"step": 14000
},
{
"epoch": 0.8392107847513615,
"grad_norm": 5.686164855957031,
"learning_rate": 1.7475565554305713e-05,
"loss": 0.068,
"step": 14100
},
{
"epoch": 0.8451626342886052,
"grad_norm": 0.052303824573755264,
"learning_rate": 1.7450568678915136e-05,
"loss": 0.0861,
"step": 14200
},
{
"epoch": 0.8511144838258489,
"grad_norm": 2.4506478309631348,
"learning_rate": 1.742557180352456e-05,
"loss": 0.0586,
"step": 14300
},
{
"epoch": 0.8570663333630926,
"grad_norm": 4.940361499786377,
"learning_rate": 1.7400574928133983e-05,
"loss": 0.058,
"step": 14400
},
{
"epoch": 0.8630181829003363,
"grad_norm": 0.027969790622591972,
"learning_rate": 1.737557805274341e-05,
"loss": 0.0603,
"step": 14500
},
{
"epoch": 0.86897003243758,
"grad_norm": 7.126792907714844,
"learning_rate": 1.7350581177352833e-05,
"loss": 0.0718,
"step": 14600
},
{
"epoch": 0.8749218819748237,
"grad_norm": 1.7231388092041016,
"learning_rate": 1.7325584301962257e-05,
"loss": 0.0553,
"step": 14700
},
{
"epoch": 0.8808737315120674,
"grad_norm": 2.9911932945251465,
"learning_rate": 1.730058742657168e-05,
"loss": 0.0671,
"step": 14800
},
{
"epoch": 0.8868255810493111,
"grad_norm": 0.11182364076375961,
"learning_rate": 1.7275590551181104e-05,
"loss": 0.0722,
"step": 14900
},
{
"epoch": 0.8927774305865548,
"grad_norm": 1.7560580968856812,
"learning_rate": 1.7250593675790527e-05,
"loss": 0.0773,
"step": 15000
},
{
"epoch": 0.8927774305865548,
"eval_accuracy": 0.9907744684903459,
"eval_f1": 0.9786730509920472,
"eval_loss": 0.030510300770401955,
"eval_precision": 0.9894830560347226,
"eval_recall": 0.9680966898954704,
"eval_runtime": 637.3433,
"eval_samples_per_second": 131.807,
"eval_steps_per_second": 4.12,
"step": 15000
},
{
"epoch": 0.8987292801237985,
"grad_norm": 24.16592025756836,
"learning_rate": 1.722559680039995e-05,
"loss": 0.0547,
"step": 15100
},
{
"epoch": 0.9046811296610422,
"grad_norm": 0.27272409200668335,
"learning_rate": 1.7200599925009377e-05,
"loss": 0.0766,
"step": 15200
},
{
"epoch": 0.9106329791982859,
"grad_norm": 0.11689332127571106,
"learning_rate": 1.7175603049618797e-05,
"loss": 0.0585,
"step": 15300
},
{
"epoch": 0.9165848287355296,
"grad_norm": 0.033309925347566605,
"learning_rate": 1.7150606174228224e-05,
"loss": 0.0508,
"step": 15400
},
{
"epoch": 0.9225366782727733,
"grad_norm": 0.1792755424976349,
"learning_rate": 1.7125609298837647e-05,
"loss": 0.0676,
"step": 15500
},
{
"epoch": 0.928488527810017,
"grad_norm": 0.009599496610462666,
"learning_rate": 1.710061242344707e-05,
"loss": 0.0582,
"step": 15600
},
{
"epoch": 0.9344403773472607,
"grad_norm": 5.908741474151611,
"learning_rate": 1.7075615548056494e-05,
"loss": 0.0531,
"step": 15700
},
{
"epoch": 0.9403922268845044,
"grad_norm": 0.12097036838531494,
"learning_rate": 1.7050618672665918e-05,
"loss": 0.0701,
"step": 15800
},
{
"epoch": 0.946344076421748,
"grad_norm": 0.04996073991060257,
"learning_rate": 1.702562179727534e-05,
"loss": 0.0797,
"step": 15900
},
{
"epoch": 0.9522959259589917,
"grad_norm": 3.689929485321045,
"learning_rate": 1.7000624921884764e-05,
"loss": 0.0783,
"step": 16000
},
{
"epoch": 0.9582477754962354,
"grad_norm": 3.7698171138763428,
"learning_rate": 1.697562804649419e-05,
"loss": 0.0608,
"step": 16100
},
{
"epoch": 0.9641996250334791,
"grad_norm": 0.3943725824356079,
"learning_rate": 1.6950631171103615e-05,
"loss": 0.068,
"step": 16200
},
{
"epoch": 0.9701514745707228,
"grad_norm": 0.24215468764305115,
"learning_rate": 1.6925634295713038e-05,
"loss": 0.0588,
"step": 16300
},
{
"epoch": 0.9761033241079665,
"grad_norm": 1.7841726541519165,
"learning_rate": 1.690063742032246e-05,
"loss": 0.0621,
"step": 16400
},
{
"epoch": 0.9820551736452102,
"grad_norm": 0.02200796641409397,
"learning_rate": 1.6875640544931885e-05,
"loss": 0.0658,
"step": 16500
},
{
"epoch": 0.9880070231824539,
"grad_norm": 5.905091762542725,
"learning_rate": 1.6850643669541308e-05,
"loss": 0.0477,
"step": 16600
},
{
"epoch": 0.9939588727196976,
"grad_norm": 6.53129243850708,
"learning_rate": 1.682564679415073e-05,
"loss": 0.0612,
"step": 16700
},
{
"epoch": 0.9999107222569413,
"grad_norm": 8.548293113708496,
"learning_rate": 1.680064991876016e-05,
"loss": 0.0594,
"step": 16800
},
{
"epoch": 1.0058328125464988,
"grad_norm": 0.3448663353919983,
"learning_rate": 1.677565304336958e-05,
"loss": 0.0327,
"step": 16900
},
{
"epoch": 1.0117846620837425,
"grad_norm": 0.6257154941558838,
"learning_rate": 1.6750656167979005e-05,
"loss": 0.0306,
"step": 17000
},
{
"epoch": 1.0177365116209862,
"grad_norm": 4.9479193687438965,
"learning_rate": 1.672565929258843e-05,
"loss": 0.0346,
"step": 17100
},
{
"epoch": 1.02368836115823,
"grad_norm": 0.009480413980782032,
"learning_rate": 1.6700662417197852e-05,
"loss": 0.0268,
"step": 17200
},
{
"epoch": 1.0296402106954736,
"grad_norm": 0.9463269114494324,
"learning_rate": 1.6675665541807275e-05,
"loss": 0.0586,
"step": 17300
},
{
"epoch": 1.0355920602327173,
"grad_norm": 0.07529614865779877,
"learning_rate": 1.66506686664167e-05,
"loss": 0.0294,
"step": 17400
},
{
"epoch": 1.041543909769961,
"grad_norm": 0.006725461222231388,
"learning_rate": 1.6625671791026122e-05,
"loss": 0.0387,
"step": 17500
},
{
"epoch": 1.0474957593072047,
"grad_norm": 0.6210471987724304,
"learning_rate": 1.6600674915635546e-05,
"loss": 0.0422,
"step": 17600
},
{
"epoch": 1.0534476088444484,
"grad_norm": 0.21577982604503632,
"learning_rate": 1.6575678040244972e-05,
"loss": 0.0241,
"step": 17700
},
{
"epoch": 1.0593994583816921,
"grad_norm": 13.052763938903809,
"learning_rate": 1.6550681164854392e-05,
"loss": 0.0321,
"step": 17800
},
{
"epoch": 1.0653513079189358,
"grad_norm": 0.019462615251541138,
"learning_rate": 1.652568428946382e-05,
"loss": 0.0374,
"step": 17900
},
{
"epoch": 1.0713031574561795,
"grad_norm": 0.008586418814957142,
"learning_rate": 1.6500687414073243e-05,
"loss": 0.0441,
"step": 18000
},
{
"epoch": 1.0772550069934232,
"grad_norm": 0.0021827963646501303,
"learning_rate": 1.6475690538682666e-05,
"loss": 0.0219,
"step": 18100
},
{
"epoch": 1.083206856530667,
"grad_norm": 0.019988901913166046,
"learning_rate": 1.645069366329209e-05,
"loss": 0.0363,
"step": 18200
},
{
"epoch": 1.0891587060679107,
"grad_norm": 0.3322964012622833,
"learning_rate": 1.6425696787901513e-05,
"loss": 0.041,
"step": 18300
},
{
"epoch": 1.0951105556051544,
"grad_norm": 3.1195592880249023,
"learning_rate": 1.6400699912510936e-05,
"loss": 0.0349,
"step": 18400
},
{
"epoch": 1.101062405142398,
"grad_norm": 0.020526066422462463,
"learning_rate": 1.637570303712036e-05,
"loss": 0.0583,
"step": 18500
},
{
"epoch": 1.1070142546796418,
"grad_norm": 0.026518339291214943,
"learning_rate": 1.6350706161729786e-05,
"loss": 0.0328,
"step": 18600
},
{
"epoch": 1.1129661042168855,
"grad_norm": 0.02615894377231598,
"learning_rate": 1.632570928633921e-05,
"loss": 0.0233,
"step": 18700
},
{
"epoch": 1.1189179537541292,
"grad_norm": 0.036855828016996384,
"learning_rate": 1.6300712410948633e-05,
"loss": 0.025,
"step": 18800
},
{
"epoch": 1.1248698032913729,
"grad_norm": 0.07263686507940292,
"learning_rate": 1.6275715535558057e-05,
"loss": 0.0318,
"step": 18900
},
{
"epoch": 1.1308216528286166,
"grad_norm": 0.12782253324985504,
"learning_rate": 1.625071866016748e-05,
"loss": 0.0588,
"step": 19000
},
{
"epoch": 1.1367735023658603,
"grad_norm": 0.012232950888574123,
"learning_rate": 1.6225721784776903e-05,
"loss": 0.0283,
"step": 19100
},
{
"epoch": 1.142725351903104,
"grad_norm": 0.22422778606414795,
"learning_rate": 1.6200724909386327e-05,
"loss": 0.0454,
"step": 19200
},
{
"epoch": 1.1486772014403477,
"grad_norm": 0.37581029534339905,
"learning_rate": 1.6175728033995754e-05,
"loss": 0.0475,
"step": 19300
},
{
"epoch": 1.1546290509775914,
"grad_norm": 0.06439394503831863,
"learning_rate": 1.6150731158605174e-05,
"loss": 0.0484,
"step": 19400
},
{
"epoch": 1.160580900514835,
"grad_norm": 0.09931550920009613,
"learning_rate": 1.61257342832146e-05,
"loss": 0.0386,
"step": 19500
},
{
"epoch": 1.1665327500520788,
"grad_norm": 4.9056291580200195,
"learning_rate": 1.6100737407824024e-05,
"loss": 0.0499,
"step": 19600
},
{
"epoch": 1.1724845995893225,
"grad_norm": 0.15619812905788422,
"learning_rate": 1.6075740532433447e-05,
"loss": 0.0324,
"step": 19700
},
{
"epoch": 1.1784364491265662,
"grad_norm": 0.42920124530792236,
"learning_rate": 1.605074365704287e-05,
"loss": 0.0321,
"step": 19800
},
{
"epoch": 1.18438829866381,
"grad_norm": 0.014962832443416119,
"learning_rate": 1.6025746781652294e-05,
"loss": 0.0256,
"step": 19900
},
{
"epoch": 1.1903401482010534,
"grad_norm": 1.7344650030136108,
"learning_rate": 1.6000749906261717e-05,
"loss": 0.0416,
"step": 20000
},
{
"epoch": 1.1903401482010534,
"eval_accuracy": 0.9913934718948646,
"eval_f1": 0.9800788030749731,
"eval_loss": 0.03799758478999138,
"eval_precision": 0.992189679218968,
"eval_recall": 0.9682600174216028,
"eval_runtime": 634.1459,
"eval_samples_per_second": 132.471,
"eval_steps_per_second": 4.141,
"step": 20000
},
{
"epoch": 1.196291997738297,
"grad_norm": 0.008463047444820404,
"learning_rate": 1.597575303087114e-05,
"loss": 0.0374,
"step": 20100
},
{
"epoch": 1.2022438472755408,
"grad_norm": 0.055202700197696686,
"learning_rate": 1.5950756155480568e-05,
"loss": 0.0376,
"step": 20200
},
{
"epoch": 1.2081956968127845,
"grad_norm": 0.8383081555366516,
"learning_rate": 1.592575928008999e-05,
"loss": 0.0391,
"step": 20300
},
{
"epoch": 1.2141475463500282,
"grad_norm": 4.7434868812561035,
"learning_rate": 1.5900762404699414e-05,
"loss": 0.0561,
"step": 20400
},
{
"epoch": 1.220099395887272,
"grad_norm": 0.18934893608093262,
"learning_rate": 1.5875765529308838e-05,
"loss": 0.0323,
"step": 20500
},
{
"epoch": 1.2260512454245156,
"grad_norm": 0.02613821066915989,
"learning_rate": 1.585076865391826e-05,
"loss": 0.0425,
"step": 20600
},
{
"epoch": 1.2320030949617593,
"grad_norm": 0.04844895005226135,
"learning_rate": 1.5825771778527685e-05,
"loss": 0.0315,
"step": 20700
},
{
"epoch": 1.237954944499003,
"grad_norm": 0.2447776347398758,
"learning_rate": 1.5800774903137108e-05,
"loss": 0.0205,
"step": 20800
},
{
"epoch": 1.2439067940362467,
"grad_norm": 6.23702335357666,
"learning_rate": 1.5775778027746535e-05,
"loss": 0.046,
"step": 20900
},
{
"epoch": 1.2498586435734904,
"grad_norm": 9.4482421875,
"learning_rate": 1.5750781152355955e-05,
"loss": 0.0252,
"step": 21000
},
{
"epoch": 1.2558104931107341,
"grad_norm": 5.085052013397217,
"learning_rate": 1.572578427696538e-05,
"loss": 0.0391,
"step": 21100
},
{
"epoch": 1.2617623426479778,
"grad_norm": 6.614001274108887,
"learning_rate": 1.5700787401574805e-05,
"loss": 0.0312,
"step": 21200
},
{
"epoch": 1.2677141921852215,
"grad_norm": 0.14532746374607086,
"learning_rate": 1.567579052618423e-05,
"loss": 0.0416,
"step": 21300
},
{
"epoch": 1.2736660417224652,
"grad_norm": 0.16996897757053375,
"learning_rate": 1.5650793650793652e-05,
"loss": 0.0409,
"step": 21400
},
{
"epoch": 1.279617891259709,
"grad_norm": 21.97540283203125,
"learning_rate": 1.5625796775403075e-05,
"loss": 0.0279,
"step": 21500
},
{
"epoch": 1.2855697407969526,
"grad_norm": 0.021660545840859413,
"learning_rate": 1.56007999000125e-05,
"loss": 0.0387,
"step": 21600
},
{
"epoch": 1.2915215903341963,
"grad_norm": 4.85373592376709,
"learning_rate": 1.5575803024621922e-05,
"loss": 0.0438,
"step": 21700
},
{
"epoch": 1.29747343987144,
"grad_norm": 0.12195076793432236,
"learning_rate": 1.555080614923135e-05,
"loss": 0.0397,
"step": 21800
},
{
"epoch": 1.3034252894086837,
"grad_norm": 0.004497983027249575,
"learning_rate": 1.5525809273840772e-05,
"loss": 0.0357,
"step": 21900
},
{
"epoch": 1.3093771389459274,
"grad_norm": 0.6372514367103577,
"learning_rate": 1.5500812398450196e-05,
"loss": 0.0341,
"step": 22000
},
{
"epoch": 1.3153289884831711,
"grad_norm": 0.04463673010468483,
"learning_rate": 1.547581552305962e-05,
"loss": 0.0339,
"step": 22100
},
{
"epoch": 1.3212808380204148,
"grad_norm": 0.5814944505691528,
"learning_rate": 1.5450818647669042e-05,
"loss": 0.0417,
"step": 22200
},
{
"epoch": 1.3272326875576586,
"grad_norm": 16.33363151550293,
"learning_rate": 1.5425821772278466e-05,
"loss": 0.0514,
"step": 22300
},
{
"epoch": 1.3331845370949023,
"grad_norm": 6.866737365722656,
"learning_rate": 1.540082489688789e-05,
"loss": 0.0261,
"step": 22400
},
{
"epoch": 1.339136386632146,
"grad_norm": 0.016487309709191322,
"learning_rate": 1.5375828021497313e-05,
"loss": 0.032,
"step": 22500
},
{
"epoch": 1.3450882361693897,
"grad_norm": 0.08337634056806564,
"learning_rate": 1.5350831146106736e-05,
"loss": 0.0493,
"step": 22600
},
{
"epoch": 1.3510400857066334,
"grad_norm": 0.01637539267539978,
"learning_rate": 1.5325834270716163e-05,
"loss": 0.0405,
"step": 22700
},
{
"epoch": 1.356991935243877,
"grad_norm": 0.48449915647506714,
"learning_rate": 1.5300837395325586e-05,
"loss": 0.0408,
"step": 22800
},
{
"epoch": 1.3629437847811208,
"grad_norm": 0.05106478929519653,
"learning_rate": 1.527584051993501e-05,
"loss": 0.0393,
"step": 22900
},
{
"epoch": 1.3688956343183645,
"grad_norm": 0.01775064878165722,
"learning_rate": 1.5250843644544433e-05,
"loss": 0.0507,
"step": 23000
},
{
"epoch": 1.3748474838556082,
"grad_norm": 0.027698859572410583,
"learning_rate": 1.5225846769153858e-05,
"loss": 0.0227,
"step": 23100
},
{
"epoch": 1.3807993333928519,
"grad_norm": 0.004283764399588108,
"learning_rate": 1.5200849893763282e-05,
"loss": 0.0381,
"step": 23200
},
{
"epoch": 1.3867511829300956,
"grad_norm": 0.008574152365326881,
"learning_rate": 1.5175853018372703e-05,
"loss": 0.0505,
"step": 23300
},
{
"epoch": 1.3927030324673393,
"grad_norm": 0.3766544759273529,
"learning_rate": 1.5150856142982128e-05,
"loss": 0.0245,
"step": 23400
},
{
"epoch": 1.398654882004583,
"grad_norm": 0.003701981622725725,
"learning_rate": 1.5125859267591552e-05,
"loss": 0.0315,
"step": 23500
},
{
"epoch": 1.4046067315418267,
"grad_norm": 0.049905925989151,
"learning_rate": 1.5100862392200977e-05,
"loss": 0.0494,
"step": 23600
},
{
"epoch": 1.4105585810790704,
"grad_norm": 0.01804766058921814,
"learning_rate": 1.50758655168104e-05,
"loss": 0.0425,
"step": 23700
},
{
"epoch": 1.416510430616314,
"grad_norm": 7.34988260269165,
"learning_rate": 1.5050868641419825e-05,
"loss": 0.0519,
"step": 23800
},
{
"epoch": 1.4224622801535578,
"grad_norm": 27.711393356323242,
"learning_rate": 1.5025871766029247e-05,
"loss": 0.0351,
"step": 23900
},
{
"epoch": 1.4284141296908013,
"grad_norm": 6.535654544830322,
"learning_rate": 1.500087489063867e-05,
"loss": 0.0398,
"step": 24000
},
{
"epoch": 1.434365979228045,
"grad_norm": 0.5379288196563721,
"learning_rate": 1.4975878015248096e-05,
"loss": 0.0263,
"step": 24100
},
{
"epoch": 1.4403178287652887,
"grad_norm": 2.396723747253418,
"learning_rate": 1.4950881139857519e-05,
"loss": 0.0355,
"step": 24200
},
{
"epoch": 1.4462696783025324,
"grad_norm": 0.052881885319948196,
"learning_rate": 1.4925884264466944e-05,
"loss": 0.0381,
"step": 24300
},
{
"epoch": 1.452221527839776,
"grad_norm": 0.042416125535964966,
"learning_rate": 1.4900887389076366e-05,
"loss": 0.0268,
"step": 24400
},
{
"epoch": 1.4581733773770198,
"grad_norm": 0.05842834711074829,
"learning_rate": 1.4875890513685791e-05,
"loss": 0.0508,
"step": 24500
},
{
"epoch": 1.4641252269142635,
"grad_norm": 0.1875191628932953,
"learning_rate": 1.4850893638295214e-05,
"loss": 0.0578,
"step": 24600
},
{
"epoch": 1.4700770764515072,
"grad_norm": 2.026597499847412,
"learning_rate": 1.482589676290464e-05,
"loss": 0.0272,
"step": 24700
},
{
"epoch": 1.476028925988751,
"grad_norm": 0.9349521398544312,
"learning_rate": 1.4800899887514063e-05,
"loss": 0.0338,
"step": 24800
},
{
"epoch": 1.4819807755259946,
"grad_norm": 7.7040791511535645,
"learning_rate": 1.4775903012123484e-05,
"loss": 0.0288,
"step": 24900
},
{
"epoch": 1.4879326250632383,
"grad_norm": 0.027851175516843796,
"learning_rate": 1.475090613673291e-05,
"loss": 0.0412,
"step": 25000
},
{
"epoch": 1.4879326250632383,
"eval_accuracy": 0.9921910339737637,
"eval_f1": 0.982066703116457,
"eval_loss": 0.031415630131959915,
"eval_precision": 0.9862727871732924,
"eval_recall": 0.9778963414634146,
"eval_runtime": 634.7831,
"eval_samples_per_second": 132.338,
"eval_steps_per_second": 4.137,
"step": 25000
},
{
"epoch": 1.493884474600482,
"grad_norm": 0.005522654391825199,
"learning_rate": 1.4725909261342333e-05,
"loss": 0.0185,
"step": 25100
},
{
"epoch": 1.4998363241377257,
"grad_norm": 5.522494316101074,
"learning_rate": 1.4700912385951758e-05,
"loss": 0.0452,
"step": 25200
},
{
"epoch": 1.5057881736749694,
"grad_norm": 18.661855697631836,
"learning_rate": 1.467591551056118e-05,
"loss": 0.031,
"step": 25300
},
{
"epoch": 1.5117400232122131,
"grad_norm": 0.14226514101028442,
"learning_rate": 1.4650918635170605e-05,
"loss": 0.0354,
"step": 25400
},
{
"epoch": 1.5176918727494568,
"grad_norm": 0.0307607501745224,
"learning_rate": 1.4625921759780028e-05,
"loss": 0.0351,
"step": 25500
},
{
"epoch": 1.5236437222867005,
"grad_norm": 0.018097123131155968,
"learning_rate": 1.4600924884389453e-05,
"loss": 0.0463,
"step": 25600
},
{
"epoch": 1.5295955718239442,
"grad_norm": 0.016355447471141815,
"learning_rate": 1.4575928008998877e-05,
"loss": 0.0275,
"step": 25700
},
{
"epoch": 1.535547421361188,
"grad_norm": 0.03597331792116165,
"learning_rate": 1.4550931133608298e-05,
"loss": 0.0231,
"step": 25800
},
{
"epoch": 1.5414992708984316,
"grad_norm": 0.02727796509861946,
"learning_rate": 1.4525934258217724e-05,
"loss": 0.0346,
"step": 25900
},
{
"epoch": 1.5474511204356753,
"grad_norm": 20.804296493530273,
"learning_rate": 1.4500937382827147e-05,
"loss": 0.0397,
"step": 26000
},
{
"epoch": 1.553402969972919,
"grad_norm": 0.07538951188325882,
"learning_rate": 1.4475940507436572e-05,
"loss": 0.049,
"step": 26100
},
{
"epoch": 1.5593548195101627,
"grad_norm": 5.466484546661377,
"learning_rate": 1.4450943632045995e-05,
"loss": 0.0454,
"step": 26200
},
{
"epoch": 1.5653066690474065,
"grad_norm": 5.512643814086914,
"learning_rate": 1.442594675665542e-05,
"loss": 0.0464,
"step": 26300
},
{
"epoch": 1.5712585185846502,
"grad_norm": 0.0186602845788002,
"learning_rate": 1.4400949881264842e-05,
"loss": 0.0301,
"step": 26400
},
{
"epoch": 1.5772103681218939,
"grad_norm": 0.031637031584978104,
"learning_rate": 1.4375953005874266e-05,
"loss": 0.0349,
"step": 26500
},
{
"epoch": 1.5831622176591376,
"grad_norm": 0.016410792246460915,
"learning_rate": 1.435095613048369e-05,
"loss": 0.0468,
"step": 26600
},
{
"epoch": 1.5891140671963813,
"grad_norm": 0.05681620165705681,
"learning_rate": 1.4325959255093114e-05,
"loss": 0.0251,
"step": 26700
},
{
"epoch": 1.595065916733625,
"grad_norm": 0.594913125038147,
"learning_rate": 1.430096237970254e-05,
"loss": 0.0334,
"step": 26800
},
{
"epoch": 1.6010177662708687,
"grad_norm": 1.608115553855896,
"learning_rate": 1.4275965504311961e-05,
"loss": 0.0366,
"step": 26900
},
{
"epoch": 1.6069696158081124,
"grad_norm": 6.881460189819336,
"learning_rate": 1.4250968628921386e-05,
"loss": 0.04,
"step": 27000
},
{
"epoch": 1.612921465345356,
"grad_norm": 0.032269809395074844,
"learning_rate": 1.422597175353081e-05,
"loss": 0.0332,
"step": 27100
},
{
"epoch": 1.6188733148825998,
"grad_norm": 1.2177642583847046,
"learning_rate": 1.4200974878140235e-05,
"loss": 0.0265,
"step": 27200
},
{
"epoch": 1.6248251644198435,
"grad_norm": 0.016585221514105797,
"learning_rate": 1.4175978002749658e-05,
"loss": 0.0275,
"step": 27300
},
{
"epoch": 1.6307770139570872,
"grad_norm": 7.614233493804932,
"learning_rate": 1.415098112735908e-05,
"loss": 0.0411,
"step": 27400
},
{
"epoch": 1.6367288634943309,
"grad_norm": 0.0856470912694931,
"learning_rate": 1.4125984251968505e-05,
"loss": 0.0569,
"step": 27500
},
{
"epoch": 1.6426807130315746,
"grad_norm": 0.03266504406929016,
"learning_rate": 1.4100987376577928e-05,
"loss": 0.0253,
"step": 27600
},
{
"epoch": 1.6486325625688183,
"grad_norm": 0.08693056553602219,
"learning_rate": 1.4075990501187353e-05,
"loss": 0.0375,
"step": 27700
},
{
"epoch": 1.654584412106062,
"grad_norm": 0.016319314017891884,
"learning_rate": 1.4050993625796777e-05,
"loss": 0.0442,
"step": 27800
},
{
"epoch": 1.6605362616433057,
"grad_norm": 0.11942635476589203,
"learning_rate": 1.4025996750406202e-05,
"loss": 0.0372,
"step": 27900
},
{
"epoch": 1.6664881111805494,
"grad_norm": 5.282145977020264,
"learning_rate": 1.4000999875015623e-05,
"loss": 0.053,
"step": 28000
},
{
"epoch": 1.672439960717793,
"grad_norm": 0.020808018743991852,
"learning_rate": 1.3976002999625049e-05,
"loss": 0.0336,
"step": 28100
},
{
"epoch": 1.6783918102550368,
"grad_norm": 3.7852814197540283,
"learning_rate": 1.3951006124234472e-05,
"loss": 0.0457,
"step": 28200
},
{
"epoch": 1.6843436597922805,
"grad_norm": 0.013235555961728096,
"learning_rate": 1.3926009248843895e-05,
"loss": 0.0386,
"step": 28300
},
{
"epoch": 1.6902955093295242,
"grad_norm": 0.05768481642007828,
"learning_rate": 1.390101237345332e-05,
"loss": 0.0493,
"step": 28400
},
{
"epoch": 1.696247358866768,
"grad_norm": 2.631303548812866,
"learning_rate": 1.3876015498062742e-05,
"loss": 0.0439,
"step": 28500
},
{
"epoch": 1.7021992084040116,
"grad_norm": 0.5090872645378113,
"learning_rate": 1.3851018622672167e-05,
"loss": 0.0282,
"step": 28600
},
{
"epoch": 1.7081510579412553,
"grad_norm": 0.014639855362474918,
"learning_rate": 1.382602174728159e-05,
"loss": 0.0451,
"step": 28700
},
{
"epoch": 1.714102907478499,
"grad_norm": 0.010227348655462265,
"learning_rate": 1.3801024871891016e-05,
"loss": 0.0304,
"step": 28800
},
{
"epoch": 1.7200547570157427,
"grad_norm": 0.06359577178955078,
"learning_rate": 1.377602799650044e-05,
"loss": 0.0531,
"step": 28900
},
{
"epoch": 1.7260066065529864,
"grad_norm": 0.1433858424425125,
"learning_rate": 1.3751031121109864e-05,
"loss": 0.0407,
"step": 29000
},
{
"epoch": 1.7319584560902301,
"grad_norm": 0.5865179896354675,
"learning_rate": 1.3726034245719286e-05,
"loss": 0.0351,
"step": 29100
},
{
"epoch": 1.7379103056274738,
"grad_norm": 0.008081269450485706,
"learning_rate": 1.370103737032871e-05,
"loss": 0.0359,
"step": 29200
},
{
"epoch": 1.7438621551647175,
"grad_norm": 0.0038693081587553024,
"learning_rate": 1.3676040494938134e-05,
"loss": 0.029,
"step": 29300
},
{
"epoch": 1.7498140047019612,
"grad_norm": 0.011708752252161503,
"learning_rate": 1.3651043619547558e-05,
"loss": 0.02,
"step": 29400
},
{
"epoch": 1.755765854239205,
"grad_norm": 13.56950569152832,
"learning_rate": 1.3626046744156983e-05,
"loss": 0.0261,
"step": 29500
},
{
"epoch": 1.7617177037764487,
"grad_norm": 0.07075086236000061,
"learning_rate": 1.3601049868766405e-05,
"loss": 0.0336,
"step": 29600
},
{
"epoch": 1.7676695533136924,
"grad_norm": 0.03655830770730972,
"learning_rate": 1.357605299337583e-05,
"loss": 0.0383,
"step": 29700
},
{
"epoch": 1.773621402850936,
"grad_norm": 0.29130032658576965,
"learning_rate": 1.3551056117985253e-05,
"loss": 0.0324,
"step": 29800
},
{
"epoch": 1.7795732523881798,
"grad_norm": 0.023573417216539383,
"learning_rate": 1.3526059242594677e-05,
"loss": 0.0366,
"step": 29900
},
{
"epoch": 1.7855251019254235,
"grad_norm": 3.8324155807495117,
"learning_rate": 1.35010623672041e-05,
"loss": 0.0391,
"step": 30000
},
{
"epoch": 1.7855251019254235,
"eval_accuracy": 0.9919529557412565,
"eval_f1": 0.9815602836879432,
"eval_loss": 0.031384389847517014,
"eval_precision": 0.9835993877104745,
"eval_recall": 0.9795296167247387,
"eval_runtime": 634.8585,
"eval_samples_per_second": 132.322,
"eval_steps_per_second": 4.136,
"step": 30000
},
{
"epoch": 1.7914769514626672,
"grad_norm": 3.4548158645629883,
"learning_rate": 1.3476065491813523e-05,
"loss": 0.0359,
"step": 30100
},
{
"epoch": 1.7974288009999109,
"grad_norm": 0.14029207825660706,
"learning_rate": 1.3451068616422948e-05,
"loss": 0.0263,
"step": 30200
},
{
"epoch": 1.8033806505371546,
"grad_norm": 0.004503784701228142,
"learning_rate": 1.3426071741032372e-05,
"loss": 0.0393,
"step": 30300
},
{
"epoch": 1.8093325000743983,
"grad_norm": 1.7882189750671387,
"learning_rate": 1.3401074865641797e-05,
"loss": 0.0489,
"step": 30400
},
{
"epoch": 1.815284349611642,
"grad_norm": 1.591253399848938,
"learning_rate": 1.3376077990251219e-05,
"loss": 0.0433,
"step": 30500
},
{
"epoch": 1.8212361991488857,
"grad_norm": 0.012020041234791279,
"learning_rate": 1.3351081114860644e-05,
"loss": 0.0304,
"step": 30600
},
{
"epoch": 1.8271880486861292,
"grad_norm": 1.5531669855117798,
"learning_rate": 1.3326084239470067e-05,
"loss": 0.0335,
"step": 30700
},
{
"epoch": 1.8331398982233729,
"grad_norm": 3.4099249839782715,
"learning_rate": 1.330108736407949e-05,
"loss": 0.0513,
"step": 30800
},
{
"epoch": 1.8390917477606166,
"grad_norm": 0.0514712817966938,
"learning_rate": 1.3276090488688916e-05,
"loss": 0.0492,
"step": 30900
},
{
"epoch": 1.8450435972978603,
"grad_norm": 0.018144380301237106,
"learning_rate": 1.3251093613298337e-05,
"loss": 0.0348,
"step": 31000
},
{
"epoch": 1.850995446835104,
"grad_norm": 0.016865933313965797,
"learning_rate": 1.3226096737907762e-05,
"loss": 0.0387,
"step": 31100
},
{
"epoch": 1.8569472963723477,
"grad_norm": 1.086006760597229,
"learning_rate": 1.3201099862517186e-05,
"loss": 0.0531,
"step": 31200
},
{
"epoch": 1.8628991459095914,
"grad_norm": 0.2081891894340515,
"learning_rate": 1.3176102987126611e-05,
"loss": 0.0541,
"step": 31300
},
{
"epoch": 1.868850995446835,
"grad_norm": 0.14017155766487122,
"learning_rate": 1.3151106111736034e-05,
"loss": 0.0425,
"step": 31400
},
{
"epoch": 1.8748028449840788,
"grad_norm": 0.3818506598472595,
"learning_rate": 1.312610923634546e-05,
"loss": 0.0418,
"step": 31500
},
{
"epoch": 1.8807546945213225,
"grad_norm": 0.06837693601846695,
"learning_rate": 1.3101112360954881e-05,
"loss": 0.0346,
"step": 31600
},
{
"epoch": 1.8867065440585662,
"grad_norm": 9.042508125305176,
"learning_rate": 1.3076115485564305e-05,
"loss": 0.0395,
"step": 31700
},
{
"epoch": 1.89265839359581,
"grad_norm": 2.3299245834350586,
"learning_rate": 1.305111861017373e-05,
"loss": 0.036,
"step": 31800
},
{
"epoch": 1.8986102431330536,
"grad_norm": 0.08258947730064392,
"learning_rate": 1.3026121734783153e-05,
"loss": 0.0509,
"step": 31900
},
{
"epoch": 1.9045620926702973,
"grad_norm": 0.054314155131578445,
"learning_rate": 1.3001124859392578e-05,
"loss": 0.036,
"step": 32000
},
{
"epoch": 1.910513942207541,
"grad_norm": 0.047836799174547195,
"learning_rate": 1.2976127984002e-05,
"loss": 0.0353,
"step": 32100
},
{
"epoch": 1.9164657917447847,
"grad_norm": 0.2258683145046234,
"learning_rate": 1.2951131108611425e-05,
"loss": 0.0344,
"step": 32200
},
{
"epoch": 1.9224176412820284,
"grad_norm": 2.757575273513794,
"learning_rate": 1.2926134233220848e-05,
"loss": 0.0295,
"step": 32300
},
{
"epoch": 1.9283694908192721,
"grad_norm": 0.5833088159561157,
"learning_rate": 1.2901137357830272e-05,
"loss": 0.037,
"step": 32400
},
{
"epoch": 1.9343213403565158,
"grad_norm": 0.00616218289360404,
"learning_rate": 1.2876140482439697e-05,
"loss": 0.0335,
"step": 32500
},
{
"epoch": 1.9402731898937595,
"grad_norm": 0.11325003206729889,
"learning_rate": 1.2851143607049119e-05,
"loss": 0.0405,
"step": 32600
},
{
"epoch": 1.9462250394310032,
"grad_norm": 13.159346580505371,
"learning_rate": 1.2826146731658544e-05,
"loss": 0.0299,
"step": 32700
},
{
"epoch": 1.952176888968247,
"grad_norm": 0.03671230375766754,
"learning_rate": 1.2801149856267967e-05,
"loss": 0.0479,
"step": 32800
},
{
"epoch": 1.9581287385054906,
"grad_norm": 0.011833186261355877,
"learning_rate": 1.2776152980877392e-05,
"loss": 0.0293,
"step": 32900
},
{
"epoch": 1.9640805880427343,
"grad_norm": 0.021998632699251175,
"learning_rate": 1.2751156105486816e-05,
"loss": 0.0355,
"step": 33000
},
{
"epoch": 1.9700324375799778,
"grad_norm": 0.020362643525004387,
"learning_rate": 1.272615923009624e-05,
"loss": 0.046,
"step": 33100
},
{
"epoch": 1.9759842871172215,
"grad_norm": 3.637277603149414,
"learning_rate": 1.2701162354705662e-05,
"loss": 0.0314,
"step": 33200
},
{
"epoch": 1.9819361366544652,
"grad_norm": 0.03707759082317352,
"learning_rate": 1.2676165479315086e-05,
"loss": 0.03,
"step": 33300
},
{
"epoch": 1.987887986191709,
"grad_norm": 0.071082204580307,
"learning_rate": 1.2651168603924511e-05,
"loss": 0.0311,
"step": 33400
},
{
"epoch": 1.9938398357289526,
"grad_norm": 1.5680729150772095,
"learning_rate": 1.2626171728533934e-05,
"loss": 0.0405,
"step": 33500
},
{
"epoch": 1.9997916852661963,
"grad_norm": 0.09194811433553696,
"learning_rate": 1.260117485314336e-05,
"loss": 0.0528,
"step": 33600
},
{
"epoch": 2.005713775555754,
"grad_norm": 0.04092830792069435,
"learning_rate": 1.2576177977752781e-05,
"loss": 0.0103,
"step": 33700
},
{
"epoch": 2.0116656250929976,
"grad_norm": 0.017966441810131073,
"learning_rate": 1.2551181102362206e-05,
"loss": 0.0045,
"step": 33800
},
{
"epoch": 2.0176174746302413,
"grad_norm": 0.002570417709648609,
"learning_rate": 1.252618422697163e-05,
"loss": 0.0185,
"step": 33900
},
{
"epoch": 2.023569324167485,
"grad_norm": 9.743237495422363,
"learning_rate": 1.2501187351581055e-05,
"loss": 0.0095,
"step": 34000
},
{
"epoch": 2.0295211737047287,
"grad_norm": 0.00426062848418951,
"learning_rate": 1.2476190476190478e-05,
"loss": 0.0206,
"step": 34100
},
{
"epoch": 2.0354730232419724,
"grad_norm": 0.010534725151956081,
"learning_rate": 1.24511936007999e-05,
"loss": 0.0112,
"step": 34200
},
{
"epoch": 2.041424872779216,
"grad_norm": 0.0059084463864564896,
"learning_rate": 1.2426196725409325e-05,
"loss": 0.0136,
"step": 34300
},
{
"epoch": 2.04737672231646,
"grad_norm": 0.02636191062629223,
"learning_rate": 1.2401199850018748e-05,
"loss": 0.0173,
"step": 34400
},
{
"epoch": 2.0533285718537035,
"grad_norm": 0.010172510519623756,
"learning_rate": 1.2376202974628173e-05,
"loss": 0.0158,
"step": 34500
},
{
"epoch": 2.0592804213909472,
"grad_norm": 0.04961102828383446,
"learning_rate": 1.2351206099237597e-05,
"loss": 0.0243,
"step": 34600
},
{
"epoch": 2.065232270928191,
"grad_norm": 0.0027994669508188963,
"learning_rate": 1.232620922384702e-05,
"loss": 0.0152,
"step": 34700
},
{
"epoch": 2.0711841204654347,
"grad_norm": 0.0040953196585178375,
"learning_rate": 1.2301212348456444e-05,
"loss": 0.0076,
"step": 34800
},
{
"epoch": 2.0771359700026784,
"grad_norm": 0.019047342240810394,
"learning_rate": 1.2276215473065867e-05,
"loss": 0.0089,
"step": 34900
},
{
"epoch": 2.083087819539922,
"grad_norm": 0.0023104604333639145,
"learning_rate": 1.2251218597675292e-05,
"loss": 0.0119,
"step": 35000
},
{
"epoch": 2.083087819539922,
"eval_accuracy": 0.9928933647596601,
"eval_f1": 0.9836666575469891,
"eval_loss": 0.04034886509180069,
"eval_precision": 0.9886707364021339,
"eval_recall": 0.9787129790940766,
"eval_runtime": 633.4633,
"eval_samples_per_second": 132.614,
"eval_steps_per_second": 4.145,
"step": 35000
},
{
"epoch": 2.0890396690771658,
"grad_norm": 0.04810173809528351,
"learning_rate": 1.2226221722284714e-05,
"loss": 0.0185,
"step": 35100
},
{
"epoch": 2.0949915186144095,
"grad_norm": 0.013579961843788624,
"learning_rate": 1.2201224846894139e-05,
"loss": 0.0147,
"step": 35200
},
{
"epoch": 2.100943368151653,
"grad_norm": 0.001705506001599133,
"learning_rate": 1.2176227971503562e-05,
"loss": 0.014,
"step": 35300
},
{
"epoch": 2.106895217688897,
"grad_norm": 0.12017501890659332,
"learning_rate": 1.2151231096112987e-05,
"loss": 0.0258,
"step": 35400
},
{
"epoch": 2.1128470672261406,
"grad_norm": 0.017923153936862946,
"learning_rate": 1.212623422072241e-05,
"loss": 0.0043,
"step": 35500
},
{
"epoch": 2.1187989167633843,
"grad_norm": 0.026006320491433144,
"learning_rate": 1.2101237345331836e-05,
"loss": 0.0113,
"step": 35600
},
{
"epoch": 2.124750766300628,
"grad_norm": 0.009411540813744068,
"learning_rate": 1.2076240469941258e-05,
"loss": 0.0191,
"step": 35700
},
{
"epoch": 2.1307026158378717,
"grad_norm": 0.037772323936223984,
"learning_rate": 1.2051243594550681e-05,
"loss": 0.0106,
"step": 35800
},
{
"epoch": 2.1366544653751154,
"grad_norm": 0.0031770714558660984,
"learning_rate": 1.2026246719160106e-05,
"loss": 0.0178,
"step": 35900
},
{
"epoch": 2.142606314912359,
"grad_norm": 0.0042376103810966015,
"learning_rate": 1.200124984376953e-05,
"loss": 0.0204,
"step": 36000
},
{
"epoch": 2.148558164449603,
"grad_norm": 0.0013943923404440284,
"learning_rate": 1.1976252968378955e-05,
"loss": 0.0148,
"step": 36100
},
{
"epoch": 2.1545100139868465,
"grad_norm": 0.0036634367424994707,
"learning_rate": 1.1951256092988376e-05,
"loss": 0.0203,
"step": 36200
},
{
"epoch": 2.16046186352409,
"grad_norm": 0.015139013528823853,
"learning_rate": 1.1926259217597801e-05,
"loss": 0.0143,
"step": 36300
},
{
"epoch": 2.166413713061334,
"grad_norm": 0.0035298485308885574,
"learning_rate": 1.1901262342207225e-05,
"loss": 0.0071,
"step": 36400
},
{
"epoch": 2.1723655625985776,
"grad_norm": 0.007681310176849365,
"learning_rate": 1.187626546681665e-05,
"loss": 0.0211,
"step": 36500
},
{
"epoch": 2.1783174121358213,
"grad_norm": 1.1557055711746216,
"learning_rate": 1.1851268591426073e-05,
"loss": 0.0111,
"step": 36600
},
{
"epoch": 2.184269261673065,
"grad_norm": 0.7144343256950378,
"learning_rate": 1.1826271716035495e-05,
"loss": 0.0136,
"step": 36700
},
{
"epoch": 2.1902211112103087,
"grad_norm": 0.021098671481013298,
"learning_rate": 1.180127484064492e-05,
"loss": 0.0241,
"step": 36800
},
{
"epoch": 2.1961729607475524,
"grad_norm": 0.0722559317946434,
"learning_rate": 1.1776277965254343e-05,
"loss": 0.027,
"step": 36900
},
{
"epoch": 2.202124810284796,
"grad_norm": 0.0044738114811480045,
"learning_rate": 1.1751281089863769e-05,
"loss": 0.0142,
"step": 37000
},
{
"epoch": 2.20807665982204,
"grad_norm": 0.0008085681474767625,
"learning_rate": 1.1726284214473192e-05,
"loss": 0.0206,
"step": 37100
},
{
"epoch": 2.2140285093592835,
"grad_norm": 0.0013074972666800022,
"learning_rate": 1.1701287339082617e-05,
"loss": 0.0104,
"step": 37200
},
{
"epoch": 2.2199803588965272,
"grad_norm": 0.006557020358741283,
"learning_rate": 1.1676290463692039e-05,
"loss": 0.0208,
"step": 37300
},
{
"epoch": 2.225932208433771,
"grad_norm": 0.007766306400299072,
"learning_rate": 1.1651293588301462e-05,
"loss": 0.0314,
"step": 37400
},
{
"epoch": 2.2318840579710146,
"grad_norm": 0.05710221454501152,
"learning_rate": 1.1626296712910887e-05,
"loss": 0.0184,
"step": 37500
},
{
"epoch": 2.2378359075082583,
"grad_norm": 0.00156008405610919,
"learning_rate": 1.160129983752031e-05,
"loss": 0.0127,
"step": 37600
},
{
"epoch": 2.243787757045502,
"grad_norm": 0.022134894505143166,
"learning_rate": 1.1576302962129736e-05,
"loss": 0.0268,
"step": 37700
},
{
"epoch": 2.2497396065827457,
"grad_norm": 0.4784717559814453,
"learning_rate": 1.1551306086739157e-05,
"loss": 0.0169,
"step": 37800
},
{
"epoch": 2.2556914561199894,
"grad_norm": 0.02040729857981205,
"learning_rate": 1.1526309211348583e-05,
"loss": 0.0235,
"step": 37900
},
{
"epoch": 2.261643305657233,
"grad_norm": 0.006299525499343872,
"learning_rate": 1.1501312335958006e-05,
"loss": 0.0251,
"step": 38000
},
{
"epoch": 2.267595155194477,
"grad_norm": 0.017164984717965126,
"learning_rate": 1.1476315460567431e-05,
"loss": 0.0161,
"step": 38100
},
{
"epoch": 2.2735470047317206,
"grad_norm": 0.006945526693016291,
"learning_rate": 1.1451318585176854e-05,
"loss": 0.0141,
"step": 38200
},
{
"epoch": 2.2794988542689643,
"grad_norm": 0.012456955388188362,
"learning_rate": 1.1426321709786276e-05,
"loss": 0.02,
"step": 38300
},
{
"epoch": 2.285450703806208,
"grad_norm": 0.2582660913467407,
"learning_rate": 1.1401324834395701e-05,
"loss": 0.0118,
"step": 38400
},
{
"epoch": 2.2914025533434517,
"grad_norm": 0.008593305014073849,
"learning_rate": 1.1376327959005125e-05,
"loss": 0.0172,
"step": 38500
},
{
"epoch": 2.2973544028806954,
"grad_norm": 0.3093462884426117,
"learning_rate": 1.135133108361455e-05,
"loss": 0.0177,
"step": 38600
},
{
"epoch": 2.303306252417939,
"grad_norm": 0.03443511947989464,
"learning_rate": 1.1326334208223973e-05,
"loss": 0.0288,
"step": 38700
},
{
"epoch": 2.3092581019551828,
"grad_norm": 0.002232662169262767,
"learning_rate": 1.1301337332833398e-05,
"loss": 0.0129,
"step": 38800
},
{
"epoch": 2.3152099514924265,
"grad_norm": 0.08788257837295532,
"learning_rate": 1.127634045744282e-05,
"loss": 0.0298,
"step": 38900
},
{
"epoch": 2.32116180102967,
"grad_norm": 0.06080511957406998,
"learning_rate": 1.1251343582052245e-05,
"loss": 0.0193,
"step": 39000
},
{
"epoch": 2.327113650566914,
"grad_norm": 0.03741913661360741,
"learning_rate": 1.1226346706661668e-05,
"loss": 0.0065,
"step": 39100
},
{
"epoch": 2.3330655001041576,
"grad_norm": 4.331652641296387,
"learning_rate": 1.1201349831271092e-05,
"loss": 0.0087,
"step": 39200
},
{
"epoch": 2.3390173496414013,
"grad_norm": 7.526216983795166,
"learning_rate": 1.1176352955880517e-05,
"loss": 0.0157,
"step": 39300
},
{
"epoch": 2.344969199178645,
"grad_norm": 0.0066286916844546795,
"learning_rate": 1.1151356080489939e-05,
"loss": 0.012,
"step": 39400
},
{
"epoch": 2.3509210487158887,
"grad_norm": 0.03812452405691147,
"learning_rate": 1.1126359205099364e-05,
"loss": 0.0265,
"step": 39500
},
{
"epoch": 2.3568728982531324,
"grad_norm": 0.063834048807621,
"learning_rate": 1.1101362329708787e-05,
"loss": 0.0212,
"step": 39600
},
{
"epoch": 2.362824747790376,
"grad_norm": 0.0014151857467368245,
"learning_rate": 1.1076365454318212e-05,
"loss": 0.0141,
"step": 39700
},
{
"epoch": 2.36877659732762,
"grad_norm": 10.587772369384766,
"learning_rate": 1.1051368578927634e-05,
"loss": 0.0297,
"step": 39800
},
{
"epoch": 2.374728446864863,
"grad_norm": 0.0303042009472847,
"learning_rate": 1.1026371703537059e-05,
"loss": 0.0132,
"step": 39900
},
{
"epoch": 2.3806802964021068,
"grad_norm": 0.008887387812137604,
"learning_rate": 1.1001374828146482e-05,
"loss": 0.0195,
"step": 40000
},
{
"epoch": 2.3806802964021068,
"eval_accuracy": 0.9929290764945361,
"eval_f1": 0.983751846381093,
"eval_loss": 0.0369524210691452,
"eval_precision": 0.988565145684442,
"eval_recall": 0.9789851916376306,
"eval_runtime": 635.2271,
"eval_samples_per_second": 132.246,
"eval_steps_per_second": 4.134,
"step": 40000
}
],
"logging_steps": 100,
"max_steps": 84010,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 5000,
"stateful_callbacks": {
"EarlyStoppingCallback": {
"args": {
"early_stopping_patience": 3,
"early_stopping_threshold": 0.0
},
"attributes": {
"early_stopping_patience_counter": 0
}
},
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 1.0903801272155259e+18,
"train_batch_size": 20,
"trial_name": null,
"trial_params": null
}