| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 1072, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0046641791044776115, | |
| "grad_norm": 1.9758090459480515, | |
| "learning_rate": 4.6296296296296296e-06, | |
| "loss": 0.8462, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.009328358208955223, | |
| "grad_norm": 1.4311203664430652, | |
| "learning_rate": 9.259259259259259e-06, | |
| "loss": 0.8235, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.013992537313432836, | |
| "grad_norm": 1.1270739610351956, | |
| "learning_rate": 1.388888888888889e-05, | |
| "loss": 0.7682, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.018656716417910446, | |
| "grad_norm": 0.677912080630837, | |
| "learning_rate": 1.8518518518518518e-05, | |
| "loss": 0.7273, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02332089552238806, | |
| "grad_norm": 0.5110522342084258, | |
| "learning_rate": 2.314814814814815e-05, | |
| "loss": 0.6958, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.027985074626865673, | |
| "grad_norm": 0.47140530283590526, | |
| "learning_rate": 2.777777777777778e-05, | |
| "loss": 0.6736, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03264925373134328, | |
| "grad_norm": 0.4237814291987174, | |
| "learning_rate": 3.240740740740741e-05, | |
| "loss": 0.6447, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.03731343283582089, | |
| "grad_norm": 0.4386058681539761, | |
| "learning_rate": 3.7037037037037037e-05, | |
| "loss": 0.6379, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04197761194029851, | |
| "grad_norm": 0.41746910366160445, | |
| "learning_rate": 4.166666666666667e-05, | |
| "loss": 0.6333, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.04664179104477612, | |
| "grad_norm": 0.4109568020163113, | |
| "learning_rate": 4.62962962962963e-05, | |
| "loss": 0.6148, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.051305970149253734, | |
| "grad_norm": 0.4243376531383993, | |
| "learning_rate": 4.999989285883431e-05, | |
| "loss": 0.6341, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.055970149253731345, | |
| "grad_norm": 0.4706209794387523, | |
| "learning_rate": 4.999614302517356e-05, | |
| "loss": 0.6272, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06063432835820896, | |
| "grad_norm": 0.41951138935679394, | |
| "learning_rate": 4.99870371535606e-05, | |
| "loss": 0.6061, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.06529850746268656, | |
| "grad_norm": 0.4228681972291175, | |
| "learning_rate": 4.997257741198456e-05, | |
| "loss": 0.6351, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.06996268656716417, | |
| "grad_norm": 0.5483252374903946, | |
| "learning_rate": 4.9952767243121146e-05, | |
| "loss": 0.6124, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.07462686567164178, | |
| "grad_norm": 0.5016302010500509, | |
| "learning_rate": 4.992761136351291e-05, | |
| "loss": 0.6077, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.07929104477611941, | |
| "grad_norm": 0.5666402593067416, | |
| "learning_rate": 4.989711576244639e-05, | |
| "loss": 0.6002, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.08395522388059702, | |
| "grad_norm": 0.44871650573083804, | |
| "learning_rate": 4.986128770052603e-05, | |
| "loss": 0.5896, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.08861940298507463, | |
| "grad_norm": 0.5416624788799568, | |
| "learning_rate": 4.9820135707945634e-05, | |
| "loss": 0.5916, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.09328358208955224, | |
| "grad_norm": 0.5901020019152606, | |
| "learning_rate": 4.9773669582457364e-05, | |
| "loss": 0.6047, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.09794776119402986, | |
| "grad_norm": 0.45563119871555746, | |
| "learning_rate": 4.972190038703905e-05, | |
| "loss": 0.61, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.10261194029850747, | |
| "grad_norm": 0.47772042985166674, | |
| "learning_rate": 4.966484044726024e-05, | |
| "loss": 0.597, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.10727611940298508, | |
| "grad_norm": 0.4871302886461948, | |
| "learning_rate": 4.9602503348347625e-05, | |
| "loss": 0.5875, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.11194029850746269, | |
| "grad_norm": 0.5281372638064636, | |
| "learning_rate": 4.953490393195063e-05, | |
| "loss": 0.5948, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1166044776119403, | |
| "grad_norm": 0.5118855628060364, | |
| "learning_rate": 4.9462058292607735e-05, | |
| "loss": 0.5921, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.12126865671641791, | |
| "grad_norm": 0.46099021566430204, | |
| "learning_rate": 4.938398377391461e-05, | |
| "loss": 0.5849, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.1259328358208955, | |
| "grad_norm": 0.3995491160765474, | |
| "learning_rate": 4.930069896439485e-05, | |
| "loss": 0.5846, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.13059701492537312, | |
| "grad_norm": 0.5537933591705182, | |
| "learning_rate": 4.921222369307427e-05, | |
| "loss": 0.5867, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.13526119402985073, | |
| "grad_norm": 0.5289822134215199, | |
| "learning_rate": 4.9118579024759854e-05, | |
| "loss": 0.5892, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.13992537313432835, | |
| "grad_norm": 0.5169173210156138, | |
| "learning_rate": 4.901978725502454e-05, | |
| "loss": 0.5731, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.14458955223880596, | |
| "grad_norm": 0.4536434111297343, | |
| "learning_rate": 4.891587190489891e-05, | |
| "loss": 0.5775, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.14925373134328357, | |
| "grad_norm": 0.4725716102470668, | |
| "learning_rate": 4.880685771527114e-05, | |
| "loss": 0.5853, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.15391791044776118, | |
| "grad_norm": 0.40833470515432263, | |
| "learning_rate": 4.869277064099654e-05, | |
| "loss": 0.5812, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.15858208955223882, | |
| "grad_norm": 0.4085254798617572, | |
| "learning_rate": 4.8573637844718e-05, | |
| "loss": 0.5915, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.16324626865671643, | |
| "grad_norm": 0.3885679664159616, | |
| "learning_rate": 4.844948769039896e-05, | |
| "loss": 0.5856, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.16791044776119404, | |
| "grad_norm": 0.40258516114305937, | |
| "learning_rate": 4.83203497365703e-05, | |
| "loss": 0.5813, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.17257462686567165, | |
| "grad_norm": 0.4297281730288888, | |
| "learning_rate": 4.818625472929286e-05, | |
| "loss": 0.5741, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.17723880597014927, | |
| "grad_norm": 0.4002787357087474, | |
| "learning_rate": 4.8047234594837143e-05, | |
| "loss": 0.5678, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.18190298507462688, | |
| "grad_norm": 0.4477952031357522, | |
| "learning_rate": 4.7903322432082185e-05, | |
| "loss": 0.5681, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.1865671641791045, | |
| "grad_norm": 0.3947334250070109, | |
| "learning_rate": 4.775455250463507e-05, | |
| "loss": 0.5679, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.1912313432835821, | |
| "grad_norm": 0.47514070355109034, | |
| "learning_rate": 4.760096023267322e-05, | |
| "loss": 0.5816, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.1958955223880597, | |
| "grad_norm": 0.42373816797872005, | |
| "learning_rate": 4.744258218451135e-05, | |
| "loss": 0.5735, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.20055970149253732, | |
| "grad_norm": 0.6325082441249004, | |
| "learning_rate": 4.7279456067895e-05, | |
| "loss": 0.5712, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.20522388059701493, | |
| "grad_norm": 0.5253649891188522, | |
| "learning_rate": 4.71116207210228e-05, | |
| "loss": 0.567, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.20988805970149255, | |
| "grad_norm": 0.4391181672828864, | |
| "learning_rate": 4.6939116103299655e-05, | |
| "loss": 0.5596, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.21455223880597016, | |
| "grad_norm": 0.48073619855075617, | |
| "learning_rate": 4.676198328582288e-05, | |
| "loss": 0.5687, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.21921641791044777, | |
| "grad_norm": 0.43188709388496127, | |
| "learning_rate": 4.6580264441603724e-05, | |
| "loss": 0.5756, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.22388059701492538, | |
| "grad_norm": 0.43533127078485306, | |
| "learning_rate": 4.6394002835526535e-05, | |
| "loss": 0.5747, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.228544776119403, | |
| "grad_norm": 0.45197249866544914, | |
| "learning_rate": 4.6203242814047946e-05, | |
| "loss": 0.5598, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.2332089552238806, | |
| "grad_norm": 0.4708406669228157, | |
| "learning_rate": 4.6008029794638596e-05, | |
| "loss": 0.5601, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.23787313432835822, | |
| "grad_norm": 0.3788147740469829, | |
| "learning_rate": 4.580841025496974e-05, | |
| "loss": 0.5575, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.24253731343283583, | |
| "grad_norm": 0.39832021921351807, | |
| "learning_rate": 4.560443172184763e-05, | |
| "loss": 0.5668, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.24720149253731344, | |
| "grad_norm": 0.3796232471227291, | |
| "learning_rate": 4.539614275989793e-05, | |
| "loss": 0.5554, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.251865671641791, | |
| "grad_norm": 0.38983766864721225, | |
| "learning_rate": 4.5183592960003104e-05, | |
| "loss": 0.5541, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.25652985074626866, | |
| "grad_norm": 0.4355247239008026, | |
| "learning_rate": 4.496683292749555e-05, | |
| "loss": 0.5585, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.26119402985074625, | |
| "grad_norm": 0.486141033194627, | |
| "learning_rate": 4.4745914270109055e-05, | |
| "loss": 0.562, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2658582089552239, | |
| "grad_norm": 0.3997719744423428, | |
| "learning_rate": 4.4520889585691705e-05, | |
| "loss": 0.5671, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.27052238805970147, | |
| "grad_norm": 0.3712456764688162, | |
| "learning_rate": 4.429181244968301e-05, | |
| "loss": 0.5512, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2751865671641791, | |
| "grad_norm": 0.33420016126238833, | |
| "learning_rate": 4.4058737402358295e-05, | |
| "loss": 0.5454, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.2798507462686567, | |
| "grad_norm": 0.42877559880462685, | |
| "learning_rate": 4.38217199358434e-05, | |
| "loss": 0.5613, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.28451492537313433, | |
| "grad_norm": 0.3883567888846464, | |
| "learning_rate": 4.3580816480902656e-05, | |
| "loss": 0.5456, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.2891791044776119, | |
| "grad_norm": 0.35952103908629107, | |
| "learning_rate": 4.3336084393503545e-05, | |
| "loss": 0.5539, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.29384328358208955, | |
| "grad_norm": 0.4006187854029051, | |
| "learning_rate": 4.308758194116094e-05, | |
| "loss": 0.5566, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.29850746268656714, | |
| "grad_norm": 0.4377288166205896, | |
| "learning_rate": 4.283536828906436e-05, | |
| "loss": 0.5681, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.3031716417910448, | |
| "grad_norm": 0.479723537665308, | |
| "learning_rate": 4.2579503485991567e-05, | |
| "loss": 0.5525, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.30783582089552236, | |
| "grad_norm": 0.45114833691747125, | |
| "learning_rate": 4.2320048450011684e-05, | |
| "loss": 0.5593, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.3125, | |
| "grad_norm": 0.43084209549655134, | |
| "learning_rate": 4.205706495398143e-05, | |
| "loss": 0.5548, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.31716417910447764, | |
| "grad_norm": 0.43679277321127774, | |
| "learning_rate": 4.179061561083777e-05, | |
| "loss": 0.5409, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.3218283582089552, | |
| "grad_norm": 0.38395193302678027, | |
| "learning_rate": 4.1520763858690644e-05, | |
| "loss": 0.5555, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.32649253731343286, | |
| "grad_norm": 0.41454057296858093, | |
| "learning_rate": 4.124757394571914e-05, | |
| "loss": 0.5519, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.33115671641791045, | |
| "grad_norm": 0.38586970653277064, | |
| "learning_rate": 4.097111091487486e-05, | |
| "loss": 0.5458, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.3358208955223881, | |
| "grad_norm": 0.40062138427554544, | |
| "learning_rate": 4.069144058839605e-05, | |
| "loss": 0.5633, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.34048507462686567, | |
| "grad_norm": 0.395450557781568, | |
| "learning_rate": 4.040862955213615e-05, | |
| "loss": 0.5574, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.3451492537313433, | |
| "grad_norm": 0.3504896852533603, | |
| "learning_rate": 4.012274513971061e-05, | |
| "loss": 0.5595, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3498134328358209, | |
| "grad_norm": 0.3998859028513085, | |
| "learning_rate": 3.9833855416465624e-05, | |
| "loss": 0.5468, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.35447761194029853, | |
| "grad_norm": 0.42412732408291715, | |
| "learning_rate": 3.954202916327264e-05, | |
| "loss": 0.546, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.3591417910447761, | |
| "grad_norm": 0.331243600336611, | |
| "learning_rate": 3.924733586015257e-05, | |
| "loss": 0.5446, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.36380597014925375, | |
| "grad_norm": 0.3475840800994299, | |
| "learning_rate": 3.894984566973346e-05, | |
| "loss": 0.548, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.36847014925373134, | |
| "grad_norm": 0.3693437580108704, | |
| "learning_rate": 3.864962942054572e-05, | |
| "loss": 0.5544, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.373134328358209, | |
| "grad_norm": 0.3569504930907783, | |
| "learning_rate": 3.834675859015876e-05, | |
| "loss": 0.5462, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.37779850746268656, | |
| "grad_norm": 0.3738692499486398, | |
| "learning_rate": 3.804130528816312e-05, | |
| "loss": 0.5486, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.3824626865671642, | |
| "grad_norm": 0.4273252518697797, | |
| "learning_rate": 3.77333422390021e-05, | |
| "loss": 0.5343, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.3871268656716418, | |
| "grad_norm": 0.3421772246038431, | |
| "learning_rate": 3.7422942764657054e-05, | |
| "loss": 0.5457, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.3917910447761194, | |
| "grad_norm": 0.38314646569144656, | |
| "learning_rate": 3.711018076719034e-05, | |
| "loss": 0.5508, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.396455223880597, | |
| "grad_norm": 0.3863226048836933, | |
| "learning_rate": 3.679513071115025e-05, | |
| "loss": 0.5539, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.40111940298507465, | |
| "grad_norm": 0.39491043076054533, | |
| "learning_rate": 3.647786760584194e-05, | |
| "loss": 0.5509, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.40578358208955223, | |
| "grad_norm": 0.3965233515414594, | |
| "learning_rate": 3.615846698746869e-05, | |
| "loss": 0.5546, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.41044776119402987, | |
| "grad_norm": 0.3912089184238815, | |
| "learning_rate": 3.583700490114776e-05, | |
| "loss": 0.547, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.41511194029850745, | |
| "grad_norm": 0.32145572767001085, | |
| "learning_rate": 3.5513557882805e-05, | |
| "loss": 0.5454, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.4197761194029851, | |
| "grad_norm": 0.42448068761931496, | |
| "learning_rate": 3.518820294095267e-05, | |
| "loss": 0.5479, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.4244402985074627, | |
| "grad_norm": 0.38615690137848707, | |
| "learning_rate": 3.486101753835468e-05, | |
| "loss": 0.5407, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.4291044776119403, | |
| "grad_norm": 0.3615989793832084, | |
| "learning_rate": 3.453207957358377e-05, | |
| "loss": 0.5365, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.4337686567164179, | |
| "grad_norm": 0.2971209837786286, | |
| "learning_rate": 3.420146736247487e-05, | |
| "loss": 0.5274, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.43843283582089554, | |
| "grad_norm": 0.3988695188963396, | |
| "learning_rate": 3.386925961947906e-05, | |
| "loss": 0.5514, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.4430970149253731, | |
| "grad_norm": 0.36890972403201877, | |
| "learning_rate": 3.353553543892277e-05, | |
| "loss": 0.5509, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.44776119402985076, | |
| "grad_norm": 0.38553607498940795, | |
| "learning_rate": 3.320037427617639e-05, | |
| "loss": 0.5429, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.45242537313432835, | |
| "grad_norm": 0.3332697347381256, | |
| "learning_rate": 3.2863855928737026e-05, | |
| "loss": 0.5377, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.457089552238806, | |
| "grad_norm": 0.32566097057035065, | |
| "learning_rate": 3.252606051722972e-05, | |
| "loss": 0.5523, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.46175373134328357, | |
| "grad_norm": 0.34648905143931663, | |
| "learning_rate": 3.218706846633183e-05, | |
| "loss": 0.5363, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.4664179104477612, | |
| "grad_norm": 0.3958967307108548, | |
| "learning_rate": 3.1846960485624886e-05, | |
| "loss": 0.5323, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.4710820895522388, | |
| "grad_norm": 0.3732373162068215, | |
| "learning_rate": 3.150581755037877e-05, | |
| "loss": 0.5446, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.47574626865671643, | |
| "grad_norm": 0.30584061440617794, | |
| "learning_rate": 3.1163720882272516e-05, | |
| "loss": 0.5393, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.480410447761194, | |
| "grad_norm": 0.30587419898718043, | |
| "learning_rate": 3.08207519300565e-05, | |
| "loss": 0.5381, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.48507462686567165, | |
| "grad_norm": 0.3078167857914096, | |
| "learning_rate": 3.047699235016056e-05, | |
| "loss": 0.5296, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.48973880597014924, | |
| "grad_norm": 0.3763160137060326, | |
| "learning_rate": 3.0132523987252658e-05, | |
| "loss": 0.533, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.4944029850746269, | |
| "grad_norm": 0.32451377249607954, | |
| "learning_rate": 2.9787428854752736e-05, | |
| "loss": 0.523, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.49906716417910446, | |
| "grad_norm": 0.33157085351771526, | |
| "learning_rate": 2.9441789115306402e-05, | |
| "loss": 0.5446, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.503731343283582, | |
| "grad_norm": 0.32049542675343645, | |
| "learning_rate": 2.9095687061223058e-05, | |
| "loss": 0.5407, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5083955223880597, | |
| "grad_norm": 0.35063165698426135, | |
| "learning_rate": 2.874920509488319e-05, | |
| "loss": 0.5437, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.5130597014925373, | |
| "grad_norm": 0.3667880823680995, | |
| "learning_rate": 2.8402425709119435e-05, | |
| "loss": 0.5389, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5177238805970149, | |
| "grad_norm": 0.32354831842780596, | |
| "learning_rate": 2.8055431467576106e-05, | |
| "loss": 0.5367, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.5223880597014925, | |
| "grad_norm": 0.33938180829200487, | |
| "learning_rate": 2.7708304985051868e-05, | |
| "loss": 0.5402, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.5270522388059702, | |
| "grad_norm": 0.35282251833933853, | |
| "learning_rate": 2.7361128907830253e-05, | |
| "loss": 0.5311, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.5317164179104478, | |
| "grad_norm": 0.31972090659363583, | |
| "learning_rate": 2.7013985894002623e-05, | |
| "loss": 0.5215, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.5363805970149254, | |
| "grad_norm": 0.3174151600355697, | |
| "learning_rate": 2.6666958593788405e-05, | |
| "loss": 0.5364, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.5410447761194029, | |
| "grad_norm": 0.33876423049788124, | |
| "learning_rate": 2.6320129629857093e-05, | |
| "loss": 0.5411, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.5457089552238806, | |
| "grad_norm": 0.29718906820328433, | |
| "learning_rate": 2.597358157765692e-05, | |
| "loss": 0.5289, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.5503731343283582, | |
| "grad_norm": 0.2869861395346561, | |
| "learning_rate": 2.56273969457547e-05, | |
| "loss": 0.5264, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.5550373134328358, | |
| "grad_norm": 0.35750595786875244, | |
| "learning_rate": 2.528165815619162e-05, | |
| "loss": 0.5363, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.5597014925373134, | |
| "grad_norm": 0.30578766469673174, | |
| "learning_rate": 2.4936447524859625e-05, | |
| "loss": 0.5274, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.5643656716417911, | |
| "grad_norm": 0.2953155890310072, | |
| "learning_rate": 2.459184724190308e-05, | |
| "loss": 0.5196, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.5690298507462687, | |
| "grad_norm": 0.33223357909885076, | |
| "learning_rate": 2.4247939352150386e-05, | |
| "loss": 0.5341, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.5736940298507462, | |
| "grad_norm": 0.3181104656570763, | |
| "learning_rate": 2.390480573558012e-05, | |
| "loss": 0.5407, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.5783582089552238, | |
| "grad_norm": 0.33684068564791786, | |
| "learning_rate": 2.3562528087826573e-05, | |
| "loss": 0.5253, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.5830223880597015, | |
| "grad_norm": 0.30311641590639177, | |
| "learning_rate": 2.3221187900729003e-05, | |
| "loss": 0.529, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.5876865671641791, | |
| "grad_norm": 0.3154356658715104, | |
| "learning_rate": 2.2880866442929544e-05, | |
| "loss": 0.5306, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.5923507462686567, | |
| "grad_norm": 0.3037068003922746, | |
| "learning_rate": 2.254164474052416e-05, | |
| "loss": 0.5337, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.5970149253731343, | |
| "grad_norm": 0.30921050319634213, | |
| "learning_rate": 2.2203603557771447e-05, | |
| "loss": 0.5182, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.601679104477612, | |
| "grad_norm": 0.30128343015135456, | |
| "learning_rate": 2.186682337786365e-05, | |
| "loss": 0.5303, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.6063432835820896, | |
| "grad_norm": 0.29432973674145474, | |
| "learning_rate": 2.153138438376473e-05, | |
| "loss": 0.5179, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6110074626865671, | |
| "grad_norm": 0.3507922671216128, | |
| "learning_rate": 2.119736643911979e-05, | |
| "loss": 0.5267, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.6156716417910447, | |
| "grad_norm": 0.31281788509171565, | |
| "learning_rate": 2.0864849069240645e-05, | |
| "loss": 0.5305, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.6203358208955224, | |
| "grad_norm": 0.2996979980996351, | |
| "learning_rate": 2.0533911442171805e-05, | |
| "loss": 0.5266, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 0.3434082805928599, | |
| "learning_rate": 2.0204632349841667e-05, | |
| "loss": 0.5239, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.6296641791044776, | |
| "grad_norm": 0.29647748744493213, | |
| "learning_rate": 1.9877090189303182e-05, | |
| "loss": 0.5312, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.6343283582089553, | |
| "grad_norm": 0.2953288139907128, | |
| "learning_rate": 1.9551362944068462e-05, | |
| "loss": 0.5239, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.6389925373134329, | |
| "grad_norm": 0.295178795028809, | |
| "learning_rate": 1.922752816554204e-05, | |
| "loss": 0.5236, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.6436567164179104, | |
| "grad_norm": 0.27981806393912667, | |
| "learning_rate": 1.890566295455678e-05, | |
| "loss": 0.5176, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.648320895522388, | |
| "grad_norm": 0.27816063483253595, | |
| "learning_rate": 1.858584394301728e-05, | |
| "loss": 0.5217, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.6529850746268657, | |
| "grad_norm": 0.3123242454081955, | |
| "learning_rate": 1.8268147275654707e-05, | |
| "loss": 0.5385, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.6576492537313433, | |
| "grad_norm": 0.28709889828339485, | |
| "learning_rate": 1.7952648591897858e-05, | |
| "loss": 0.5214, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.6623134328358209, | |
| "grad_norm": 0.3668112563917202, | |
| "learning_rate": 1.7639423007864252e-05, | |
| "loss": 0.5133, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.6669776119402985, | |
| "grad_norm": 0.29742974973894337, | |
| "learning_rate": 1.7328545098476106e-05, | |
| "loss": 0.5169, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.6716417910447762, | |
| "grad_norm": 0.2925207230319102, | |
| "learning_rate": 1.702008887970491e-05, | |
| "loss": 0.5193, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.6763059701492538, | |
| "grad_norm": 0.3071808107671139, | |
| "learning_rate": 1.671412779094926e-05, | |
| "loss": 0.5231, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.6809701492537313, | |
| "grad_norm": 0.2950744904887601, | |
| "learning_rate": 1.6410734677549872e-05, | |
| "loss": 0.5193, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.6856343283582089, | |
| "grad_norm": 0.2816774125768958, | |
| "learning_rate": 1.6109981773446036e-05, | |
| "loss": 0.5206, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.6902985074626866, | |
| "grad_norm": 0.2652132182427412, | |
| "learning_rate": 1.58119406839777e-05, | |
| "loss": 0.519, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.6949626865671642, | |
| "grad_norm": 0.27011209734380043, | |
| "learning_rate": 1.5516682368837133e-05, | |
| "loss": 0.5144, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.6996268656716418, | |
| "grad_norm": 0.30018495912676124, | |
| "learning_rate": 1.5224277125174388e-05, | |
| "loss": 0.5308, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.7042910447761194, | |
| "grad_norm": 0.2999514150211457, | |
| "learning_rate": 1.4934794570860416e-05, | |
| "loss": 0.5316, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.7089552238805971, | |
| "grad_norm": 0.2921376925832542, | |
| "learning_rate": 1.464830362791204e-05, | |
| "loss": 0.518, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.7136194029850746, | |
| "grad_norm": 0.2848170051758577, | |
| "learning_rate": 1.4364872506082425e-05, | |
| "loss": 0.5258, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.7182835820895522, | |
| "grad_norm": 0.29637465642307265, | |
| "learning_rate": 1.4084568686621314e-05, | |
| "loss": 0.5288, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.7229477611940298, | |
| "grad_norm": 0.2868790941186257, | |
| "learning_rate": 1.3807458906208546e-05, | |
| "loss": 0.5356, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.7276119402985075, | |
| "grad_norm": 0.2949651720632738, | |
| "learning_rate": 1.3533609141065008e-05, | |
| "loss": 0.515, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.7322761194029851, | |
| "grad_norm": 0.28879653042722686, | |
| "learning_rate": 1.326308459124447e-05, | |
| "loss": 0.5181, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.7369402985074627, | |
| "grad_norm": 0.31103503096489793, | |
| "learning_rate": 1.299594966511038e-05, | |
| "loss": 0.5267, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.7416044776119403, | |
| "grad_norm": 0.28007490231478543, | |
| "learning_rate": 1.2732267964001033e-05, | |
| "loss": 0.5149, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.746268656716418, | |
| "grad_norm": 0.31007299015276557, | |
| "learning_rate": 1.2472102267086904e-05, | |
| "loss": 0.5212, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.7509328358208955, | |
| "grad_norm": 0.2744709213405645, | |
| "learning_rate": 1.2215514516423813e-05, | |
| "loss": 0.5353, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.7555970149253731, | |
| "grad_norm": 0.3081049377050525, | |
| "learning_rate": 1.1962565802205255e-05, | |
| "loss": 0.5141, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.7602611940298507, | |
| "grad_norm": 0.2734600525913403, | |
| "learning_rate": 1.1713316348217673e-05, | |
| "loss": 0.5189, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.7649253731343284, | |
| "grad_norm": 0.28749468701202524, | |
| "learning_rate": 1.1467825497501954e-05, | |
| "loss": 0.5035, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.769589552238806, | |
| "grad_norm": 0.272461788008537, | |
| "learning_rate": 1.1226151698224597e-05, | |
| "loss": 0.5244, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.7742537313432836, | |
| "grad_norm": 0.2768855490444309, | |
| "learning_rate": 1.0988352489762006e-05, | |
| "loss": 0.5286, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.7789179104477612, | |
| "grad_norm": 0.26984601645578105, | |
| "learning_rate": 1.0754484489001085e-05, | |
| "loss": 0.5187, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.7835820895522388, | |
| "grad_norm": 0.2869169085896112, | |
| "learning_rate": 1.052460337685951e-05, | |
| "loss": 0.5175, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.7882462686567164, | |
| "grad_norm": 0.2658391392164394, | |
| "learning_rate": 1.0298763885028839e-05, | |
| "loss": 0.5137, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.792910447761194, | |
| "grad_norm": 0.2592122836661767, | |
| "learning_rate": 1.0077019782943584e-05, | |
| "loss": 0.5195, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.7975746268656716, | |
| "grad_norm": 0.25369333199896593, | |
| "learning_rate": 9.859423864979441e-06, | |
| "loss": 0.5077, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.8022388059701493, | |
| "grad_norm": 0.2807004671522522, | |
| "learning_rate": 9.646027937883622e-06, | |
| "loss": 0.5268, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.8069029850746269, | |
| "grad_norm": 0.29876031015981147, | |
| "learning_rate": 9.436882808440334e-06, | |
| "loss": 0.5281, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.8115671641791045, | |
| "grad_norm": 0.2552797444232342, | |
| "learning_rate": 9.232038271374377e-06, | |
| "loss": 0.5101, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.816231343283582, | |
| "grad_norm": 0.25714669471191914, | |
| "learning_rate": 9.031543097495638e-06, | |
| "loss": 0.5146, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.8208955223880597, | |
| "grad_norm": 0.28443818799388493, | |
| "learning_rate": 8.835445022087426e-06, | |
| "loss": 0.5127, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.8255597014925373, | |
| "grad_norm": 0.2811267607916848, | |
| "learning_rate": 8.6437907335413e-06, | |
| "loss": 0.5226, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.8302238805970149, | |
| "grad_norm": 0.24918571196666064, | |
| "learning_rate": 8.456625862241193e-06, | |
| "loss": 0.525, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.8348880597014925, | |
| "grad_norm": 0.2628330074151525, | |
| "learning_rate": 8.273994969699394e-06, | |
| "loss": 0.5191, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.8395522388059702, | |
| "grad_norm": 0.24224127635385673, | |
| "learning_rate": 8.095941537947057e-06, | |
| "loss": 0.522, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.8442164179104478, | |
| "grad_norm": 0.25919034374317823, | |
| "learning_rate": 7.922507959181673e-06, | |
| "loss": 0.5103, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.8488805970149254, | |
| "grad_norm": 0.25048199005874255, | |
| "learning_rate": 7.753735525674059e-06, | |
| "loss": 0.5165, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.8535447761194029, | |
| "grad_norm": 0.2942539424494046, | |
| "learning_rate": 7.58966441993719e-06, | |
| "loss": 0.5131, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.8582089552238806, | |
| "grad_norm": 0.280150753783428, | |
| "learning_rate": 7.430333705159286e-06, | |
| "loss": 0.534, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.8628731343283582, | |
| "grad_norm": 0.29700862209226553, | |
| "learning_rate": 7.275781315903374e-06, | |
| "loss": 0.5326, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.8675373134328358, | |
| "grad_norm": 0.26562031569986255, | |
| "learning_rate": 7.126044049075548e-06, | |
| "loss": 0.5238, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.8722014925373134, | |
| "grad_norm": 0.2525705211514053, | |
| "learning_rate": 6.9811575551641224e-06, | |
| "loss": 0.5119, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.8768656716417911, | |
| "grad_norm": 0.26217000703599885, | |
| "learning_rate": 6.8411563297516995e-06, | |
| "loss": 0.5056, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.8815298507462687, | |
| "grad_norm": 0.2842531580761581, | |
| "learning_rate": 6.706073705302254e-06, | |
| "loss": 0.5172, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.8861940298507462, | |
| "grad_norm": 0.2645345280529236, | |
| "learning_rate": 6.575941843225068e-06, | |
| "loss": 0.5074, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.8908582089552238, | |
| "grad_norm": 0.25388011418956324, | |
| "learning_rate": 6.450791726217538e-06, | |
| "loss": 0.5149, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.8955223880597015, | |
| "grad_norm": 0.24544518817511415, | |
| "learning_rate": 6.330653150888617e-06, | |
| "loss": 0.5187, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.9001865671641791, | |
| "grad_norm": 0.2521004631753791, | |
| "learning_rate": 6.215554720664598e-06, | |
| "loss": 0.5247, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.9048507462686567, | |
| "grad_norm": 0.26393317318573556, | |
| "learning_rate": 6.105523838979022e-06, | |
| "loss": 0.5245, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.9095149253731343, | |
| "grad_norm": 0.2542640717204462, | |
| "learning_rate": 6.000586702748301e-06, | |
| "loss": 0.5101, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.914179104477612, | |
| "grad_norm": 0.26268921726771643, | |
| "learning_rate": 5.900768296134551e-06, | |
| "loss": 0.5122, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.9188432835820896, | |
| "grad_norm": 0.2588415092397899, | |
| "learning_rate": 5.8060923845971825e-06, | |
| "loss": 0.5204, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.9235074626865671, | |
| "grad_norm": 0.24621495506492652, | |
| "learning_rate": 5.7165815092346825e-06, | |
| "loss": 0.506, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.9281716417910447, | |
| "grad_norm": 0.273533414817668, | |
| "learning_rate": 5.632256981417845e-06, | |
| "loss": 0.5142, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.9328358208955224, | |
| "grad_norm": 0.25568849262973614, | |
| "learning_rate": 5.553138877715833e-06, | |
| "loss": 0.5164, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.9375, | |
| "grad_norm": 0.24726065850772955, | |
| "learning_rate": 5.479246035116201e-06, | |
| "loss": 0.5092, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.9421641791044776, | |
| "grad_norm": 0.252585044507956, | |
| "learning_rate": 5.410596046540051e-06, | |
| "loss": 0.5115, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.9468283582089553, | |
| "grad_norm": 0.27436517322468823, | |
| "learning_rate": 5.347205256653387e-06, | |
| "loss": 0.5142, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.9514925373134329, | |
| "grad_norm": 0.27436008430884, | |
| "learning_rate": 5.28908875797568e-06, | |
| "loss": 0.5074, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.9561567164179104, | |
| "grad_norm": 0.2504038189040886, | |
| "learning_rate": 5.236260387286509e-06, | |
| "loss": 0.5136, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.960820895522388, | |
| "grad_norm": 0.2517117370901398, | |
| "learning_rate": 5.1887327223312296e-06, | |
| "loss": 0.5103, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.9654850746268657, | |
| "grad_norm": 0.24813549404796453, | |
| "learning_rate": 5.1465170788263595e-06, | |
| "loss": 0.5182, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.9701492537313433, | |
| "grad_norm": 0.2540938670475621, | |
| "learning_rate": 5.109623507765466e-06, | |
| "loss": 0.5139, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.9748134328358209, | |
| "grad_norm": 0.2536067876872605, | |
| "learning_rate": 5.07806079302615e-06, | |
| "loss": 0.521, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.9794776119402985, | |
| "grad_norm": 0.24694114472632778, | |
| "learning_rate": 5.051836449278715e-06, | |
| "loss": 0.5156, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.9841417910447762, | |
| "grad_norm": 0.2641635688392549, | |
| "learning_rate": 5.030956720197035e-06, | |
| "loss": 0.5081, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.9888059701492538, | |
| "grad_norm": 0.26400774317053205, | |
| "learning_rate": 5.015426576972003e-06, | |
| "loss": 0.5258, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.9934701492537313, | |
| "grad_norm": 0.26897181865120834, | |
| "learning_rate": 5.005249717127964e-06, | |
| "loss": 0.5169, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.9981343283582089, | |
| "grad_norm": 0.2598340737075146, | |
| "learning_rate": 5.000428563642382e-06, | |
| "loss": 0.5173, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 1072, | |
| "total_flos": 488621249396736.0, | |
| "train_loss": 0.5497299346016414, | |
| "train_runtime": 26829.3105, | |
| "train_samples_per_second": 1.278, | |
| "train_steps_per_second": 0.04 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 1072, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 488621249396736.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |