| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 88.78504672897196, | |
| "eval_steps": 500, | |
| "global_step": 9500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.09345794392523364, | |
| "grad_norm": 11.088369369506836, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 1.7742, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.18691588785046728, | |
| "grad_norm": 11.834894180297852, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 1.6346, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.2803738317757009, | |
| "grad_norm": 7.324471473693848, | |
| "learning_rate": 6e-06, | |
| "loss": 1.3161, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.37383177570093457, | |
| "grad_norm": 2.9169681072235107, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.6619, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.4672897196261682, | |
| "grad_norm": 2.7369236946105957, | |
| "learning_rate": 1e-05, | |
| "loss": 0.3784, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.5607476635514018, | |
| "grad_norm": 2.0095913410186768, | |
| "learning_rate": 1.2e-05, | |
| "loss": 0.2843, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.6542056074766355, | |
| "grad_norm": 1.0399119853973389, | |
| "learning_rate": 1.4000000000000001e-05, | |
| "loss": 0.2026, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.7476635514018691, | |
| "grad_norm": 1.0228025913238525, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 0.171, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.8411214953271028, | |
| "grad_norm": 1.065486192703247, | |
| "learning_rate": 1.8e-05, | |
| "loss": 0.143, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.9345794392523364, | |
| "grad_norm": 0.994318962097168, | |
| "learning_rate": 2e-05, | |
| "loss": 0.123, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.02803738317757, | |
| "grad_norm": 1.089653730392456, | |
| "learning_rate": 2.2000000000000003e-05, | |
| "loss": 0.1088, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.1214953271028036, | |
| "grad_norm": 0.7751863598823547, | |
| "learning_rate": 2.4e-05, | |
| "loss": 0.1128, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.2149532710280373, | |
| "grad_norm": 0.7448062896728516, | |
| "learning_rate": 2.6000000000000002e-05, | |
| "loss": 0.0817, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.308411214953271, | |
| "grad_norm": 1.0464024543762207, | |
| "learning_rate": 2.8000000000000003e-05, | |
| "loss": 0.0797, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.4018691588785046, | |
| "grad_norm": 0.945536196231842, | |
| "learning_rate": 3e-05, | |
| "loss": 0.0854, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.4953271028037383, | |
| "grad_norm": 0.4769207239151001, | |
| "learning_rate": 3.2000000000000005e-05, | |
| "loss": 0.0774, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.588785046728972, | |
| "grad_norm": 0.6780673265457153, | |
| "learning_rate": 3.4000000000000007e-05, | |
| "loss": 0.0676, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.6822429906542056, | |
| "grad_norm": 0.4976266622543335, | |
| "learning_rate": 3.6e-05, | |
| "loss": 0.0605, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.7757009345794392, | |
| "grad_norm": 0.5387341380119324, | |
| "learning_rate": 3.8e-05, | |
| "loss": 0.0666, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.8691588785046729, | |
| "grad_norm": 1.1668950319290161, | |
| "learning_rate": 4e-05, | |
| "loss": 0.0668, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.9626168224299065, | |
| "grad_norm": 0.6982051730155945, | |
| "learning_rate": 4.2e-05, | |
| "loss": 0.0619, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.05607476635514, | |
| "grad_norm": 0.8520908355712891, | |
| "learning_rate": 4.4000000000000006e-05, | |
| "loss": 0.0626, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.149532710280374, | |
| "grad_norm": 0.6880495548248291, | |
| "learning_rate": 4.600000000000001e-05, | |
| "loss": 0.0544, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.2429906542056073, | |
| "grad_norm": 1.1675457954406738, | |
| "learning_rate": 4.8e-05, | |
| "loss": 0.0597, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.336448598130841, | |
| "grad_norm": 0.6466752886772156, | |
| "learning_rate": 5e-05, | |
| "loss": 0.0579, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.4299065420560746, | |
| "grad_norm": 0.8839955925941467, | |
| "learning_rate": 5.2000000000000004e-05, | |
| "loss": 0.0574, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.5233644859813085, | |
| "grad_norm": 0.44215482473373413, | |
| "learning_rate": 5.4000000000000005e-05, | |
| "loss": 0.0507, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.616822429906542, | |
| "grad_norm": 0.6018381118774414, | |
| "learning_rate": 5.6000000000000006e-05, | |
| "loss": 0.0602, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.710280373831776, | |
| "grad_norm": 0.5012953281402588, | |
| "learning_rate": 5.8e-05, | |
| "loss": 0.0475, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.803738317757009, | |
| "grad_norm": 0.5049079060554504, | |
| "learning_rate": 6e-05, | |
| "loss": 0.0543, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.897196261682243, | |
| "grad_norm": 0.6439036130905151, | |
| "learning_rate": 6.2e-05, | |
| "loss": 0.0481, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.9906542056074765, | |
| "grad_norm": 0.760713517665863, | |
| "learning_rate": 6.400000000000001e-05, | |
| "loss": 0.0483, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 3.0841121495327104, | |
| "grad_norm": 0.8276482224464417, | |
| "learning_rate": 6.6e-05, | |
| "loss": 0.0506, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 3.177570093457944, | |
| "grad_norm": 0.7145904898643494, | |
| "learning_rate": 6.800000000000001e-05, | |
| "loss": 0.039, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 3.2710280373831777, | |
| "grad_norm": 0.5840898752212524, | |
| "learning_rate": 7e-05, | |
| "loss": 0.0473, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 3.364485981308411, | |
| "grad_norm": 0.8430977463722229, | |
| "learning_rate": 7.2e-05, | |
| "loss": 0.0499, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 3.457943925233645, | |
| "grad_norm": 0.4334861636161804, | |
| "learning_rate": 7.4e-05, | |
| "loss": 0.0421, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 3.5514018691588785, | |
| "grad_norm": 0.40518927574157715, | |
| "learning_rate": 7.6e-05, | |
| "loss": 0.0467, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 3.6448598130841123, | |
| "grad_norm": 0.6229057908058167, | |
| "learning_rate": 7.800000000000001e-05, | |
| "loss": 0.055, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 3.7383177570093458, | |
| "grad_norm": 0.5108091831207275, | |
| "learning_rate": 8e-05, | |
| "loss": 0.0399, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 3.831775700934579, | |
| "grad_norm": 0.733472466468811, | |
| "learning_rate": 8.2e-05, | |
| "loss": 0.0441, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 3.925233644859813, | |
| "grad_norm": 0.8219009637832642, | |
| "learning_rate": 8.4e-05, | |
| "loss": 0.0491, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 4.018691588785047, | |
| "grad_norm": 0.779040515422821, | |
| "learning_rate": 8.6e-05, | |
| "loss": 0.0485, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 4.11214953271028, | |
| "grad_norm": 0.7235063910484314, | |
| "learning_rate": 8.800000000000001e-05, | |
| "loss": 0.0449, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 4.205607476635514, | |
| "grad_norm": 0.9473637342453003, | |
| "learning_rate": 9e-05, | |
| "loss": 0.0428, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 4.299065420560748, | |
| "grad_norm": 0.5876563787460327, | |
| "learning_rate": 9.200000000000001e-05, | |
| "loss": 0.0411, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 4.392523364485982, | |
| "grad_norm": 0.7518793344497681, | |
| "learning_rate": 9.4e-05, | |
| "loss": 0.047, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 4.485981308411215, | |
| "grad_norm": 0.47040578722953796, | |
| "learning_rate": 9.6e-05, | |
| "loss": 0.0387, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 4.579439252336448, | |
| "grad_norm": 0.7008688449859619, | |
| "learning_rate": 9.8e-05, | |
| "loss": 0.0391, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 4.672897196261682, | |
| "grad_norm": 0.32632163166999817, | |
| "learning_rate": 0.0001, | |
| "loss": 0.0345, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 4.766355140186916, | |
| "grad_norm": 1.0094679594039917, | |
| "learning_rate": 9.999972660400536e-05, | |
| "loss": 0.039, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 4.859813084112149, | |
| "grad_norm": 0.382621169090271, | |
| "learning_rate": 9.999890641901125e-05, | |
| "loss": 0.0303, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 4.953271028037383, | |
| "grad_norm": 0.2948877811431885, | |
| "learning_rate": 9.999753945398704e-05, | |
| "loss": 0.0362, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 5.046728971962617, | |
| "grad_norm": 0.5779514908790588, | |
| "learning_rate": 9.99956257238817e-05, | |
| "loss": 0.0373, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 5.140186915887851, | |
| "grad_norm": 0.414638489484787, | |
| "learning_rate": 9.999316524962345e-05, | |
| "loss": 0.0308, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 5.233644859813084, | |
| "grad_norm": 0.28259772062301636, | |
| "learning_rate": 9.999015805811965e-05, | |
| "loss": 0.0353, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 5.327102803738318, | |
| "grad_norm": 0.599785566329956, | |
| "learning_rate": 9.998660418225645e-05, | |
| "loss": 0.0448, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 5.420560747663552, | |
| "grad_norm": 0.3182764947414398, | |
| "learning_rate": 9.998250366089848e-05, | |
| "loss": 0.0408, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 5.5140186915887845, | |
| "grad_norm": 0.59663325548172, | |
| "learning_rate": 9.997785653888835e-05, | |
| "loss": 0.0292, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 5.607476635514018, | |
| "grad_norm": 0.6391479969024658, | |
| "learning_rate": 9.997266286704631e-05, | |
| "loss": 0.0344, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 5.700934579439252, | |
| "grad_norm": 0.6931540369987488, | |
| "learning_rate": 9.996692270216947e-05, | |
| "loss": 0.0412, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 5.794392523364486, | |
| "grad_norm": 0.4313046932220459, | |
| "learning_rate": 9.996063610703137e-05, | |
| "loss": 0.0376, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 5.88785046728972, | |
| "grad_norm": 0.42874786257743835, | |
| "learning_rate": 9.995380315038119e-05, | |
| "loss": 0.0314, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 5.981308411214953, | |
| "grad_norm": 0.24948689341545105, | |
| "learning_rate": 9.994642390694308e-05, | |
| "loss": 0.0277, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 6.074766355140187, | |
| "grad_norm": 0.3999479413032532, | |
| "learning_rate": 9.993849845741524e-05, | |
| "loss": 0.03, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 6.168224299065421, | |
| "grad_norm": 0.3755294382572174, | |
| "learning_rate": 9.993002688846913e-05, | |
| "loss": 0.0313, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 6.261682242990654, | |
| "grad_norm": 0.540371298789978, | |
| "learning_rate": 9.992100929274846e-05, | |
| "loss": 0.0239, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 6.355140186915888, | |
| "grad_norm": 0.37286096811294556, | |
| "learning_rate": 9.991144576886823e-05, | |
| "loss": 0.0375, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 6.4485981308411215, | |
| "grad_norm": 0.6131537556648254, | |
| "learning_rate": 9.990133642141359e-05, | |
| "loss": 0.0268, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 6.542056074766355, | |
| "grad_norm": 0.3708513379096985, | |
| "learning_rate": 9.989068136093873e-05, | |
| "loss": 0.0285, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 6.635514018691588, | |
| "grad_norm": 0.4660991430282593, | |
| "learning_rate": 9.987948070396571e-05, | |
| "loss": 0.0321, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 6.728971962616822, | |
| "grad_norm": 0.41945624351501465, | |
| "learning_rate": 9.986773457298311e-05, | |
| "loss": 0.0269, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 6.822429906542056, | |
| "grad_norm": 0.4975600242614746, | |
| "learning_rate": 9.985544309644475e-05, | |
| "loss": 0.0357, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 6.91588785046729, | |
| "grad_norm": 0.6201094388961792, | |
| "learning_rate": 9.984260640876821e-05, | |
| "loss": 0.0313, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 7.009345794392523, | |
| "grad_norm": 0.4306051433086395, | |
| "learning_rate": 9.98292246503335e-05, | |
| "loss": 0.0294, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 7.102803738317757, | |
| "grad_norm": 0.4569713771343231, | |
| "learning_rate": 9.981529796748134e-05, | |
| "loss": 0.029, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 7.196261682242991, | |
| "grad_norm": 0.6840571165084839, | |
| "learning_rate": 9.980082651251175e-05, | |
| "loss": 0.0289, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 7.289719626168225, | |
| "grad_norm": 0.36494606733322144, | |
| "learning_rate": 9.97858104436822e-05, | |
| "loss": 0.0314, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 7.383177570093458, | |
| "grad_norm": 0.7286158204078674, | |
| "learning_rate": 9.977024992520602e-05, | |
| "loss": 0.0248, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 7.4766355140186915, | |
| "grad_norm": 0.6263937950134277, | |
| "learning_rate": 9.975414512725057e-05, | |
| "loss": 0.0304, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 7.570093457943925, | |
| "grad_norm": 0.4872860312461853, | |
| "learning_rate": 9.973749622593534e-05, | |
| "loss": 0.0246, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 7.663551401869158, | |
| "grad_norm": 0.5235103368759155, | |
| "learning_rate": 9.972030340333001e-05, | |
| "loss": 0.0404, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 7.757009345794392, | |
| "grad_norm": 0.5448396801948547, | |
| "learning_rate": 9.970256684745258e-05, | |
| "loss": 0.0302, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 7.850467289719626, | |
| "grad_norm": 0.40461036562919617, | |
| "learning_rate": 9.968428675226714e-05, | |
| "loss": 0.0226, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 7.94392523364486, | |
| "grad_norm": 0.44990962743759155, | |
| "learning_rate": 9.966546331768191e-05, | |
| "loss": 0.0244, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 8.037383177570094, | |
| "grad_norm": 0.3754257261753082, | |
| "learning_rate": 9.964609674954696e-05, | |
| "loss": 0.0304, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 8.130841121495328, | |
| "grad_norm": 0.6377671957015991, | |
| "learning_rate": 9.962618725965196e-05, | |
| "loss": 0.0255, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 8.22429906542056, | |
| "grad_norm": 0.509066641330719, | |
| "learning_rate": 9.96057350657239e-05, | |
| "loss": 0.0286, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 8.317757009345794, | |
| "grad_norm": 0.39900103211402893, | |
| "learning_rate": 9.95847403914247e-05, | |
| "loss": 0.0258, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 8.411214953271028, | |
| "grad_norm": 0.4878309965133667, | |
| "learning_rate": 9.956320346634876e-05, | |
| "loss": 0.0276, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 8.504672897196262, | |
| "grad_norm": 0.4488677680492401, | |
| "learning_rate": 9.954112452602045e-05, | |
| "loss": 0.0286, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 8.598130841121495, | |
| "grad_norm": 0.6243172883987427, | |
| "learning_rate": 9.95185038118915e-05, | |
| "loss": 0.0318, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 8.69158878504673, | |
| "grad_norm": 0.34763044118881226, | |
| "learning_rate": 9.949534157133844e-05, | |
| "loss": 0.0403, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 8.785046728971963, | |
| "grad_norm": 0.45288386940956116, | |
| "learning_rate": 9.94716380576598e-05, | |
| "loss": 0.0217, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 8.878504672897197, | |
| "grad_norm": 0.479641854763031, | |
| "learning_rate": 9.944739353007344e-05, | |
| "loss": 0.0241, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 8.97196261682243, | |
| "grad_norm": 0.7133591175079346, | |
| "learning_rate": 9.942260825371358e-05, | |
| "loss": 0.0316, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 9.065420560747663, | |
| "grad_norm": 0.47209638357162476, | |
| "learning_rate": 9.939728249962807e-05, | |
| "loss": 0.0217, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 9.158878504672897, | |
| "grad_norm": 0.4069143235683441, | |
| "learning_rate": 9.937141654477528e-05, | |
| "loss": 0.0243, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 9.25233644859813, | |
| "grad_norm": 0.45809483528137207, | |
| "learning_rate": 9.934501067202117e-05, | |
| "loss": 0.0231, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 9.345794392523365, | |
| "grad_norm": 0.3990670144557953, | |
| "learning_rate": 9.931806517013612e-05, | |
| "loss": 0.0245, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 9.439252336448599, | |
| "grad_norm": 0.35518234968185425, | |
| "learning_rate": 9.929058033379181e-05, | |
| "loss": 0.0338, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 9.532710280373832, | |
| "grad_norm": 0.3511808514595032, | |
| "learning_rate": 9.926255646355804e-05, | |
| "loss": 0.0219, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 9.626168224299064, | |
| "grad_norm": 0.36739039421081543, | |
| "learning_rate": 9.923399386589933e-05, | |
| "loss": 0.0244, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 9.719626168224298, | |
| "grad_norm": 0.3416367471218109, | |
| "learning_rate": 9.92048928531717e-05, | |
| "loss": 0.0208, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 9.813084112149532, | |
| "grad_norm": 0.3843317925930023, | |
| "learning_rate": 9.917525374361912e-05, | |
| "loss": 0.0222, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 9.906542056074766, | |
| "grad_norm": 0.3594072163105011, | |
| "learning_rate": 9.914507686137019e-05, | |
| "loss": 0.0292, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.29179903864860535, | |
| "learning_rate": 9.911436253643445e-05, | |
| "loss": 0.0183, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 10.093457943925234, | |
| "grad_norm": 0.24897705018520355, | |
| "learning_rate": 9.90831111046988e-05, | |
| "loss": 0.0236, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 10.186915887850468, | |
| "grad_norm": 0.357282817363739, | |
| "learning_rate": 9.905132290792394e-05, | |
| "loss": 0.0224, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 10.280373831775702, | |
| "grad_norm": 0.14735649526119232, | |
| "learning_rate": 9.901899829374047e-05, | |
| "loss": 0.0181, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 10.373831775700934, | |
| "grad_norm": 0.30263710021972656, | |
| "learning_rate": 9.89861376156452e-05, | |
| "loss": 0.0195, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 10.467289719626168, | |
| "grad_norm": 0.13147243857383728, | |
| "learning_rate": 9.895274123299723e-05, | |
| "loss": 0.0185, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 10.560747663551401, | |
| "grad_norm": 0.20842011272907257, | |
| "learning_rate": 9.891880951101407e-05, | |
| "loss": 0.0194, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 10.654205607476635, | |
| "grad_norm": 0.28125032782554626, | |
| "learning_rate": 9.888434282076758e-05, | |
| "loss": 0.0167, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 10.74766355140187, | |
| "grad_norm": 0.25410014390945435, | |
| "learning_rate": 9.884934153917997e-05, | |
| "loss": 0.0221, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 10.841121495327103, | |
| "grad_norm": 0.31990426778793335, | |
| "learning_rate": 9.881380604901964e-05, | |
| "loss": 0.0168, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 10.934579439252337, | |
| "grad_norm": 0.1671873927116394, | |
| "learning_rate": 9.877773673889701e-05, | |
| "loss": 0.0213, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 11.02803738317757, | |
| "grad_norm": 0.38101059198379517, | |
| "learning_rate": 9.87411340032603e-05, | |
| "loss": 0.0209, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 11.121495327102803, | |
| "grad_norm": 0.24433757364749908, | |
| "learning_rate": 9.870399824239117e-05, | |
| "loss": 0.0217, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 11.214953271028037, | |
| "grad_norm": 0.35972458124160767, | |
| "learning_rate": 9.86663298624003e-05, | |
| "loss": 0.0277, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 11.30841121495327, | |
| "grad_norm": 0.19008149206638336, | |
| "learning_rate": 9.862812927522309e-05, | |
| "loss": 0.0279, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 11.401869158878505, | |
| "grad_norm": 0.5246997475624084, | |
| "learning_rate": 9.858939689861506e-05, | |
| "loss": 0.0246, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 11.495327102803738, | |
| "grad_norm": 0.31999820470809937, | |
| "learning_rate": 9.855013315614725e-05, | |
| "loss": 0.0194, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 11.588785046728972, | |
| "grad_norm": 0.26507657766342163, | |
| "learning_rate": 9.851033847720166e-05, | |
| "loss": 0.0222, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 11.682242990654206, | |
| "grad_norm": 0.2151166796684265, | |
| "learning_rate": 9.847001329696653e-05, | |
| "loss": 0.0169, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 11.77570093457944, | |
| "grad_norm": 0.22967422008514404, | |
| "learning_rate": 9.842915805643155e-05, | |
| "loss": 0.0171, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 11.869158878504672, | |
| "grad_norm": 0.32531777024269104, | |
| "learning_rate": 9.838777320238312e-05, | |
| "loss": 0.0173, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 11.962616822429906, | |
| "grad_norm": 0.3821974992752075, | |
| "learning_rate": 9.834585918739936e-05, | |
| "loss": 0.0209, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 12.05607476635514, | |
| "grad_norm": 0.295969158411026, | |
| "learning_rate": 9.830341646984521e-05, | |
| "loss": 0.0196, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 12.149532710280374, | |
| "grad_norm": 0.43346959352493286, | |
| "learning_rate": 9.826044551386744e-05, | |
| "loss": 0.0192, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 12.242990654205608, | |
| "grad_norm": 0.2353905439376831, | |
| "learning_rate": 9.821694678938953e-05, | |
| "loss": 0.0187, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 12.336448598130842, | |
| "grad_norm": 0.2748644948005676, | |
| "learning_rate": 9.817292077210659e-05, | |
| "loss": 0.0199, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 12.429906542056075, | |
| "grad_norm": 0.32517120242118835, | |
| "learning_rate": 9.812836794348004e-05, | |
| "loss": 0.0242, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 12.523364485981308, | |
| "grad_norm": 0.4817813038825989, | |
| "learning_rate": 9.808328879073251e-05, | |
| "loss": 0.0268, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 12.616822429906541, | |
| "grad_norm": 0.28131112456321716, | |
| "learning_rate": 9.803768380684242e-05, | |
| "loss": 0.0177, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 12.710280373831775, | |
| "grad_norm": 0.23528891801834106, | |
| "learning_rate": 9.799155349053851e-05, | |
| "loss": 0.0266, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 12.80373831775701, | |
| "grad_norm": 0.33092162013053894, | |
| "learning_rate": 9.794489834629455e-05, | |
| "loss": 0.0194, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 12.897196261682243, | |
| "grad_norm": 0.5667449235916138, | |
| "learning_rate": 9.789771888432375e-05, | |
| "loss": 0.0202, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 12.990654205607477, | |
| "grad_norm": 0.5390638113021851, | |
| "learning_rate": 9.785001562057309e-05, | |
| "loss": 0.0231, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 13.08411214953271, | |
| "grad_norm": 0.28344106674194336, | |
| "learning_rate": 9.780178907671789e-05, | |
| "loss": 0.0176, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 13.177570093457945, | |
| "grad_norm": 0.3115670084953308, | |
| "learning_rate": 9.775303978015585e-05, | |
| "loss": 0.0285, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 13.271028037383177, | |
| "grad_norm": 0.2856522798538208, | |
| "learning_rate": 9.77037682640015e-05, | |
| "loss": 0.0219, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 13.36448598130841, | |
| "grad_norm": 0.41443973779678345, | |
| "learning_rate": 9.765397506708023e-05, | |
| "loss": 0.0235, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 13.457943925233645, | |
| "grad_norm": 0.2849663496017456, | |
| "learning_rate": 9.760366073392246e-05, | |
| "loss": 0.0167, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 13.551401869158878, | |
| "grad_norm": 0.28459078073501587, | |
| "learning_rate": 9.755282581475769e-05, | |
| "loss": 0.0253, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 13.644859813084112, | |
| "grad_norm": 0.25047361850738525, | |
| "learning_rate": 9.750147086550844e-05, | |
| "loss": 0.0186, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 13.738317757009346, | |
| "grad_norm": 0.2597232162952423, | |
| "learning_rate": 9.744959644778422e-05, | |
| "loss": 0.0137, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 13.83177570093458, | |
| "grad_norm": 0.14577718079090118, | |
| "learning_rate": 9.739720312887535e-05, | |
| "loss": 0.0194, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 13.925233644859812, | |
| "grad_norm": 0.2940736413002014, | |
| "learning_rate": 9.734429148174675e-05, | |
| "loss": 0.0165, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 14.018691588785046, | |
| "grad_norm": 0.33982032537460327, | |
| "learning_rate": 9.729086208503174e-05, | |
| "loss": 0.0229, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 14.11214953271028, | |
| "grad_norm": 0.2669644355773926, | |
| "learning_rate": 9.723691552302562e-05, | |
| "loss": 0.0151, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 14.205607476635514, | |
| "grad_norm": 0.20052188634872437, | |
| "learning_rate": 9.718245238567939e-05, | |
| "loss": 0.0169, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 14.299065420560748, | |
| "grad_norm": 0.27926936745643616, | |
| "learning_rate": 9.712747326859315e-05, | |
| "loss": 0.0184, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 14.392523364485982, | |
| "grad_norm": 0.38229790329933167, | |
| "learning_rate": 9.707197877300974e-05, | |
| "loss": 0.0142, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 14.485981308411215, | |
| "grad_norm": 0.2728178799152374, | |
| "learning_rate": 9.701596950580806e-05, | |
| "loss": 0.0231, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 14.57943925233645, | |
| "grad_norm": 0.17084470391273499, | |
| "learning_rate": 9.695944607949649e-05, | |
| "loss": 0.0124, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 14.672897196261681, | |
| "grad_norm": 0.24558572471141815, | |
| "learning_rate": 9.690240911220618e-05, | |
| "loss": 0.0153, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 14.766355140186915, | |
| "grad_norm": 0.1569851040840149, | |
| "learning_rate": 9.684485922768422e-05, | |
| "loss": 0.0179, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 14.85981308411215, | |
| "grad_norm": 0.3535224199295044, | |
| "learning_rate": 9.6786797055287e-05, | |
| "loss": 0.02, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 14.953271028037383, | |
| "grad_norm": 0.4264807105064392, | |
| "learning_rate": 9.672822322997305e-05, | |
| "loss": 0.0207, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 15.046728971962617, | |
| "grad_norm": 0.3585994839668274, | |
| "learning_rate": 9.66691383922964e-05, | |
| "loss": 0.0196, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 15.14018691588785, | |
| "grad_norm": 0.27081626653671265, | |
| "learning_rate": 9.660954318839933e-05, | |
| "loss": 0.018, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 15.233644859813085, | |
| "grad_norm": 0.31903624534606934, | |
| "learning_rate": 9.654943827000548e-05, | |
| "loss": 0.0221, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 15.327102803738319, | |
| "grad_norm": 0.18142905831336975, | |
| "learning_rate": 9.648882429441257e-05, | |
| "loss": 0.0169, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 15.42056074766355, | |
| "grad_norm": 0.23492714762687683, | |
| "learning_rate": 9.642770192448536e-05, | |
| "loss": 0.0157, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 15.514018691588785, | |
| "grad_norm": 0.33975672721862793, | |
| "learning_rate": 9.636607182864827e-05, | |
| "loss": 0.0249, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 15.607476635514018, | |
| "grad_norm": 0.413004070520401, | |
| "learning_rate": 9.630393468087818e-05, | |
| "loss": 0.0156, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 15.700934579439252, | |
| "grad_norm": 0.2392245978116989, | |
| "learning_rate": 9.624129116069694e-05, | |
| "loss": 0.0178, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 15.794392523364486, | |
| "grad_norm": 0.2815963327884674, | |
| "learning_rate": 9.617814195316411e-05, | |
| "loss": 0.0282, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 15.88785046728972, | |
| "grad_norm": 0.23372851312160492, | |
| "learning_rate": 9.611448774886924e-05, | |
| "loss": 0.0164, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 15.981308411214954, | |
| "grad_norm": 0.42078474164009094, | |
| "learning_rate": 9.605032924392457e-05, | |
| "loss": 0.0155, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 16.074766355140188, | |
| "grad_norm": 0.35717156529426575, | |
| "learning_rate": 9.598566713995718e-05, | |
| "loss": 0.0184, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 16.16822429906542, | |
| "grad_norm": 0.2764424979686737, | |
| "learning_rate": 9.59205021441015e-05, | |
| "loss": 0.0147, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 16.261682242990656, | |
| "grad_norm": 0.26016825437545776, | |
| "learning_rate": 9.58548349689915e-05, | |
| "loss": 0.0224, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 16.35514018691589, | |
| "grad_norm": 0.27049052715301514, | |
| "learning_rate": 9.578866633275288e-05, | |
| "loss": 0.0146, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 16.44859813084112, | |
| "grad_norm": 0.338381826877594, | |
| "learning_rate": 9.572199695899522e-05, | |
| "loss": 0.0165, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 16.542056074766354, | |
| "grad_norm": 0.26160427927970886, | |
| "learning_rate": 9.565482757680415e-05, | |
| "loss": 0.0157, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 16.635514018691588, | |
| "grad_norm": 0.2207760363817215, | |
| "learning_rate": 9.558715892073323e-05, | |
| "loss": 0.013, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 16.72897196261682, | |
| "grad_norm": 0.37250402569770813, | |
| "learning_rate": 9.551899173079607e-05, | |
| "loss": 0.0184, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 16.822429906542055, | |
| "grad_norm": 0.28439027070999146, | |
| "learning_rate": 9.545032675245813e-05, | |
| "loss": 0.021, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 16.91588785046729, | |
| "grad_norm": 0.3891927897930145, | |
| "learning_rate": 9.538116473662861e-05, | |
| "loss": 0.0141, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 17.009345794392523, | |
| "grad_norm": 0.2712792754173279, | |
| "learning_rate": 9.531150643965223e-05, | |
| "loss": 0.0108, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 17.102803738317757, | |
| "grad_norm": 0.4965173006057739, | |
| "learning_rate": 9.524135262330098e-05, | |
| "loss": 0.0162, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 17.19626168224299, | |
| "grad_norm": 0.360164999961853, | |
| "learning_rate": 9.517070405476575e-05, | |
| "loss": 0.0175, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 17.289719626168225, | |
| "grad_norm": 0.3308715522289276, | |
| "learning_rate": 9.509956150664796e-05, | |
| "loss": 0.0144, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 17.38317757009346, | |
| "grad_norm": 0.24046167731285095, | |
| "learning_rate": 9.502792575695112e-05, | |
| "loss": 0.0142, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 17.476635514018692, | |
| "grad_norm": 0.32182639837265015, | |
| "learning_rate": 9.49557975890723e-05, | |
| "loss": 0.0124, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 17.570093457943926, | |
| "grad_norm": 0.19691896438598633, | |
| "learning_rate": 9.488317779179361e-05, | |
| "loss": 0.0152, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 17.66355140186916, | |
| "grad_norm": 0.248452827334404, | |
| "learning_rate": 9.481006715927351e-05, | |
| "loss": 0.0195, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 17.757009345794394, | |
| "grad_norm": 0.37300196290016174, | |
| "learning_rate": 9.473646649103818e-05, | |
| "loss": 0.022, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 17.850467289719624, | |
| "grad_norm": 0.3653255105018616, | |
| "learning_rate": 9.46623765919727e-05, | |
| "loss": 0.0155, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 17.94392523364486, | |
| "grad_norm": 0.1721230000257492, | |
| "learning_rate": 9.458779827231237e-05, | |
| "loss": 0.0113, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 18.037383177570092, | |
| "grad_norm": 0.3271198868751526, | |
| "learning_rate": 9.451273234763371e-05, | |
| "loss": 0.0155, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 18.130841121495326, | |
| "grad_norm": 0.3506911098957062, | |
| "learning_rate": 9.443717963884569e-05, | |
| "loss": 0.0113, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 18.22429906542056, | |
| "grad_norm": 0.1824873387813568, | |
| "learning_rate": 9.43611409721806e-05, | |
| "loss": 0.0168, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 18.317757009345794, | |
| "grad_norm": 0.3345133066177368, | |
| "learning_rate": 9.428461717918511e-05, | |
| "loss": 0.0166, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 18.411214953271028, | |
| "grad_norm": 0.3359324634075165, | |
| "learning_rate": 9.420760909671118e-05, | |
| "loss": 0.0165, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 18.50467289719626, | |
| "grad_norm": 0.24985487759113312, | |
| "learning_rate": 9.413011756690685e-05, | |
| "loss": 0.0211, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 18.598130841121495, | |
| "grad_norm": 0.2868458926677704, | |
| "learning_rate": 9.405214343720707e-05, | |
| "loss": 0.0142, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 18.69158878504673, | |
| "grad_norm": 0.3124500513076782, | |
| "learning_rate": 9.397368756032445e-05, | |
| "loss": 0.018, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 18.785046728971963, | |
| "grad_norm": 0.328535795211792, | |
| "learning_rate": 9.389475079423988e-05, | |
| "loss": 0.0155, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 18.878504672897197, | |
| "grad_norm": 0.2067783921957016, | |
| "learning_rate": 9.381533400219318e-05, | |
| "loss": 0.0125, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 18.97196261682243, | |
| "grad_norm": 0.27102118730545044, | |
| "learning_rate": 9.373543805267368e-05, | |
| "loss": 0.0148, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 19.065420560747665, | |
| "grad_norm": 0.28738537430763245, | |
| "learning_rate": 9.365506381941066e-05, | |
| "loss": 0.0145, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 19.1588785046729, | |
| "grad_norm": 0.2228069007396698, | |
| "learning_rate": 9.357421218136386e-05, | |
| "loss": 0.013, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 19.252336448598133, | |
| "grad_norm": 0.26949775218963623, | |
| "learning_rate": 9.349288402271388e-05, | |
| "loss": 0.018, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 19.345794392523363, | |
| "grad_norm": 0.1682933121919632, | |
| "learning_rate": 9.341108023285238e-05, | |
| "loss": 0.015, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 19.439252336448597, | |
| "grad_norm": 0.36588406562805176, | |
| "learning_rate": 9.332880170637252e-05, | |
| "loss": 0.0167, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 19.53271028037383, | |
| "grad_norm": 0.1443902850151062, | |
| "learning_rate": 9.32460493430591e-05, | |
| "loss": 0.0129, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 19.626168224299064, | |
| "grad_norm": 0.1862812638282776, | |
| "learning_rate": 9.316282404787871e-05, | |
| "loss": 0.0133, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 19.7196261682243, | |
| "grad_norm": 0.26972711086273193, | |
| "learning_rate": 9.30791267309698e-05, | |
| "loss": 0.0183, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 19.813084112149532, | |
| "grad_norm": 0.4285616874694824, | |
| "learning_rate": 9.299495830763286e-05, | |
| "loss": 0.0222, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 19.906542056074766, | |
| "grad_norm": 0.38283267617225647, | |
| "learning_rate": 9.291031969832026e-05, | |
| "loss": 0.0195, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "grad_norm": 0.29968154430389404, | |
| "learning_rate": 9.282521182862629e-05, | |
| "loss": 0.0134, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 20.093457943925234, | |
| "grad_norm": 0.20329944789409637, | |
| "learning_rate": 9.273963562927695e-05, | |
| "loss": 0.0123, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 20.186915887850468, | |
| "grad_norm": 0.26469314098358154, | |
| "learning_rate": 9.265359203611987e-05, | |
| "loss": 0.0223, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 20.2803738317757, | |
| "grad_norm": 0.19388605654239655, | |
| "learning_rate": 9.256708199011401e-05, | |
| "loss": 0.0106, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 20.373831775700936, | |
| "grad_norm": 0.2838001549243927, | |
| "learning_rate": 9.248010643731935e-05, | |
| "loss": 0.0093, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 20.46728971962617, | |
| "grad_norm": 0.27465811371803284, | |
| "learning_rate": 9.239266632888659e-05, | |
| "loss": 0.0124, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 20.560747663551403, | |
| "grad_norm": 0.17780600488185883, | |
| "learning_rate": 9.230476262104677e-05, | |
| "loss": 0.0141, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 20.654205607476637, | |
| "grad_norm": 0.3275097608566284, | |
| "learning_rate": 9.221639627510076e-05, | |
| "loss": 0.0173, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 20.747663551401867, | |
| "grad_norm": 0.42499256134033203, | |
| "learning_rate": 9.212756825740873e-05, | |
| "loss": 0.0154, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 20.8411214953271, | |
| "grad_norm": 0.28516319394111633, | |
| "learning_rate": 9.20382795393797e-05, | |
| "loss": 0.0177, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 20.934579439252335, | |
| "grad_norm": 0.2422408163547516, | |
| "learning_rate": 9.194853109746074e-05, | |
| "loss": 0.0133, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 21.02803738317757, | |
| "grad_norm": 0.35381415486335754, | |
| "learning_rate": 9.185832391312644e-05, | |
| "loss": 0.0142, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 21.121495327102803, | |
| "grad_norm": 0.11969628930091858, | |
| "learning_rate": 9.176765897286813e-05, | |
| "loss": 0.0211, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 21.214953271028037, | |
| "grad_norm": 0.35882630944252014, | |
| "learning_rate": 9.167653726818305e-05, | |
| "loss": 0.019, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 21.30841121495327, | |
| "grad_norm": 0.3682778477668762, | |
| "learning_rate": 9.158495979556358e-05, | |
| "loss": 0.016, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 21.401869158878505, | |
| "grad_norm": 0.3526158630847931, | |
| "learning_rate": 9.14929275564863e-05, | |
| "loss": 0.017, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 21.49532710280374, | |
| "grad_norm": 0.32683178782463074, | |
| "learning_rate": 9.140044155740101e-05, | |
| "loss": 0.015, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 21.588785046728972, | |
| "grad_norm": 0.3018406629562378, | |
| "learning_rate": 9.130750280971978e-05, | |
| "loss": 0.0131, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 21.682242990654206, | |
| "grad_norm": 0.438232958316803, | |
| "learning_rate": 9.121411232980588e-05, | |
| "loss": 0.0183, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 21.77570093457944, | |
| "grad_norm": 0.26934388279914856, | |
| "learning_rate": 9.112027113896262e-05, | |
| "loss": 0.0135, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 21.869158878504674, | |
| "grad_norm": 0.2123032808303833, | |
| "learning_rate": 9.102598026342222e-05, | |
| "loss": 0.013, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 21.962616822429908, | |
| "grad_norm": 0.35099825263023376, | |
| "learning_rate": 9.093124073433463e-05, | |
| "loss": 0.0165, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 22.05607476635514, | |
| "grad_norm": 0.3659217357635498, | |
| "learning_rate": 9.083605358775612e-05, | |
| "loss": 0.0117, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 22.149532710280372, | |
| "grad_norm": 0.2593810260295868, | |
| "learning_rate": 9.074041986463808e-05, | |
| "loss": 0.0156, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 22.242990654205606, | |
| "grad_norm": 0.20466811954975128, | |
| "learning_rate": 9.064434061081562e-05, | |
| "loss": 0.0138, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 22.33644859813084, | |
| "grad_norm": 0.2669541537761688, | |
| "learning_rate": 9.0547816876996e-05, | |
| "loss": 0.0187, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 22.429906542056074, | |
| "grad_norm": 0.22927866876125336, | |
| "learning_rate": 9.045084971874738e-05, | |
| "loss": 0.0125, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 22.523364485981308, | |
| "grad_norm": 0.28933751583099365, | |
| "learning_rate": 9.035344019648702e-05, | |
| "loss": 0.0173, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 22.61682242990654, | |
| "grad_norm": 0.2809857130050659, | |
| "learning_rate": 9.025558937546988e-05, | |
| "loss": 0.0143, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 22.710280373831775, | |
| "grad_norm": 0.11940523982048035, | |
| "learning_rate": 9.015729832577681e-05, | |
| "loss": 0.011, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 22.80373831775701, | |
| "grad_norm": 0.20271120965480804, | |
| "learning_rate": 9.005856812230304e-05, | |
| "loss": 0.0127, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 22.897196261682243, | |
| "grad_norm": 0.23386332392692566, | |
| "learning_rate": 8.995939984474624e-05, | |
| "loss": 0.0168, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 22.990654205607477, | |
| "grad_norm": 0.13469015061855316, | |
| "learning_rate": 8.98597945775948e-05, | |
| "loss": 0.0142, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 23.08411214953271, | |
| "grad_norm": 0.2923325300216675, | |
| "learning_rate": 8.975975341011596e-05, | |
| "loss": 0.0128, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 23.177570093457945, | |
| "grad_norm": 0.2187262773513794, | |
| "learning_rate": 8.965927743634391e-05, | |
| "loss": 0.0147, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 23.27102803738318, | |
| "grad_norm": 0.2251782864332199, | |
| "learning_rate": 8.955836775506776e-05, | |
| "loss": 0.018, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 23.364485981308412, | |
| "grad_norm": 0.2662113904953003, | |
| "learning_rate": 8.945702546981969e-05, | |
| "loss": 0.0171, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 23.457943925233646, | |
| "grad_norm": 0.29429367184638977, | |
| "learning_rate": 8.935525168886262e-05, | |
| "loss": 0.0137, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 23.55140186915888, | |
| "grad_norm": 0.19048871099948883, | |
| "learning_rate": 8.92530475251784e-05, | |
| "loss": 0.0103, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 23.64485981308411, | |
| "grad_norm": 0.20753830671310425, | |
| "learning_rate": 8.91504140964553e-05, | |
| "loss": 0.012, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 23.738317757009344, | |
| "grad_norm": 0.3121615946292877, | |
| "learning_rate": 8.90473525250761e-05, | |
| "loss": 0.0202, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 23.83177570093458, | |
| "grad_norm": 0.3104843497276306, | |
| "learning_rate": 8.894386393810563e-05, | |
| "loss": 0.011, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 23.925233644859812, | |
| "grad_norm": 0.29644063115119934, | |
| "learning_rate": 8.883994946727849e-05, | |
| "loss": 0.0192, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 24.018691588785046, | |
| "grad_norm": 0.32935768365859985, | |
| "learning_rate": 8.873561024898668e-05, | |
| "loss": 0.0163, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 24.11214953271028, | |
| "grad_norm": 0.19332408905029297, | |
| "learning_rate": 8.863084742426719e-05, | |
| "loss": 0.0163, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 24.205607476635514, | |
| "grad_norm": 0.2649053931236267, | |
| "learning_rate": 8.852566213878947e-05, | |
| "loss": 0.0127, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 24.299065420560748, | |
| "grad_norm": 0.3077411949634552, | |
| "learning_rate": 8.842005554284296e-05, | |
| "loss": 0.0182, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 24.39252336448598, | |
| "grad_norm": 0.16626809537410736, | |
| "learning_rate": 8.831402879132446e-05, | |
| "loss": 0.0138, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 24.485981308411215, | |
| "grad_norm": 0.3031405210494995, | |
| "learning_rate": 8.820758304372557e-05, | |
| "loss": 0.0139, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 24.57943925233645, | |
| "grad_norm": 0.6083452105522156, | |
| "learning_rate": 8.810071946411989e-05, | |
| "loss": 0.0193, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 24.672897196261683, | |
| "grad_norm": 0.08449734002351761, | |
| "learning_rate": 8.799343922115044e-05, | |
| "loss": 0.0171, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 24.766355140186917, | |
| "grad_norm": 0.20243260264396667, | |
| "learning_rate": 8.788574348801675e-05, | |
| "loss": 0.0155, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 24.85981308411215, | |
| "grad_norm": 0.29259979724884033, | |
| "learning_rate": 8.77776334424621e-05, | |
| "loss": 0.0131, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 24.953271028037385, | |
| "grad_norm": 0.2411697506904602, | |
| "learning_rate": 8.766911026676064e-05, | |
| "loss": 0.0186, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 25.046728971962615, | |
| "grad_norm": 0.33529865741729736, | |
| "learning_rate": 8.756017514770443e-05, | |
| "loss": 0.0117, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 25.14018691588785, | |
| "grad_norm": 0.23808050155639648, | |
| "learning_rate": 8.745082927659047e-05, | |
| "loss": 0.011, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 25.233644859813083, | |
| "grad_norm": 0.1847400665283203, | |
| "learning_rate": 8.73410738492077e-05, | |
| "loss": 0.0128, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 25.327102803738317, | |
| "grad_norm": 0.13571485877037048, | |
| "learning_rate": 8.723091006582389e-05, | |
| "loss": 0.0103, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 25.42056074766355, | |
| "grad_norm": 0.20057013630867004, | |
| "learning_rate": 8.71203391311725e-05, | |
| "loss": 0.0144, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 25.514018691588785, | |
| "grad_norm": 0.18877534568309784, | |
| "learning_rate": 8.700936225443959e-05, | |
| "loss": 0.0087, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 25.60747663551402, | |
| "grad_norm": 0.34305885434150696, | |
| "learning_rate": 8.689798064925049e-05, | |
| "loss": 0.0111, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 25.700934579439252, | |
| "grad_norm": 0.30774959921836853, | |
| "learning_rate": 8.678619553365659e-05, | |
| "loss": 0.0118, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 25.794392523364486, | |
| "grad_norm": 0.20990929007530212, | |
| "learning_rate": 8.6674008130122e-05, | |
| "loss": 0.0123, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 25.88785046728972, | |
| "grad_norm": 0.14915867149829865, | |
| "learning_rate": 8.656141966551019e-05, | |
| "loss": 0.0104, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 25.981308411214954, | |
| "grad_norm": 0.21087510883808136, | |
| "learning_rate": 8.644843137107059e-05, | |
| "loss": 0.0103, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 26.074766355140188, | |
| "grad_norm": 0.3087444007396698, | |
| "learning_rate": 8.633504448242505e-05, | |
| "loss": 0.0146, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 26.16822429906542, | |
| "grad_norm": 0.15579691529273987, | |
| "learning_rate": 8.622126023955446e-05, | |
| "loss": 0.0134, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 26.261682242990656, | |
| "grad_norm": 0.29436925053596497, | |
| "learning_rate": 8.610707988678503e-05, | |
| "loss": 0.0124, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 26.35514018691589, | |
| "grad_norm": 0.3273058533668518, | |
| "learning_rate": 8.599250467277483e-05, | |
| "loss": 0.0101, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 26.44859813084112, | |
| "grad_norm": 0.28451430797576904, | |
| "learning_rate": 8.587753585050004e-05, | |
| "loss": 0.0195, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 26.542056074766354, | |
| "grad_norm": 0.255038857460022, | |
| "learning_rate": 8.576217467724128e-05, | |
| "loss": 0.0142, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 26.635514018691588, | |
| "grad_norm": 0.18231236934661865, | |
| "learning_rate": 8.564642241456986e-05, | |
| "loss": 0.0135, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 26.72897196261682, | |
| "grad_norm": 0.28548383712768555, | |
| "learning_rate": 8.553028032833397e-05, | |
| "loss": 0.0142, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 26.822429906542055, | |
| "grad_norm": 0.21996966004371643, | |
| "learning_rate": 8.541374968864487e-05, | |
| "loss": 0.0153, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 26.91588785046729, | |
| "grad_norm": 0.18419910967350006, | |
| "learning_rate": 8.529683176986295e-05, | |
| "loss": 0.0114, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 27.009345794392523, | |
| "grad_norm": 0.2264631688594818, | |
| "learning_rate": 8.517952785058385e-05, | |
| "loss": 0.0136, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 27.102803738317757, | |
| "grad_norm": 0.28802967071533203, | |
| "learning_rate": 8.506183921362443e-05, | |
| "loss": 0.0147, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 27.19626168224299, | |
| "grad_norm": 0.3631379306316376, | |
| "learning_rate": 8.494376714600878e-05, | |
| "loss": 0.0144, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 27.289719626168225, | |
| "grad_norm": 0.24260710179805756, | |
| "learning_rate": 8.482531293895412e-05, | |
| "loss": 0.0148, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 27.38317757009346, | |
| "grad_norm": 0.1798403412103653, | |
| "learning_rate": 8.470647788785665e-05, | |
| "loss": 0.0147, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 27.476635514018692, | |
| "grad_norm": 0.34532681107521057, | |
| "learning_rate": 8.458726329227747e-05, | |
| "loss": 0.017, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 27.570093457943926, | |
| "grad_norm": 0.13224120438098907, | |
| "learning_rate": 8.44676704559283e-05, | |
| "loss": 0.0231, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 27.66355140186916, | |
| "grad_norm": 0.3429749011993408, | |
| "learning_rate": 8.434770068665723e-05, | |
| "loss": 0.0164, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 27.757009345794394, | |
| "grad_norm": 0.24342355132102966, | |
| "learning_rate": 8.422735529643444e-05, | |
| "loss": 0.0138, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 27.850467289719624, | |
| "grad_norm": 0.21670503914356232, | |
| "learning_rate": 8.410663560133784e-05, | |
| "loss": 0.0119, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 27.94392523364486, | |
| "grad_norm": 0.21148911118507385, | |
| "learning_rate": 8.398554292153866e-05, | |
| "loss": 0.0139, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 28.037383177570092, | |
| "grad_norm": 0.24043697118759155, | |
| "learning_rate": 8.386407858128706e-05, | |
| "loss": 0.0215, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 28.130841121495326, | |
| "grad_norm": 0.25758662819862366, | |
| "learning_rate": 8.37422439088976e-05, | |
| "loss": 0.0136, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 28.22429906542056, | |
| "grad_norm": 0.1965651512145996, | |
| "learning_rate": 8.362004023673474e-05, | |
| "loss": 0.0139, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 28.317757009345794, | |
| "grad_norm": 0.1873561143875122, | |
| "learning_rate": 8.349746890119826e-05, | |
| "loss": 0.014, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 28.411214953271028, | |
| "grad_norm": 0.3306177258491516, | |
| "learning_rate": 8.337453124270863e-05, | |
| "loss": 0.0149, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 28.50467289719626, | |
| "grad_norm": 0.5167064666748047, | |
| "learning_rate": 8.32512286056924e-05, | |
| "loss": 0.0183, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 28.598130841121495, | |
| "grad_norm": 0.28625836968421936, | |
| "learning_rate": 8.31275623385675e-05, | |
| "loss": 0.0129, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 28.69158878504673, | |
| "grad_norm": 0.17390304803848267, | |
| "learning_rate": 8.300353379372834e-05, | |
| "loss": 0.0115, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 28.785046728971963, | |
| "grad_norm": 0.2651899456977844, | |
| "learning_rate": 8.287914432753123e-05, | |
| "loss": 0.0245, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 28.878504672897197, | |
| "grad_norm": 0.16162163019180298, | |
| "learning_rate": 8.275439530027948e-05, | |
| "loss": 0.0152, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 28.97196261682243, | |
| "grad_norm": 0.10550884157419205, | |
| "learning_rate": 8.262928807620843e-05, | |
| "loss": 0.0145, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 29.065420560747665, | |
| "grad_norm": 0.17049351334571838, | |
| "learning_rate": 8.250382402347065e-05, | |
| "loss": 0.0118, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 29.1588785046729, | |
| "grad_norm": 0.3678361475467682, | |
| "learning_rate": 8.237800451412095e-05, | |
| "loss": 0.0141, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 29.252336448598133, | |
| "grad_norm": 0.3102375268936157, | |
| "learning_rate": 8.225183092410128e-05, | |
| "loss": 0.0133, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 29.345794392523363, | |
| "grad_norm": 0.18469548225402832, | |
| "learning_rate": 8.212530463322583e-05, | |
| "loss": 0.0114, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 29.439252336448597, | |
| "grad_norm": 0.17053820192813873, | |
| "learning_rate": 8.199842702516583e-05, | |
| "loss": 0.0157, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 29.53271028037383, | |
| "grad_norm": 0.22760725021362305, | |
| "learning_rate": 8.18711994874345e-05, | |
| "loss": 0.0103, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 29.626168224299064, | |
| "grad_norm": 0.212839275598526, | |
| "learning_rate": 8.174362341137177e-05, | |
| "loss": 0.014, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 29.7196261682243, | |
| "grad_norm": 0.16992239654064178, | |
| "learning_rate": 8.161570019212921e-05, | |
| "loss": 0.0113, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 29.813084112149532, | |
| "grad_norm": 0.2040039300918579, | |
| "learning_rate": 8.148743122865463e-05, | |
| "loss": 0.0097, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 29.906542056074766, | |
| "grad_norm": 0.23768551647663116, | |
| "learning_rate": 8.135881792367686e-05, | |
| "loss": 0.0135, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "grad_norm": 0.27631258964538574, | |
| "learning_rate": 8.12298616836904e-05, | |
| "loss": 0.0111, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 30.093457943925234, | |
| "grad_norm": 0.13083235919475555, | |
| "learning_rate": 8.110056391894005e-05, | |
| "loss": 0.0115, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 30.186915887850468, | |
| "grad_norm": 0.21918922662734985, | |
| "learning_rate": 8.097092604340542e-05, | |
| "loss": 0.0112, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 30.2803738317757, | |
| "grad_norm": 0.20092561841011047, | |
| "learning_rate": 8.084094947478556e-05, | |
| "loss": 0.0109, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 30.373831775700936, | |
| "grad_norm": 0.09447859972715378, | |
| "learning_rate": 8.07106356344834e-05, | |
| "loss": 0.0127, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 30.46728971962617, | |
| "grad_norm": 0.29426026344299316, | |
| "learning_rate": 8.057998594759022e-05, | |
| "loss": 0.0104, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 30.560747663551403, | |
| "grad_norm": 0.2886331379413605, | |
| "learning_rate": 8.044900184287007e-05, | |
| "loss": 0.0133, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 30.654205607476637, | |
| "grad_norm": 0.23450273275375366, | |
| "learning_rate": 8.031768475274413e-05, | |
| "loss": 0.0113, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 30.747663551401867, | |
| "grad_norm": 0.2236676663160324, | |
| "learning_rate": 8.018603611327504e-05, | |
| "loss": 0.0144, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 30.8411214953271, | |
| "grad_norm": 0.15358710289001465, | |
| "learning_rate": 8.005405736415126e-05, | |
| "loss": 0.0169, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 30.934579439252335, | |
| "grad_norm": 0.20338520407676697, | |
| "learning_rate": 7.992174994867123e-05, | |
| "loss": 0.0095, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 31.02803738317757, | |
| "grad_norm": 0.2024795562028885, | |
| "learning_rate": 7.978911531372765e-05, | |
| "loss": 0.0086, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 31.121495327102803, | |
| "grad_norm": 0.22821472585201263, | |
| "learning_rate": 7.965615490979163e-05, | |
| "loss": 0.0132, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 31.214953271028037, | |
| "grad_norm": 0.1402302086353302, | |
| "learning_rate": 7.952287019089685e-05, | |
| "loss": 0.0119, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 31.30841121495327, | |
| "grad_norm": 0.10818405449390411, | |
| "learning_rate": 7.938926261462366e-05, | |
| "loss": 0.0115, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 31.401869158878505, | |
| "grad_norm": 0.11518066376447678, | |
| "learning_rate": 7.925533364208309e-05, | |
| "loss": 0.013, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 31.49532710280374, | |
| "grad_norm": 0.2154664248228073, | |
| "learning_rate": 7.912108473790092e-05, | |
| "loss": 0.0176, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 31.588785046728972, | |
| "grad_norm": 0.1289885938167572, | |
| "learning_rate": 7.898651737020166e-05, | |
| "loss": 0.0099, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 31.682242990654206, | |
| "grad_norm": 0.17317914962768555, | |
| "learning_rate": 7.88516330105925e-05, | |
| "loss": 0.0113, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 31.77570093457944, | |
| "grad_norm": 0.26972246170043945, | |
| "learning_rate": 7.871643313414718e-05, | |
| "loss": 0.0115, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 31.869158878504674, | |
| "grad_norm": 0.21863438189029694, | |
| "learning_rate": 7.858091921938988e-05, | |
| "loss": 0.0107, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 31.962616822429908, | |
| "grad_norm": 0.3031831979751587, | |
| "learning_rate": 7.844509274827907e-05, | |
| "loss": 0.0117, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 32.05607476635514, | |
| "grad_norm": 0.20491284132003784, | |
| "learning_rate": 7.830895520619128e-05, | |
| "loss": 0.0101, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 32.149532710280376, | |
| "grad_norm": 0.27927613258361816, | |
| "learning_rate": 7.817250808190483e-05, | |
| "loss": 0.0176, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 32.242990654205606, | |
| "grad_norm": 0.15980954468250275, | |
| "learning_rate": 7.803575286758364e-05, | |
| "loss": 0.013, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 32.33644859813084, | |
| "grad_norm": 0.23138810694217682, | |
| "learning_rate": 7.789869105876083e-05, | |
| "loss": 0.0128, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 32.429906542056074, | |
| "grad_norm": 0.2822995185852051, | |
| "learning_rate": 7.776132415432234e-05, | |
| "loss": 0.0119, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 32.52336448598131, | |
| "grad_norm": 0.23915642499923706, | |
| "learning_rate": 7.762365365649067e-05, | |
| "loss": 0.0097, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 32.61682242990654, | |
| "grad_norm": 0.2594238817691803, | |
| "learning_rate": 7.748568107080832e-05, | |
| "loss": 0.0154, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 32.71028037383178, | |
| "grad_norm": 0.22654129564762115, | |
| "learning_rate": 7.734740790612136e-05, | |
| "loss": 0.0104, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 32.80373831775701, | |
| "grad_norm": 0.2319777011871338, | |
| "learning_rate": 7.720883567456298e-05, | |
| "loss": 0.0155, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 32.89719626168224, | |
| "grad_norm": 0.1848135143518448, | |
| "learning_rate": 7.70699658915369e-05, | |
| "loss": 0.0151, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 32.99065420560748, | |
| "grad_norm": 0.1986227184534073, | |
| "learning_rate": 7.693080007570084e-05, | |
| "loss": 0.0086, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 33.08411214953271, | |
| "grad_norm": 0.21213096380233765, | |
| "learning_rate": 7.679133974894983e-05, | |
| "loss": 0.0155, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 33.177570093457945, | |
| "grad_norm": 0.33364537358283997, | |
| "learning_rate": 7.66515864363997e-05, | |
| "loss": 0.011, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 33.271028037383175, | |
| "grad_norm": 0.1819266825914383, | |
| "learning_rate": 7.651154166637025e-05, | |
| "loss": 0.0123, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 33.36448598130841, | |
| "grad_norm": 0.2073090821504593, | |
| "learning_rate": 7.637120697036866e-05, | |
| "loss": 0.0183, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 33.45794392523364, | |
| "grad_norm": 0.2921043038368225, | |
| "learning_rate": 7.623058388307269e-05, | |
| "loss": 0.0156, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 33.55140186915888, | |
| "grad_norm": 0.3672184348106384, | |
| "learning_rate": 7.608967394231387e-05, | |
| "loss": 0.0111, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 33.64485981308411, | |
| "grad_norm": 0.3048618733882904, | |
| "learning_rate": 7.594847868906076e-05, | |
| "loss": 0.0126, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 33.73831775700935, | |
| "grad_norm": 0.1969730406999588, | |
| "learning_rate": 7.580699966740201e-05, | |
| "loss": 0.0117, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 33.83177570093458, | |
| "grad_norm": 0.1948702484369278, | |
| "learning_rate": 7.566523842452958e-05, | |
| "loss": 0.0098, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 33.925233644859816, | |
| "grad_norm": 0.1653144657611847, | |
| "learning_rate": 7.552319651072164e-05, | |
| "loss": 0.0094, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 34.018691588785046, | |
| "grad_norm": 0.2479052096605301, | |
| "learning_rate": 7.538087547932585e-05, | |
| "loss": 0.008, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 34.11214953271028, | |
| "grad_norm": 0.07916168868541718, | |
| "learning_rate": 7.52382768867422e-05, | |
| "loss": 0.0113, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 34.205607476635514, | |
| "grad_norm": 0.174769327044487, | |
| "learning_rate": 7.509540229240601e-05, | |
| "loss": 0.0093, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 34.299065420560744, | |
| "grad_norm": 0.23629306256771088, | |
| "learning_rate": 7.495225325877103e-05, | |
| "loss": 0.0121, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 34.39252336448598, | |
| "grad_norm": 0.2444513440132141, | |
| "learning_rate": 7.480883135129211e-05, | |
| "loss": 0.0088, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 34.48598130841121, | |
| "grad_norm": 0.10653328895568848, | |
| "learning_rate": 7.466513813840825e-05, | |
| "loss": 0.0105, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 34.57943925233645, | |
| "grad_norm": 0.22507068514823914, | |
| "learning_rate": 7.452117519152542e-05, | |
| "loss": 0.0113, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 34.67289719626168, | |
| "grad_norm": 0.24139828979969025, | |
| "learning_rate": 7.437694408499933e-05, | |
| "loss": 0.0135, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 34.76635514018692, | |
| "grad_norm": 0.22388507425785065, | |
| "learning_rate": 7.423244639611826e-05, | |
| "loss": 0.0124, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 34.85981308411215, | |
| "grad_norm": 0.16689877212047577, | |
| "learning_rate": 7.408768370508576e-05, | |
| "loss": 0.0106, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 34.953271028037385, | |
| "grad_norm": 0.37698590755462646, | |
| "learning_rate": 7.394265759500348e-05, | |
| "loss": 0.0151, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 35.046728971962615, | |
| "grad_norm": 0.24599884450435638, | |
| "learning_rate": 7.379736965185368e-05, | |
| "loss": 0.011, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 35.14018691588785, | |
| "grad_norm": 0.21924257278442383, | |
| "learning_rate": 7.365182146448205e-05, | |
| "loss": 0.008, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 35.23364485981308, | |
| "grad_norm": 0.2563648521900177, | |
| "learning_rate": 7.350601462458024e-05, | |
| "loss": 0.0101, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 35.32710280373832, | |
| "grad_norm": 0.18041463196277618, | |
| "learning_rate": 7.335995072666848e-05, | |
| "loss": 0.0098, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 35.42056074766355, | |
| "grad_norm": 0.1949167102575302, | |
| "learning_rate": 7.32136313680782e-05, | |
| "loss": 0.0158, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 35.51401869158879, | |
| "grad_norm": 0.21856240928173065, | |
| "learning_rate": 7.30670581489344e-05, | |
| "loss": 0.0175, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 35.60747663551402, | |
| "grad_norm": 0.1561160832643509, | |
| "learning_rate": 7.292023267213835e-05, | |
| "loss": 0.0139, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 35.70093457943925, | |
| "grad_norm": 0.26308417320251465, | |
| "learning_rate": 7.277315654334997e-05, | |
| "loss": 0.0121, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 35.794392523364486, | |
| "grad_norm": 0.08401590585708618, | |
| "learning_rate": 7.262583137097018e-05, | |
| "loss": 0.0089, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 35.88785046728972, | |
| "grad_norm": 0.21631336212158203, | |
| "learning_rate": 7.247825876612353e-05, | |
| "loss": 0.0128, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 35.981308411214954, | |
| "grad_norm": 0.12208600342273712, | |
| "learning_rate": 7.233044034264034e-05, | |
| "loss": 0.0085, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 36.074766355140184, | |
| "grad_norm": 0.29350805282592773, | |
| "learning_rate": 7.218237771703921e-05, | |
| "loss": 0.0134, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 36.16822429906542, | |
| "grad_norm": 0.14051783084869385, | |
| "learning_rate": 7.203407250850928e-05, | |
| "loss": 0.0129, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 36.26168224299065, | |
| "grad_norm": 0.17099681496620178, | |
| "learning_rate": 7.188552633889259e-05, | |
| "loss": 0.0114, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 36.35514018691589, | |
| "grad_norm": 0.22999656200408936, | |
| "learning_rate": 7.173674083266624e-05, | |
| "loss": 0.0131, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 36.44859813084112, | |
| "grad_norm": 0.29541051387786865, | |
| "learning_rate": 7.158771761692464e-05, | |
| "loss": 0.0114, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 36.54205607476636, | |
| "grad_norm": 0.20923186838626862, | |
| "learning_rate": 7.143845832136188e-05, | |
| "loss": 0.0094, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 36.63551401869159, | |
| "grad_norm": 0.21942150592803955, | |
| "learning_rate": 7.128896457825364e-05, | |
| "loss": 0.0105, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 36.728971962616825, | |
| "grad_norm": 0.17471730709075928, | |
| "learning_rate": 7.113923802243957e-05, | |
| "loss": 0.0119, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 36.822429906542055, | |
| "grad_norm": 0.1934959888458252, | |
| "learning_rate": 7.09892802913053e-05, | |
| "loss": 0.014, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 36.91588785046729, | |
| "grad_norm": 0.26659610867500305, | |
| "learning_rate": 7.083909302476453e-05, | |
| "loss": 0.0155, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 37.00934579439252, | |
| "grad_norm": 0.2176859825849533, | |
| "learning_rate": 7.068867786524116e-05, | |
| "loss": 0.0175, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 37.10280373831776, | |
| "grad_norm": 0.17019227147102356, | |
| "learning_rate": 7.053803645765128e-05, | |
| "loss": 0.0116, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 37.19626168224299, | |
| "grad_norm": 0.20591872930526733, | |
| "learning_rate": 7.038717044938519e-05, | |
| "loss": 0.0096, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 37.28971962616822, | |
| "grad_norm": 0.1310962587594986, | |
| "learning_rate": 7.023608149028937e-05, | |
| "loss": 0.0093, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 37.38317757009346, | |
| "grad_norm": 0.17182035744190216, | |
| "learning_rate": 7.008477123264848e-05, | |
| "loss": 0.0077, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 37.47663551401869, | |
| "grad_norm": 0.2555645704269409, | |
| "learning_rate": 6.993324133116726e-05, | |
| "loss": 0.0097, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 37.570093457943926, | |
| "grad_norm": 0.1435556560754776, | |
| "learning_rate": 6.978149344295242e-05, | |
| "loss": 0.0077, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 37.66355140186916, | |
| "grad_norm": 0.13911636173725128, | |
| "learning_rate": 6.962952922749457e-05, | |
| "loss": 0.01, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 37.757009345794394, | |
| "grad_norm": 0.2193230539560318, | |
| "learning_rate": 6.947735034665002e-05, | |
| "loss": 0.0133, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 37.850467289719624, | |
| "grad_norm": 0.15190434455871582, | |
| "learning_rate": 6.932495846462261e-05, | |
| "loss": 0.0135, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 37.94392523364486, | |
| "grad_norm": 0.14792603254318237, | |
| "learning_rate": 6.917235524794558e-05, | |
| "loss": 0.0132, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 38.03738317757009, | |
| "grad_norm": 0.14730827510356903, | |
| "learning_rate": 6.901954236546323e-05, | |
| "loss": 0.0164, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 38.13084112149533, | |
| "grad_norm": 0.2186291366815567, | |
| "learning_rate": 6.886652148831279e-05, | |
| "loss": 0.0102, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 38.22429906542056, | |
| "grad_norm": 0.1827789694070816, | |
| "learning_rate": 6.871329428990602e-05, | |
| "loss": 0.0097, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 38.3177570093458, | |
| "grad_norm": 0.18084844946861267, | |
| "learning_rate": 6.855986244591104e-05, | |
| "loss": 0.0101, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 38.41121495327103, | |
| "grad_norm": 0.21352370083332062, | |
| "learning_rate": 6.840622763423391e-05, | |
| "loss": 0.0092, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 38.504672897196265, | |
| "grad_norm": 0.09848731011152267, | |
| "learning_rate": 6.825239153500029e-05, | |
| "loss": 0.0098, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 38.598130841121495, | |
| "grad_norm": 0.15104982256889343, | |
| "learning_rate": 6.809835583053715e-05, | |
| "loss": 0.0081, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 38.691588785046726, | |
| "grad_norm": 0.18223416805267334, | |
| "learning_rate": 6.794412220535426e-05, | |
| "loss": 0.0111, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 38.78504672897196, | |
| "grad_norm": 0.231476828455925, | |
| "learning_rate": 6.778969234612584e-05, | |
| "loss": 0.0092, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 38.87850467289719, | |
| "grad_norm": 0.14167046546936035, | |
| "learning_rate": 6.763506794167208e-05, | |
| "loss": 0.0073, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 38.97196261682243, | |
| "grad_norm": 0.12900766730308533, | |
| "learning_rate": 6.748025068294067e-05, | |
| "loss": 0.0076, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 39.06542056074766, | |
| "grad_norm": 0.16524341702461243, | |
| "learning_rate": 6.732524226298841e-05, | |
| "loss": 0.0091, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 39.1588785046729, | |
| "grad_norm": 0.17994999885559082, | |
| "learning_rate": 6.71700443769625e-05, | |
| "loss": 0.0099, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 39.25233644859813, | |
| "grad_norm": 0.2520671784877777, | |
| "learning_rate": 6.701465872208216e-05, | |
| "loss": 0.0082, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 39.345794392523366, | |
| "grad_norm": 0.2442283183336258, | |
| "learning_rate": 6.685908699762002e-05, | |
| "loss": 0.0098, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 39.4392523364486, | |
| "grad_norm": 0.27867448329925537, | |
| "learning_rate": 6.670333090488356e-05, | |
| "loss": 0.011, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 39.532710280373834, | |
| "grad_norm": 0.1637038290500641, | |
| "learning_rate": 6.654739214719641e-05, | |
| "loss": 0.0089, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 39.626168224299064, | |
| "grad_norm": 0.20169733464717865, | |
| "learning_rate": 6.639127242987988e-05, | |
| "loss": 0.0084, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 39.7196261682243, | |
| "grad_norm": 0.23685987293720245, | |
| "learning_rate": 6.623497346023418e-05, | |
| "loss": 0.0121, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 39.81308411214953, | |
| "grad_norm": 0.09718881547451019, | |
| "learning_rate": 6.607849694751977e-05, | |
| "loss": 0.0068, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 39.90654205607477, | |
| "grad_norm": 0.13366201519966125, | |
| "learning_rate": 6.592184460293877e-05, | |
| "loss": 0.009, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 40.0, | |
| "grad_norm": 0.23680470883846283, | |
| "learning_rate": 6.576501813961609e-05, | |
| "loss": 0.0103, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 40.09345794392523, | |
| "grad_norm": 0.33618593215942383, | |
| "learning_rate": 6.56080192725808e-05, | |
| "loss": 0.0175, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 40.18691588785047, | |
| "grad_norm": 0.18473008275032043, | |
| "learning_rate": 6.545084971874738e-05, | |
| "loss": 0.0083, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 40.2803738317757, | |
| "grad_norm": 0.2896006405353546, | |
| "learning_rate": 6.529351119689688e-05, | |
| "loss": 0.0115, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 40.373831775700936, | |
| "grad_norm": 0.3565766513347626, | |
| "learning_rate": 6.513600542765817e-05, | |
| "loss": 0.0085, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 40.467289719626166, | |
| "grad_norm": 0.1844252496957779, | |
| "learning_rate": 6.497833413348909e-05, | |
| "loss": 0.0107, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 40.5607476635514, | |
| "grad_norm": 0.19039011001586914, | |
| "learning_rate": 6.48204990386577e-05, | |
| "loss": 0.0068, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 40.654205607476634, | |
| "grad_norm": 0.1492714285850525, | |
| "learning_rate": 6.466250186922325e-05, | |
| "loss": 0.0086, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 40.74766355140187, | |
| "grad_norm": 0.1916590929031372, | |
| "learning_rate": 6.450434435301751e-05, | |
| "loss": 0.0144, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 40.8411214953271, | |
| "grad_norm": 0.08804311603307724, | |
| "learning_rate": 6.43460282196257e-05, | |
| "loss": 0.0073, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 40.93457943925234, | |
| "grad_norm": 0.18781927227973938, | |
| "learning_rate": 6.418755520036775e-05, | |
| "loss": 0.0096, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 41.02803738317757, | |
| "grad_norm": 0.12092869728803635, | |
| "learning_rate": 6.402892702827916e-05, | |
| "loss": 0.0135, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 41.12149532710281, | |
| "grad_norm": 0.08973784744739532, | |
| "learning_rate": 6.387014543809223e-05, | |
| "loss": 0.007, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 41.21495327102804, | |
| "grad_norm": 0.2680470049381256, | |
| "learning_rate": 6.371121216621698e-05, | |
| "loss": 0.008, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 41.308411214953274, | |
| "grad_norm": 0.30315253138542175, | |
| "learning_rate": 6.355212895072223e-05, | |
| "loss": 0.0087, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 41.401869158878505, | |
| "grad_norm": 0.13689424097537994, | |
| "learning_rate": 6.339289753131649e-05, | |
| "loss": 0.0075, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 41.495327102803735, | |
| "grad_norm": 0.16243036091327667, | |
| "learning_rate": 6.323351964932908e-05, | |
| "loss": 0.0092, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 41.58878504672897, | |
| "grad_norm": 0.13021892309188843, | |
| "learning_rate": 6.307399704769099e-05, | |
| "loss": 0.0095, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 41.6822429906542, | |
| "grad_norm": 0.27589356899261475, | |
| "learning_rate": 6.291433147091583e-05, | |
| "loss": 0.0108, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 41.77570093457944, | |
| "grad_norm": 0.12132186442613602, | |
| "learning_rate": 6.275452466508077e-05, | |
| "loss": 0.0085, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 41.86915887850467, | |
| "grad_norm": 0.1552329659461975, | |
| "learning_rate": 6.259457837780742e-05, | |
| "loss": 0.0064, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 41.96261682242991, | |
| "grad_norm": 0.20105503499507904, | |
| "learning_rate": 6.243449435824276e-05, | |
| "loss": 0.0069, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 42.05607476635514, | |
| "grad_norm": 0.24328036606311798, | |
| "learning_rate": 6.227427435703997e-05, | |
| "loss": 0.015, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 42.149532710280376, | |
| "grad_norm": 0.27159082889556885, | |
| "learning_rate": 6.211392012633932e-05, | |
| "loss": 0.0084, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 42.242990654205606, | |
| "grad_norm": 0.21687833964824677, | |
| "learning_rate": 6.195343341974899e-05, | |
| "loss": 0.0127, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 42.33644859813084, | |
| "grad_norm": 0.22451163828372955, | |
| "learning_rate": 6.179281599232591e-05, | |
| "loss": 0.0112, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 42.429906542056074, | |
| "grad_norm": 0.10428455471992493, | |
| "learning_rate": 6.163206960055651e-05, | |
| "loss": 0.0065, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 42.52336448598131, | |
| "grad_norm": 0.2592661678791046, | |
| "learning_rate": 6.147119600233758e-05, | |
| "loss": 0.0138, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 42.61682242990654, | |
| "grad_norm": 0.21353816986083984, | |
| "learning_rate": 6.131019695695702e-05, | |
| "loss": 0.0103, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 42.71028037383178, | |
| "grad_norm": 0.12959344685077667, | |
| "learning_rate": 6.11490742250746e-05, | |
| "loss": 0.009, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 42.80373831775701, | |
| "grad_norm": 0.17373885214328766, | |
| "learning_rate": 6.0987829568702656e-05, | |
| "loss": 0.0092, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 42.89719626168224, | |
| "grad_norm": 0.11210908740758896, | |
| "learning_rate": 6.0826464751186994e-05, | |
| "loss": 0.0079, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 42.99065420560748, | |
| "grad_norm": 0.15213441848754883, | |
| "learning_rate": 6.066498153718735e-05, | |
| "loss": 0.0068, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 43.08411214953271, | |
| "grad_norm": 0.20361490547657013, | |
| "learning_rate": 6.05033816926583e-05, | |
| "loss": 0.0149, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 43.177570093457945, | |
| "grad_norm": 0.12980744242668152, | |
| "learning_rate": 6.034166698482984e-05, | |
| "loss": 0.012, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 43.271028037383175, | |
| "grad_norm": 0.1619279980659485, | |
| "learning_rate": 6.017983918218812e-05, | |
| "loss": 0.0075, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 43.36448598130841, | |
| "grad_norm": 0.10983559489250183, | |
| "learning_rate": 6.001790005445607e-05, | |
| "loss": 0.0084, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 43.45794392523364, | |
| "grad_norm": 0.18888936936855316, | |
| "learning_rate": 5.985585137257401e-05, | |
| "loss": 0.0167, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 43.55140186915888, | |
| "grad_norm": 0.3127266466617584, | |
| "learning_rate": 5.969369490868042e-05, | |
| "loss": 0.014, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 43.64485981308411, | |
| "grad_norm": 0.21398553252220154, | |
| "learning_rate": 5.953143243609235e-05, | |
| "loss": 0.0109, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 43.73831775700935, | |
| "grad_norm": 0.1972840428352356, | |
| "learning_rate": 5.9369065729286245e-05, | |
| "loss": 0.0078, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 43.83177570093458, | |
| "grad_norm": 0.17708909511566162, | |
| "learning_rate": 5.9206596563878357e-05, | |
| "loss": 0.0136, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 43.925233644859816, | |
| "grad_norm": 0.25454819202423096, | |
| "learning_rate": 5.90440267166055e-05, | |
| "loss": 0.0107, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 44.018691588785046, | |
| "grad_norm": 0.1953362226486206, | |
| "learning_rate": 5.888135796530544e-05, | |
| "loss": 0.0113, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 44.11214953271028, | |
| "grad_norm": 0.21734946966171265, | |
| "learning_rate": 5.871859208889759e-05, | |
| "loss": 0.007, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 44.205607476635514, | |
| "grad_norm": 0.13528119027614594, | |
| "learning_rate": 5.85557308673635e-05, | |
| "loss": 0.0061, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 44.299065420560744, | |
| "grad_norm": 0.09264456480741501, | |
| "learning_rate": 5.8392776081727385e-05, | |
| "loss": 0.0115, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 44.39252336448598, | |
| "grad_norm": 0.09667211025953293, | |
| "learning_rate": 5.8229729514036705e-05, | |
| "loss": 0.0066, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 44.48598130841121, | |
| "grad_norm": 0.18434172868728638, | |
| "learning_rate": 5.8066592947342555e-05, | |
| "loss": 0.0091, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 44.57943925233645, | |
| "grad_norm": 0.2720284163951874, | |
| "learning_rate": 5.7903368165680327e-05, | |
| "loss": 0.0113, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 44.67289719626168, | |
| "grad_norm": 0.23133063316345215, | |
| "learning_rate": 5.7740056954050084e-05, | |
| "loss": 0.0101, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 44.76635514018692, | |
| "grad_norm": 0.28980597853660583, | |
| "learning_rate": 5.757666109839702e-05, | |
| "loss": 0.0128, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 44.85981308411215, | |
| "grad_norm": 0.2679378390312195, | |
| "learning_rate": 5.74131823855921e-05, | |
| "loss": 0.0075, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 44.953271028037385, | |
| "grad_norm": 0.16744691133499146, | |
| "learning_rate": 5.72496226034123e-05, | |
| "loss": 0.0081, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 45.046728971962615, | |
| "grad_norm": 0.2577405869960785, | |
| "learning_rate": 5.7085983540521216e-05, | |
| "loss": 0.0068, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 45.14018691588785, | |
| "grad_norm": 0.1690078228712082, | |
| "learning_rate": 5.692226698644938e-05, | |
| "loss": 0.0086, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 45.23364485981308, | |
| "grad_norm": 0.11636698246002197, | |
| "learning_rate": 5.675847473157485e-05, | |
| "loss": 0.0092, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 45.32710280373832, | |
| "grad_norm": 0.15227118134498596, | |
| "learning_rate": 5.6594608567103456e-05, | |
| "loss": 0.0075, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 45.42056074766355, | |
| "grad_norm": 0.126779705286026, | |
| "learning_rate": 5.6430670285049314e-05, | |
| "loss": 0.0075, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 45.51401869158879, | |
| "grad_norm": 0.23741380870342255, | |
| "learning_rate": 5.6266661678215216e-05, | |
| "loss": 0.0098, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 45.60747663551402, | |
| "grad_norm": 0.12933692336082458, | |
| "learning_rate": 5.6102584540173006e-05, | |
| "loss": 0.0075, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 45.70093457943925, | |
| "grad_norm": 0.1902732253074646, | |
| "learning_rate": 5.5938440665244006e-05, | |
| "loss": 0.0087, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 45.794392523364486, | |
| "grad_norm": 0.24616940319538116, | |
| "learning_rate": 5.577423184847932e-05, | |
| "loss": 0.0077, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 45.88785046728972, | |
| "grad_norm": 0.16900432109832764, | |
| "learning_rate": 5.560995988564023e-05, | |
| "loss": 0.0097, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 45.981308411214954, | |
| "grad_norm": 0.20487163960933685, | |
| "learning_rate": 5.544562657317863e-05, | |
| "loss": 0.0114, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 46.074766355140184, | |
| "grad_norm": 0.25133049488067627, | |
| "learning_rate": 5.52812337082173e-05, | |
| "loss": 0.0088, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 46.16822429906542, | |
| "grad_norm": 0.1800190508365631, | |
| "learning_rate": 5.511678308853026e-05, | |
| "loss": 0.0096, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 46.26168224299065, | |
| "grad_norm": 0.2863096594810486, | |
| "learning_rate": 5.495227651252315e-05, | |
| "loss": 0.0076, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 46.35514018691589, | |
| "grad_norm": 0.3172850012779236, | |
| "learning_rate": 5.478771577921351e-05, | |
| "loss": 0.0104, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 46.44859813084112, | |
| "grad_norm": 0.13900935649871826, | |
| "learning_rate": 5.462310268821118e-05, | |
| "loss": 0.0077, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 46.54205607476636, | |
| "grad_norm": 0.22267590463161469, | |
| "learning_rate": 5.445843903969854e-05, | |
| "loss": 0.0059, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 46.63551401869159, | |
| "grad_norm": 0.16322097182273865, | |
| "learning_rate": 5.4293726634410855e-05, | |
| "loss": 0.0104, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 46.728971962616825, | |
| "grad_norm": 0.18399694561958313, | |
| "learning_rate": 5.4128967273616625e-05, | |
| "loss": 0.0106, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 46.822429906542055, | |
| "grad_norm": 0.1907566636800766, | |
| "learning_rate": 5.396416275909779e-05, | |
| "loss": 0.0089, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 46.91588785046729, | |
| "grad_norm": 0.2239440530538559, | |
| "learning_rate": 5.379931489313016e-05, | |
| "loss": 0.0086, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 47.00934579439252, | |
| "grad_norm": 0.16346921026706696, | |
| "learning_rate": 5.363442547846356e-05, | |
| "loss": 0.0089, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 47.10280373831776, | |
| "grad_norm": 0.14416854083538055, | |
| "learning_rate": 5.3469496318302204e-05, | |
| "loss": 0.0065, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 47.19626168224299, | |
| "grad_norm": 0.18693040311336517, | |
| "learning_rate": 5.330452921628497e-05, | |
| "loss": 0.0077, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 47.28971962616822, | |
| "grad_norm": 0.10827270895242691, | |
| "learning_rate": 5.313952597646568e-05, | |
| "loss": 0.0056, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 47.38317757009346, | |
| "grad_norm": 0.15888318419456482, | |
| "learning_rate": 5.297448840329329e-05, | |
| "loss": 0.0087, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 47.47663551401869, | |
| "grad_norm": 0.11162968724966049, | |
| "learning_rate": 5.280941830159227e-05, | |
| "loss": 0.0093, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 47.570093457943926, | |
| "grad_norm": 0.11820860952138901, | |
| "learning_rate": 5.264431747654284e-05, | |
| "loss": 0.0108, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 47.66355140186916, | |
| "grad_norm": 0.3276582956314087, | |
| "learning_rate": 5.247918773366112e-05, | |
| "loss": 0.0131, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 47.757009345794394, | |
| "grad_norm": 0.14899931848049164, | |
| "learning_rate": 5.231403087877955e-05, | |
| "loss": 0.0074, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 47.850467289719624, | |
| "grad_norm": 0.07620976120233536, | |
| "learning_rate": 5.214884871802703e-05, | |
| "loss": 0.009, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 47.94392523364486, | |
| "grad_norm": 0.18864719569683075, | |
| "learning_rate": 5.198364305780922e-05, | |
| "loss": 0.0064, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 48.03738317757009, | |
| "grad_norm": 0.13755668699741364, | |
| "learning_rate": 5.1818415704788725e-05, | |
| "loss": 0.0085, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 48.13084112149533, | |
| "grad_norm": 0.22894635796546936, | |
| "learning_rate": 5.165316846586541e-05, | |
| "loss": 0.0113, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 48.22429906542056, | |
| "grad_norm": 0.11449041217565536, | |
| "learning_rate": 5.148790314815663e-05, | |
| "loss": 0.0082, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 48.3177570093458, | |
| "grad_norm": 0.1928773671388626, | |
| "learning_rate": 5.132262155897739e-05, | |
| "loss": 0.0084, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 48.41121495327103, | |
| "grad_norm": 0.15431612730026245, | |
| "learning_rate": 5.1157325505820694e-05, | |
| "loss": 0.0096, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 48.504672897196265, | |
| "grad_norm": 0.21660259366035461, | |
| "learning_rate": 5.0992016796337686e-05, | |
| "loss": 0.008, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 48.598130841121495, | |
| "grad_norm": 0.11220486462116241, | |
| "learning_rate": 5.0826697238317935e-05, | |
| "loss": 0.0072, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 48.691588785046726, | |
| "grad_norm": 0.15329080820083618, | |
| "learning_rate": 5.066136863966963e-05, | |
| "loss": 0.0088, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 48.78504672897196, | |
| "grad_norm": 0.1777033805847168, | |
| "learning_rate": 5.0496032808399815e-05, | |
| "loss": 0.0052, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 48.87850467289719, | |
| "grad_norm": 0.23826682567596436, | |
| "learning_rate": 5.033069155259471e-05, | |
| "loss": 0.0094, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 48.97196261682243, | |
| "grad_norm": 0.13108570873737335, | |
| "learning_rate": 5.016534668039976e-05, | |
| "loss": 0.0062, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 49.06542056074766, | |
| "grad_norm": 0.1880958378314972, | |
| "learning_rate": 5e-05, | |
| "loss": 0.0053, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 49.1588785046729, | |
| "grad_norm": 0.11604796350002289, | |
| "learning_rate": 4.9834653319600246e-05, | |
| "loss": 0.0083, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 49.25233644859813, | |
| "grad_norm": 0.10272664576768875, | |
| "learning_rate": 4.96693084474053e-05, | |
| "loss": 0.0081, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 49.345794392523366, | |
| "grad_norm": 0.1421612948179245, | |
| "learning_rate": 4.950396719160018e-05, | |
| "loss": 0.005, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 49.4392523364486, | |
| "grad_norm": 0.11580304056406021, | |
| "learning_rate": 4.93386313603304e-05, | |
| "loss": 0.0071, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 49.532710280373834, | |
| "grad_norm": 0.0858597457408905, | |
| "learning_rate": 4.917330276168208e-05, | |
| "loss": 0.0107, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 49.626168224299064, | |
| "grad_norm": 0.17755255103111267, | |
| "learning_rate": 4.9007983203662326e-05, | |
| "loss": 0.0188, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 49.7196261682243, | |
| "grad_norm": 0.14021337032318115, | |
| "learning_rate": 4.884267449417931e-05, | |
| "loss": 0.0081, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 49.81308411214953, | |
| "grad_norm": 0.10999365150928497, | |
| "learning_rate": 4.867737844102261e-05, | |
| "loss": 0.0103, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 49.90654205607477, | |
| "grad_norm": 0.11541043221950531, | |
| "learning_rate": 4.851209685184338e-05, | |
| "loss": 0.0072, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 50.0, | |
| "grad_norm": 0.10045769810676575, | |
| "learning_rate": 4.834683153413459e-05, | |
| "loss": 0.0146, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 50.09345794392523, | |
| "grad_norm": 0.1158151924610138, | |
| "learning_rate": 4.818158429521129e-05, | |
| "loss": 0.0078, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 50.18691588785047, | |
| "grad_norm": 0.1430526077747345, | |
| "learning_rate": 4.801635694219079e-05, | |
| "loss": 0.0054, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 50.2803738317757, | |
| "grad_norm": 0.09269680827856064, | |
| "learning_rate": 4.785115128197298e-05, | |
| "loss": 0.0097, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 50.373831775700936, | |
| "grad_norm": 0.21225248277187347, | |
| "learning_rate": 4.7685969121220456e-05, | |
| "loss": 0.0098, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 50.467289719626166, | |
| "grad_norm": 0.18716934323310852, | |
| "learning_rate": 4.7520812266338885e-05, | |
| "loss": 0.0083, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 50.5607476635514, | |
| "grad_norm": 0.24767927825450897, | |
| "learning_rate": 4.735568252345718e-05, | |
| "loss": 0.0075, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 50.654205607476634, | |
| "grad_norm": 0.14961543679237366, | |
| "learning_rate": 4.7190581698407725e-05, | |
| "loss": 0.0106, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 50.74766355140187, | |
| "grad_norm": 0.16245384514331818, | |
| "learning_rate": 4.702551159670672e-05, | |
| "loss": 0.0062, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 50.8411214953271, | |
| "grad_norm": 0.1727730631828308, | |
| "learning_rate": 4.6860474023534335e-05, | |
| "loss": 0.0086, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 50.93457943925234, | |
| "grad_norm": 0.1440618336200714, | |
| "learning_rate": 4.669547078371504e-05, | |
| "loss": 0.009, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 51.02803738317757, | |
| "grad_norm": 0.17170698940753937, | |
| "learning_rate": 4.65305036816978e-05, | |
| "loss": 0.0067, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 51.12149532710281, | |
| "grad_norm": 0.3083714246749878, | |
| "learning_rate": 4.6365574521536445e-05, | |
| "loss": 0.0114, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 51.21495327102804, | |
| "grad_norm": 0.1776435673236847, | |
| "learning_rate": 4.620068510686985e-05, | |
| "loss": 0.0054, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 51.308411214953274, | |
| "grad_norm": 0.13433292508125305, | |
| "learning_rate": 4.60358372409022e-05, | |
| "loss": 0.0065, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 51.401869158878505, | |
| "grad_norm": 0.1641828864812851, | |
| "learning_rate": 4.5871032726383386e-05, | |
| "loss": 0.0059, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 51.495327102803735, | |
| "grad_norm": 0.13108348846435547, | |
| "learning_rate": 4.570627336558915e-05, | |
| "loss": 0.0069, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 51.58878504672897, | |
| "grad_norm": 0.10418945550918579, | |
| "learning_rate": 4.554156096030149e-05, | |
| "loss": 0.007, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 51.6822429906542, | |
| "grad_norm": 0.1613224595785141, | |
| "learning_rate": 4.537689731178883e-05, | |
| "loss": 0.0087, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 51.77570093457944, | |
| "grad_norm": 0.10617467761039734, | |
| "learning_rate": 4.5212284220786494e-05, | |
| "loss": 0.0068, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 51.86915887850467, | |
| "grad_norm": 0.18668167293071747, | |
| "learning_rate": 4.504772348747687e-05, | |
| "loss": 0.0071, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 51.96261682242991, | |
| "grad_norm": 0.1861657351255417, | |
| "learning_rate": 4.488321691146975e-05, | |
| "loss": 0.0078, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 52.05607476635514, | |
| "grad_norm": 0.16021041572093964, | |
| "learning_rate": 4.471876629178273e-05, | |
| "loss": 0.0084, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 52.149532710280376, | |
| "grad_norm": 0.16291090846061707, | |
| "learning_rate": 4.4554373426821374e-05, | |
| "loss": 0.0053, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 52.242990654205606, | |
| "grad_norm": 0.19529825448989868, | |
| "learning_rate": 4.439004011435979e-05, | |
| "loss": 0.0069, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 52.33644859813084, | |
| "grad_norm": 0.10670498758554459, | |
| "learning_rate": 4.4225768151520694e-05, | |
| "loss": 0.0091, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 52.429906542056074, | |
| "grad_norm": 0.14729143679141998, | |
| "learning_rate": 4.406155933475599e-05, | |
| "loss": 0.0073, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 52.52336448598131, | |
| "grad_norm": 0.10457372665405273, | |
| "learning_rate": 4.3897415459827e-05, | |
| "loss": 0.0103, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 52.61682242990654, | |
| "grad_norm": 0.09747841209173203, | |
| "learning_rate": 4.373333832178478e-05, | |
| "loss": 0.0044, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 52.71028037383178, | |
| "grad_norm": 0.2818126082420349, | |
| "learning_rate": 4.3569329714950704e-05, | |
| "loss": 0.0111, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 52.80373831775701, | |
| "grad_norm": 0.1272769421339035, | |
| "learning_rate": 4.3405391432896555e-05, | |
| "loss": 0.0069, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 52.89719626168224, | |
| "grad_norm": 0.08397116512060165, | |
| "learning_rate": 4.324152526842517e-05, | |
| "loss": 0.007, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 52.99065420560748, | |
| "grad_norm": 0.10440504550933838, | |
| "learning_rate": 4.307773301355062e-05, | |
| "loss": 0.0052, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 53.08411214953271, | |
| "grad_norm": 0.16689011454582214, | |
| "learning_rate": 4.291401645947879e-05, | |
| "loss": 0.0111, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 53.177570093457945, | |
| "grad_norm": 0.19110624492168427, | |
| "learning_rate": 4.275037739658771e-05, | |
| "loss": 0.0045, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 53.271028037383175, | |
| "grad_norm": 0.2127784937620163, | |
| "learning_rate": 4.2586817614407895e-05, | |
| "loss": 0.0096, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 53.36448598130841, | |
| "grad_norm": 0.15279392898082733, | |
| "learning_rate": 4.2423338901602985e-05, | |
| "loss": 0.0084, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 53.45794392523364, | |
| "grad_norm": 0.14837054908275604, | |
| "learning_rate": 4.2259943045949934e-05, | |
| "loss": 0.0061, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 53.55140186915888, | |
| "grad_norm": 0.15841516852378845, | |
| "learning_rate": 4.209663183431969e-05, | |
| "loss": 0.0076, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 53.64485981308411, | |
| "grad_norm": 0.13824456930160522, | |
| "learning_rate": 4.1933407052657456e-05, | |
| "loss": 0.0051, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 53.73831775700935, | |
| "grad_norm": 0.16600170731544495, | |
| "learning_rate": 4.17702704859633e-05, | |
| "loss": 0.0065, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 53.83177570093458, | |
| "grad_norm": 0.0763145163655281, | |
| "learning_rate": 4.160722391827262e-05, | |
| "loss": 0.0055, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 53.925233644859816, | |
| "grad_norm": 0.1596927046775818, | |
| "learning_rate": 4.14442691326365e-05, | |
| "loss": 0.0054, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 54.018691588785046, | |
| "grad_norm": 0.07967574149370193, | |
| "learning_rate": 4.1281407911102425e-05, | |
| "loss": 0.0074, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 54.11214953271028, | |
| "grad_norm": 0.12622042000293732, | |
| "learning_rate": 4.111864203469457e-05, | |
| "loss": 0.0071, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 54.205607476635514, | |
| "grad_norm": 0.19472530484199524, | |
| "learning_rate": 4.095597328339452e-05, | |
| "loss": 0.0065, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 54.299065420560744, | |
| "grad_norm": 0.17462767660617828, | |
| "learning_rate": 4.079340343612165e-05, | |
| "loss": 0.0116, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 54.39252336448598, | |
| "grad_norm": 0.1130964532494545, | |
| "learning_rate": 4.063093427071376e-05, | |
| "loss": 0.0116, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 54.48598130841121, | |
| "grad_norm": 0.17297130823135376, | |
| "learning_rate": 4.046856756390767e-05, | |
| "loss": 0.0073, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 54.57943925233645, | |
| "grad_norm": 0.1260395348072052, | |
| "learning_rate": 4.0306305091319595e-05, | |
| "loss": 0.0066, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 54.67289719626168, | |
| "grad_norm": 0.06664054840803146, | |
| "learning_rate": 4.0144148627425993e-05, | |
| "loss": 0.0064, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 54.76635514018692, | |
| "grad_norm": 0.12369495630264282, | |
| "learning_rate": 3.9982099945543945e-05, | |
| "loss": 0.0072, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 54.85981308411215, | |
| "grad_norm": 0.12784737348556519, | |
| "learning_rate": 3.982016081781189e-05, | |
| "loss": 0.0054, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 54.953271028037385, | |
| "grad_norm": 0.1303785741329193, | |
| "learning_rate": 3.965833301517017e-05, | |
| "loss": 0.0084, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 55.046728971962615, | |
| "grad_norm": 0.3063434660434723, | |
| "learning_rate": 3.949661830734172e-05, | |
| "loss": 0.0064, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 55.14018691588785, | |
| "grad_norm": 0.16514930129051208, | |
| "learning_rate": 3.933501846281267e-05, | |
| "loss": 0.0072, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 55.23364485981308, | |
| "grad_norm": 0.13724902272224426, | |
| "learning_rate": 3.917353524881302e-05, | |
| "loss": 0.0094, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 55.32710280373832, | |
| "grad_norm": 0.2272414267063141, | |
| "learning_rate": 3.901217043129735e-05, | |
| "loss": 0.0095, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 55.42056074766355, | |
| "grad_norm": 0.19817429780960083, | |
| "learning_rate": 3.8850925774925425e-05, | |
| "loss": 0.0087, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 55.51401869158879, | |
| "grad_norm": 0.15857623517513275, | |
| "learning_rate": 3.8689803043043e-05, | |
| "loss": 0.0097, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 55.60747663551402, | |
| "grad_norm": 0.10527165979146957, | |
| "learning_rate": 3.852880399766243e-05, | |
| "loss": 0.0059, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 55.70093457943925, | |
| "grad_norm": 0.13487043976783752, | |
| "learning_rate": 3.836793039944349e-05, | |
| "loss": 0.0069, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 55.794392523364486, | |
| "grad_norm": 0.12190820276737213, | |
| "learning_rate": 3.820718400767409e-05, | |
| "loss": 0.0061, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 55.88785046728972, | |
| "grad_norm": 0.15790730714797974, | |
| "learning_rate": 3.8046566580251e-05, | |
| "loss": 0.0049, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 55.981308411214954, | |
| "grad_norm": 0.25264978408813477, | |
| "learning_rate": 3.788607987366069e-05, | |
| "loss": 0.007, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 56.074766355140184, | |
| "grad_norm": 0.23202531039714813, | |
| "learning_rate": 3.772572564296005e-05, | |
| "loss": 0.0098, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 56.16822429906542, | |
| "grad_norm": 0.13732583820819855, | |
| "learning_rate": 3.756550564175727e-05, | |
| "loss": 0.007, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 56.26168224299065, | |
| "grad_norm": 0.20250828564167023, | |
| "learning_rate": 3.74054216221926e-05, | |
| "loss": 0.0081, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 56.35514018691589, | |
| "grad_norm": 0.08914332091808319, | |
| "learning_rate": 3.7245475334919246e-05, | |
| "loss": 0.0082, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 56.44859813084112, | |
| "grad_norm": 0.21319356560707092, | |
| "learning_rate": 3.7085668529084184e-05, | |
| "loss": 0.0103, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 56.54205607476636, | |
| "grad_norm": 0.13997061550617218, | |
| "learning_rate": 3.6926002952309016e-05, | |
| "loss": 0.0066, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 56.63551401869159, | |
| "grad_norm": 0.14500273764133453, | |
| "learning_rate": 3.676648035067093e-05, | |
| "loss": 0.0062, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 56.728971962616825, | |
| "grad_norm": 0.1436031460762024, | |
| "learning_rate": 3.6607102468683526e-05, | |
| "loss": 0.0061, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 56.822429906542055, | |
| "grad_norm": 0.12058055400848389, | |
| "learning_rate": 3.6447871049277796e-05, | |
| "loss": 0.0055, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 56.91588785046729, | |
| "grad_norm": 0.1439819186925888, | |
| "learning_rate": 3.628878783378302e-05, | |
| "loss": 0.0087, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 57.00934579439252, | |
| "grad_norm": 0.09871205687522888, | |
| "learning_rate": 3.612985456190778e-05, | |
| "loss": 0.0087, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 57.10280373831776, | |
| "grad_norm": 0.05546163022518158, | |
| "learning_rate": 3.597107297172084e-05, | |
| "loss": 0.0101, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 57.19626168224299, | |
| "grad_norm": 0.05012049525976181, | |
| "learning_rate": 3.581244479963225e-05, | |
| "loss": 0.0063, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 57.28971962616822, | |
| "grad_norm": 0.23360881209373474, | |
| "learning_rate": 3.5653971780374295e-05, | |
| "loss": 0.0103, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 57.38317757009346, | |
| "grad_norm": 0.1165015920996666, | |
| "learning_rate": 3.5495655646982505e-05, | |
| "loss": 0.0061, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 57.47663551401869, | |
| "grad_norm": 0.16637849807739258, | |
| "learning_rate": 3.533749813077677e-05, | |
| "loss": 0.0063, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 57.570093457943926, | |
| "grad_norm": 0.2202228456735611, | |
| "learning_rate": 3.517950096134232e-05, | |
| "loss": 0.0084, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 57.66355140186916, | |
| "grad_norm": 0.20240144431591034, | |
| "learning_rate": 3.5021665866510925e-05, | |
| "loss": 0.0073, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 57.757009345794394, | |
| "grad_norm": 0.11815023422241211, | |
| "learning_rate": 3.4863994572341843e-05, | |
| "loss": 0.0067, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 57.850467289719624, | |
| "grad_norm": 0.1485767364501953, | |
| "learning_rate": 3.470648880310313e-05, | |
| "loss": 0.005, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 57.94392523364486, | |
| "grad_norm": 0.16621701419353485, | |
| "learning_rate": 3.4549150281252636e-05, | |
| "loss": 0.0077, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 58.03738317757009, | |
| "grad_norm": 0.14100301265716553, | |
| "learning_rate": 3.439198072741921e-05, | |
| "loss": 0.0067, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 58.13084112149533, | |
| "grad_norm": 0.10529860854148865, | |
| "learning_rate": 3.423498186038393e-05, | |
| "loss": 0.011, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 58.22429906542056, | |
| "grad_norm": 0.23530688881874084, | |
| "learning_rate": 3.407815539706124e-05, | |
| "loss": 0.0087, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 58.3177570093458, | |
| "grad_norm": 0.06678273528814316, | |
| "learning_rate": 3.392150305248024e-05, | |
| "loss": 0.0055, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 58.41121495327103, | |
| "grad_norm": 0.08229216933250427, | |
| "learning_rate": 3.3765026539765834e-05, | |
| "loss": 0.0064, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 58.504672897196265, | |
| "grad_norm": 0.16697847843170166, | |
| "learning_rate": 3.360872757012011e-05, | |
| "loss": 0.0092, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 58.598130841121495, | |
| "grad_norm": 0.20726190507411957, | |
| "learning_rate": 3.3452607852803584e-05, | |
| "loss": 0.0087, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 58.691588785046726, | |
| "grad_norm": 0.16936185956001282, | |
| "learning_rate": 3.329666909511645e-05, | |
| "loss": 0.0066, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 58.78504672897196, | |
| "grad_norm": 0.15490904450416565, | |
| "learning_rate": 3.3140913002379995e-05, | |
| "loss": 0.0058, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 58.87850467289719, | |
| "grad_norm": 0.12930889427661896, | |
| "learning_rate": 3.298534127791785e-05, | |
| "loss": 0.0078, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 58.97196261682243, | |
| "grad_norm": 0.10294876247644424, | |
| "learning_rate": 3.282995562303754e-05, | |
| "loss": 0.0047, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 59.06542056074766, | |
| "grad_norm": 0.20665724575519562, | |
| "learning_rate": 3.267475773701161e-05, | |
| "loss": 0.0075, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 59.1588785046729, | |
| "grad_norm": 0.32335957884788513, | |
| "learning_rate": 3.251974931705933e-05, | |
| "loss": 0.0083, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 59.25233644859813, | |
| "grad_norm": 0.16983294486999512, | |
| "learning_rate": 3.236493205832795e-05, | |
| "loss": 0.007, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 59.345794392523366, | |
| "grad_norm": 0.22015579044818878, | |
| "learning_rate": 3.221030765387417e-05, | |
| "loss": 0.0071, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 59.4392523364486, | |
| "grad_norm": 0.18638084828853607, | |
| "learning_rate": 3.205587779464576e-05, | |
| "loss": 0.0055, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 59.532710280373834, | |
| "grad_norm": 0.10701462626457214, | |
| "learning_rate": 3.190164416946285e-05, | |
| "loss": 0.0059, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 59.626168224299064, | |
| "grad_norm": 0.15403886139392853, | |
| "learning_rate": 3.1747608464999725e-05, | |
| "loss": 0.0081, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 59.7196261682243, | |
| "grad_norm": 0.1373993307352066, | |
| "learning_rate": 3.1593772365766105e-05, | |
| "loss": 0.0091, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 59.81308411214953, | |
| "grad_norm": 0.08877486735582352, | |
| "learning_rate": 3.144013755408895e-05, | |
| "loss": 0.0071, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 59.90654205607477, | |
| "grad_norm": 0.10183851420879364, | |
| "learning_rate": 3.128670571009399e-05, | |
| "loss": 0.0075, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 60.0, | |
| "grad_norm": 0.1058053970336914, | |
| "learning_rate": 3.113347851168721e-05, | |
| "loss": 0.0071, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 60.09345794392523, | |
| "grad_norm": 0.16160264611244202, | |
| "learning_rate": 3.098045763453678e-05, | |
| "loss": 0.0064, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 60.18691588785047, | |
| "grad_norm": 0.13019979000091553, | |
| "learning_rate": 3.082764475205442e-05, | |
| "loss": 0.0049, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 60.2803738317757, | |
| "grad_norm": 0.0628226175904274, | |
| "learning_rate": 3.0675041535377405e-05, | |
| "loss": 0.0085, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 60.373831775700936, | |
| "grad_norm": 0.10675778985023499, | |
| "learning_rate": 3.052264965335e-05, | |
| "loss": 0.0041, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 60.467289719626166, | |
| "grad_norm": 0.1395292580127716, | |
| "learning_rate": 3.0370470772505433e-05, | |
| "loss": 0.006, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 60.5607476635514, | |
| "grad_norm": 0.14375652372837067, | |
| "learning_rate": 3.0218506557047598e-05, | |
| "loss": 0.0042, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 60.654205607476634, | |
| "grad_norm": 0.06682026386260986, | |
| "learning_rate": 3.006675866883275e-05, | |
| "loss": 0.0042, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 60.74766355140187, | |
| "grad_norm": 0.15054212510585785, | |
| "learning_rate": 2.991522876735154e-05, | |
| "loss": 0.0051, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 60.8411214953271, | |
| "grad_norm": 0.22600547969341278, | |
| "learning_rate": 2.976391850971065e-05, | |
| "loss": 0.0153, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 60.93457943925234, | |
| "grad_norm": 0.2411378026008606, | |
| "learning_rate": 2.9612829550614836e-05, | |
| "loss": 0.0075, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 61.02803738317757, | |
| "grad_norm": 0.21554498374462128, | |
| "learning_rate": 2.9461963542348737e-05, | |
| "loss": 0.0048, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 61.12149532710281, | |
| "grad_norm": 0.23806604743003845, | |
| "learning_rate": 2.931132213475884e-05, | |
| "loss": 0.0093, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 61.21495327102804, | |
| "grad_norm": 0.10214415937662125, | |
| "learning_rate": 2.916090697523549e-05, | |
| "loss": 0.0069, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 61.308411214953274, | |
| "grad_norm": 0.0819956436753273, | |
| "learning_rate": 2.9010719708694722e-05, | |
| "loss": 0.0039, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 61.401869158878505, | |
| "grad_norm": 0.09245266020298004, | |
| "learning_rate": 2.8860761977560436e-05, | |
| "loss": 0.0077, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 61.495327102803735, | |
| "grad_norm": 0.08326391875743866, | |
| "learning_rate": 2.8711035421746367e-05, | |
| "loss": 0.0045, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 61.58878504672897, | |
| "grad_norm": 0.1491008847951889, | |
| "learning_rate": 2.8561541678638142e-05, | |
| "loss": 0.0061, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 61.6822429906542, | |
| "grad_norm": 0.30525025725364685, | |
| "learning_rate": 2.8412282383075363e-05, | |
| "loss": 0.0052, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 61.77570093457944, | |
| "grad_norm": 0.09467942267656326, | |
| "learning_rate": 2.8263259167333777e-05, | |
| "loss": 0.0033, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 61.86915887850467, | |
| "grad_norm": 0.1267421990633011, | |
| "learning_rate": 2.811447366110741e-05, | |
| "loss": 0.0079, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 61.96261682242991, | |
| "grad_norm": 0.13594943284988403, | |
| "learning_rate": 2.7965927491490705e-05, | |
| "loss": 0.0049, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 62.05607476635514, | |
| "grad_norm": 0.08311861008405685, | |
| "learning_rate": 2.7817622282960815e-05, | |
| "loss": 0.0039, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 62.149532710280376, | |
| "grad_norm": 0.1365499198436737, | |
| "learning_rate": 2.766955965735968e-05, | |
| "loss": 0.0043, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 62.242990654205606, | |
| "grad_norm": 0.2722339630126953, | |
| "learning_rate": 2.7521741233876496e-05, | |
| "loss": 0.007, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 62.33644859813084, | |
| "grad_norm": 0.09474354982376099, | |
| "learning_rate": 2.7374168629029813e-05, | |
| "loss": 0.0058, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 62.429906542056074, | |
| "grad_norm": 0.08654358983039856, | |
| "learning_rate": 2.7226843456650037e-05, | |
| "loss": 0.0045, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 62.52336448598131, | |
| "grad_norm": 0.16651228070259094, | |
| "learning_rate": 2.707976732786166e-05, | |
| "loss": 0.0089, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 62.61682242990654, | |
| "grad_norm": 0.10585249215364456, | |
| "learning_rate": 2.693294185106562e-05, | |
| "loss": 0.0082, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 62.71028037383178, | |
| "grad_norm": 0.09274085611104965, | |
| "learning_rate": 2.6786368631921836e-05, | |
| "loss": 0.0051, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 62.80373831775701, | |
| "grad_norm": 0.12623822689056396, | |
| "learning_rate": 2.6640049273331515e-05, | |
| "loss": 0.0045, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 62.89719626168224, | |
| "grad_norm": 0.11692936718463898, | |
| "learning_rate": 2.6493985375419778e-05, | |
| "loss": 0.0093, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 62.99065420560748, | |
| "grad_norm": 0.22904883325099945, | |
| "learning_rate": 2.6348178535517966e-05, | |
| "loss": 0.0121, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 63.08411214953271, | |
| "grad_norm": 0.13337840139865875, | |
| "learning_rate": 2.6202630348146324e-05, | |
| "loss": 0.0049, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 63.177570093457945, | |
| "grad_norm": 0.1077757328748703, | |
| "learning_rate": 2.6057342404996522e-05, | |
| "loss": 0.0093, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 63.271028037383175, | |
| "grad_norm": 0.12814705073833466, | |
| "learning_rate": 2.591231629491423e-05, | |
| "loss": 0.009, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 63.36448598130841, | |
| "grad_norm": 0.09796059876680374, | |
| "learning_rate": 2.5767553603881767e-05, | |
| "loss": 0.0067, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 63.45794392523364, | |
| "grad_norm": 0.12925131618976593, | |
| "learning_rate": 2.562305591500069e-05, | |
| "loss": 0.005, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 63.55140186915888, | |
| "grad_norm": 0.12728284299373627, | |
| "learning_rate": 2.547882480847461e-05, | |
| "loss": 0.0106, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 63.64485981308411, | |
| "grad_norm": 0.16028623282909393, | |
| "learning_rate": 2.5334861861591753e-05, | |
| "loss": 0.0084, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 63.73831775700935, | |
| "grad_norm": 0.13662315905094147, | |
| "learning_rate": 2.5191168648707887e-05, | |
| "loss": 0.0051, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 63.83177570093458, | |
| "grad_norm": 0.15248225629329681, | |
| "learning_rate": 2.5047746741228978e-05, | |
| "loss": 0.0074, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 63.925233644859816, | |
| "grad_norm": 0.07730412483215332, | |
| "learning_rate": 2.490459770759398e-05, | |
| "loss": 0.0046, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 64.01869158878505, | |
| "grad_norm": 0.15421777963638306, | |
| "learning_rate": 2.476172311325783e-05, | |
| "loss": 0.006, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 64.11214953271028, | |
| "grad_norm": 0.07014045119285583, | |
| "learning_rate": 2.4619124520674146e-05, | |
| "loss": 0.0043, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 64.20560747663552, | |
| "grad_norm": 0.13341595232486725, | |
| "learning_rate": 2.447680348927837e-05, | |
| "loss": 0.0041, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 64.29906542056075, | |
| "grad_norm": 0.3038124442100525, | |
| "learning_rate": 2.433476157547044e-05, | |
| "loss": 0.0105, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 64.39252336448598, | |
| "grad_norm": 0.12947747111320496, | |
| "learning_rate": 2.419300033259798e-05, | |
| "loss": 0.004, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 64.48598130841121, | |
| "grad_norm": 0.12829992175102234, | |
| "learning_rate": 2.405152131093926e-05, | |
| "loss": 0.0051, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 64.57943925233644, | |
| "grad_norm": 0.10023665428161621, | |
| "learning_rate": 2.3910326057686127e-05, | |
| "loss": 0.0096, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 64.67289719626169, | |
| "grad_norm": 0.19918078184127808, | |
| "learning_rate": 2.3769416116927335e-05, | |
| "loss": 0.0076, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 64.76635514018692, | |
| "grad_norm": 0.1109958365559578, | |
| "learning_rate": 2.362879302963135e-05, | |
| "loss": 0.0048, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 64.85981308411215, | |
| "grad_norm": 0.07922573387622833, | |
| "learning_rate": 2.3488458333629777e-05, | |
| "loss": 0.0047, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 64.95327102803738, | |
| "grad_norm": 0.11461448669433594, | |
| "learning_rate": 2.3348413563600325e-05, | |
| "loss": 0.0057, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 65.04672897196262, | |
| "grad_norm": 0.13653631508350372, | |
| "learning_rate": 2.3208660251050158e-05, | |
| "loss": 0.0056, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 65.14018691588785, | |
| "grad_norm": 0.17430157959461212, | |
| "learning_rate": 2.3069199924299174e-05, | |
| "loss": 0.0062, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 65.23364485981308, | |
| "grad_norm": 0.245283842086792, | |
| "learning_rate": 2.29300341084631e-05, | |
| "loss": 0.0054, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 65.32710280373831, | |
| "grad_norm": 0.14271025359630585, | |
| "learning_rate": 2.279116432543705e-05, | |
| "loss": 0.0127, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 65.42056074766356, | |
| "grad_norm": 0.12084395438432693, | |
| "learning_rate": 2.2652592093878666e-05, | |
| "loss": 0.0062, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 65.51401869158879, | |
| "grad_norm": 0.12945696711540222, | |
| "learning_rate": 2.251431892919171e-05, | |
| "loss": 0.0071, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 65.60747663551402, | |
| "grad_norm": 0.10073532909154892, | |
| "learning_rate": 2.237634634350934e-05, | |
| "loss": 0.0049, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 65.70093457943925, | |
| "grad_norm": 0.0651387944817543, | |
| "learning_rate": 2.2238675845677663e-05, | |
| "loss": 0.0048, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 65.79439252336448, | |
| "grad_norm": 0.19357287883758545, | |
| "learning_rate": 2.2101308941239203e-05, | |
| "loss": 0.0056, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 65.88785046728972, | |
| "grad_norm": 0.0946953147649765, | |
| "learning_rate": 2.196424713241637e-05, | |
| "loss": 0.0058, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 65.98130841121495, | |
| "grad_norm": 0.129776269197464, | |
| "learning_rate": 2.182749191809518e-05, | |
| "loss": 0.0051, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 66.07476635514018, | |
| "grad_norm": 0.10389594733715057, | |
| "learning_rate": 2.1691044793808734e-05, | |
| "loss": 0.0079, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 66.16822429906541, | |
| "grad_norm": 0.16596883535385132, | |
| "learning_rate": 2.1554907251720945e-05, | |
| "loss": 0.0078, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 66.26168224299066, | |
| "grad_norm": 0.14865995943546295, | |
| "learning_rate": 2.1419080780610123e-05, | |
| "loss": 0.005, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 66.35514018691589, | |
| "grad_norm": 0.16961994767189026, | |
| "learning_rate": 2.128356686585282e-05, | |
| "loss": 0.0072, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 66.44859813084112, | |
| "grad_norm": 0.11203950643539429, | |
| "learning_rate": 2.1148366989407496e-05, | |
| "loss": 0.006, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 66.54205607476635, | |
| "grad_norm": 0.14505212008953094, | |
| "learning_rate": 2.1013482629798333e-05, | |
| "loss": 0.0047, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 66.6355140186916, | |
| "grad_norm": 0.08906478434801102, | |
| "learning_rate": 2.0878915262099098e-05, | |
| "loss": 0.0058, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 66.72897196261682, | |
| "grad_norm": 0.23099195957183838, | |
| "learning_rate": 2.0744666357916925e-05, | |
| "loss": 0.0059, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 66.82242990654206, | |
| "grad_norm": 0.10707362741231918, | |
| "learning_rate": 2.061073738537635e-05, | |
| "loss": 0.005, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 66.91588785046729, | |
| "grad_norm": 0.055628702044487, | |
| "learning_rate": 2.0477129809103147e-05, | |
| "loss": 0.009, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 67.00934579439253, | |
| "grad_norm": 0.15333709120750427, | |
| "learning_rate": 2.0343845090208368e-05, | |
| "loss": 0.0047, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 67.10280373831776, | |
| "grad_norm": 0.130838081240654, | |
| "learning_rate": 2.0210884686272368e-05, | |
| "loss": 0.0052, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 67.19626168224299, | |
| "grad_norm": 0.08488751947879791, | |
| "learning_rate": 2.0078250051328784e-05, | |
| "loss": 0.0045, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 67.28971962616822, | |
| "grad_norm": 0.15652534365653992, | |
| "learning_rate": 1.9945942635848748e-05, | |
| "loss": 0.0073, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 67.38317757009345, | |
| "grad_norm": 0.08217427134513855, | |
| "learning_rate": 1.981396388672496e-05, | |
| "loss": 0.0039, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 67.4766355140187, | |
| "grad_norm": 0.11081071197986603, | |
| "learning_rate": 1.9682315247255894e-05, | |
| "loss": 0.004, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 67.57009345794393, | |
| "grad_norm": 0.14157460629940033, | |
| "learning_rate": 1.9550998157129946e-05, | |
| "loss": 0.005, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 67.66355140186916, | |
| "grad_norm": 0.13172607123851776, | |
| "learning_rate": 1.942001405240979e-05, | |
| "loss": 0.0049, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 67.75700934579439, | |
| "grad_norm": 0.1288451850414276, | |
| "learning_rate": 1.928936436551661e-05, | |
| "loss": 0.0063, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 67.85046728971963, | |
| "grad_norm": 0.061469387263059616, | |
| "learning_rate": 1.9159050525214452e-05, | |
| "loss": 0.0043, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 67.94392523364486, | |
| "grad_norm": 0.19150938093662262, | |
| "learning_rate": 1.9029073956594606e-05, | |
| "loss": 0.0077, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 68.03738317757009, | |
| "grad_norm": 0.11590924859046936, | |
| "learning_rate": 1.8899436081059975e-05, | |
| "loss": 0.0039, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 68.13084112149532, | |
| "grad_norm": 0.18850106000900269, | |
| "learning_rate": 1.877013831630961e-05, | |
| "loss": 0.0045, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 68.22429906542057, | |
| "grad_norm": 0.07713674753904343, | |
| "learning_rate": 1.8641182076323148e-05, | |
| "loss": 0.0108, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 68.3177570093458, | |
| "grad_norm": 0.15120600163936615, | |
| "learning_rate": 1.851256877134538e-05, | |
| "loss": 0.0038, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 68.41121495327103, | |
| "grad_norm": 0.16040542721748352, | |
| "learning_rate": 1.838429980787081e-05, | |
| "loss": 0.0036, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 68.50467289719626, | |
| "grad_norm": 0.10460657626390457, | |
| "learning_rate": 1.8256376588628238e-05, | |
| "loss": 0.0045, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 68.59813084112149, | |
| "grad_norm": 0.13424362242221832, | |
| "learning_rate": 1.8128800512565513e-05, | |
| "loss": 0.0055, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 68.69158878504673, | |
| "grad_norm": 0.18600434064865112, | |
| "learning_rate": 1.800157297483417e-05, | |
| "loss": 0.0063, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 68.78504672897196, | |
| "grad_norm": 0.08261007815599442, | |
| "learning_rate": 1.787469536677419e-05, | |
| "loss": 0.0075, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 68.8785046728972, | |
| "grad_norm": 0.08533482998609543, | |
| "learning_rate": 1.774816907589873e-05, | |
| "loss": 0.0072, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 68.97196261682242, | |
| "grad_norm": 0.06679965555667877, | |
| "learning_rate": 1.7621995485879062e-05, | |
| "loss": 0.0064, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 69.06542056074767, | |
| "grad_norm": 0.0962221547961235, | |
| "learning_rate": 1.749617597652934e-05, | |
| "loss": 0.0048, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 69.1588785046729, | |
| "grad_norm": 0.15369288623332977, | |
| "learning_rate": 1.7370711923791567e-05, | |
| "loss": 0.0043, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 69.25233644859813, | |
| "grad_norm": 0.09784958511590958, | |
| "learning_rate": 1.7245604699720535e-05, | |
| "loss": 0.0045, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 69.34579439252336, | |
| "grad_norm": 0.04342583566904068, | |
| "learning_rate": 1.712085567246878e-05, | |
| "loss": 0.0156, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 69.4392523364486, | |
| "grad_norm": 0.09830091148614883, | |
| "learning_rate": 1.699646620627168e-05, | |
| "loss": 0.0092, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 69.53271028037383, | |
| "grad_norm": 0.15314213931560516, | |
| "learning_rate": 1.6872437661432517e-05, | |
| "loss": 0.0048, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 69.62616822429906, | |
| "grad_norm": 0.1179867535829544, | |
| "learning_rate": 1.6748771394307585e-05, | |
| "loss": 0.0045, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 69.7196261682243, | |
| "grad_norm": 0.10799465328454971, | |
| "learning_rate": 1.662546875729138e-05, | |
| "loss": 0.0045, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 69.81308411214954, | |
| "grad_norm": 0.07715349644422531, | |
| "learning_rate": 1.6502531098801753e-05, | |
| "loss": 0.0061, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 69.90654205607477, | |
| "grad_norm": 0.2066328078508377, | |
| "learning_rate": 1.637995976326527e-05, | |
| "loss": 0.0121, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 70.0, | |
| "grad_norm": 0.10284323245286942, | |
| "learning_rate": 1.62577560911024e-05, | |
| "loss": 0.0056, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 70.09345794392523, | |
| "grad_norm": 0.11130893975496292, | |
| "learning_rate": 1.6135921418712956e-05, | |
| "loss": 0.0058, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 70.18691588785046, | |
| "grad_norm": 0.19642308354377747, | |
| "learning_rate": 1.6014457078461353e-05, | |
| "loss": 0.0038, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 70.2803738317757, | |
| "grad_norm": 0.08608820289373398, | |
| "learning_rate": 1.5893364398662176e-05, | |
| "loss": 0.0082, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 70.37383177570094, | |
| "grad_norm": 0.13513289391994476, | |
| "learning_rate": 1.5772644703565565e-05, | |
| "loss": 0.0106, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 70.46728971962617, | |
| "grad_norm": 0.0810285359621048, | |
| "learning_rate": 1.5652299313342773e-05, | |
| "loss": 0.0059, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 70.5607476635514, | |
| "grad_norm": 0.0977608859539032, | |
| "learning_rate": 1.553232954407171e-05, | |
| "loss": 0.0054, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 70.65420560747664, | |
| "grad_norm": 0.0938141942024231, | |
| "learning_rate": 1.5412736707722537e-05, | |
| "loss": 0.0042, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 70.74766355140187, | |
| "grad_norm": 0.06880947202444077, | |
| "learning_rate": 1.5293522112143373e-05, | |
| "loss": 0.0036, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 70.8411214953271, | |
| "grad_norm": 0.1963202804327011, | |
| "learning_rate": 1.517468706104589e-05, | |
| "loss": 0.0055, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 70.93457943925233, | |
| "grad_norm": 0.054219506680965424, | |
| "learning_rate": 1.5056232853991209e-05, | |
| "loss": 0.0069, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 71.02803738317758, | |
| "grad_norm": 0.1340658813714981, | |
| "learning_rate": 1.4938160786375572e-05, | |
| "loss": 0.006, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 71.1214953271028, | |
| "grad_norm": 0.1702129989862442, | |
| "learning_rate": 1.4820472149416154e-05, | |
| "loss": 0.0051, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 71.21495327102804, | |
| "grad_norm": 0.09247569739818573, | |
| "learning_rate": 1.470316823013707e-05, | |
| "loss": 0.0029, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 71.30841121495327, | |
| "grad_norm": 0.1598488688468933, | |
| "learning_rate": 1.4586250311355132e-05, | |
| "loss": 0.0109, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 71.40186915887851, | |
| "grad_norm": 0.07022323459386826, | |
| "learning_rate": 1.4469719671666043e-05, | |
| "loss": 0.0038, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 71.49532710280374, | |
| "grad_norm": 0.08927827328443527, | |
| "learning_rate": 1.435357758543015e-05, | |
| "loss": 0.0065, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 71.58878504672897, | |
| "grad_norm": 0.06398759037256241, | |
| "learning_rate": 1.4237825322758736e-05, | |
| "loss": 0.0066, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 71.6822429906542, | |
| "grad_norm": 0.06025317683815956, | |
| "learning_rate": 1.412246414949997e-05, | |
| "loss": 0.004, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 71.77570093457943, | |
| "grad_norm": 0.05556010454893112, | |
| "learning_rate": 1.4007495327225162e-05, | |
| "loss": 0.0048, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 71.86915887850468, | |
| "grad_norm": 0.06115364655852318, | |
| "learning_rate": 1.389292011321498e-05, | |
| "loss": 0.0051, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 71.96261682242991, | |
| "grad_norm": 0.12382921576499939, | |
| "learning_rate": 1.3778739760445552e-05, | |
| "loss": 0.0046, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 72.05607476635514, | |
| "grad_norm": 0.08170247077941895, | |
| "learning_rate": 1.3664955517574968e-05, | |
| "loss": 0.0039, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 72.14953271028037, | |
| "grad_norm": 0.08965415507555008, | |
| "learning_rate": 1.3551568628929434e-05, | |
| "loss": 0.0074, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 72.24299065420561, | |
| "grad_norm": 0.18327778577804565, | |
| "learning_rate": 1.343858033448982e-05, | |
| "loss": 0.0058, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 72.33644859813084, | |
| "grad_norm": 0.2206907570362091, | |
| "learning_rate": 1.3325991869878013e-05, | |
| "loss": 0.0046, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 72.42990654205607, | |
| "grad_norm": 0.14453735947608948, | |
| "learning_rate": 1.3213804466343421e-05, | |
| "loss": 0.0053, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 72.5233644859813, | |
| "grad_norm": 0.0630757138133049, | |
| "learning_rate": 1.3102019350749528e-05, | |
| "loss": 0.004, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 72.61682242990655, | |
| "grad_norm": 0.07396890968084335, | |
| "learning_rate": 1.299063774556042e-05, | |
| "loss": 0.0036, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 72.71028037383178, | |
| "grad_norm": 0.07948428392410278, | |
| "learning_rate": 1.2879660868827508e-05, | |
| "loss": 0.0045, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 72.80373831775701, | |
| "grad_norm": 0.13974665105342865, | |
| "learning_rate": 1.2769089934176126e-05, | |
| "loss": 0.0066, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 72.89719626168224, | |
| "grad_norm": 0.0720183327794075, | |
| "learning_rate": 1.2658926150792322e-05, | |
| "loss": 0.0035, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 72.99065420560747, | |
| "grad_norm": 0.0702391117811203, | |
| "learning_rate": 1.2549170723409549e-05, | |
| "loss": 0.0041, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 73.08411214953271, | |
| "grad_norm": 0.10197412967681885, | |
| "learning_rate": 1.243982485229559e-05, | |
| "loss": 0.0045, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 73.17757009345794, | |
| "grad_norm": 0.20599843561649323, | |
| "learning_rate": 1.233088973323937e-05, | |
| "loss": 0.0047, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 73.27102803738318, | |
| "grad_norm": 0.0804935023188591, | |
| "learning_rate": 1.2222366557537911e-05, | |
| "loss": 0.0035, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 73.3644859813084, | |
| "grad_norm": 0.12421528249979019, | |
| "learning_rate": 1.2114256511983274e-05, | |
| "loss": 0.004, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 73.45794392523365, | |
| "grad_norm": 0.09544029086828232, | |
| "learning_rate": 1.2006560778849578e-05, | |
| "loss": 0.0041, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 73.55140186915888, | |
| "grad_norm": 0.10466738790273666, | |
| "learning_rate": 1.1899280535880119e-05, | |
| "loss": 0.0064, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 73.64485981308411, | |
| "grad_norm": 0.14154809713363647, | |
| "learning_rate": 1.1792416956274444e-05, | |
| "loss": 0.0062, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 73.73831775700934, | |
| "grad_norm": 0.058023110032081604, | |
| "learning_rate": 1.1685971208675539e-05, | |
| "loss": 0.0051, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 73.83177570093459, | |
| "grad_norm": 0.09609735012054443, | |
| "learning_rate": 1.157994445715706e-05, | |
| "loss": 0.0091, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 73.92523364485982, | |
| "grad_norm": 0.09363383799791336, | |
| "learning_rate": 1.1474337861210543e-05, | |
| "loss": 0.0041, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 74.01869158878505, | |
| "grad_norm": 0.14261376857757568, | |
| "learning_rate": 1.1369152575732822e-05, | |
| "loss": 0.0034, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 74.11214953271028, | |
| "grad_norm": 0.06502056121826172, | |
| "learning_rate": 1.1264389751013326e-05, | |
| "loss": 0.0042, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 74.20560747663552, | |
| "grad_norm": 0.08161542564630508, | |
| "learning_rate": 1.1160050532721528e-05, | |
| "loss": 0.0063, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 74.29906542056075, | |
| "grad_norm": 0.05485191568732262, | |
| "learning_rate": 1.1056136061894384e-05, | |
| "loss": 0.0036, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 74.39252336448598, | |
| "grad_norm": 0.14355270564556122, | |
| "learning_rate": 1.095264747492391e-05, | |
| "loss": 0.0047, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 74.48598130841121, | |
| "grad_norm": 0.08420294523239136, | |
| "learning_rate": 1.0849585903544706e-05, | |
| "loss": 0.0053, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 74.57943925233644, | |
| "grad_norm": 0.08172670006752014, | |
| "learning_rate": 1.0746952474821614e-05, | |
| "loss": 0.003, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 74.67289719626169, | |
| "grad_norm": 0.07927118986845016, | |
| "learning_rate": 1.0644748311137376e-05, | |
| "loss": 0.0052, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 74.76635514018692, | |
| "grad_norm": 0.10906045883893967, | |
| "learning_rate": 1.0542974530180327e-05, | |
| "loss": 0.0053, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 74.85981308411215, | |
| "grad_norm": 0.10352658480405807, | |
| "learning_rate": 1.0441632244932237e-05, | |
| "loss": 0.0043, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 74.95327102803738, | |
| "grad_norm": 0.13669471442699432, | |
| "learning_rate": 1.0340722563656107e-05, | |
| "loss": 0.0048, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 75.04672897196262, | |
| "grad_norm": 0.12065304070711136, | |
| "learning_rate": 1.0240246589884044e-05, | |
| "loss": 0.0037, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 75.14018691588785, | |
| "grad_norm": 0.11262913048267365, | |
| "learning_rate": 1.0140205422405214e-05, | |
| "loss": 0.0042, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 75.23364485981308, | |
| "grad_norm": 0.06449510157108307, | |
| "learning_rate": 1.0040600155253765e-05, | |
| "loss": 0.0045, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 75.32710280373831, | |
| "grad_norm": 0.15835675597190857, | |
| "learning_rate": 9.941431877696955e-06, | |
| "loss": 0.0061, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 75.42056074766356, | |
| "grad_norm": 0.10386522859334946, | |
| "learning_rate": 9.842701674223187e-06, | |
| "loss": 0.004, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 75.51401869158879, | |
| "grad_norm": 0.07288730889558792, | |
| "learning_rate": 9.744410624530148e-06, | |
| "loss": 0.0065, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 75.60747663551402, | |
| "grad_norm": 0.05738939344882965, | |
| "learning_rate": 9.646559803512994e-06, | |
| "loss": 0.0034, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 75.70093457943925, | |
| "grad_norm": 0.0958879292011261, | |
| "learning_rate": 9.549150281252633e-06, | |
| "loss": 0.0045, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 75.79439252336448, | |
| "grad_norm": 0.0466642864048481, | |
| "learning_rate": 9.452183123004e-06, | |
| "loss": 0.0055, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 75.88785046728972, | |
| "grad_norm": 0.1655125766992569, | |
| "learning_rate": 9.355659389184396e-06, | |
| "loss": 0.0076, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 75.98130841121495, | |
| "grad_norm": 0.12360979616641998, | |
| "learning_rate": 9.259580135361929e-06, | |
| "loss": 0.0047, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 76.07476635514018, | |
| "grad_norm": 0.06821216642856598, | |
| "learning_rate": 9.163946412243896e-06, | |
| "loss": 0.005, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 76.16822429906541, | |
| "grad_norm": 0.04745886102318764, | |
| "learning_rate": 9.068759265665384e-06, | |
| "loss": 0.0034, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 76.26168224299066, | |
| "grad_norm": 0.1106477677822113, | |
| "learning_rate": 8.974019736577777e-06, | |
| "loss": 0.0051, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 76.35514018691589, | |
| "grad_norm": 0.06387385725975037, | |
| "learning_rate": 8.879728861037384e-06, | |
| "loss": 0.006, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 76.44859813084112, | |
| "grad_norm": 0.08066016435623169, | |
| "learning_rate": 8.785887670194138e-06, | |
| "loss": 0.0037, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 76.54205607476635, | |
| "grad_norm": 0.06591822952032089, | |
| "learning_rate": 8.692497190280224e-06, | |
| "loss": 0.0053, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 76.6355140186916, | |
| "grad_norm": 0.1523038148880005, | |
| "learning_rate": 8.599558442598998e-06, | |
| "loss": 0.006, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 76.72897196261682, | |
| "grad_norm": 0.08181269466876984, | |
| "learning_rate": 8.507072443513702e-06, | |
| "loss": 0.0046, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 76.82242990654206, | |
| "grad_norm": 0.12298338115215302, | |
| "learning_rate": 8.415040204436426e-06, | |
| "loss": 0.0045, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 76.91588785046729, | |
| "grad_norm": 0.28609699010849, | |
| "learning_rate": 8.323462731816961e-06, | |
| "loss": 0.0046, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 77.00934579439253, | |
| "grad_norm": 0.11661451309919357, | |
| "learning_rate": 8.232341027131885e-06, | |
| "loss": 0.005, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 77.10280373831776, | |
| "grad_norm": 0.13034674525260925, | |
| "learning_rate": 8.141676086873572e-06, | |
| "loss": 0.0043, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 77.19626168224299, | |
| "grad_norm": 0.09487629681825638, | |
| "learning_rate": 8.051468902539272e-06, | |
| "loss": 0.0042, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 77.28971962616822, | |
| "grad_norm": 0.12193930894136429, | |
| "learning_rate": 7.96172046062032e-06, | |
| "loss": 0.0047, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 77.38317757009345, | |
| "grad_norm": 0.14566880464553833, | |
| "learning_rate": 7.872431742591268e-06, | |
| "loss": 0.0056, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 77.4766355140187, | |
| "grad_norm": 0.09443622827529907, | |
| "learning_rate": 7.783603724899257e-06, | |
| "loss": 0.0066, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 77.57009345794393, | |
| "grad_norm": 0.03891506791114807, | |
| "learning_rate": 7.695237378953223e-06, | |
| "loss": 0.0022, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 77.66355140186916, | |
| "grad_norm": 0.05303707346320152, | |
| "learning_rate": 7.607333671113409e-06, | |
| "loss": 0.002, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 77.75700934579439, | |
| "grad_norm": 0.1763453185558319, | |
| "learning_rate": 7.519893562680663e-06, | |
| "loss": 0.0049, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 77.85046728971963, | |
| "grad_norm": 0.045349784195423126, | |
| "learning_rate": 7.432918009885997e-06, | |
| "loss": 0.0044, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 77.94392523364486, | |
| "grad_norm": 0.12692664563655853, | |
| "learning_rate": 7.3464079638801365e-06, | |
| "loss": 0.0095, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 78.03738317757009, | |
| "grad_norm": 0.07811149954795837, | |
| "learning_rate": 7.260364370723044e-06, | |
| "loss": 0.003, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 78.13084112149532, | |
| "grad_norm": 0.15021850168704987, | |
| "learning_rate": 7.174788171373731e-06, | |
| "loss": 0.0069, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 78.22429906542057, | |
| "grad_norm": 0.0827387198805809, | |
| "learning_rate": 7.089680301679752e-06, | |
| "loss": 0.0081, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 78.3177570093458, | |
| "grad_norm": 0.08250360190868378, | |
| "learning_rate": 7.005041692367154e-06, | |
| "loss": 0.0054, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 78.41121495327103, | |
| "grad_norm": 0.1127791777253151, | |
| "learning_rate": 6.92087326903022e-06, | |
| "loss": 0.0035, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 78.50467289719626, | |
| "grad_norm": 0.05626427009701729, | |
| "learning_rate": 6.837175952121306e-06, | |
| "loss": 0.0032, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 78.59813084112149, | |
| "grad_norm": 0.11432872712612152, | |
| "learning_rate": 6.753950656940905e-06, | |
| "loss": 0.003, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 78.69158878504673, | |
| "grad_norm": 0.0607433021068573, | |
| "learning_rate": 6.671198293627479e-06, | |
| "loss": 0.0028, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 78.78504672897196, | |
| "grad_norm": 0.05074009671807289, | |
| "learning_rate": 6.588919767147639e-06, | |
| "loss": 0.0047, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 78.8785046728972, | |
| "grad_norm": 0.07059110701084137, | |
| "learning_rate": 6.5071159772861436e-06, | |
| "loss": 0.0024, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 78.97196261682242, | |
| "grad_norm": 0.07970912009477615, | |
| "learning_rate": 6.425787818636131e-06, | |
| "loss": 0.003, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 79.06542056074767, | |
| "grad_norm": 0.04442933201789856, | |
| "learning_rate": 6.344936180589351e-06, | |
| "loss": 0.0031, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 79.1588785046729, | |
| "grad_norm": 0.0632733404636383, | |
| "learning_rate": 6.264561947326331e-06, | |
| "loss": 0.004, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 79.25233644859813, | |
| "grad_norm": 0.2247694879770279, | |
| "learning_rate": 6.184665997806832e-06, | |
| "loss": 0.0062, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 79.34579439252336, | |
| "grad_norm": 0.08065646141767502, | |
| "learning_rate": 6.1052492057601275e-06, | |
| "loss": 0.0039, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 79.4392523364486, | |
| "grad_norm": 0.1812344342470169, | |
| "learning_rate": 6.026312439675552e-06, | |
| "loss": 0.0057, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 79.53271028037383, | |
| "grad_norm": 0.1381814181804657, | |
| "learning_rate": 5.947856562792925e-06, | |
| "loss": 0.0101, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 79.62616822429906, | |
| "grad_norm": 0.04312485083937645, | |
| "learning_rate": 5.869882433093155e-06, | |
| "loss": 0.0028, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 79.7196261682243, | |
| "grad_norm": 0.24571475386619568, | |
| "learning_rate": 5.79239090328883e-06, | |
| "loss": 0.0066, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 79.81308411214954, | |
| "grad_norm": 0.06329802423715591, | |
| "learning_rate": 5.715382820814885e-06, | |
| "loss": 0.003, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 79.90654205607477, | |
| "grad_norm": 0.05647183954715729, | |
| "learning_rate": 5.6388590278194096e-06, | |
| "loss": 0.0031, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 80.0, | |
| "grad_norm": 0.0694732517004013, | |
| "learning_rate": 5.562820361154314e-06, | |
| "loss": 0.0038, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 80.09345794392523, | |
| "grad_norm": 0.09058716893196106, | |
| "learning_rate": 5.48726765236629e-06, | |
| "loss": 0.0026, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 80.18691588785046, | |
| "grad_norm": 0.12245356291532516, | |
| "learning_rate": 5.412201727687644e-06, | |
| "loss": 0.0044, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 80.2803738317757, | |
| "grad_norm": 0.11849050968885422, | |
| "learning_rate": 5.337623408027293e-06, | |
| "loss": 0.0045, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 80.37383177570094, | |
| "grad_norm": 0.13533665239810944, | |
| "learning_rate": 5.263533508961827e-06, | |
| "loss": 0.0037, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 80.46728971962617, | |
| "grad_norm": 0.05687372013926506, | |
| "learning_rate": 5.1899328407264855e-06, | |
| "loss": 0.0029, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 80.5607476635514, | |
| "grad_norm": 0.19712159037590027, | |
| "learning_rate": 5.116822208206396e-06, | |
| "loss": 0.003, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 80.65420560747664, | |
| "grad_norm": 0.058728091418743134, | |
| "learning_rate": 5.044202410927706e-06, | |
| "loss": 0.0073, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 80.74766355140187, | |
| "grad_norm": 0.05174853280186653, | |
| "learning_rate": 4.972074243048897e-06, | |
| "loss": 0.0025, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 80.8411214953271, | |
| "grad_norm": 0.046942222863435745, | |
| "learning_rate": 4.900438493352055e-06, | |
| "loss": 0.0044, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 80.93457943925233, | |
| "grad_norm": 0.06713097542524338, | |
| "learning_rate": 4.829295945234258e-06, | |
| "loss": 0.0025, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 81.02803738317758, | |
| "grad_norm": 0.058530379086732864, | |
| "learning_rate": 4.758647376699032e-06, | |
| "loss": 0.0029, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 81.1214953271028, | |
| "grad_norm": 0.0929771289229393, | |
| "learning_rate": 4.688493560347773e-06, | |
| "loss": 0.0034, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 81.21495327102804, | |
| "grad_norm": 0.10967829823493958, | |
| "learning_rate": 4.618835263371396e-06, | |
| "loss": 0.0032, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 81.30841121495327, | |
| "grad_norm": 0.05041574314236641, | |
| "learning_rate": 4.549673247541875e-06, | |
| "loss": 0.0039, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 81.40186915887851, | |
| "grad_norm": 0.1108449250459671, | |
| "learning_rate": 4.48100826920394e-06, | |
| "loss": 0.0029, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 81.49532710280374, | |
| "grad_norm": 0.09053438901901245, | |
| "learning_rate": 4.412841079266777e-06, | |
| "loss": 0.0044, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 81.58878504672897, | |
| "grad_norm": 0.1006326973438263, | |
| "learning_rate": 4.3451724231958644e-06, | |
| "loss": 0.0033, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 81.6822429906542, | |
| "grad_norm": 0.11440656334161758, | |
| "learning_rate": 4.27800304100478e-06, | |
| "loss": 0.0031, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 81.77570093457943, | |
| "grad_norm": 0.048498962074518204, | |
| "learning_rate": 4.2113336672471245e-06, | |
| "loss": 0.0025, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 81.86915887850468, | |
| "grad_norm": 0.08707248419523239, | |
| "learning_rate": 4.145165031008508e-06, | |
| "loss": 0.0041, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 81.96261682242991, | |
| "grad_norm": 0.0621667355298996, | |
| "learning_rate": 4.079497855898501e-06, | |
| "loss": 0.0041, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 82.05607476635514, | |
| "grad_norm": 0.12233584374189377, | |
| "learning_rate": 4.01433286004283e-06, | |
| "loss": 0.007, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 82.14953271028037, | |
| "grad_norm": 0.14951051771640778, | |
| "learning_rate": 3.949670756075447e-06, | |
| "loss": 0.0045, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 82.24299065420561, | |
| "grad_norm": 0.3771739900112152, | |
| "learning_rate": 3.885512251130763e-06, | |
| "loss": 0.0073, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 82.33644859813084, | |
| "grad_norm": 0.04400696977972984, | |
| "learning_rate": 3.821858046835913e-06, | |
| "loss": 0.0045, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 82.42990654205607, | |
| "grad_norm": 0.12610068917274475, | |
| "learning_rate": 3.75870883930306e-06, | |
| "loss": 0.0029, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 82.5233644859813, | |
| "grad_norm": 0.26768192648887634, | |
| "learning_rate": 3.696065319121833e-06, | |
| "loss": 0.0068, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 82.61682242990655, | |
| "grad_norm": 0.13012675940990448, | |
| "learning_rate": 3.6339281713517303e-06, | |
| "loss": 0.0049, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 82.71028037383178, | |
| "grad_norm": 0.08191750943660736, | |
| "learning_rate": 3.5722980755146517e-06, | |
| "loss": 0.0068, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 82.80373831775701, | |
| "grad_norm": 0.11502333730459213, | |
| "learning_rate": 3.511175705587433e-06, | |
| "loss": 0.0043, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 82.89719626168224, | |
| "grad_norm": 0.14655309915542603, | |
| "learning_rate": 3.4505617299945336e-06, | |
| "loss": 0.0062, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 82.99065420560747, | |
| "grad_norm": 0.1569114327430725, | |
| "learning_rate": 3.390456811600673e-06, | |
| "loss": 0.008, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 83.08411214953271, | |
| "grad_norm": 0.08708454668521881, | |
| "learning_rate": 3.3308616077036115e-06, | |
| "loss": 0.0046, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 83.17757009345794, | |
| "grad_norm": 0.05366307497024536, | |
| "learning_rate": 3.271776770026963e-06, | |
| "loss": 0.0022, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 83.27102803738318, | |
| "grad_norm": 0.15384458005428314, | |
| "learning_rate": 3.213202944713023e-06, | |
| "loss": 0.0049, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 83.3644859813084, | |
| "grad_norm": 0.06727658212184906, | |
| "learning_rate": 3.155140772315773e-06, | |
| "loss": 0.0031, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 83.45794392523365, | |
| "grad_norm": 0.08927454799413681, | |
| "learning_rate": 3.0975908877938277e-06, | |
| "loss": 0.0033, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 83.55140186915888, | |
| "grad_norm": 0.12398143857717514, | |
| "learning_rate": 3.040553920503503e-06, | |
| "loss": 0.0033, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 83.64485981308411, | |
| "grad_norm": 0.16793964803218842, | |
| "learning_rate": 2.9840304941919415e-06, | |
| "loss": 0.0032, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 83.73831775700934, | |
| "grad_norm": 0.1104469820857048, | |
| "learning_rate": 2.928021226990263e-06, | |
| "loss": 0.0032, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 83.83177570093459, | |
| "grad_norm": 0.07358328253030777, | |
| "learning_rate": 2.8725267314068495e-06, | |
| "loss": 0.0028, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 83.92523364485982, | |
| "grad_norm": 0.13550950586795807, | |
| "learning_rate": 2.817547614320615e-06, | |
| "loss": 0.0072, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 84.01869158878505, | |
| "grad_norm": 0.09289432317018509, | |
| "learning_rate": 2.7630844769743757e-06, | |
| "loss": 0.0053, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 84.11214953271028, | |
| "grad_norm": 0.0311606265604496, | |
| "learning_rate": 2.7091379149682685e-06, | |
| "loss": 0.0035, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 84.20560747663552, | |
| "grad_norm": 0.07517659664154053, | |
| "learning_rate": 2.6557085182532582e-06, | |
| "loss": 0.0061, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 84.29906542056075, | |
| "grad_norm": 0.07934467494487762, | |
| "learning_rate": 2.602796871124663e-06, | |
| "loss": 0.0055, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 84.39252336448598, | |
| "grad_norm": 0.0909305214881897, | |
| "learning_rate": 2.5504035522157854e-06, | |
| "loss": 0.0034, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 84.48598130841121, | |
| "grad_norm": 0.05545186623930931, | |
| "learning_rate": 2.4985291344915674e-06, | |
| "loss": 0.0032, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 84.57943925233644, | |
| "grad_norm": 0.14734113216400146, | |
| "learning_rate": 2.4471741852423237e-06, | |
| "loss": 0.0045, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 84.67289719626169, | |
| "grad_norm": 0.10017460584640503, | |
| "learning_rate": 2.3963392660775575e-06, | |
| "loss": 0.0028, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 84.76635514018692, | |
| "grad_norm": 0.04614974930882454, | |
| "learning_rate": 2.3460249329197824e-06, | |
| "loss": 0.0031, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 84.85981308411215, | |
| "grad_norm": 0.1374596208333969, | |
| "learning_rate": 2.296231735998511e-06, | |
| "loss": 0.0031, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 84.95327102803738, | |
| "grad_norm": 0.07840343564748764, | |
| "learning_rate": 2.2469602198441573e-06, | |
| "loss": 0.0031, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 85.04672897196262, | |
| "grad_norm": 0.04728247597813606, | |
| "learning_rate": 2.1982109232821178e-06, | |
| "loss": 0.0035, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 85.14018691588785, | |
| "grad_norm": 0.05790605768561363, | |
| "learning_rate": 2.149984379426906e-06, | |
| "loss": 0.0028, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 85.23364485981308, | |
| "grad_norm": 0.09767275303602219, | |
| "learning_rate": 2.102281115676258e-06, | |
| "loss": 0.0027, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 85.32710280373831, | |
| "grad_norm": 0.05615156143903732, | |
| "learning_rate": 2.0551016537054493e-06, | |
| "loss": 0.0049, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 85.42056074766356, | |
| "grad_norm": 0.0627375990152359, | |
| "learning_rate": 2.008446509461498e-06, | |
| "loss": 0.004, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 85.51401869158879, | |
| "grad_norm": 0.06630418449640274, | |
| "learning_rate": 1.962316193157593e-06, | |
| "loss": 0.0032, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 85.60747663551402, | |
| "grad_norm": 0.08775442093610764, | |
| "learning_rate": 1.91671120926748e-06, | |
| "loss": 0.0051, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 85.70093457943925, | |
| "grad_norm": 0.061658404767513275, | |
| "learning_rate": 1.8716320565199618e-06, | |
| "loss": 0.0033, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 85.79439252336448, | |
| "grad_norm": 0.054096709936857224, | |
| "learning_rate": 1.8270792278934302e-06, | |
| "loss": 0.0032, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 85.88785046728972, | |
| "grad_norm": 0.06938756257295609, | |
| "learning_rate": 1.7830532106104747e-06, | |
| "loss": 0.0029, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 85.98130841121495, | |
| "grad_norm": 0.13099592924118042, | |
| "learning_rate": 1.7395544861325718e-06, | |
| "loss": 0.0044, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 86.07476635514018, | |
| "grad_norm": 0.08681978285312653, | |
| "learning_rate": 1.696583530154794e-06, | |
| "loss": 0.0051, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 86.16822429906541, | |
| "grad_norm": 0.06871863454580307, | |
| "learning_rate": 1.6541408126006463e-06, | |
| "loss": 0.0023, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 86.26168224299066, | |
| "grad_norm": 0.11858540028333664, | |
| "learning_rate": 1.6122267976168781e-06, | |
| "loss": 0.0054, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 86.35514018691589, | |
| "grad_norm": 0.0753912627696991, | |
| "learning_rate": 1.5708419435684462e-06, | |
| "loss": 0.0045, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 86.44859813084112, | |
| "grad_norm": 0.04186966270208359, | |
| "learning_rate": 1.5299867030334814e-06, | |
| "loss": 0.0058, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 86.54205607476635, | |
| "grad_norm": 0.1056654155254364, | |
| "learning_rate": 1.4896615227983468e-06, | |
| "loss": 0.0027, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 86.6355140186916, | |
| "grad_norm": 0.04532716050744057, | |
| "learning_rate": 1.4498668438527597e-06, | |
| "loss": 0.0031, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 86.72897196261682, | |
| "grad_norm": 0.036859266459941864, | |
| "learning_rate": 1.4106031013849496e-06, | |
| "loss": 0.0063, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 86.82242990654206, | |
| "grad_norm": 0.11480679363012314, | |
| "learning_rate": 1.3718707247769135e-06, | |
| "loss": 0.0029, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 86.91588785046729, | |
| "grad_norm": 0.2239740788936615, | |
| "learning_rate": 1.333670137599713e-06, | |
| "loss": 0.0075, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 87.00934579439253, | |
| "grad_norm": 0.14973869919776917, | |
| "learning_rate": 1.2960017576088446e-06, | |
| "loss": 0.0045, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 87.10280373831776, | |
| "grad_norm": 0.10877890884876251, | |
| "learning_rate": 1.2588659967397e-06, | |
| "loss": 0.011, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 87.19626168224299, | |
| "grad_norm": 0.08974131941795349, | |
| "learning_rate": 1.222263261102985e-06, | |
| "loss": 0.0037, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 87.28971962616822, | |
| "grad_norm": 0.12366478145122528, | |
| "learning_rate": 1.1861939509803687e-06, | |
| "loss": 0.0044, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 87.38317757009345, | |
| "grad_norm": 0.06801088899374008, | |
| "learning_rate": 1.1506584608200367e-06, | |
| "loss": 0.0023, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 87.4766355140187, | |
| "grad_norm": 0.055119484663009644, | |
| "learning_rate": 1.1156571792324211e-06, | |
| "loss": 0.0024, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 87.57009345794393, | |
| "grad_norm": 0.03374645486474037, | |
| "learning_rate": 1.0811904889859336e-06, | |
| "loss": 0.0082, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 87.66355140186916, | |
| "grad_norm": 0.18118692934513092, | |
| "learning_rate": 1.0472587670027678e-06, | |
| "loss": 0.0026, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 87.75700934579439, | |
| "grad_norm": 0.17593815922737122, | |
| "learning_rate": 1.0138623843548078e-06, | |
| "loss": 0.0115, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 87.85046728971963, | |
| "grad_norm": 0.12268546968698502, | |
| "learning_rate": 9.810017062595322e-07, | |
| "loss": 0.003, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 87.94392523364486, | |
| "grad_norm": 0.08846314996480942, | |
| "learning_rate": 9.486770920760668e-07, | |
| "loss": 0.0036, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 88.03738317757009, | |
| "grad_norm": 0.09881670773029327, | |
| "learning_rate": 9.168888953011989e-07, | |
| "loss": 0.0026, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 88.13084112149532, | |
| "grad_norm": 0.05135083198547363, | |
| "learning_rate": 8.856374635655695e-07, | |
| "loss": 0.0043, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 88.22429906542057, | |
| "grad_norm": 0.04362175986170769, | |
| "learning_rate": 8.549231386298151e-07, | |
| "loss": 0.0037, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 88.3177570093458, | |
| "grad_norm": 0.0345948152244091, | |
| "learning_rate": 8.247462563808817e-07, | |
| "loss": 0.0071, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 88.41121495327103, | |
| "grad_norm": 0.09612035006284714, | |
| "learning_rate": 7.951071468283167e-07, | |
| "loss": 0.0021, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 88.50467289719626, | |
| "grad_norm": 0.04552985727787018, | |
| "learning_rate": 7.66006134100672e-07, | |
| "loss": 0.003, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 88.59813084112149, | |
| "grad_norm": 0.1042826697230339, | |
| "learning_rate": 7.374435364419674e-07, | |
| "loss": 0.0035, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 88.69158878504673, | |
| "grad_norm": 0.15467211604118347, | |
| "learning_rate": 7.094196662081831e-07, | |
| "loss": 0.0034, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 88.78504672897196, | |
| "grad_norm": 0.22603380680084229, | |
| "learning_rate": 6.819348298638839e-07, | |
| "loss": 0.0038, | |
| "step": 9500 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 94, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.69573051908096e+18, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |