| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.7138122673736808, | |
| "eval_steps": 500, | |
| "global_step": 14000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0005098659052669148, | |
| "grad_norm": 316.62525281525757, | |
| "learning_rate": 7.640067911714771e-08, | |
| "loss": 10.4749, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0010197318105338296, | |
| "grad_norm": 285.7401162496275, | |
| "learning_rate": 1.6129032258064518e-07, | |
| "loss": 10.6201, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0015295977158007444, | |
| "grad_norm": 312.2873919163466, | |
| "learning_rate": 2.461799660441426e-07, | |
| "loss": 9.8721, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.002039463621067659, | |
| "grad_norm": 243.94179574653327, | |
| "learning_rate": 3.310696095076401e-07, | |
| "loss": 8.295, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.002549329526334574, | |
| "grad_norm": 140.37820408824535, | |
| "learning_rate": 4.1595925297113753e-07, | |
| "loss": 6.5498, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.0030591954316014887, | |
| "grad_norm": 74.21515072344681, | |
| "learning_rate": 5.00848896434635e-07, | |
| "loss": 5.3838, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0035690613368684035, | |
| "grad_norm": 93.57353432385545, | |
| "learning_rate": 5.857385398981324e-07, | |
| "loss": 4.775, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.004078927242135318, | |
| "grad_norm": 68.41964079965516, | |
| "learning_rate": 6.706281833616299e-07, | |
| "loss": 4.4661, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.0045887931474022335, | |
| "grad_norm": 54.81800032711122, | |
| "learning_rate": 7.555178268251275e-07, | |
| "loss": 4.1719, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.005098659052669148, | |
| "grad_norm": 234.65660116280884, | |
| "learning_rate": 8.404074702886249e-07, | |
| "loss": 4.1875, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.005608524957936063, | |
| "grad_norm": 51.75897318253527, | |
| "learning_rate": 9.252971137521223e-07, | |
| "loss": 4.1807, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.0061183908632029774, | |
| "grad_norm": 32.87813340481878, | |
| "learning_rate": 1.0101867572156197e-06, | |
| "loss": 4.0487, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.006628256768469893, | |
| "grad_norm": 47.12160023497351, | |
| "learning_rate": 1.0950764006791174e-06, | |
| "loss": 4.057, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.007138122673736807, | |
| "grad_norm": 33.36787554080754, | |
| "learning_rate": 1.1799660441426147e-06, | |
| "loss": 4.0638, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.007647988579003722, | |
| "grad_norm": 36.22615493740882, | |
| "learning_rate": 1.2648556876061122e-06, | |
| "loss": 3.9743, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.008157854484270637, | |
| "grad_norm": 25.548896106296255, | |
| "learning_rate": 1.3497453310696096e-06, | |
| "loss": 4.0058, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.008667720389537552, | |
| "grad_norm": 28.71639199269464, | |
| "learning_rate": 1.434634974533107e-06, | |
| "loss": 3.9652, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.009177586294804467, | |
| "grad_norm": 92.81634578219439, | |
| "learning_rate": 1.5195246179966044e-06, | |
| "loss": 4.0078, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.00968745220007138, | |
| "grad_norm": 46.254937054936384, | |
| "learning_rate": 1.604414261460102e-06, | |
| "loss": 3.9596, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.010197318105338296, | |
| "grad_norm": 936.4754280338494, | |
| "learning_rate": 1.6893039049235995e-06, | |
| "loss": 3.855, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.010707184010605211, | |
| "grad_norm": 496.26665459390637, | |
| "learning_rate": 1.774193548387097e-06, | |
| "loss": 3.7976, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.011217049915872126, | |
| "grad_norm": 59.013534542348424, | |
| "learning_rate": 1.8590831918505945e-06, | |
| "loss": 3.7332, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.01172691582113904, | |
| "grad_norm": 697.3633039430886, | |
| "learning_rate": 1.943972835314092e-06, | |
| "loss": 3.6656, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.012236781726405955, | |
| "grad_norm": 52.874578205264044, | |
| "learning_rate": 2.028862478777589e-06, | |
| "loss": 3.5203, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.01274664763167287, | |
| "grad_norm": 76.57920146938653, | |
| "learning_rate": 2.113752122241087e-06, | |
| "loss": 3.2779, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.013256513536939785, | |
| "grad_norm": 85.19750945653773, | |
| "learning_rate": 2.1986417657045842e-06, | |
| "loss": 3.1369, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.013766379442206699, | |
| "grad_norm": 65.41060472799023, | |
| "learning_rate": 2.2835314091680816e-06, | |
| "loss": 2.8289, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.014276245347473614, | |
| "grad_norm": 53.689335922333726, | |
| "learning_rate": 2.368421052631579e-06, | |
| "loss": 2.6487, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.01478611125274053, | |
| "grad_norm": 53.93921545430968, | |
| "learning_rate": 2.4533106960950766e-06, | |
| "loss": 2.3619, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.015295977158007444, | |
| "grad_norm": 58.944377639899294, | |
| "learning_rate": 2.538200339558574e-06, | |
| "loss": 2.1433, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.015805843063274358, | |
| "grad_norm": 46.41298005691738, | |
| "learning_rate": 2.6230899830220713e-06, | |
| "loss": 1.8862, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.016315708968541273, | |
| "grad_norm": 122.74189427146665, | |
| "learning_rate": 2.707979626485569e-06, | |
| "loss": 1.6422, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.01682557487380819, | |
| "grad_norm": 81.65446781620822, | |
| "learning_rate": 2.7928692699490667e-06, | |
| "loss": 1.3886, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.017335440779075104, | |
| "grad_norm": 38.92723075682003, | |
| "learning_rate": 2.877758913412564e-06, | |
| "loss": 1.3017, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.01784530668434202, | |
| "grad_norm": 27.287077460988606, | |
| "learning_rate": 2.9626485568760614e-06, | |
| "loss": 1.151, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.018355172589608934, | |
| "grad_norm": 32.79014822857911, | |
| "learning_rate": 3.0475382003395587e-06, | |
| "loss": 1.0401, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.01886503849487585, | |
| "grad_norm": 24.31108717981766, | |
| "learning_rate": 3.1324278438030564e-06, | |
| "loss": 1.079, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.01937490440014276, | |
| "grad_norm": 21.523545006358592, | |
| "learning_rate": 3.2173174872665538e-06, | |
| "loss": 0.9804, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.019884770305409676, | |
| "grad_norm": 21.37428335689284, | |
| "learning_rate": 3.302207130730051e-06, | |
| "loss": 0.887, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.02039463621067659, | |
| "grad_norm": 18.688340602869662, | |
| "learning_rate": 3.3870967741935484e-06, | |
| "loss": 0.8609, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.020904502115943507, | |
| "grad_norm": 64.36276305999914, | |
| "learning_rate": 3.471986417657046e-06, | |
| "loss": 0.8907, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.021414368021210422, | |
| "grad_norm": 47.17306058904959, | |
| "learning_rate": 3.556876061120544e-06, | |
| "loss": 0.8588, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.021924233926477337, | |
| "grad_norm": 29.113641286019497, | |
| "learning_rate": 3.641765704584041e-06, | |
| "loss": 0.9311, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.022434099831744252, | |
| "grad_norm": 34.98515248575744, | |
| "learning_rate": 3.7266553480475385e-06, | |
| "loss": 0.8532, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.022943965737011168, | |
| "grad_norm": 17.956084637737842, | |
| "learning_rate": 3.811544991511036e-06, | |
| "loss": 0.846, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.02345383164227808, | |
| "grad_norm": 27.7234720431503, | |
| "learning_rate": 3.896434634974533e-06, | |
| "loss": 0.8954, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.023963697547544995, | |
| "grad_norm": 19.560756884372157, | |
| "learning_rate": 3.981324278438031e-06, | |
| "loss": 0.8641, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.02447356345281191, | |
| "grad_norm": 57.59529673932827, | |
| "learning_rate": 4.066213921901529e-06, | |
| "loss": 0.8708, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.024983429358078825, | |
| "grad_norm": 17.446075098012066, | |
| "learning_rate": 4.1511035653650255e-06, | |
| "loss": 0.8501, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.02549329526334574, | |
| "grad_norm": 30.267237186402376, | |
| "learning_rate": 4.235993208828523e-06, | |
| "loss": 0.904, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.026003161168612655, | |
| "grad_norm": 13.743819054307822, | |
| "learning_rate": 4.320882852292021e-06, | |
| "loss": 0.8243, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.02651302707387957, | |
| "grad_norm": 80.97279138588813, | |
| "learning_rate": 4.405772495755518e-06, | |
| "loss": 0.9007, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.027022892979146486, | |
| "grad_norm": 16.040486362513338, | |
| "learning_rate": 4.490662139219016e-06, | |
| "loss": 0.7898, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.027532758884413398, | |
| "grad_norm": 10.675993103430974, | |
| "learning_rate": 4.575551782682513e-06, | |
| "loss": 0.8358, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.028042624789680313, | |
| "grad_norm": 22.25156917130235, | |
| "learning_rate": 4.66044142614601e-06, | |
| "loss": 0.8365, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.028552490694947228, | |
| "grad_norm": 23.67520091302, | |
| "learning_rate": 4.745331069609508e-06, | |
| "loss": 0.8394, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.029062356600214143, | |
| "grad_norm": 15.025526899274565, | |
| "learning_rate": 4.830220713073006e-06, | |
| "loss": 0.7973, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.02957222250548106, | |
| "grad_norm": 20.041273748960393, | |
| "learning_rate": 4.915110356536503e-06, | |
| "loss": 0.8677, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.030082088410747974, | |
| "grad_norm": 44.45852961277365, | |
| "learning_rate": 5e-06, | |
| "loss": 0.8133, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.03059195431601489, | |
| "grad_norm": 12.016989828995413, | |
| "learning_rate": 4.999996591164963e-06, | |
| "loss": 0.8235, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.031101820221281804, | |
| "grad_norm": 14.441095000429819, | |
| "learning_rate": 4.999986364669145e-06, | |
| "loss": 0.7902, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.031611686126548716, | |
| "grad_norm": 25.24752988501315, | |
| "learning_rate": 4.999969320540435e-06, | |
| "loss": 0.7942, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.03212155203181563, | |
| "grad_norm": 15.168503999969841, | |
| "learning_rate": 4.999945458825315e-06, | |
| "loss": 0.7884, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.032631417937082546, | |
| "grad_norm": 10.83021089779209, | |
| "learning_rate": 4.9999147795888545e-06, | |
| "loss": 0.7717, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.03314128384234946, | |
| "grad_norm": 19.445450635811248, | |
| "learning_rate": 4.999877282914722e-06, | |
| "loss": 0.8135, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.03365114974761638, | |
| "grad_norm": 22.554572824298493, | |
| "learning_rate": 4.999832968905171e-06, | |
| "loss": 0.8295, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.03416101565288329, | |
| "grad_norm": 42.731702899490074, | |
| "learning_rate": 4.999781837681048e-06, | |
| "loss": 0.8874, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.03467088155815021, | |
| "grad_norm": 20.307886633961164, | |
| "learning_rate": 4.999723889381793e-06, | |
| "loss": 0.8232, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.03518074746341712, | |
| "grad_norm": 10.52969344532376, | |
| "learning_rate": 4.999659124165434e-06, | |
| "loss": 0.8182, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.03569061336868404, | |
| "grad_norm": 14.518720996860537, | |
| "learning_rate": 4.999587542208591e-06, | |
| "loss": 0.7534, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.03620047927395095, | |
| "grad_norm": 19.483740313351362, | |
| "learning_rate": 4.999509143706472e-06, | |
| "loss": 0.8737, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.03671034517921787, | |
| "grad_norm": 54.41221044485456, | |
| "learning_rate": 4.999423928872876e-06, | |
| "loss": 0.9167, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.03722021108448478, | |
| "grad_norm": 23.662531439864207, | |
| "learning_rate": 4.999331897940189e-06, | |
| "loss": 0.8138, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.0377300769897517, | |
| "grad_norm": 27.072148977063062, | |
| "learning_rate": 4.999233051159385e-06, | |
| "loss": 0.7962, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.03823994289501861, | |
| "grad_norm": 24.651843711278424, | |
| "learning_rate": 4.999127388800028e-06, | |
| "loss": 0.8065, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.03874980880028552, | |
| "grad_norm": 21.94483810373911, | |
| "learning_rate": 4.999014911150264e-06, | |
| "loss": 0.8254, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.03925967470555244, | |
| "grad_norm": 28.802217532211035, | |
| "learning_rate": 4.998895618516829e-06, | |
| "loss": 0.8433, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.03976954061081935, | |
| "grad_norm": 11.32904166159406, | |
| "learning_rate": 4.998769511225041e-06, | |
| "loss": 0.7309, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.04027940651608627, | |
| "grad_norm": 99.07364050040725, | |
| "learning_rate": 4.998636589618803e-06, | |
| "loss": 0.838, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.04078927242135318, | |
| "grad_norm": 34.42370873450888, | |
| "learning_rate": 4.998496854060603e-06, | |
| "loss": 0.7781, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.0412991383266201, | |
| "grad_norm": 8.488083542574639, | |
| "learning_rate": 4.998350304931507e-06, | |
| "loss": 0.8022, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.04180900423188701, | |
| "grad_norm": 14.994383374892706, | |
| "learning_rate": 4.998196942631166e-06, | |
| "loss": 0.7474, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.04231887013715393, | |
| "grad_norm": 26.626123416012348, | |
| "learning_rate": 4.998036767577809e-06, | |
| "loss": 0.7616, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.042828736042420844, | |
| "grad_norm": 18.654351581860734, | |
| "learning_rate": 4.997869780208244e-06, | |
| "loss": 0.8281, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.04333860194768776, | |
| "grad_norm": 27.303240160352054, | |
| "learning_rate": 4.997695980977858e-06, | |
| "loss": 0.7595, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.043848467852954674, | |
| "grad_norm": 9.408631234143192, | |
| "learning_rate": 4.997515370360613e-06, | |
| "loss": 0.7614, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.04435833375822159, | |
| "grad_norm": 8.49071108290611, | |
| "learning_rate": 4.997327948849044e-06, | |
| "loss": 0.715, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.044868199663488505, | |
| "grad_norm": 17.76405503635825, | |
| "learning_rate": 4.997133716954266e-06, | |
| "loss": 0.7866, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.04537806556875542, | |
| "grad_norm": 15.798376541913443, | |
| "learning_rate": 4.996932675205961e-06, | |
| "loss": 0.7586, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.045887931474022335, | |
| "grad_norm": 13.142262117486272, | |
| "learning_rate": 4.996724824152382e-06, | |
| "loss": 0.7853, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.04639779737928925, | |
| "grad_norm": 85.17749253553107, | |
| "learning_rate": 4.996510164360355e-06, | |
| "loss": 0.7684, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.04690766328455616, | |
| "grad_norm": 34.70487061745637, | |
| "learning_rate": 4.996288696415271e-06, | |
| "loss": 0.7657, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.047417529189823074, | |
| "grad_norm": 11.797913133577842, | |
| "learning_rate": 4.9960604209210885e-06, | |
| "loss": 0.7401, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.04792739509508999, | |
| "grad_norm": 18.459225779080683, | |
| "learning_rate": 4.99582533850033e-06, | |
| "loss": 0.7398, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.048437261000356904, | |
| "grad_norm": 13.742483693841917, | |
| "learning_rate": 4.995583449794081e-06, | |
| "loss": 0.7655, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.04894712690562382, | |
| "grad_norm": 50.003928825953324, | |
| "learning_rate": 4.99533475546199e-06, | |
| "loss": 0.6877, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.049456992810890735, | |
| "grad_norm": 8.082504134207067, | |
| "learning_rate": 4.995079256182261e-06, | |
| "loss": 0.7962, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.04996685871615765, | |
| "grad_norm": 19.5746887217413, | |
| "learning_rate": 4.99481695265166e-06, | |
| "loss": 0.7655, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.050476724621424565, | |
| "grad_norm": 67.6550051771218, | |
| "learning_rate": 4.994547845585505e-06, | |
| "loss": 0.7461, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.05098659052669148, | |
| "grad_norm": 22.637134522224294, | |
| "learning_rate": 4.994271935717671e-06, | |
| "loss": 0.803, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.051496456431958396, | |
| "grad_norm": 18.105250320469835, | |
| "learning_rate": 4.99398922380058e-06, | |
| "loss": 0.8435, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.05200632233722531, | |
| "grad_norm": 7.6508740287140125, | |
| "learning_rate": 4.99369971060521e-06, | |
| "loss": 0.7596, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.052516188242492226, | |
| "grad_norm": 15.513939265611556, | |
| "learning_rate": 4.993403396921082e-06, | |
| "loss": 0.7175, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.05302605414775914, | |
| "grad_norm": 16.59172849330501, | |
| "learning_rate": 4.993100283556262e-06, | |
| "loss": 0.6755, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.053535920053026056, | |
| "grad_norm": 14.05251425869156, | |
| "learning_rate": 4.992790371337363e-06, | |
| "loss": 0.7593, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.05404578595829297, | |
| "grad_norm": 14.57170517221848, | |
| "learning_rate": 4.992473661109535e-06, | |
| "loss": 0.7366, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.05455565186355989, | |
| "grad_norm": 12.125948500236293, | |
| "learning_rate": 4.99215015373647e-06, | |
| "loss": 0.7425, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.055065517768826795, | |
| "grad_norm": 8.24534642564121, | |
| "learning_rate": 4.991819850100394e-06, | |
| "loss": 0.7838, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.05557538367409371, | |
| "grad_norm": 56.96441026445971, | |
| "learning_rate": 4.991482751102066e-06, | |
| "loss": 0.6824, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.056085249579360626, | |
| "grad_norm": 24.02412886675572, | |
| "learning_rate": 4.99113885766078e-06, | |
| "loss": 0.7026, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.05659511548462754, | |
| "grad_norm": 52.756101119639034, | |
| "learning_rate": 4.990788170714355e-06, | |
| "loss": 0.725, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.057104981389894456, | |
| "grad_norm": 17.38614067478378, | |
| "learning_rate": 4.99043069121914e-06, | |
| "loss": 0.7609, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.05761484729516137, | |
| "grad_norm": 8.055654893368551, | |
| "learning_rate": 4.990066420150005e-06, | |
| "loss": 0.6845, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.05812471320042829, | |
| "grad_norm": 18.036549926044923, | |
| "learning_rate": 4.989695358500342e-06, | |
| "loss": 0.7608, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.0586345791056952, | |
| "grad_norm": 79.44498639307135, | |
| "learning_rate": 4.989317507282061e-06, | |
| "loss": 0.7873, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.05914444501096212, | |
| "grad_norm": 10.75610853089343, | |
| "learning_rate": 4.988932867525588e-06, | |
| "loss": 0.6532, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.05965431091622903, | |
| "grad_norm": 21.466559907484747, | |
| "learning_rate": 4.988541440279862e-06, | |
| "loss": 0.7931, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.06016417682149595, | |
| "grad_norm": 39.97021653705112, | |
| "learning_rate": 4.988143226612333e-06, | |
| "loss": 0.7459, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.06067404272676286, | |
| "grad_norm": 26.23183983177856, | |
| "learning_rate": 4.987738227608954e-06, | |
| "loss": 0.7495, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.06118390863202978, | |
| "grad_norm": 30.186219658432467, | |
| "learning_rate": 4.987326444374189e-06, | |
| "loss": 0.7752, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.06169377453729669, | |
| "grad_norm": 19.81958951098629, | |
| "learning_rate": 4.986907878030994e-06, | |
| "loss": 0.6576, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.06220364044256361, | |
| "grad_norm": 10.29760641694682, | |
| "learning_rate": 4.986482529720831e-06, | |
| "loss": 0.7398, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.06271350634783052, | |
| "grad_norm": 23.848823577624334, | |
| "learning_rate": 4.986050400603653e-06, | |
| "loss": 0.7755, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.06322337225309743, | |
| "grad_norm": 16.981268241082418, | |
| "learning_rate": 4.985611491857906e-06, | |
| "loss": 0.6892, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.06373323815836435, | |
| "grad_norm": 12.203438943223416, | |
| "learning_rate": 4.9851658046805226e-06, | |
| "loss": 0.697, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.06424310406363126, | |
| "grad_norm": 22.820805465543035, | |
| "learning_rate": 4.9847133402869235e-06, | |
| "loss": 0.7316, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.06475296996889818, | |
| "grad_norm": 15.965046033606288, | |
| "learning_rate": 4.984254099911009e-06, | |
| "loss": 0.6669, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.06526283587416509, | |
| "grad_norm": 51.03956406155315, | |
| "learning_rate": 4.98378808480516e-06, | |
| "loss": 0.7309, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.06577270177943201, | |
| "grad_norm": 11.580015861232175, | |
| "learning_rate": 4.98331529624023e-06, | |
| "loss": 0.693, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.06628256768469892, | |
| "grad_norm": 23.393195377173306, | |
| "learning_rate": 4.982835735505545e-06, | |
| "loss": 0.7701, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.06679243358996584, | |
| "grad_norm": 21.230670744931253, | |
| "learning_rate": 4.982349403908902e-06, | |
| "loss": 0.7009, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.06730229949523275, | |
| "grad_norm": 7.105260679653341, | |
| "learning_rate": 4.98185630277656e-06, | |
| "loss": 0.727, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.06781216540049967, | |
| "grad_norm": 54.34592659188367, | |
| "learning_rate": 4.981356433453238e-06, | |
| "loss": 0.6632, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.06832203130576658, | |
| "grad_norm": 9.535587626528804, | |
| "learning_rate": 4.9808497973021146e-06, | |
| "loss": 0.7859, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.0688318972110335, | |
| "grad_norm": 16.49512859613625, | |
| "learning_rate": 4.98033639570482e-06, | |
| "loss": 0.7587, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.06934176311630041, | |
| "grad_norm": 14.125512381840414, | |
| "learning_rate": 4.979816230061436e-06, | |
| "loss": 0.7243, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.06985162902156733, | |
| "grad_norm": 17.80662494813945, | |
| "learning_rate": 4.97928930179049e-06, | |
| "loss": 0.7113, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.07036149492683424, | |
| "grad_norm": 18.57586486945156, | |
| "learning_rate": 4.978755612328951e-06, | |
| "loss": 0.7561, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.07087136083210116, | |
| "grad_norm": 8.283766059803575, | |
| "learning_rate": 4.978215163132226e-06, | |
| "loss": 0.7463, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.07138122673736808, | |
| "grad_norm": 14.653696663915117, | |
| "learning_rate": 4.977667955674158e-06, | |
| "loss": 0.7461, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.07189109264263499, | |
| "grad_norm": 13.507517215885278, | |
| "learning_rate": 4.977113991447017e-06, | |
| "loss": 0.7121, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.0724009585479019, | |
| "grad_norm": 9.288908938652135, | |
| "learning_rate": 4.976553271961503e-06, | |
| "loss": 0.8075, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.07291082445316882, | |
| "grad_norm": 21.236905071975933, | |
| "learning_rate": 4.975985798746736e-06, | |
| "loss": 0.7781, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.07342069035843574, | |
| "grad_norm": 13.572369167896932, | |
| "learning_rate": 4.975411573350252e-06, | |
| "loss": 0.7895, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.07393055626370265, | |
| "grad_norm": 13.55096914139037, | |
| "learning_rate": 4.974830597338004e-06, | |
| "loss": 0.736, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.07444042216896957, | |
| "grad_norm": 13.542801484459083, | |
| "learning_rate": 4.974242872294354e-06, | |
| "loss": 0.7303, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.07495028807423648, | |
| "grad_norm": 40.44101408661269, | |
| "learning_rate": 4.973648399822068e-06, | |
| "loss": 0.7475, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.0754601539795034, | |
| "grad_norm": 25.97183290799639, | |
| "learning_rate": 4.9730471815423124e-06, | |
| "loss": 0.7864, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.07597001988477031, | |
| "grad_norm": 26.74042060051638, | |
| "learning_rate": 4.972439219094649e-06, | |
| "loss": 0.6983, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.07647988579003721, | |
| "grad_norm": 11.80538671359037, | |
| "learning_rate": 4.971824514137035e-06, | |
| "loss": 0.6881, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.07698975169530413, | |
| "grad_norm": 8.2222875767639, | |
| "learning_rate": 4.971203068345811e-06, | |
| "loss": 0.7066, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.07749961760057104, | |
| "grad_norm": 8.370648963437903, | |
| "learning_rate": 4.970574883415704e-06, | |
| "loss": 0.7455, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.07800948350583796, | |
| "grad_norm": 16.048489718931286, | |
| "learning_rate": 4.969939961059814e-06, | |
| "loss": 0.7628, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.07851934941110487, | |
| "grad_norm": 7.234659506066708, | |
| "learning_rate": 4.969298303009621e-06, | |
| "loss": 0.7262, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.07902921531637179, | |
| "grad_norm": 31.829241494176976, | |
| "learning_rate": 4.968649911014967e-06, | |
| "loss": 0.6757, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.0795390812216387, | |
| "grad_norm": 19.35414711975273, | |
| "learning_rate": 4.9679947868440625e-06, | |
| "loss": 0.7596, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.08004894712690562, | |
| "grad_norm": 8.711492612161729, | |
| "learning_rate": 4.967332932283476e-06, | |
| "loss": 0.6841, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.08055881303217254, | |
| "grad_norm": 8.342431288830609, | |
| "learning_rate": 4.966664349138129e-06, | |
| "loss": 0.6874, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.08106867893743945, | |
| "grad_norm": 10.392016002654769, | |
| "learning_rate": 4.9659890392312935e-06, | |
| "loss": 0.7102, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.08157854484270637, | |
| "grad_norm": 7.781469416808468, | |
| "learning_rate": 4.965307004404586e-06, | |
| "loss": 0.7509, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.08208841074797328, | |
| "grad_norm": 7.184310384526179, | |
| "learning_rate": 4.964618246517962e-06, | |
| "loss": 0.7089, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.0825982766532402, | |
| "grad_norm": 28.801020894955254, | |
| "learning_rate": 4.96392276744971e-06, | |
| "loss": 0.6766, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.08310814255850711, | |
| "grad_norm": 11.015804077658716, | |
| "learning_rate": 4.9632205690964505e-06, | |
| "loss": 0.6858, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.08361800846377403, | |
| "grad_norm": 8.76079381481738, | |
| "learning_rate": 4.962511653373124e-06, | |
| "loss": 0.6786, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.08412787436904094, | |
| "grad_norm": 6.02245638719929, | |
| "learning_rate": 4.961796022212994e-06, | |
| "loss": 0.7022, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.08463774027430786, | |
| "grad_norm": 15.612214027251115, | |
| "learning_rate": 4.961073677567634e-06, | |
| "loss": 0.6816, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.08514760617957477, | |
| "grad_norm": 16.85101160492237, | |
| "learning_rate": 4.960344621406927e-06, | |
| "loss": 0.8359, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.08565747208484169, | |
| "grad_norm": 19.376664337749983, | |
| "learning_rate": 4.959608855719059e-06, | |
| "loss": 0.6726, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.0861673379901086, | |
| "grad_norm": 21.493545021821046, | |
| "learning_rate": 4.958866382510515e-06, | |
| "loss": 0.7395, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.08667720389537552, | |
| "grad_norm": 23.71442741073946, | |
| "learning_rate": 4.958117203806067e-06, | |
| "loss": 0.6864, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.08718706980064243, | |
| "grad_norm": 33.989133046895674, | |
| "learning_rate": 4.957361321648777e-06, | |
| "loss": 0.6745, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.08769693570590935, | |
| "grad_norm": 26.31554851843956, | |
| "learning_rate": 4.956598738099988e-06, | |
| "loss": 0.7354, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.08820680161117626, | |
| "grad_norm": 29.998869144940723, | |
| "learning_rate": 4.955829455239316e-06, | |
| "loss": 0.7275, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.08871666751644318, | |
| "grad_norm": 23.577129784492897, | |
| "learning_rate": 4.95505347516465e-06, | |
| "loss": 0.7156, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.0892265334217101, | |
| "grad_norm": 12.752943472931328, | |
| "learning_rate": 4.954270799992138e-06, | |
| "loss": 0.7123, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.08973639932697701, | |
| "grad_norm": 14.685020491827581, | |
| "learning_rate": 4.953481431856189e-06, | |
| "loss": 0.7625, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.09024626523224392, | |
| "grad_norm": 8.228343997618332, | |
| "learning_rate": 4.952685372909465e-06, | |
| "loss": 0.6541, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.09075613113751084, | |
| "grad_norm": 9.63598214058154, | |
| "learning_rate": 4.951882625322871e-06, | |
| "loss": 0.7265, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.09126599704277776, | |
| "grad_norm": 14.152116737412488, | |
| "learning_rate": 4.951073191285555e-06, | |
| "loss": 0.727, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.09177586294804467, | |
| "grad_norm": 17.44279955167637, | |
| "learning_rate": 4.9502570730048995e-06, | |
| "loss": 0.7907, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.09228572885331159, | |
| "grad_norm": 13.60771925339826, | |
| "learning_rate": 4.949434272706514e-06, | |
| "loss": 0.6227, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.0927955947585785, | |
| "grad_norm": 6.367815102413218, | |
| "learning_rate": 4.9486047926342294e-06, | |
| "loss": 0.6355, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.0933054606638454, | |
| "grad_norm": 15.713131366094526, | |
| "learning_rate": 4.947768635050098e-06, | |
| "loss": 0.6764, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.09381532656911232, | |
| "grad_norm": 8.563023673169502, | |
| "learning_rate": 4.946925802234373e-06, | |
| "loss": 0.6923, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.09432519247437923, | |
| "grad_norm": 7.363234533978469, | |
| "learning_rate": 4.946076296485522e-06, | |
| "loss": 0.6625, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.09483505837964615, | |
| "grad_norm": 20.01812955600952, | |
| "learning_rate": 4.945220120120203e-06, | |
| "loss": 0.6543, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.09534492428491306, | |
| "grad_norm": 13.172376589466024, | |
| "learning_rate": 4.9443572754732675e-06, | |
| "loss": 0.6707, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.09585479019017998, | |
| "grad_norm": 6.822990704870548, | |
| "learning_rate": 4.943487764897749e-06, | |
| "loss": 0.6938, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.0963646560954469, | |
| "grad_norm": 15.932520477808335, | |
| "learning_rate": 4.942611590764866e-06, | |
| "loss": 0.6879, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.09687452200071381, | |
| "grad_norm": 24.250098360288504, | |
| "learning_rate": 4.941728755464003e-06, | |
| "loss": 0.6847, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.09738438790598072, | |
| "grad_norm": 17.44623443605144, | |
| "learning_rate": 4.940839261402711e-06, | |
| "loss": 0.7219, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.09789425381124764, | |
| "grad_norm": 12.45136746215644, | |
| "learning_rate": 4.939943111006702e-06, | |
| "loss": 0.6549, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.09840411971651455, | |
| "grad_norm": 18.583572409276037, | |
| "learning_rate": 4.93904030671984e-06, | |
| "loss": 0.6624, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.09891398562178147, | |
| "grad_norm": 7.2053286836275605, | |
| "learning_rate": 4.938130851004131e-06, | |
| "loss": 0.7675, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.09942385152704838, | |
| "grad_norm": 12.915641303278997, | |
| "learning_rate": 4.937214746339726e-06, | |
| "loss": 0.7149, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.0999337174323153, | |
| "grad_norm": 11.31073060509851, | |
| "learning_rate": 4.936291995224902e-06, | |
| "loss": 0.7683, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.10044358333758222, | |
| "grad_norm": 6.40606866544193, | |
| "learning_rate": 4.935362600176064e-06, | |
| "loss": 0.7258, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.10095344924284913, | |
| "grad_norm": 11.827179756342, | |
| "learning_rate": 4.934426563727739e-06, | |
| "loss": 0.691, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.10146331514811605, | |
| "grad_norm": 13.898857798518174, | |
| "learning_rate": 4.933483888432558e-06, | |
| "loss": 0.6964, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.10197318105338296, | |
| "grad_norm": 14.73925860829306, | |
| "learning_rate": 4.932534576861263e-06, | |
| "loss": 0.6815, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.10248304695864988, | |
| "grad_norm": 10.503261101844386, | |
| "learning_rate": 4.931578631602691e-06, | |
| "loss": 0.7233, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.10299291286391679, | |
| "grad_norm": 19.799601878435443, | |
| "learning_rate": 4.930616055263768e-06, | |
| "loss": 0.6772, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.1035027787691837, | |
| "grad_norm": 16.215950711270004, | |
| "learning_rate": 4.9296468504695075e-06, | |
| "loss": 0.6837, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.10401264467445062, | |
| "grad_norm": 6.487253559165473, | |
| "learning_rate": 4.928671019862995e-06, | |
| "loss": 0.6732, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.10452251057971754, | |
| "grad_norm": 66.35910128060324, | |
| "learning_rate": 4.927688566105388e-06, | |
| "loss": 0.6883, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.10503237648498445, | |
| "grad_norm": 22.70308591251781, | |
| "learning_rate": 4.926699491875905e-06, | |
| "loss": 0.73, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.10554224239025137, | |
| "grad_norm": 14.473695567395094, | |
| "learning_rate": 4.925703799871818e-06, | |
| "loss": 0.6991, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.10605210829551828, | |
| "grad_norm": 15.581472109798852, | |
| "learning_rate": 4.924701492808447e-06, | |
| "loss": 0.6915, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.1065619742007852, | |
| "grad_norm": 7.980568405700353, | |
| "learning_rate": 4.923692573419152e-06, | |
| "loss": 0.6913, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.10707184010605211, | |
| "grad_norm": 23.31011562340674, | |
| "learning_rate": 4.922677044455324e-06, | |
| "loss": 0.7805, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.10758170601131903, | |
| "grad_norm": 14.87523420325485, | |
| "learning_rate": 4.921654908686381e-06, | |
| "loss": 0.6911, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.10809157191658594, | |
| "grad_norm": 14.14543897390739, | |
| "learning_rate": 4.920626168899755e-06, | |
| "loss": 0.7109, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.10860143782185286, | |
| "grad_norm": 10.881342080162556, | |
| "learning_rate": 4.91959082790089e-06, | |
| "loss": 0.7369, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.10911130372711977, | |
| "grad_norm": 21.17172349808835, | |
| "learning_rate": 4.918548888513232e-06, | |
| "loss": 0.6307, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.10962116963238669, | |
| "grad_norm": 9.13096463033165, | |
| "learning_rate": 4.91750035357822e-06, | |
| "loss": 0.6476, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.11013103553765359, | |
| "grad_norm": 11.89326391176972, | |
| "learning_rate": 4.9164452259552805e-06, | |
| "loss": 0.7066, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.1106409014429205, | |
| "grad_norm": 8.975708683266294, | |
| "learning_rate": 4.9153835085218175e-06, | |
| "loss": 0.7041, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.11115076734818742, | |
| "grad_norm": 8.585258277188357, | |
| "learning_rate": 4.9143152041732074e-06, | |
| "loss": 0.6879, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.11166063325345434, | |
| "grad_norm": 11.58529748100363, | |
| "learning_rate": 4.91324031582279e-06, | |
| "loss": 0.7314, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.11217049915872125, | |
| "grad_norm": 11.908156658951144, | |
| "learning_rate": 4.9121588464018555e-06, | |
| "loss": 0.663, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.11268036506398817, | |
| "grad_norm": 10.24067908361276, | |
| "learning_rate": 4.911070798859647e-06, | |
| "loss": 0.6701, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.11319023096925508, | |
| "grad_norm": 23.539758621052474, | |
| "learning_rate": 4.909976176163345e-06, | |
| "loss": 0.6944, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.113700096874522, | |
| "grad_norm": 19.14611387579224, | |
| "learning_rate": 4.908874981298058e-06, | |
| "loss": 0.7148, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.11420996277978891, | |
| "grad_norm": 11.887671840791135, | |
| "learning_rate": 4.90776721726682e-06, | |
| "loss": 0.6832, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.11471982868505583, | |
| "grad_norm": 25.33947936645936, | |
| "learning_rate": 4.90665288709058e-06, | |
| "loss": 0.6461, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.11522969459032274, | |
| "grad_norm": 19.152392455583414, | |
| "learning_rate": 4.905531993808191e-06, | |
| "loss": 0.6192, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.11573956049558966, | |
| "grad_norm": 15.300011798292733, | |
| "learning_rate": 4.904404540476405e-06, | |
| "loss": 0.6853, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.11624942640085657, | |
| "grad_norm": 10.628181469407775, | |
| "learning_rate": 4.903270530169865e-06, | |
| "loss": 0.666, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.11675929230612349, | |
| "grad_norm": 19.385929263319337, | |
| "learning_rate": 4.902129965981094e-06, | |
| "loss": 0.7041, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.1172691582113904, | |
| "grad_norm": 57.98147128023247, | |
| "learning_rate": 4.900982851020487e-06, | |
| "loss": 0.7214, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.11777902411665732, | |
| "grad_norm": 14.01968653085331, | |
| "learning_rate": 4.899829188416306e-06, | |
| "loss": 0.653, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.11828889002192423, | |
| "grad_norm": 21.742692522690568, | |
| "learning_rate": 4.898668981314667e-06, | |
| "loss": 0.6654, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.11879875592719115, | |
| "grad_norm": 11.319460979734869, | |
| "learning_rate": 4.897502232879533e-06, | |
| "loss": 0.716, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.11930862183245806, | |
| "grad_norm": 13.991539570566289, | |
| "learning_rate": 4.896328946292706e-06, | |
| "loss": 0.6435, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.11981848773772498, | |
| "grad_norm": 21.2320524992128, | |
| "learning_rate": 4.895149124753821e-06, | |
| "loss": 0.6812, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.1203283536429919, | |
| "grad_norm": 10.28385295525001, | |
| "learning_rate": 4.893962771480329e-06, | |
| "loss": 0.6905, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.12083821954825881, | |
| "grad_norm": 17.31710761218729, | |
| "learning_rate": 4.892769889707497e-06, | |
| "loss": 0.7062, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.12134808545352573, | |
| "grad_norm": 13.401798655701699, | |
| "learning_rate": 4.891570482688395e-06, | |
| "loss": 0.6135, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.12185795135879264, | |
| "grad_norm": 26.789093785209946, | |
| "learning_rate": 4.890364553693886e-06, | |
| "loss": 0.6903, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.12236781726405956, | |
| "grad_norm": 9.750731917531368, | |
| "learning_rate": 4.889152106012623e-06, | |
| "loss": 0.655, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.12287768316932647, | |
| "grad_norm": 26.758659449058616, | |
| "learning_rate": 4.88793314295103e-06, | |
| "loss": 0.6116, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.12338754907459339, | |
| "grad_norm": 13.440739094685654, | |
| "learning_rate": 4.886707667833306e-06, | |
| "loss": 0.6829, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.1238974149798603, | |
| "grad_norm": 49.978223621638385, | |
| "learning_rate": 4.885475684001401e-06, | |
| "loss": 0.6415, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.12440728088512722, | |
| "grad_norm": 9.760273316885899, | |
| "learning_rate": 4.884237194815023e-06, | |
| "loss": 0.6308, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.12491714679039413, | |
| "grad_norm": 27.651331346216452, | |
| "learning_rate": 4.882992203651613e-06, | |
| "loss": 0.7362, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.12542701269566103, | |
| "grad_norm": 13.634588558328543, | |
| "learning_rate": 4.881740713906348e-06, | |
| "loss": 0.7433, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.12593687860092795, | |
| "grad_norm": 13.602097173034727, | |
| "learning_rate": 4.880482728992126e-06, | |
| "loss": 0.7717, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.12644674450619486, | |
| "grad_norm": 13.612161583437892, | |
| "learning_rate": 4.8792182523395555e-06, | |
| "loss": 0.644, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.12695661041146178, | |
| "grad_norm": 12.619409906208562, | |
| "learning_rate": 4.877947287396952e-06, | |
| "loss": 0.6215, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.1274664763167287, | |
| "grad_norm": 10.09193885674217, | |
| "learning_rate": 4.876669837630324e-06, | |
| "loss": 0.691, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.1279763422219956, | |
| "grad_norm": 14.76981467720561, | |
| "learning_rate": 4.875385906523361e-06, | |
| "loss": 0.6645, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.12848620812726252, | |
| "grad_norm": 7.371746141172926, | |
| "learning_rate": 4.874095497577434e-06, | |
| "loss": 0.6466, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.12899607403252944, | |
| "grad_norm": 6.835699156756462, | |
| "learning_rate": 4.872798614311574e-06, | |
| "loss": 0.6535, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.12950593993779635, | |
| "grad_norm": 21.566628352018, | |
| "learning_rate": 4.87149526026247e-06, | |
| "loss": 0.6597, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.13001580584306327, | |
| "grad_norm": 15.703751182885975, | |
| "learning_rate": 4.870185438984458e-06, | |
| "loss": 0.6666, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.13052567174833019, | |
| "grad_norm": 9.583980407078485, | |
| "learning_rate": 4.868869154049509e-06, | |
| "loss": 0.6803, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.1310355376535971, | |
| "grad_norm": 30.731237091152327, | |
| "learning_rate": 4.867546409047221e-06, | |
| "loss": 0.7388, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.13154540355886402, | |
| "grad_norm": 14.357083066524277, | |
| "learning_rate": 4.866217207584811e-06, | |
| "loss": 0.6337, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.13205526946413093, | |
| "grad_norm": 8.506886453649118, | |
| "learning_rate": 4.864881553287101e-06, | |
| "loss": 0.5825, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.13256513536939785, | |
| "grad_norm": 9.014994788890833, | |
| "learning_rate": 4.863539449796511e-06, | |
| "loss": 0.6527, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.13307500127466476, | |
| "grad_norm": 6.859684932885404, | |
| "learning_rate": 4.86219090077305e-06, | |
| "loss": 0.6557, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.13358486717993168, | |
| "grad_norm": 10.206698448625279, | |
| "learning_rate": 4.8608359098943014e-06, | |
| "loss": 0.6416, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.1340947330851986, | |
| "grad_norm": 16.90354656461581, | |
| "learning_rate": 4.859474480855417e-06, | |
| "loss": 0.6386, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.1346045989904655, | |
| "grad_norm": 8.345581201286654, | |
| "learning_rate": 4.858106617369108e-06, | |
| "loss": 0.6757, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.13511446489573242, | |
| "grad_norm": 13.073017499813815, | |
| "learning_rate": 4.85673232316563e-06, | |
| "loss": 0.7058, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.13562433080099934, | |
| "grad_norm": 12.30994463616197, | |
| "learning_rate": 4.855351601992777e-06, | |
| "loss": 0.7062, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.13613419670626625, | |
| "grad_norm": 18.52389808957781, | |
| "learning_rate": 4.853964457615871e-06, | |
| "loss": 0.659, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.13664406261153317, | |
| "grad_norm": 8.300091892691642, | |
| "learning_rate": 4.852570893817747e-06, | |
| "loss": 0.6531, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.13715392851680008, | |
| "grad_norm": 12.03361121338335, | |
| "learning_rate": 4.851170914398749e-06, | |
| "loss": 0.645, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.137663794422067, | |
| "grad_norm": 15.761500517093225, | |
| "learning_rate": 4.849764523176716e-06, | |
| "loss": 0.7158, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.1381736603273339, | |
| "grad_norm": 9.61582603163959, | |
| "learning_rate": 4.848351723986974e-06, | |
| "loss": 0.7035, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.13868352623260083, | |
| "grad_norm": 12.19449916010554, | |
| "learning_rate": 4.84693252068232e-06, | |
| "loss": 0.6123, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.13919339213786774, | |
| "grad_norm": 10.928920087576591, | |
| "learning_rate": 4.845506917133021e-06, | |
| "loss": 0.6498, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.13970325804313466, | |
| "grad_norm": 14.276233849917213, | |
| "learning_rate": 4.844074917226792e-06, | |
| "loss": 0.6456, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.14021312394840157, | |
| "grad_norm": 11.803572897065552, | |
| "learning_rate": 4.842636524868796e-06, | |
| "loss": 0.6318, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.1407229898536685, | |
| "grad_norm": 7.925735834843732, | |
| "learning_rate": 4.8411917439816245e-06, | |
| "loss": 0.6248, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.1412328557589354, | |
| "grad_norm": 11.37915018985988, | |
| "learning_rate": 4.839740578505297e-06, | |
| "loss": 0.6717, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.14174272166420232, | |
| "grad_norm": 28.129852376592233, | |
| "learning_rate": 4.838283032397237e-06, | |
| "loss": 0.7101, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.14225258756946924, | |
| "grad_norm": 17.754960236458654, | |
| "learning_rate": 4.8368191096322734e-06, | |
| "loss": 0.7285, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.14276245347473615, | |
| "grad_norm": 11.980565371011531, | |
| "learning_rate": 4.835348814202624e-06, | |
| "loss": 0.6997, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.14327231938000307, | |
| "grad_norm": 11.536185693713957, | |
| "learning_rate": 4.833872150117883e-06, | |
| "loss": 0.6932, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.14378218528526998, | |
| "grad_norm": 8.134945726109256, | |
| "learning_rate": 4.832389121405013e-06, | |
| "loss": 0.7011, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.1442920511905369, | |
| "grad_norm": 13.154971513839428, | |
| "learning_rate": 4.830899732108337e-06, | |
| "loss": 0.6592, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.1448019170958038, | |
| "grad_norm": 16.74245961522776, | |
| "learning_rate": 4.829403986289519e-06, | |
| "loss": 0.6715, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.14531178300107073, | |
| "grad_norm": 13.034354542099656, | |
| "learning_rate": 4.827901888027561e-06, | |
| "loss": 0.6125, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.14582164890633764, | |
| "grad_norm": 27.871553670050698, | |
| "learning_rate": 4.826393441418785e-06, | |
| "loss": 0.7305, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.14633151481160456, | |
| "grad_norm": 15.95016686949317, | |
| "learning_rate": 4.824878650576829e-06, | |
| "loss": 0.676, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.14684138071687147, | |
| "grad_norm": 7.303492060471926, | |
| "learning_rate": 4.823357519632631e-06, | |
| "loss": 0.6396, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.1473512466221384, | |
| "grad_norm": 9.781495067337557, | |
| "learning_rate": 4.821830052734418e-06, | |
| "loss": 0.6509, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.1478611125274053, | |
| "grad_norm": 29.408307046395446, | |
| "learning_rate": 4.820296254047695e-06, | |
| "loss": 0.6778, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.14837097843267222, | |
| "grad_norm": 96.84824967607089, | |
| "learning_rate": 4.8187561277552376e-06, | |
| "loss": 0.7329, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.14888084433793913, | |
| "grad_norm": 6.615844488316659, | |
| "learning_rate": 4.817209678057073e-06, | |
| "loss": 0.6763, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.14939071024320605, | |
| "grad_norm": 19.12515725566358, | |
| "learning_rate": 4.815656909170476e-06, | |
| "loss": 0.6168, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.14990057614847296, | |
| "grad_norm": 23.734925698529885, | |
| "learning_rate": 4.814097825329953e-06, | |
| "loss": 0.6631, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.15041044205373988, | |
| "grad_norm": 11.93938418941973, | |
| "learning_rate": 4.81253243078723e-06, | |
| "loss": 0.6259, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.1509203079590068, | |
| "grad_norm": 62.39893614222443, | |
| "learning_rate": 4.810960729811247e-06, | |
| "loss": 0.6687, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.1514301738642737, | |
| "grad_norm": 17.461388805483967, | |
| "learning_rate": 4.8093827266881375e-06, | |
| "loss": 0.6018, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.15194003976954062, | |
| "grad_norm": 11.072920322971628, | |
| "learning_rate": 4.807798425721224e-06, | |
| "loss": 0.6557, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.1524499056748075, | |
| "grad_norm": 72.73945006415794, | |
| "learning_rate": 4.806207831231004e-06, | |
| "loss": 0.6651, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.15295977158007443, | |
| "grad_norm": 136.05019813495343, | |
| "learning_rate": 4.804610947555135e-06, | |
| "loss": 0.6657, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.15346963748534134, | |
| "grad_norm": 49.62155036405917, | |
| "learning_rate": 4.803007779048428e-06, | |
| "loss": 0.6058, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.15397950339060826, | |
| "grad_norm": 150.29526061483784, | |
| "learning_rate": 4.801398330082834e-06, | |
| "loss": 0.7119, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.15448936929587517, | |
| "grad_norm": 39.16164201556088, | |
| "learning_rate": 4.7997826050474284e-06, | |
| "loss": 0.6403, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.1549992352011421, | |
| "grad_norm": 13.033901470600805, | |
| "learning_rate": 4.798160608348404e-06, | |
| "loss": 0.669, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.155509101106409, | |
| "grad_norm": 10.436175563590364, | |
| "learning_rate": 4.796532344409055e-06, | |
| "loss": 0.6787, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.15601896701167592, | |
| "grad_norm": 8.593652019091126, | |
| "learning_rate": 4.794897817669769e-06, | |
| "loss": 0.679, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.15652883291694283, | |
| "grad_norm": 73.66768580455573, | |
| "learning_rate": 4.7932570325880114e-06, | |
| "loss": 0.637, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.15703869882220975, | |
| "grad_norm": 9.16547202941871, | |
| "learning_rate": 4.791609993638315e-06, | |
| "loss": 0.6113, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.15754856472747666, | |
| "grad_norm": 27.807854408085937, | |
| "learning_rate": 4.789956705312266e-06, | |
| "loss": 0.7046, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.15805843063274358, | |
| "grad_norm": 158.66445861192776, | |
| "learning_rate": 4.7882971721184955e-06, | |
| "loss": 0.6328, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.1585682965380105, | |
| "grad_norm": 8.77759522628308, | |
| "learning_rate": 4.786631398582663e-06, | |
| "loss": 0.5866, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.1590781624432774, | |
| "grad_norm": 20.82303103042944, | |
| "learning_rate": 4.784959389247445e-06, | |
| "loss": 0.6648, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.15958802834854433, | |
| "grad_norm": 30.15207335672682, | |
| "learning_rate": 4.7832811486725275e-06, | |
| "loss": 0.7311, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.16009789425381124, | |
| "grad_norm": 48.14398580346578, | |
| "learning_rate": 4.781596681434584e-06, | |
| "loss": 0.6556, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.16060776015907816, | |
| "grad_norm": 240.90567407324733, | |
| "learning_rate": 4.779905992127273e-06, | |
| "loss": 0.6415, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.16111762606434507, | |
| "grad_norm": 13.587128797890388, | |
| "learning_rate": 4.778209085361217e-06, | |
| "loss": 0.7353, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.16162749196961199, | |
| "grad_norm": 26.152194406935024, | |
| "learning_rate": 4.776505965763999e-06, | |
| "loss": 0.6146, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.1621373578748789, | |
| "grad_norm": 28.08752066614632, | |
| "learning_rate": 4.77479663798014e-06, | |
| "loss": 0.6987, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.16264722378014582, | |
| "grad_norm": 18.4700415367194, | |
| "learning_rate": 4.773081106671094e-06, | |
| "loss": 0.6776, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.16315708968541273, | |
| "grad_norm": 26.687478554750435, | |
| "learning_rate": 4.771359376515231e-06, | |
| "loss": 0.6865, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.16366695559067965, | |
| "grad_norm": 10.56334023559032, | |
| "learning_rate": 4.769631452207828e-06, | |
| "loss": 0.6037, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.16417682149594656, | |
| "grad_norm": 30.24248577609317, | |
| "learning_rate": 4.76789733846105e-06, | |
| "loss": 0.6766, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.16468668740121348, | |
| "grad_norm": 6.118922498908346, | |
| "learning_rate": 4.766157040003944e-06, | |
| "loss": 0.576, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.1651965533064804, | |
| "grad_norm": 43.10186647344212, | |
| "learning_rate": 4.7644105615824226e-06, | |
| "loss": 0.6617, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.1657064192117473, | |
| "grad_norm": 22.58741112886033, | |
| "learning_rate": 4.7626579079592504e-06, | |
| "loss": 0.6257, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.16621628511701422, | |
| "grad_norm": 12.207542296775456, | |
| "learning_rate": 4.760899083914035e-06, | |
| "loss": 0.6313, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.16672615102228114, | |
| "grad_norm": 45.71668407841048, | |
| "learning_rate": 4.759134094243206e-06, | |
| "loss": 0.6591, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.16723601692754805, | |
| "grad_norm": 14.848732025059714, | |
| "learning_rate": 4.757362943760013e-06, | |
| "loss": 0.5773, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.16774588283281497, | |
| "grad_norm": 17.646815540917046, | |
| "learning_rate": 4.755585637294503e-06, | |
| "loss": 0.6431, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.16825574873808188, | |
| "grad_norm": 51.21492133232567, | |
| "learning_rate": 4.753802179693512e-06, | |
| "loss": 0.6482, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.1687656146433488, | |
| "grad_norm": 19.112461161721722, | |
| "learning_rate": 4.7520125758206495e-06, | |
| "loss": 0.6671, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.16927548054861571, | |
| "grad_norm": 16.191200196997656, | |
| "learning_rate": 4.750216830556287e-06, | |
| "loss": 0.6296, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.16978534645388263, | |
| "grad_norm": 27.51474330611919, | |
| "learning_rate": 4.748414948797545e-06, | |
| "loss": 0.6612, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.17029521235914954, | |
| "grad_norm": 23.75272142335703, | |
| "learning_rate": 4.746606935458277e-06, | |
| "loss": 0.641, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.17080507826441646, | |
| "grad_norm": 11.809633635411776, | |
| "learning_rate": 4.744792795469058e-06, | |
| "loss": 0.6834, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.17131494416968338, | |
| "grad_norm": 189.33582561579482, | |
| "learning_rate": 4.742972533777172e-06, | |
| "loss": 0.6559, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.1718248100749503, | |
| "grad_norm": 11.941107362922496, | |
| "learning_rate": 4.741146155346596e-06, | |
| "loss": 0.6373, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.1723346759802172, | |
| "grad_norm": 7.436609726007049, | |
| "learning_rate": 4.739313665157988e-06, | |
| "loss": 0.6129, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.17284454188548412, | |
| "grad_norm": 23.5332384240844, | |
| "learning_rate": 4.737475068208673e-06, | |
| "loss": 0.6544, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.17335440779075104, | |
| "grad_norm": 26.16689346814709, | |
| "learning_rate": 4.7356303695126315e-06, | |
| "loss": 0.6598, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.17386427369601795, | |
| "grad_norm": 8.349460123034687, | |
| "learning_rate": 4.733779574100482e-06, | |
| "loss": 0.6285, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.17437413960128487, | |
| "grad_norm": 5.208194637835858, | |
| "learning_rate": 4.7319226870194676e-06, | |
| "loss": 0.6597, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.17488400550655178, | |
| "grad_norm": 10.345782004711861, | |
| "learning_rate": 4.730059713333448e-06, | |
| "loss": 0.6892, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.1753938714118187, | |
| "grad_norm": 21.377764003397456, | |
| "learning_rate": 4.728190658122878e-06, | |
| "loss": 0.58, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.1759037373170856, | |
| "grad_norm": 30.6794980257094, | |
| "learning_rate": 4.726315526484799e-06, | |
| "loss": 0.6423, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.17641360322235253, | |
| "grad_norm": 13.709883604900671, | |
| "learning_rate": 4.724434323532822e-06, | |
| "loss": 0.6154, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.17692346912761944, | |
| "grad_norm": 8.479320584002648, | |
| "learning_rate": 4.722547054397114e-06, | |
| "loss": 0.628, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.17743333503288636, | |
| "grad_norm": 13.685563027122981, | |
| "learning_rate": 4.720653724224389e-06, | |
| "loss": 0.6556, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.17794320093815327, | |
| "grad_norm": 8.260254371701032, | |
| "learning_rate": 4.718754338177887e-06, | |
| "loss": 0.5759, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.1784530668434202, | |
| "grad_norm": 13.913564384357112, | |
| "learning_rate": 4.716848901437361e-06, | |
| "loss": 0.6285, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.1789629327486871, | |
| "grad_norm": 45.21035989952079, | |
| "learning_rate": 4.714937419199067e-06, | |
| "loss": 0.6304, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.17947279865395402, | |
| "grad_norm": 17.61340217915247, | |
| "learning_rate": 4.713019896675749e-06, | |
| "loss": 0.7353, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.17998266455922093, | |
| "grad_norm": 51.73719722197582, | |
| "learning_rate": 4.711096339096619e-06, | |
| "loss": 0.6661, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.18049253046448785, | |
| "grad_norm": 41.42271286545349, | |
| "learning_rate": 4.709166751707351e-06, | |
| "loss": 0.6626, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.18100239636975476, | |
| "grad_norm": 7.6574270650897605, | |
| "learning_rate": 4.7072311397700605e-06, | |
| "loss": 0.6286, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.18151226227502168, | |
| "grad_norm": 55.46468850803111, | |
| "learning_rate": 4.705289508563293e-06, | |
| "loss": 0.6502, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.1820221281802886, | |
| "grad_norm": 23.031046436781327, | |
| "learning_rate": 4.703341863382009e-06, | |
| "loss": 0.6367, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.1825319940855555, | |
| "grad_norm": 8.278399987077327, | |
| "learning_rate": 4.701388209537569e-06, | |
| "loss": 0.654, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.18304185999082243, | |
| "grad_norm": 16.796980125582998, | |
| "learning_rate": 4.69942855235772e-06, | |
| "loss": 0.6853, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.18355172589608934, | |
| "grad_norm": 14.118030028068347, | |
| "learning_rate": 4.697462897186581e-06, | |
| "loss": 0.6337, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.18406159180135626, | |
| "grad_norm": 15.497810143418386, | |
| "learning_rate": 4.695491249384628e-06, | |
| "loss": 0.6486, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.18457145770662317, | |
| "grad_norm": 18.32562759382926, | |
| "learning_rate": 4.693513614328676e-06, | |
| "loss": 0.6651, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.1850813236118901, | |
| "grad_norm": 10.993090480177178, | |
| "learning_rate": 4.691529997411873e-06, | |
| "loss": 0.7046, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.185591189517157, | |
| "grad_norm": 9.354322331456606, | |
| "learning_rate": 4.689540404043677e-06, | |
| "loss": 0.6626, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.1861010554224239, | |
| "grad_norm": 28.01431660577904, | |
| "learning_rate": 4.687544839649842e-06, | |
| "loss": 0.6499, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.1866109213276908, | |
| "grad_norm": 9.512985903502441, | |
| "learning_rate": 4.685543309672411e-06, | |
| "loss": 0.5949, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.18712078723295772, | |
| "grad_norm": 24.4025944922159, | |
| "learning_rate": 4.683535819569691e-06, | |
| "loss": 0.6727, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.18763065313822463, | |
| "grad_norm": 50.8641661183589, | |
| "learning_rate": 4.681522374816244e-06, | |
| "loss": 0.6116, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.18814051904349155, | |
| "grad_norm": 17.587190798690532, | |
| "learning_rate": 4.679502980902871e-06, | |
| "loss": 0.6195, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.18865038494875846, | |
| "grad_norm": 18.670293843348876, | |
| "learning_rate": 4.6774776433365965e-06, | |
| "loss": 0.6008, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.18916025085402538, | |
| "grad_norm": 10.999046092407664, | |
| "learning_rate": 4.6754463676406545e-06, | |
| "loss": 0.6525, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.1896701167592923, | |
| "grad_norm": 16.578146994304092, | |
| "learning_rate": 4.6734091593544705e-06, | |
| "loss": 0.6329, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.1901799826645592, | |
| "grad_norm": 14.938986449153218, | |
| "learning_rate": 4.671366024033651e-06, | |
| "loss": 0.6031, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.19068984856982613, | |
| "grad_norm": 53.66910906959681, | |
| "learning_rate": 4.669316967249966e-06, | |
| "loss": 0.6748, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.19119971447509304, | |
| "grad_norm": 38.64005506480217, | |
| "learning_rate": 4.667261994591331e-06, | |
| "loss": 0.643, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.19170958038035996, | |
| "grad_norm": 20.83213603104761, | |
| "learning_rate": 4.665201111661797e-06, | |
| "loss": 0.6829, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.19221944628562687, | |
| "grad_norm": 7.6025512197039085, | |
| "learning_rate": 4.663134324081533e-06, | |
| "loss": 0.6365, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.1927293121908938, | |
| "grad_norm": 17.681281980648198, | |
| "learning_rate": 4.6610616374868066e-06, | |
| "loss": 0.7101, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.1932391780961607, | |
| "grad_norm": 5.961528341747031, | |
| "learning_rate": 4.658983057529978e-06, | |
| "loss": 0.5333, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.19374904400142762, | |
| "grad_norm": 8.500212586402798, | |
| "learning_rate": 4.656898589879475e-06, | |
| "loss": 0.6249, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.19425890990669453, | |
| "grad_norm": 7.489518295782326, | |
| "learning_rate": 4.654808240219782e-06, | |
| "loss": 0.6581, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.19476877581196145, | |
| "grad_norm": 6.794369508294625, | |
| "learning_rate": 4.652712014251426e-06, | |
| "loss": 0.6068, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.19527864171722836, | |
| "grad_norm": 26.68786642749212, | |
| "learning_rate": 4.650609917690957e-06, | |
| "loss": 0.5868, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.19578850762249528, | |
| "grad_norm": 24.634611602664084, | |
| "learning_rate": 4.648501956270936e-06, | |
| "loss": 0.6491, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.1962983735277622, | |
| "grad_norm": 7.916826238082026, | |
| "learning_rate": 4.646388135739915e-06, | |
| "loss": 0.6238, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.1968082394330291, | |
| "grad_norm": 21.827979294670318, | |
| "learning_rate": 4.64426846186243e-06, | |
| "loss": 0.6028, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.19731810533829602, | |
| "grad_norm": 10.817798837200906, | |
| "learning_rate": 4.642142940418973e-06, | |
| "loss": 0.7177, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.19782797124356294, | |
| "grad_norm": 7.343593825755392, | |
| "learning_rate": 4.640011577205987e-06, | |
| "loss": 0.6465, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.19833783714882985, | |
| "grad_norm": 9.578351561035468, | |
| "learning_rate": 4.637874378035845e-06, | |
| "loss": 0.6709, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.19884770305409677, | |
| "grad_norm": 11.509018820886228, | |
| "learning_rate": 4.635731348736832e-06, | |
| "loss": 0.6401, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.19935756895936368, | |
| "grad_norm": 7.148943238715311, | |
| "learning_rate": 4.633582495153137e-06, | |
| "loss": 0.6461, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.1998674348646306, | |
| "grad_norm": 6.102496878351305, | |
| "learning_rate": 4.631427823144829e-06, | |
| "loss": 0.6007, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.20037730076989752, | |
| "grad_norm": 12.872084204664713, | |
| "learning_rate": 4.6292673385878466e-06, | |
| "loss": 0.6813, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.20088716667516443, | |
| "grad_norm": 28.92020036551762, | |
| "learning_rate": 4.6271010473739754e-06, | |
| "loss": 0.7501, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.20139703258043135, | |
| "grad_norm": 26.1419017695497, | |
| "learning_rate": 4.624928955410841e-06, | |
| "loss": 0.5818, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.20190689848569826, | |
| "grad_norm": 8.520817831827328, | |
| "learning_rate": 4.622751068621886e-06, | |
| "loss": 0.619, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.20241676439096518, | |
| "grad_norm": 10.60690821999077, | |
| "learning_rate": 4.620567392946355e-06, | |
| "loss": 0.6238, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.2029266302962321, | |
| "grad_norm": 6.111505810115736, | |
| "learning_rate": 4.618377934339279e-06, | |
| "loss": 0.6251, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.203436496201499, | |
| "grad_norm": 11.501436686181162, | |
| "learning_rate": 4.616182698771463e-06, | |
| "loss": 0.59, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.20394636210676592, | |
| "grad_norm": 21.165945980312195, | |
| "learning_rate": 4.613981692229462e-06, | |
| "loss": 0.6075, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.20445622801203284, | |
| "grad_norm": 15.671878583013148, | |
| "learning_rate": 4.611774920715572e-06, | |
| "loss": 0.6139, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 0.20496609391729975, | |
| "grad_norm": 10.969313336572679, | |
| "learning_rate": 4.609562390247808e-06, | |
| "loss": 0.6505, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.20547595982256667, | |
| "grad_norm": 12.862905465982983, | |
| "learning_rate": 4.607344106859891e-06, | |
| "loss": 0.6395, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.20598582572783358, | |
| "grad_norm": 11.382723251669816, | |
| "learning_rate": 4.605120076601231e-06, | |
| "loss": 0.6574, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 0.2064956916331005, | |
| "grad_norm": 35.899532032459916, | |
| "learning_rate": 4.602890305536911e-06, | |
| "loss": 0.665, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.2070055575383674, | |
| "grad_norm": 133.21898922751015, | |
| "learning_rate": 4.6006547997476666e-06, | |
| "loss": 0.6725, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 0.20751542344363433, | |
| "grad_norm": 52.27751505587636, | |
| "learning_rate": 4.598413565329876e-06, | |
| "loss": 0.6175, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 0.20802528934890124, | |
| "grad_norm": 22.607365372261278, | |
| "learning_rate": 4.596166608395535e-06, | |
| "loss": 0.6543, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.20853515525416816, | |
| "grad_norm": 18.893786224066538, | |
| "learning_rate": 4.593913935072251e-06, | |
| "loss": 0.6588, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 0.20904502115943507, | |
| "grad_norm": 36.925520109780834, | |
| "learning_rate": 4.591655551503215e-06, | |
| "loss": 0.6452, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.209554887064702, | |
| "grad_norm": 18.989453127592082, | |
| "learning_rate": 4.589391463847194e-06, | |
| "loss": 0.7321, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.2100647529699689, | |
| "grad_norm": 21.559296328550772, | |
| "learning_rate": 4.58712167827851e-06, | |
| "loss": 0.6381, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 0.21057461887523582, | |
| "grad_norm": 7.4422471218088235, | |
| "learning_rate": 4.584846200987022e-06, | |
| "loss": 0.6695, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 0.21108448478050273, | |
| "grad_norm": 8.598103890275697, | |
| "learning_rate": 4.582565038178109e-06, | |
| "loss": 0.6393, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 0.21159435068576965, | |
| "grad_norm": 10.859833811937909, | |
| "learning_rate": 4.58027819607266e-06, | |
| "loss": 0.6611, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.21210421659103657, | |
| "grad_norm": 9.455817856990365, | |
| "learning_rate": 4.577985680907049e-06, | |
| "loss": 0.6438, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 0.21261408249630348, | |
| "grad_norm": 13.005173788491543, | |
| "learning_rate": 4.575687498933119e-06, | |
| "loss": 0.6639, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 0.2131239484015704, | |
| "grad_norm": 10.115654248919899, | |
| "learning_rate": 4.573383656418169e-06, | |
| "loss": 0.6944, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 0.2136338143068373, | |
| "grad_norm": 37.56438546169335, | |
| "learning_rate": 4.571074159644936e-06, | |
| "loss": 0.6497, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 0.21414368021210423, | |
| "grad_norm": 5.861898760817187, | |
| "learning_rate": 4.568759014911573e-06, | |
| "loss": 0.6125, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.21465354611737114, | |
| "grad_norm": 8.783410942135434, | |
| "learning_rate": 4.566438228531638e-06, | |
| "loss": 0.6775, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 0.21516341202263806, | |
| "grad_norm": 6.639944275088382, | |
| "learning_rate": 4.564111806834073e-06, | |
| "loss": 0.56, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 0.21567327792790497, | |
| "grad_norm": 6.214883955719362, | |
| "learning_rate": 4.5617797561631885e-06, | |
| "loss": 0.689, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 0.2161831438331719, | |
| "grad_norm": 10.229291895282838, | |
| "learning_rate": 4.559442082878645e-06, | |
| "loss": 0.641, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 0.2166930097384388, | |
| "grad_norm": 14.962817683964534, | |
| "learning_rate": 4.557098793355436e-06, | |
| "loss": 0.5747, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.21720287564370572, | |
| "grad_norm": 8.53213097749228, | |
| "learning_rate": 4.554749893983874e-06, | |
| "loss": 0.5826, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 0.21771274154897263, | |
| "grad_norm": 16.549243515768183, | |
| "learning_rate": 4.552395391169564e-06, | |
| "loss": 0.6758, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 0.21822260745423955, | |
| "grad_norm": 18.938696755433465, | |
| "learning_rate": 4.550035291333398e-06, | |
| "loss": 0.5707, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 0.21873247335950646, | |
| "grad_norm": 35.395198976128206, | |
| "learning_rate": 4.547669600911527e-06, | |
| "loss": 0.6877, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 0.21924233926477338, | |
| "grad_norm": 8.277494608992038, | |
| "learning_rate": 4.545298326355351e-06, | |
| "loss": 0.629, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.21975220517004027, | |
| "grad_norm": 79.25983653731949, | |
| "learning_rate": 4.542921474131497e-06, | |
| "loss": 0.678, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 0.22026207107530718, | |
| "grad_norm": 9.856307804445956, | |
| "learning_rate": 4.540539050721801e-06, | |
| "loss": 0.676, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 0.2207719369805741, | |
| "grad_norm": 8.615502828092712, | |
| "learning_rate": 4.538151062623296e-06, | |
| "loss": 0.5812, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 0.221281802885841, | |
| "grad_norm": 22.168060830576543, | |
| "learning_rate": 4.535757516348186e-06, | |
| "loss": 0.6231, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 0.22179166879110793, | |
| "grad_norm": 11.320048528710553, | |
| "learning_rate": 4.533358418423837e-06, | |
| "loss": 0.6178, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.22230153469637484, | |
| "grad_norm": 6.466250445752336, | |
| "learning_rate": 4.530953775392749e-06, | |
| "loss": 0.6187, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 0.22281140060164176, | |
| "grad_norm": 24.18925309517145, | |
| "learning_rate": 4.52854359381255e-06, | |
| "loss": 0.6119, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 0.22332126650690867, | |
| "grad_norm": 8.571095234461708, | |
| "learning_rate": 4.5261278802559675e-06, | |
| "loss": 0.6014, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 0.2238311324121756, | |
| "grad_norm": 5.982288593053768, | |
| "learning_rate": 4.523706641310817e-06, | |
| "loss": 0.6207, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 0.2243409983174425, | |
| "grad_norm": 19.10284583967391, | |
| "learning_rate": 4.521279883579982e-06, | |
| "loss": 0.5721, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.22485086422270942, | |
| "grad_norm": 19.058976511095665, | |
| "learning_rate": 4.518847613681397e-06, | |
| "loss": 0.6835, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 0.22536073012797633, | |
| "grad_norm": 17.345690564027926, | |
| "learning_rate": 4.516409838248026e-06, | |
| "loss": 0.6342, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 0.22587059603324325, | |
| "grad_norm": 18.002628820726464, | |
| "learning_rate": 4.513966563927849e-06, | |
| "loss": 0.6207, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 0.22638046193851016, | |
| "grad_norm": 10.76507448906313, | |
| "learning_rate": 4.511517797383841e-06, | |
| "loss": 0.6504, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 0.22689032784377708, | |
| "grad_norm": 37.963122971372236, | |
| "learning_rate": 4.509063545293954e-06, | |
| "loss": 0.6718, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.227400193749044, | |
| "grad_norm": 31.081526202955846, | |
| "learning_rate": 4.506603814351103e-06, | |
| "loss": 0.5657, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 0.2279100596543109, | |
| "grad_norm": 9.090363098538898, | |
| "learning_rate": 4.5041386112631394e-06, | |
| "loss": 0.6365, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 0.22841992555957782, | |
| "grad_norm": 29.102701668422192, | |
| "learning_rate": 4.501667942752841e-06, | |
| "loss": 0.565, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 0.22892979146484474, | |
| "grad_norm": 9.40166219606075, | |
| "learning_rate": 4.499191815557888e-06, | |
| "loss": 0.6468, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 0.22943965737011165, | |
| "grad_norm": 24.98331649199063, | |
| "learning_rate": 4.496710236430848e-06, | |
| "loss": 0.6094, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.22994952327537857, | |
| "grad_norm": 14.24156171953044, | |
| "learning_rate": 4.4942232121391565e-06, | |
| "loss": 0.618, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 0.23045938918064549, | |
| "grad_norm": 25.888527040924036, | |
| "learning_rate": 4.4917307494650975e-06, | |
| "loss": 0.5663, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 0.2309692550859124, | |
| "grad_norm": 11.32099980777747, | |
| "learning_rate": 4.489232855205787e-06, | |
| "loss": 0.6577, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 0.23147912099117932, | |
| "grad_norm": 7.52041696012304, | |
| "learning_rate": 4.4867295361731515e-06, | |
| "loss": 0.6623, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 0.23198898689644623, | |
| "grad_norm": 86.35249632825519, | |
| "learning_rate": 4.484220799193913e-06, | |
| "loss": 0.6061, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.23249885280171315, | |
| "grad_norm": 11.304016464447672, | |
| "learning_rate": 4.481706651109567e-06, | |
| "loss": 0.5761, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 0.23300871870698006, | |
| "grad_norm": 18.086580599155557, | |
| "learning_rate": 4.479187098776368e-06, | |
| "loss": 0.6349, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 0.23351858461224698, | |
| "grad_norm": 12.005880118436764, | |
| "learning_rate": 4.476662149065306e-06, | |
| "loss": 0.6567, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 0.2340284505175139, | |
| "grad_norm": 13.992259943255434, | |
| "learning_rate": 4.474131808862089e-06, | |
| "loss": 0.7011, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 0.2345383164227808, | |
| "grad_norm": 22.858022758700816, | |
| "learning_rate": 4.471596085067129e-06, | |
| "loss": 0.6229, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.23504818232804772, | |
| "grad_norm": 17.13933779909115, | |
| "learning_rate": 4.469054984595517e-06, | |
| "loss": 0.5982, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 0.23555804823331464, | |
| "grad_norm": 11.220115247254418, | |
| "learning_rate": 4.466508514377006e-06, | |
| "loss": 0.6181, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 0.23606791413858155, | |
| "grad_norm": 8.87668293968533, | |
| "learning_rate": 4.463956681355993e-06, | |
| "loss": 0.6309, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 0.23657778004384847, | |
| "grad_norm": 8.331451854861665, | |
| "learning_rate": 4.461399492491502e-06, | |
| "loss": 0.6389, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 0.23708764594911538, | |
| "grad_norm": 25.657125436012297, | |
| "learning_rate": 4.458836954757161e-06, | |
| "loss": 0.6381, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.2375975118543823, | |
| "grad_norm": 18.084465806692844, | |
| "learning_rate": 4.456269075141183e-06, | |
| "loss": 0.6064, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 0.2381073777596492, | |
| "grad_norm": 12.540649074725133, | |
| "learning_rate": 4.4536958606463506e-06, | |
| "loss": 0.6177, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 0.23861724366491613, | |
| "grad_norm": 10.689133786862486, | |
| "learning_rate": 4.451117318289996e-06, | |
| "loss": 0.646, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 0.23912710957018304, | |
| "grad_norm": 7.958824117114735, | |
| "learning_rate": 4.448533455103979e-06, | |
| "loss": 0.6104, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 0.23963697547544996, | |
| "grad_norm": 10.36267483447428, | |
| "learning_rate": 4.445944278134671e-06, | |
| "loss": 0.6753, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.24014684138071687, | |
| "grad_norm": 11.175782587324658, | |
| "learning_rate": 4.4433497944429325e-06, | |
| "loss": 0.5777, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 0.2406567072859838, | |
| "grad_norm": 27.8631592366726, | |
| "learning_rate": 4.440750011104098e-06, | |
| "loss": 0.5881, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 0.2411665731912507, | |
| "grad_norm": 9.81409196930052, | |
| "learning_rate": 4.438144935207953e-06, | |
| "loss": 0.5875, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 0.24167643909651762, | |
| "grad_norm": 9.171704497921569, | |
| "learning_rate": 4.435534573858717e-06, | |
| "loss": 0.6058, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 0.24218630500178454, | |
| "grad_norm": 19.37721728099732, | |
| "learning_rate": 4.432918934175023e-06, | |
| "loss": 0.6167, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.24269617090705145, | |
| "grad_norm": 14.658589343659886, | |
| "learning_rate": 4.430298023289897e-06, | |
| "loss": 0.6457, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 0.24320603681231837, | |
| "grad_norm": 17.4652006391887, | |
| "learning_rate": 4.427671848350744e-06, | |
| "loss": 0.6085, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 0.24371590271758528, | |
| "grad_norm": 17.988496277713374, | |
| "learning_rate": 4.425040416519319e-06, | |
| "loss": 0.5356, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 0.2442257686228522, | |
| "grad_norm": 40.73874761370533, | |
| "learning_rate": 4.422403734971718e-06, | |
| "loss": 0.5709, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 0.2447356345281191, | |
| "grad_norm": 11.464972225824136, | |
| "learning_rate": 4.419761810898349e-06, | |
| "loss": 0.5991, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.24524550043338603, | |
| "grad_norm": 27.356260220415578, | |
| "learning_rate": 4.4171146515039206e-06, | |
| "loss": 0.6223, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 0.24575536633865294, | |
| "grad_norm": 23.849384135826682, | |
| "learning_rate": 4.414462264007414e-06, | |
| "loss": 0.6609, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 0.24626523224391986, | |
| "grad_norm": 28.00533721425015, | |
| "learning_rate": 4.4118046556420725e-06, | |
| "loss": 0.5876, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 0.24677509814918677, | |
| "grad_norm": 12.977686770179183, | |
| "learning_rate": 4.409141833655375e-06, | |
| "loss": 0.5651, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 0.2472849640544537, | |
| "grad_norm": 15.526266224793934, | |
| "learning_rate": 4.406473805309016e-06, | |
| "loss": 0.657, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 0.2477948299597206, | |
| "grad_norm": 12.62411203239521, | |
| "learning_rate": 4.403800577878892e-06, | |
| "loss": 0.5806, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 0.24830469586498752, | |
| "grad_norm": 21.678019580304962, | |
| "learning_rate": 4.401122158655076e-06, | |
| "loss": 0.6082, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 0.24881456177025443, | |
| "grad_norm": 15.217926723899035, | |
| "learning_rate": 4.3984385549418e-06, | |
| "loss": 0.6143, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 0.24932442767552135, | |
| "grad_norm": 9.534920956087968, | |
| "learning_rate": 4.395749774057432e-06, | |
| "loss": 0.6445, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 0.24983429358078826, | |
| "grad_norm": 20.694698271569113, | |
| "learning_rate": 4.393055823334461e-06, | |
| "loss": 0.6482, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.25034415948605515, | |
| "grad_norm": 10.440590354492034, | |
| "learning_rate": 4.390356710119476e-06, | |
| "loss": 0.644, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 0.25085402539132207, | |
| "grad_norm": 14.01886170757573, | |
| "learning_rate": 4.38765244177314e-06, | |
| "loss": 0.61, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 0.251363891296589, | |
| "grad_norm": 12.121295206565357, | |
| "learning_rate": 4.3849430256701765e-06, | |
| "loss": 0.5708, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 0.2518737572018559, | |
| "grad_norm": 11.071313569953784, | |
| "learning_rate": 4.38222846919935e-06, | |
| "loss": 0.6051, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 0.2523836231071228, | |
| "grad_norm": 14.68565361360073, | |
| "learning_rate": 4.379508779763438e-06, | |
| "loss": 0.7019, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 0.2528934890123897, | |
| "grad_norm": 17.274945106451703, | |
| "learning_rate": 4.376783964779221e-06, | |
| "loss": 0.625, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 0.25340335491765664, | |
| "grad_norm": 5.602426927797183, | |
| "learning_rate": 4.3740540316774535e-06, | |
| "loss": 0.5799, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 0.25391322082292356, | |
| "grad_norm": 7.201877021121772, | |
| "learning_rate": 4.3713189879028485e-06, | |
| "loss": 0.5485, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 0.2544230867281905, | |
| "grad_norm": 13.951892269527397, | |
| "learning_rate": 4.3685788409140564e-06, | |
| "loss": 0.645, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 0.2549329526334574, | |
| "grad_norm": 27.947954866045787, | |
| "learning_rate": 4.365833598183645e-06, | |
| "loss": 0.6404, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.2554428185387243, | |
| "grad_norm": 10.514953272318815, | |
| "learning_rate": 4.363083267198079e-06, | |
| "loss": 0.5863, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 0.2559526844439912, | |
| "grad_norm": 11.245703293221581, | |
| "learning_rate": 4.360327855457696e-06, | |
| "loss": 0.6097, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 0.25646255034925813, | |
| "grad_norm": 8.599634462633494, | |
| "learning_rate": 4.357567370476693e-06, | |
| "loss": 0.5853, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 0.25697241625452505, | |
| "grad_norm": 6.095105421466429, | |
| "learning_rate": 4.354801819783099e-06, | |
| "loss": 0.5669, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 0.25748228215979196, | |
| "grad_norm": 7.616602354562931, | |
| "learning_rate": 4.35203121091876e-06, | |
| "loss": 0.5345, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 0.2579921480650589, | |
| "grad_norm": 23.42239940092673, | |
| "learning_rate": 4.349255551439314e-06, | |
| "loss": 0.6427, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 0.2585020139703258, | |
| "grad_norm": 22.048908516961838, | |
| "learning_rate": 4.346474848914174e-06, | |
| "loss": 0.6484, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 0.2590118798755927, | |
| "grad_norm": 9.251706608352913, | |
| "learning_rate": 4.343689110926504e-06, | |
| "loss": 0.64, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 0.2595217457808596, | |
| "grad_norm": 13.321339625150785, | |
| "learning_rate": 4.340898345073202e-06, | |
| "loss": 0.7136, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 0.26003161168612654, | |
| "grad_norm": 10.441597949681997, | |
| "learning_rate": 4.338102558964876e-06, | |
| "loss": 0.6608, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.26054147759139346, | |
| "grad_norm": 12.745953519040231, | |
| "learning_rate": 4.335301760225824e-06, | |
| "loss": 0.5851, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 0.26105134349666037, | |
| "grad_norm": 65.54384924061196, | |
| "learning_rate": 4.3324959564940165e-06, | |
| "loss": 0.5452, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.2615612094019273, | |
| "grad_norm": 6.375930327766279, | |
| "learning_rate": 4.329685155421069e-06, | |
| "loss": 0.5983, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 0.2620710753071942, | |
| "grad_norm": 3.6975754694433514, | |
| "learning_rate": 4.326869364672229e-06, | |
| "loss": 0.6113, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 0.2625809412124611, | |
| "grad_norm": 17.984395739677577, | |
| "learning_rate": 4.324048591926349e-06, | |
| "loss": 0.6007, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 0.26309080711772803, | |
| "grad_norm": 7.288254040012573, | |
| "learning_rate": 4.321222844875869e-06, | |
| "loss": 0.6186, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 0.26360067302299495, | |
| "grad_norm": 5.127848398893114, | |
| "learning_rate": 4.318392131226791e-06, | |
| "loss": 0.6471, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 0.26411053892826186, | |
| "grad_norm": 7.5315117397182645, | |
| "learning_rate": 4.315556458698665e-06, | |
| "loss": 0.6237, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 0.2646204048335288, | |
| "grad_norm": 8.723509314324257, | |
| "learning_rate": 4.312715835024565e-06, | |
| "loss": 0.6453, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 0.2651302707387957, | |
| "grad_norm": 10.824429436685376, | |
| "learning_rate": 4.309870267951061e-06, | |
| "loss": 0.7068, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.2656401366440626, | |
| "grad_norm": 8.298367387251412, | |
| "learning_rate": 4.30701976523821e-06, | |
| "loss": 0.6317, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 0.2661500025493295, | |
| "grad_norm": 8.313379507545704, | |
| "learning_rate": 4.3041643346595285e-06, | |
| "loss": 0.5736, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 0.26665986845459644, | |
| "grad_norm": 17.233146557176756, | |
| "learning_rate": 4.3013039840019675e-06, | |
| "loss": 0.6084, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 0.26716973435986335, | |
| "grad_norm": 16.00967256982166, | |
| "learning_rate": 4.298438721065899e-06, | |
| "loss": 0.6074, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 0.26767960026513027, | |
| "grad_norm": 15.624861264682277, | |
| "learning_rate": 4.295568553665089e-06, | |
| "loss": 0.6282, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 0.2681894661703972, | |
| "grad_norm": 21.12100148713912, | |
| "learning_rate": 4.292693489626681e-06, | |
| "loss": 0.6039, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 0.2686993320756641, | |
| "grad_norm": 7.6235473567087535, | |
| "learning_rate": 4.289813536791168e-06, | |
| "loss": 0.5629, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 0.269209197980931, | |
| "grad_norm": 26.078669039774066, | |
| "learning_rate": 4.2869287030123786e-06, | |
| "loss": 0.6521, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 0.26971906388619793, | |
| "grad_norm": 12.187667815535157, | |
| "learning_rate": 4.284038996157451e-06, | |
| "loss": 0.6104, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 0.27022892979146484, | |
| "grad_norm": 7.139360580690056, | |
| "learning_rate": 4.2811444241068115e-06, | |
| "loss": 0.5735, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.27073879569673176, | |
| "grad_norm": 13.039442344005854, | |
| "learning_rate": 4.278244994754155e-06, | |
| "loss": 0.5772, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 0.2712486616019987, | |
| "grad_norm": 63.202208365540564, | |
| "learning_rate": 4.275340716006424e-06, | |
| "loss": 0.5382, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 0.2717585275072656, | |
| "grad_norm": 14.792564476876887, | |
| "learning_rate": 4.272431595783783e-06, | |
| "loss": 0.5846, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 0.2722683934125325, | |
| "grad_norm": 36.8084652376033, | |
| "learning_rate": 4.269517642019601e-06, | |
| "loss": 0.6567, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 0.2727782593177994, | |
| "grad_norm": 12.876578464531136, | |
| "learning_rate": 4.2665988626604285e-06, | |
| "loss": 0.5491, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 0.27328812522306634, | |
| "grad_norm": 30.51025196353506, | |
| "learning_rate": 4.2636752656659745e-06, | |
| "loss": 0.6141, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 0.27379799112833325, | |
| "grad_norm": 20.777520310083748, | |
| "learning_rate": 4.260746859009087e-06, | |
| "loss": 0.6084, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 0.27430785703360017, | |
| "grad_norm": 9.044259321645782, | |
| "learning_rate": 4.257813650675732e-06, | |
| "loss": 0.623, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 0.2748177229388671, | |
| "grad_norm": 9.516832146327737, | |
| "learning_rate": 4.254875648664965e-06, | |
| "loss": 0.5885, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 0.275327588844134, | |
| "grad_norm": 30.337005568568806, | |
| "learning_rate": 4.251932860988921e-06, | |
| "loss": 0.5376, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.2758374547494009, | |
| "grad_norm": 15.785533045132347, | |
| "learning_rate": 4.24898529567278e-06, | |
| "loss": 0.6343, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 0.2763473206546678, | |
| "grad_norm": 11.074354948231605, | |
| "learning_rate": 4.246032960754753e-06, | |
| "loss": 0.6324, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 0.27685718655993474, | |
| "grad_norm": 7.969861802125547, | |
| "learning_rate": 4.243075864286059e-06, | |
| "loss": 0.5494, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 0.27736705246520166, | |
| "grad_norm": 27.380085381498265, | |
| "learning_rate": 4.2401140143309e-06, | |
| "loss": 0.5518, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 0.2778769183704686, | |
| "grad_norm": 13.480748579865969, | |
| "learning_rate": 4.237147418966444e-06, | |
| "loss": 0.5737, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 0.2783867842757355, | |
| "grad_norm": 10.01561427263943, | |
| "learning_rate": 4.234176086282797e-06, | |
| "loss": 0.5716, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 0.2788966501810024, | |
| "grad_norm": 5.812847097454107, | |
| "learning_rate": 4.231200024382987e-06, | |
| "loss": 0.6192, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 0.2794065160862693, | |
| "grad_norm": 73.32354897034128, | |
| "learning_rate": 4.228219241382936e-06, | |
| "loss": 0.5402, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 0.27991638199153623, | |
| "grad_norm": 20.44580128899934, | |
| "learning_rate": 4.2252337454114426e-06, | |
| "loss": 0.6573, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 0.28042624789680315, | |
| "grad_norm": 12.430764888197537, | |
| "learning_rate": 4.2222435446101555e-06, | |
| "loss": 0.6103, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.28093611380207006, | |
| "grad_norm": 16.727645466395536, | |
| "learning_rate": 4.219248647133559e-06, | |
| "loss": 0.5864, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 0.281445979707337, | |
| "grad_norm": 9.940584675976092, | |
| "learning_rate": 4.216249061148939e-06, | |
| "loss": 0.6636, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 0.2819558456126039, | |
| "grad_norm": 24.070584051307645, | |
| "learning_rate": 4.213244794836373e-06, | |
| "loss": 0.6432, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 0.2824657115178708, | |
| "grad_norm": 32.11346155712176, | |
| "learning_rate": 4.210235856388699e-06, | |
| "loss": 0.6097, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 0.2829755774231377, | |
| "grad_norm": 7.822795209697631, | |
| "learning_rate": 4.2072222540114965e-06, | |
| "loss": 0.633, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 0.28348544332840464, | |
| "grad_norm": 8.645615052891959, | |
| "learning_rate": 4.204203995923064e-06, | |
| "loss": 0.5731, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 0.28399530923367156, | |
| "grad_norm": 13.28668413130762, | |
| "learning_rate": 4.201181090354396e-06, | |
| "loss": 0.6167, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 0.28450517513893847, | |
| "grad_norm": 7.542368445511174, | |
| "learning_rate": 4.198153545549164e-06, | |
| "loss": 0.5833, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 0.2850150410442054, | |
| "grad_norm": 13.289497206111815, | |
| "learning_rate": 4.195121369763687e-06, | |
| "loss": 0.6001, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 0.2855249069494723, | |
| "grad_norm": 13.00491506318039, | |
| "learning_rate": 4.192084571266915e-06, | |
| "loss": 0.5839, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 0.2860347728547392, | |
| "grad_norm": 6.256915488118059, | |
| "learning_rate": 4.189043158340403e-06, | |
| "loss": 0.592, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 0.28654463876000613, | |
| "grad_norm": 14.22336372739089, | |
| "learning_rate": 4.185997139278292e-06, | |
| "loss": 0.5594, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 0.28705450466527305, | |
| "grad_norm": 17.9794598260869, | |
| "learning_rate": 4.182946522387283e-06, | |
| "loss": 0.5584, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 0.28756437057053996, | |
| "grad_norm": 26.826093361889214, | |
| "learning_rate": 4.179891315986617e-06, | |
| "loss": 0.5976, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 0.2880742364758069, | |
| "grad_norm": 26.854750297006724, | |
| "learning_rate": 4.1768315284080475e-06, | |
| "loss": 0.5782, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 0.2885841023810738, | |
| "grad_norm": 20.85565602863907, | |
| "learning_rate": 4.173767167995825e-06, | |
| "loss": 0.6018, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 0.2890939682863407, | |
| "grad_norm": 40.44526401777264, | |
| "learning_rate": 4.170698243106668e-06, | |
| "loss": 0.6106, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 0.2896038341916076, | |
| "grad_norm": 11.5937114201011, | |
| "learning_rate": 4.1676247621097445e-06, | |
| "loss": 0.5584, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 0.29011370009687454, | |
| "grad_norm": 18.80756096170308, | |
| "learning_rate": 4.164546733386644e-06, | |
| "loss": 0.5946, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 0.29062356600214145, | |
| "grad_norm": 16.134109720458465, | |
| "learning_rate": 4.161464165331363e-06, | |
| "loss": 0.6808, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 0.29113343190740837, | |
| "grad_norm": 62.99927751669872, | |
| "learning_rate": 4.158377066350273e-06, | |
| "loss": 0.5772, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 0.2916432978126753, | |
| "grad_norm": 9.81929767488598, | |
| "learning_rate": 4.1552854448621025e-06, | |
| "loss": 0.6214, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 0.2921531637179422, | |
| "grad_norm": 22.729864470532355, | |
| "learning_rate": 4.152189309297914e-06, | |
| "loss": 0.5995, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 0.2926630296232091, | |
| "grad_norm": 12.477684258820998, | |
| "learning_rate": 4.14908866810108e-06, | |
| "loss": 0.5958, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 0.29317289552847603, | |
| "grad_norm": 18.84572272547121, | |
| "learning_rate": 4.14598352972726e-06, | |
| "loss": 0.5632, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 0.29368276143374294, | |
| "grad_norm": 25.846100170430304, | |
| "learning_rate": 4.142873902644378e-06, | |
| "loss": 0.6216, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 0.29419262733900986, | |
| "grad_norm": 10.199726471823542, | |
| "learning_rate": 4.139759795332597e-06, | |
| "loss": 0.6134, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 0.2947024932442768, | |
| "grad_norm": 22.019306648150717, | |
| "learning_rate": 4.1366412162843015e-06, | |
| "loss": 0.5522, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 0.2952123591495437, | |
| "grad_norm": 13.552518209230131, | |
| "learning_rate": 4.133518174004068e-06, | |
| "loss": 0.5765, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 0.2957222250548106, | |
| "grad_norm": 8.60015583805641, | |
| "learning_rate": 4.130390677008644e-06, | |
| "loss": 0.6265, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 0.2962320909600775, | |
| "grad_norm": 12.209720854395725, | |
| "learning_rate": 4.127258733826929e-06, | |
| "loss": 0.5497, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 0.29674195686534444, | |
| "grad_norm": 22.49340812520029, | |
| "learning_rate": 4.1241223529999425e-06, | |
| "loss": 0.5843, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 0.29725182277061135, | |
| "grad_norm": 16.498403886804276, | |
| "learning_rate": 4.12098154308081e-06, | |
| "loss": 0.5801, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 0.29776168867587827, | |
| "grad_norm": 24.812587542691467, | |
| "learning_rate": 4.117836312634734e-06, | |
| "loss": 0.5884, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 0.2982715545811452, | |
| "grad_norm": 14.33912906673135, | |
| "learning_rate": 4.114686670238971e-06, | |
| "loss": 0.5561, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 0.2987814204864121, | |
| "grad_norm": 18.845657492416535, | |
| "learning_rate": 4.111532624482811e-06, | |
| "loss": 0.6124, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 0.299291286391679, | |
| "grad_norm": 6.225197949040313, | |
| "learning_rate": 4.108374183967549e-06, | |
| "loss": 0.6058, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 0.2998011522969459, | |
| "grad_norm": 14.94669704332347, | |
| "learning_rate": 4.10521135730647e-06, | |
| "loss": 0.6446, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 0.30031101820221284, | |
| "grad_norm": 11.67084221046296, | |
| "learning_rate": 4.1020441531248165e-06, | |
| "loss": 0.5561, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 0.30082088410747976, | |
| "grad_norm": 13.68291404953535, | |
| "learning_rate": 4.09887258005977e-06, | |
| "loss": 0.5402, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 0.3013307500127467, | |
| "grad_norm": 11.694111191588059, | |
| "learning_rate": 4.095696646760425e-06, | |
| "loss": 0.5883, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 0.3018406159180136, | |
| "grad_norm": 9.147828359283288, | |
| "learning_rate": 4.09251636188777e-06, | |
| "loss": 0.6205, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 0.3023504818232805, | |
| "grad_norm": 6.078824528176967, | |
| "learning_rate": 4.0893317341146545e-06, | |
| "loss": 0.6233, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 0.3028603477285474, | |
| "grad_norm": 10.338227098716464, | |
| "learning_rate": 4.086142772125779e-06, | |
| "loss": 0.5932, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 0.30337021363381433, | |
| "grad_norm": 34.03454922469082, | |
| "learning_rate": 4.082949484617656e-06, | |
| "loss": 0.5736, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 0.30388007953908125, | |
| "grad_norm": 27.717990588485844, | |
| "learning_rate": 4.079751880298601e-06, | |
| "loss": 0.6005, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 0.3043899454443481, | |
| "grad_norm": 20.62906430272811, | |
| "learning_rate": 4.076549967888697e-06, | |
| "loss": 0.625, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 0.304899811349615, | |
| "grad_norm": 8.843859127048827, | |
| "learning_rate": 4.073343756119778e-06, | |
| "loss": 0.5647, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 0.30540967725488194, | |
| "grad_norm": 15.825921945819855, | |
| "learning_rate": 4.070133253735399e-06, | |
| "loss": 0.6106, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 0.30591954316014885, | |
| "grad_norm": 7.034079033908886, | |
| "learning_rate": 4.066918469490822e-06, | |
| "loss": 0.5998, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.30642940906541577, | |
| "grad_norm": 10.725159040580532, | |
| "learning_rate": 4.063699412152979e-06, | |
| "loss": 0.5741, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 0.3069392749706827, | |
| "grad_norm": 25.954938413714736, | |
| "learning_rate": 4.060476090500462e-06, | |
| "loss": 0.6323, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 0.3074491408759496, | |
| "grad_norm": 19.147343982242354, | |
| "learning_rate": 4.057248513323484e-06, | |
| "loss": 0.6109, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 0.3079590067812165, | |
| "grad_norm": 11.067841286474719, | |
| "learning_rate": 4.054016689423871e-06, | |
| "loss": 0.5934, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 0.30846887268648343, | |
| "grad_norm": 15.374555769962404, | |
| "learning_rate": 4.050780627615025e-06, | |
| "loss": 0.605, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 0.30897873859175035, | |
| "grad_norm": 8.74236544222671, | |
| "learning_rate": 4.047540336721909e-06, | |
| "loss": 0.5638, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 0.30948860449701726, | |
| "grad_norm": 9.692794262472, | |
| "learning_rate": 4.044295825581013e-06, | |
| "loss": 0.5688, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 0.3099984704022842, | |
| "grad_norm": 19.37396547964373, | |
| "learning_rate": 4.041047103040343e-06, | |
| "loss": 0.5662, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 0.3105083363075511, | |
| "grad_norm": 4.850686203291434, | |
| "learning_rate": 4.0377941779593835e-06, | |
| "loss": 0.5754, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 0.311018202212818, | |
| "grad_norm": 19.351654987382656, | |
| "learning_rate": 4.034537059209085e-06, | |
| "loss": 0.5249, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 0.3115280681180849, | |
| "grad_norm": 48.11582019696415, | |
| "learning_rate": 4.03127575567183e-06, | |
| "loss": 0.5772, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 0.31203793402335184, | |
| "grad_norm": 9.755571290958523, | |
| "learning_rate": 4.028010276241416e-06, | |
| "loss": 0.5537, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 0.31254779992861875, | |
| "grad_norm": 19.376346542781313, | |
| "learning_rate": 4.0247406298230285e-06, | |
| "loss": 0.5969, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 0.31305766583388567, | |
| "grad_norm": 8.574897664066954, | |
| "learning_rate": 4.021466825333215e-06, | |
| "loss": 0.5945, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 0.3135675317391526, | |
| "grad_norm": 11.648947877849336, | |
| "learning_rate": 4.018188871699861e-06, | |
| "loss": 0.6026, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 0.3140773976444195, | |
| "grad_norm": 20.788831004876297, | |
| "learning_rate": 4.014906777862172e-06, | |
| "loss": 0.6057, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 0.3145872635496864, | |
| "grad_norm": 7.0208577775784695, | |
| "learning_rate": 4.01162055277064e-06, | |
| "loss": 0.654, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 0.31509712945495333, | |
| "grad_norm": 8.214032627488498, | |
| "learning_rate": 4.008330205387024e-06, | |
| "loss": 0.5925, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 0.31560699536022024, | |
| "grad_norm": 31.67405111863016, | |
| "learning_rate": 4.005035744684325e-06, | |
| "loss": 0.5596, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 0.31611686126548716, | |
| "grad_norm": 7.266395367405267, | |
| "learning_rate": 4.0017371796467635e-06, | |
| "loss": 0.5927, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 0.3166267271707541, | |
| "grad_norm": 16.510677156143704, | |
| "learning_rate": 3.998434519269749e-06, | |
| "loss": 0.5535, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 0.317136593076021, | |
| "grad_norm": 6.854338605125168, | |
| "learning_rate": 3.9951277725598604e-06, | |
| "loss": 0.6465, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 0.3176464589812879, | |
| "grad_norm": 18.111246303610876, | |
| "learning_rate": 3.991816948534823e-06, | |
| "loss": 0.5545, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 0.3181563248865548, | |
| "grad_norm": 8.117561150966036, | |
| "learning_rate": 3.988502056223477e-06, | |
| "loss": 0.571, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 0.31866619079182174, | |
| "grad_norm": 13.084742587714015, | |
| "learning_rate": 3.98518310466576e-06, | |
| "loss": 0.5753, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 0.31917605669708865, | |
| "grad_norm": 57.142935902173484, | |
| "learning_rate": 3.98186010291268e-06, | |
| "loss": 0.5618, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 0.31968592260235557, | |
| "grad_norm": 7.11621758234793, | |
| "learning_rate": 3.978533060026288e-06, | |
| "loss": 0.641, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 0.3201957885076225, | |
| "grad_norm": 16.4098953136186, | |
| "learning_rate": 3.975201985079655e-06, | |
| "loss": 0.5694, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 0.3207056544128894, | |
| "grad_norm": 23.527870364341037, | |
| "learning_rate": 3.971866887156851e-06, | |
| "loss": 0.5663, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 0.3212155203181563, | |
| "grad_norm": 13.608447802374062, | |
| "learning_rate": 3.968527775352914e-06, | |
| "loss": 0.6021, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 0.3217253862234232, | |
| "grad_norm": 7.505925346358185, | |
| "learning_rate": 3.965184658773828e-06, | |
| "loss": 0.6106, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 0.32223525212869014, | |
| "grad_norm": 35.6313686416139, | |
| "learning_rate": 3.961837546536501e-06, | |
| "loss": 0.5912, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 0.32274511803395706, | |
| "grad_norm": 5.634896177598433, | |
| "learning_rate": 3.958486447768736e-06, | |
| "loss": 0.5676, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 0.32325498393922397, | |
| "grad_norm": 6.353712880952279, | |
| "learning_rate": 3.955131371609206e-06, | |
| "loss": 0.6278, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 0.3237648498444909, | |
| "grad_norm": 7.2854542328463054, | |
| "learning_rate": 3.951772327207432e-06, | |
| "loss": 0.6439, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 0.3242747157497578, | |
| "grad_norm": 11.258387362973808, | |
| "learning_rate": 3.948409323723756e-06, | |
| "loss": 0.5684, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 0.3247845816550247, | |
| "grad_norm": 9.882782910086245, | |
| "learning_rate": 3.945042370329319e-06, | |
| "loss": 0.5296, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 0.32529444756029163, | |
| "grad_norm": 19.15860492822133, | |
| "learning_rate": 3.9416714762060325e-06, | |
| "loss": 0.6402, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 0.32580431346555855, | |
| "grad_norm": 16.143665735042397, | |
| "learning_rate": 3.938296650546552e-06, | |
| "loss": 0.5862, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 0.32631417937082546, | |
| "grad_norm": 4.106912978049323, | |
| "learning_rate": 3.934917902554257e-06, | |
| "loss": 0.5537, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 0.3268240452760924, | |
| "grad_norm": 17.22897555904163, | |
| "learning_rate": 3.931535241443225e-06, | |
| "loss": 0.5744, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 0.3273339111813593, | |
| "grad_norm": 7.739685203051684, | |
| "learning_rate": 3.9281486764382e-06, | |
| "loss": 0.5905, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 0.3278437770866262, | |
| "grad_norm": 5.280772793174786, | |
| "learning_rate": 3.924758216774579e-06, | |
| "loss": 0.6035, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 0.3283536429918931, | |
| "grad_norm": 9.56456482861663, | |
| "learning_rate": 3.921363871698372e-06, | |
| "loss": 0.6449, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 0.32886350889716004, | |
| "grad_norm": 19.76743516233812, | |
| "learning_rate": 3.917965650466192e-06, | |
| "loss": 0.5873, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 0.32937337480242695, | |
| "grad_norm": 12.805680819672576, | |
| "learning_rate": 3.914563562345218e-06, | |
| "loss": 0.5929, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 0.32988324070769387, | |
| "grad_norm": 7.3410821734516425, | |
| "learning_rate": 3.911157616613176e-06, | |
| "loss": 0.6349, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 0.3303931066129608, | |
| "grad_norm": 9.91815295194112, | |
| "learning_rate": 3.9077478225583115e-06, | |
| "loss": 0.5611, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 0.3309029725182277, | |
| "grad_norm": 10.58322112787284, | |
| "learning_rate": 3.904334189479366e-06, | |
| "loss": 0.6239, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 0.3314128384234946, | |
| "grad_norm": 5.115542327229815, | |
| "learning_rate": 3.900916726685547e-06, | |
| "loss": 0.563, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.33192270432876153, | |
| "grad_norm": 11.574317620807081, | |
| "learning_rate": 3.897495443496511e-06, | |
| "loss": 0.6234, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 0.33243257023402845, | |
| "grad_norm": 6.82635077786538, | |
| "learning_rate": 3.894070349242328e-06, | |
| "loss": 0.6331, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 0.33294243613929536, | |
| "grad_norm": 12.572812590923363, | |
| "learning_rate": 3.890641453263463e-06, | |
| "loss": 0.6089, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 0.3334523020445623, | |
| "grad_norm": 8.528905748581712, | |
| "learning_rate": 3.887208764910749e-06, | |
| "loss": 0.5721, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 0.3339621679498292, | |
| "grad_norm": 11.470816287216191, | |
| "learning_rate": 3.8837722935453615e-06, | |
| "loss": 0.6266, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 0.3344720338550961, | |
| "grad_norm": 25.624061552242477, | |
| "learning_rate": 3.880332048538789e-06, | |
| "loss": 0.571, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 0.334981899760363, | |
| "grad_norm": 6.79483649892652, | |
| "learning_rate": 3.876888039272818e-06, | |
| "loss": 0.6398, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 0.33549176566562994, | |
| "grad_norm": 13.785631626853508, | |
| "learning_rate": 3.8734402751394925e-06, | |
| "loss": 0.5693, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 0.33600163157089685, | |
| "grad_norm": 11.493447222682974, | |
| "learning_rate": 3.869988765541101e-06, | |
| "loss": 0.5496, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 0.33651149747616377, | |
| "grad_norm": 10.373154022440099, | |
| "learning_rate": 3.866533519890145e-06, | |
| "loss": 0.5145, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 0.3370213633814307, | |
| "grad_norm": 5.525677922272744, | |
| "learning_rate": 3.863074547609314e-06, | |
| "loss": 0.5442, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 0.3375312292866976, | |
| "grad_norm": 9.785016845624112, | |
| "learning_rate": 3.859611858131461e-06, | |
| "loss": 0.6107, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 0.3380410951919645, | |
| "grad_norm": 9.48430417779256, | |
| "learning_rate": 3.8561454608995765e-06, | |
| "loss": 0.6144, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 0.33855096109723143, | |
| "grad_norm": 40.189131615359535, | |
| "learning_rate": 3.852675365366762e-06, | |
| "loss": 0.5367, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 0.33906082700249834, | |
| "grad_norm": 8.524943943823194, | |
| "learning_rate": 3.849201580996201e-06, | |
| "loss": 0.5867, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 0.33957069290776526, | |
| "grad_norm": 13.618979522501697, | |
| "learning_rate": 3.845724117261142e-06, | |
| "loss": 0.5926, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 0.3400805588130322, | |
| "grad_norm": 8.693144356860516, | |
| "learning_rate": 3.8422429836448665e-06, | |
| "loss": 0.6188, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 0.3405904247182991, | |
| "grad_norm": 11.74695610219484, | |
| "learning_rate": 3.83875818964066e-06, | |
| "loss": 0.5656, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 0.341100290623566, | |
| "grad_norm": 9.198063906387935, | |
| "learning_rate": 3.835269744751796e-06, | |
| "loss": 0.5905, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 0.3416101565288329, | |
| "grad_norm": 5.334788699784293, | |
| "learning_rate": 3.831777658491497e-06, | |
| "loss": 0.6202, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 0.34212002243409984, | |
| "grad_norm": 24.177391127776264, | |
| "learning_rate": 3.828281940382923e-06, | |
| "loss": 0.6503, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 0.34262988833936675, | |
| "grad_norm": 23.2850171618304, | |
| "learning_rate": 3.824782599959134e-06, | |
| "loss": 0.6448, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 0.34313975424463367, | |
| "grad_norm": 4.4728799548336955, | |
| "learning_rate": 3.8212796467630685e-06, | |
| "loss": 0.5452, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 0.3436496201499006, | |
| "grad_norm": 7.442759570311029, | |
| "learning_rate": 3.8177730903475195e-06, | |
| "loss": 0.5569, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 0.3441594860551675, | |
| "grad_norm": 7.801573446892921, | |
| "learning_rate": 3.8142629402751047e-06, | |
| "loss": 0.589, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 0.3446693519604344, | |
| "grad_norm": 13.098329982961182, | |
| "learning_rate": 3.8107492061182418e-06, | |
| "loss": 0.6401, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 0.3451792178657013, | |
| "grad_norm": 14.400990901182896, | |
| "learning_rate": 3.8072318974591233e-06, | |
| "loss": 0.5683, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 0.34568908377096824, | |
| "grad_norm": 12.27633537035338, | |
| "learning_rate": 3.803711023889688e-06, | |
| "loss": 0.5933, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 0.34619894967623516, | |
| "grad_norm": 14.372056846033948, | |
| "learning_rate": 3.800186595011599e-06, | |
| "loss": 0.5881, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 0.3467088155815021, | |
| "grad_norm": 85.50410333963302, | |
| "learning_rate": 3.7966586204362137e-06, | |
| "loss": 0.5933, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 0.347218681486769, | |
| "grad_norm": 22.517826922116864, | |
| "learning_rate": 3.793127109784558e-06, | |
| "loss": 0.5916, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 0.3477285473920359, | |
| "grad_norm": 14.84798611353888, | |
| "learning_rate": 3.789592072687302e-06, | |
| "loss": 0.6776, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 0.3482384132973028, | |
| "grad_norm": 15.270402470251646, | |
| "learning_rate": 3.7860535187847326e-06, | |
| "loss": 0.5683, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 0.34874827920256973, | |
| "grad_norm": 10.286151783016678, | |
| "learning_rate": 3.782511457726725e-06, | |
| "loss": 0.6363, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 0.34925814510783665, | |
| "grad_norm": 11.341726028096593, | |
| "learning_rate": 3.7789658991727242e-06, | |
| "loss": 0.5448, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 0.34976801101310356, | |
| "grad_norm": 5.429208019721334, | |
| "learning_rate": 3.775416852791707e-06, | |
| "loss": 0.577, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 0.3502778769183705, | |
| "grad_norm": 17.74331297520172, | |
| "learning_rate": 3.7718643282621648e-06, | |
| "loss": 0.5379, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 0.3507877428236374, | |
| "grad_norm": 18.701045945994224, | |
| "learning_rate": 3.768308335272075e-06, | |
| "loss": 0.5417, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 0.3512976087289043, | |
| "grad_norm": 11.322040410058435, | |
| "learning_rate": 3.7647488835188705e-06, | |
| "loss": 0.5552, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 0.3518074746341712, | |
| "grad_norm": 9.526508052327046, | |
| "learning_rate": 3.76118598270942e-06, | |
| "loss": 0.6219, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 0.35231734053943814, | |
| "grad_norm": 9.618221118643495, | |
| "learning_rate": 3.7576196425599944e-06, | |
| "loss": 0.561, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 0.35282720644470505, | |
| "grad_norm": 5.285098061934564, | |
| "learning_rate": 3.7540498727962483e-06, | |
| "loss": 0.5897, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 0.35333707234997197, | |
| "grad_norm": 6.319348479139971, | |
| "learning_rate": 3.7504766831531848e-06, | |
| "loss": 0.5981, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 0.3538469382552389, | |
| "grad_norm": 14.26337724889404, | |
| "learning_rate": 3.746900083375137e-06, | |
| "loss": 0.5847, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 0.3543568041605058, | |
| "grad_norm": 8.842973608099559, | |
| "learning_rate": 3.7433200832157333e-06, | |
| "loss": 0.687, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 0.3548666700657727, | |
| "grad_norm": 26.378636644956504, | |
| "learning_rate": 3.7397366924378797e-06, | |
| "loss": 0.5838, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 0.35537653597103963, | |
| "grad_norm": 12.43417363530746, | |
| "learning_rate": 3.736149920813726e-06, | |
| "loss": 0.5844, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 0.35588640187630655, | |
| "grad_norm": 6.8624589370405715, | |
| "learning_rate": 3.7325597781246426e-06, | |
| "loss": 0.5796, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 0.35639626778157346, | |
| "grad_norm": 34.23759579395761, | |
| "learning_rate": 3.728966274161192e-06, | |
| "loss": 0.5426, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 0.3569061336868404, | |
| "grad_norm": 12.036373216153141, | |
| "learning_rate": 3.725369418723106e-06, | |
| "loss": 0.5407, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.3574159995921073, | |
| "grad_norm": 7.6541152081261306, | |
| "learning_rate": 3.721769221619252e-06, | |
| "loss": 0.5307, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 0.3579258654973742, | |
| "grad_norm": 5.221962987657324, | |
| "learning_rate": 3.718165692667613e-06, | |
| "loss": 0.5638, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 0.3584357314026411, | |
| "grad_norm": 9.769128260322358, | |
| "learning_rate": 3.714558841695258e-06, | |
| "loss": 0.6025, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 0.35894559730790804, | |
| "grad_norm": 5.4841935802743516, | |
| "learning_rate": 3.710948678538314e-06, | |
| "loss": 0.5845, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 0.35945546321317495, | |
| "grad_norm": 4.441919327602813, | |
| "learning_rate": 3.7073352130419436e-06, | |
| "loss": 0.6463, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 0.35996532911844187, | |
| "grad_norm": 10.50879690961124, | |
| "learning_rate": 3.703718455060311e-06, | |
| "loss": 0.5851, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 0.3604751950237088, | |
| "grad_norm": 47.663384741510505, | |
| "learning_rate": 3.7000984144565617e-06, | |
| "loss": 0.6206, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 0.3609850609289757, | |
| "grad_norm": 10.628757506788443, | |
| "learning_rate": 3.6964751011027933e-06, | |
| "loss": 0.5544, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 0.3614949268342426, | |
| "grad_norm": 7.288945575368875, | |
| "learning_rate": 3.6928485248800273e-06, | |
| "loss": 0.5495, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 0.36200479273950953, | |
| "grad_norm": 7.992239176835611, | |
| "learning_rate": 3.689218695678184e-06, | |
| "loss": 0.5572, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 0.36251465864477644, | |
| "grad_norm": 7.216006748679708, | |
| "learning_rate": 3.685585623396055e-06, | |
| "loss": 0.5966, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 0.36302452455004336, | |
| "grad_norm": 10.354746806457767, | |
| "learning_rate": 3.681949317941275e-06, | |
| "loss": 0.5422, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 0.3635343904553103, | |
| "grad_norm": 8.329338901239497, | |
| "learning_rate": 3.6783097892302967e-06, | |
| "loss": 0.6023, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 0.3640442563605772, | |
| "grad_norm": 10.42649234263997, | |
| "learning_rate": 3.6746670471883616e-06, | |
| "loss": 0.5793, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 0.3645541222658441, | |
| "grad_norm": 4.914161385304957, | |
| "learning_rate": 3.671021101749476e-06, | |
| "loss": 0.5533, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 0.365063988171111, | |
| "grad_norm": 6.108063070133632, | |
| "learning_rate": 3.6673719628563808e-06, | |
| "loss": 0.563, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 0.36557385407637794, | |
| "grad_norm": 7.615397623687595, | |
| "learning_rate": 3.6637196404605257e-06, | |
| "loss": 0.5659, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 0.36608371998164485, | |
| "grad_norm": 14.69964181406416, | |
| "learning_rate": 3.660064144522043e-06, | |
| "loss": 0.543, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 0.36659358588691177, | |
| "grad_norm": 3.3695135716741245, | |
| "learning_rate": 3.656405485009719e-06, | |
| "loss": 0.5664, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 0.3671034517921787, | |
| "grad_norm": 43.664567891879734, | |
| "learning_rate": 3.652743671900967e-06, | |
| "loss": 0.5566, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 0.3676133176974456, | |
| "grad_norm": 21.87781936577931, | |
| "learning_rate": 3.6490787151817986e-06, | |
| "loss": 0.5362, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 0.3681231836027125, | |
| "grad_norm": 6.9291162528960175, | |
| "learning_rate": 3.6454106248468024e-06, | |
| "loss": 0.5888, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 0.3686330495079794, | |
| "grad_norm": 6.711268461016556, | |
| "learning_rate": 3.6417394108991096e-06, | |
| "loss": 0.5572, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 0.36914291541324634, | |
| "grad_norm": 9.234151539211476, | |
| "learning_rate": 3.6380650833503705e-06, | |
| "loss": 0.6033, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 0.36965278131851326, | |
| "grad_norm": 72.08082771250515, | |
| "learning_rate": 3.6343876522207253e-06, | |
| "loss": 0.6168, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 0.3701626472237802, | |
| "grad_norm": 14.232739813895227, | |
| "learning_rate": 3.6307071275387807e-06, | |
| "loss": 0.5784, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 0.3706725131290471, | |
| "grad_norm": 8.312159079086417, | |
| "learning_rate": 3.6270235193415754e-06, | |
| "loss": 0.5503, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 0.371182379034314, | |
| "grad_norm": 5.219185861295808, | |
| "learning_rate": 3.6233368376745616e-06, | |
| "loss": 0.6168, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 0.37169224493958086, | |
| "grad_norm": 4.783574085733863, | |
| "learning_rate": 3.6196470925915705e-06, | |
| "loss": 0.6012, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 0.3722021108448478, | |
| "grad_norm": 6.14441502544535, | |
| "learning_rate": 3.6159542941547883e-06, | |
| "loss": 0.5437, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 0.3727119767501147, | |
| "grad_norm": 4.913727327218928, | |
| "learning_rate": 3.6122584524347267e-06, | |
| "loss": 0.6013, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 0.3732218426553816, | |
| "grad_norm": 4.673399055747235, | |
| "learning_rate": 3.608559577510198e-06, | |
| "loss": 0.5395, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 0.3737317085606485, | |
| "grad_norm": 5.707554591431282, | |
| "learning_rate": 3.604857679468285e-06, | |
| "loss": 0.5463, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 0.37424157446591544, | |
| "grad_norm": 10.008003378212784, | |
| "learning_rate": 3.601152768404317e-06, | |
| "loss": 0.5848, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 0.37475144037118235, | |
| "grad_norm": 15.561381138932529, | |
| "learning_rate": 3.597444854421837e-06, | |
| "loss": 0.5833, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 0.37526130627644927, | |
| "grad_norm": 23.018403169410053, | |
| "learning_rate": 3.59373394763258e-06, | |
| "loss": 0.5587, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 0.3757711721817162, | |
| "grad_norm": 7.150363888762786, | |
| "learning_rate": 3.5900200581564403e-06, | |
| "loss": 0.6373, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 0.3762810380869831, | |
| "grad_norm": 7.142174562438807, | |
| "learning_rate": 3.586303196121447e-06, | |
| "loss": 0.5655, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 0.37679090399225, | |
| "grad_norm": 13.760948907375717, | |
| "learning_rate": 3.5825833716637364e-06, | |
| "loss": 0.65, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 0.37730076989751693, | |
| "grad_norm": 9.225428328804446, | |
| "learning_rate": 3.5788605949275237e-06, | |
| "loss": 0.5367, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 0.37781063580278385, | |
| "grad_norm": 24.730845210760858, | |
| "learning_rate": 3.5751348760650722e-06, | |
| "loss": 0.6252, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 0.37832050170805076, | |
| "grad_norm": 12.795430386888327, | |
| "learning_rate": 3.5714062252366723e-06, | |
| "loss": 0.6394, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 0.3788303676133177, | |
| "grad_norm": 5.9277660577921765, | |
| "learning_rate": 3.5676746526106084e-06, | |
| "loss": 0.6379, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 0.3793402335185846, | |
| "grad_norm": 5.044275508529918, | |
| "learning_rate": 3.5639401683631314e-06, | |
| "loss": 0.6292, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 0.3798500994238515, | |
| "grad_norm": 9.004761341755255, | |
| "learning_rate": 3.5602027826784356e-06, | |
| "loss": 0.628, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 0.3803599653291184, | |
| "grad_norm": 4.017290839777303, | |
| "learning_rate": 3.556462505748625e-06, | |
| "loss": 0.5683, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 0.38086983123438534, | |
| "grad_norm": 6.958902888219458, | |
| "learning_rate": 3.5527193477736903e-06, | |
| "loss": 0.5694, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 0.38137969713965225, | |
| "grad_norm": 5.5519005203253275, | |
| "learning_rate": 3.548973318961477e-06, | |
| "loss": 0.5316, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 0.38188956304491917, | |
| "grad_norm": 6.163222258445389, | |
| "learning_rate": 3.5452244295276604e-06, | |
| "loss": 0.6386, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 0.3823994289501861, | |
| "grad_norm": 31.506161393196194, | |
| "learning_rate": 3.541472689695718e-06, | |
| "loss": 0.5672, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.382909294855453, | |
| "grad_norm": 13.297329690802785, | |
| "learning_rate": 3.537718109696899e-06, | |
| "loss": 0.5918, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 0.3834191607607199, | |
| "grad_norm": 6.188203092221159, | |
| "learning_rate": 3.5339606997701975e-06, | |
| "loss": 0.5354, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 0.38392902666598683, | |
| "grad_norm": 5.844684135462748, | |
| "learning_rate": 3.530200470162328e-06, | |
| "loss": 0.6198, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 0.38443889257125374, | |
| "grad_norm": 6.155255221678391, | |
| "learning_rate": 3.5264374311276904e-06, | |
| "loss": 0.4963, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 0.38494875847652066, | |
| "grad_norm": 44.826506547381406, | |
| "learning_rate": 3.5226715929283507e-06, | |
| "loss": 0.5906, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 0.3854586243817876, | |
| "grad_norm": 5.697741044215528, | |
| "learning_rate": 3.518902965834003e-06, | |
| "loss": 0.5611, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 0.3859684902870545, | |
| "grad_norm": 19.71448408084545, | |
| "learning_rate": 3.5151315601219517e-06, | |
| "loss": 0.536, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 0.3864783561923214, | |
| "grad_norm": 37.42877624913661, | |
| "learning_rate": 3.5113573860770755e-06, | |
| "loss": 0.5678, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 0.3869882220975883, | |
| "grad_norm": 5.897945109676117, | |
| "learning_rate": 3.5075804539918047e-06, | |
| "loss": 0.6857, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 0.38749808800285523, | |
| "grad_norm": 8.59608065779492, | |
| "learning_rate": 3.5038007741660895e-06, | |
| "loss": 0.5904, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 0.38800795390812215, | |
| "grad_norm": 8.668612428257326, | |
| "learning_rate": 3.5000183569073743e-06, | |
| "loss": 0.6183, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 0.38851781981338906, | |
| "grad_norm": 7.116728646113035, | |
| "learning_rate": 3.4962332125305686e-06, | |
| "loss": 0.5345, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 0.389027685718656, | |
| "grad_norm": 4.3910147461794775, | |
| "learning_rate": 3.492445351358018e-06, | |
| "loss": 0.5716, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 0.3895375516239229, | |
| "grad_norm": 6.723186930639564, | |
| "learning_rate": 3.488654783719477e-06, | |
| "loss": 0.5315, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 0.3900474175291898, | |
| "grad_norm": 7.9122335533789245, | |
| "learning_rate": 3.484861519952083e-06, | |
| "loss": 0.5747, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 0.3905572834344567, | |
| "grad_norm": 10.613463496682332, | |
| "learning_rate": 3.4810655704003237e-06, | |
| "loss": 0.5648, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 0.39106714933972364, | |
| "grad_norm": 18.243032282223737, | |
| "learning_rate": 3.4772669454160113e-06, | |
| "loss": 0.5482, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 0.39157701524499056, | |
| "grad_norm": 10.660339605638224, | |
| "learning_rate": 3.473465655358255e-06, | |
| "loss": 0.5209, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 0.39208688115025747, | |
| "grad_norm": 6.602625160974067, | |
| "learning_rate": 3.469661710593431e-06, | |
| "loss": 0.5632, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 0.3925967470555244, | |
| "grad_norm": 4.708196667408552, | |
| "learning_rate": 3.465855121495156e-06, | |
| "loss": 0.4498, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 0.3931066129607913, | |
| "grad_norm": 10.823694154178865, | |
| "learning_rate": 3.4620458984442564e-06, | |
| "loss": 0.5415, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 0.3936164788660582, | |
| "grad_norm": 7.351637451106162, | |
| "learning_rate": 3.4582340518287444e-06, | |
| "loss": 0.5646, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 0.39412634477132513, | |
| "grad_norm": 7.415628332609013, | |
| "learning_rate": 3.4544195920437834e-06, | |
| "loss": 0.5688, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 0.39463621067659205, | |
| "grad_norm": 11.07313306100438, | |
| "learning_rate": 3.4506025294916656e-06, | |
| "loss": 0.5557, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 0.39514607658185896, | |
| "grad_norm": 22.03291235562454, | |
| "learning_rate": 3.44678287458178e-06, | |
| "loss": 0.5875, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 0.3956559424871259, | |
| "grad_norm": 5.959836162345017, | |
| "learning_rate": 3.4429606377305847e-06, | |
| "loss": 0.6191, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 0.3961658083923928, | |
| "grad_norm": 5.5293587959812545, | |
| "learning_rate": 3.439135829361581e-06, | |
| "loss": 0.5281, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 0.3966756742976597, | |
| "grad_norm": 17.335795805582098, | |
| "learning_rate": 3.435308459905281e-06, | |
| "loss": 0.6464, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 0.3971855402029266, | |
| "grad_norm": 8.319895170860779, | |
| "learning_rate": 3.4314785397991814e-06, | |
| "loss": 0.5358, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 0.39769540610819354, | |
| "grad_norm": 7.611870921179878, | |
| "learning_rate": 3.4276460794877343e-06, | |
| "loss": 0.5625, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 0.39820527201346045, | |
| "grad_norm": 7.800640649074299, | |
| "learning_rate": 3.4238110894223205e-06, | |
| "loss": 0.6573, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 0.39871513791872737, | |
| "grad_norm": 5.387338032817162, | |
| "learning_rate": 3.419973580061218e-06, | |
| "loss": 0.5583, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 0.3992250038239943, | |
| "grad_norm": 11.84450391470328, | |
| "learning_rate": 3.4161335618695774e-06, | |
| "loss": 0.6167, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 0.3997348697292612, | |
| "grad_norm": 7.947927109201323, | |
| "learning_rate": 3.4122910453193885e-06, | |
| "loss": 0.5733, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 0.4002447356345281, | |
| "grad_norm": 18.10683231004345, | |
| "learning_rate": 3.4084460408894553e-06, | |
| "loss": 0.6145, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 0.40075460153979503, | |
| "grad_norm": 15.775686326362997, | |
| "learning_rate": 3.4045985590653667e-06, | |
| "loss": 0.6136, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 0.40126446744506195, | |
| "grad_norm": 20.43302274856715, | |
| "learning_rate": 3.4007486103394678e-06, | |
| "loss": 0.5913, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 0.40177433335032886, | |
| "grad_norm": 6.018831322305339, | |
| "learning_rate": 3.3968962052108288e-06, | |
| "loss": 0.5405, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 0.4022841992555958, | |
| "grad_norm": 13.816860148733037, | |
| "learning_rate": 3.3930413541852235e-06, | |
| "loss": 0.5905, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 0.4027940651608627, | |
| "grad_norm": 20.814324321343378, | |
| "learning_rate": 3.389184067775091e-06, | |
| "loss": 0.5492, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 0.4033039310661296, | |
| "grad_norm": 13.912098590539072, | |
| "learning_rate": 3.385324356499513e-06, | |
| "loss": 0.5587, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 0.4038137969713965, | |
| "grad_norm": 7.463579403533143, | |
| "learning_rate": 3.381462230884187e-06, | |
| "loss": 0.5488, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 0.40432366287666344, | |
| "grad_norm": 4.290865201752349, | |
| "learning_rate": 3.377597701461391e-06, | |
| "loss": 0.5305, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 0.40483352878193035, | |
| "grad_norm": 15.062253635702449, | |
| "learning_rate": 3.3737307787699593e-06, | |
| "loss": 0.5701, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 0.40534339468719727, | |
| "grad_norm": 10.230006901428354, | |
| "learning_rate": 3.3698614733552537e-06, | |
| "loss": 0.5206, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 0.4058532605924642, | |
| "grad_norm": 36.51788649803484, | |
| "learning_rate": 3.3659897957691334e-06, | |
| "loss": 0.6388, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 0.4063631264977311, | |
| "grad_norm": 12.636380989469782, | |
| "learning_rate": 3.3621157565699265e-06, | |
| "loss": 0.5556, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 0.406872992402998, | |
| "grad_norm": 7.421584163218057, | |
| "learning_rate": 3.3582393663224012e-06, | |
| "loss": 0.5578, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 0.40738285830826493, | |
| "grad_norm": 5.88874288509878, | |
| "learning_rate": 3.3543606355977377e-06, | |
| "loss": 0.5361, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 0.40789272421353184, | |
| "grad_norm": 8.416084341931638, | |
| "learning_rate": 3.350479574973498e-06, | |
| "loss": 0.5752, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.40840259011879876, | |
| "grad_norm": 11.531771923553494, | |
| "learning_rate": 3.3465961950335994e-06, | |
| "loss": 0.5378, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 0.4089124560240657, | |
| "grad_norm": 23.79130168481462, | |
| "learning_rate": 3.342710506368282e-06, | |
| "loss": 0.5193, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 0.4094223219293326, | |
| "grad_norm": 15.518320494738575, | |
| "learning_rate": 3.3388225195740844e-06, | |
| "loss": 0.5552, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 0.4099321878345995, | |
| "grad_norm": 15.601458428258836, | |
| "learning_rate": 3.33493224525381e-06, | |
| "loss": 0.5102, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 0.4104420537398664, | |
| "grad_norm": 11.821263743959667, | |
| "learning_rate": 3.3310396940165025e-06, | |
| "loss": 0.5871, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 0.41095191964513333, | |
| "grad_norm": 14.165232414945066, | |
| "learning_rate": 3.327144876477413e-06, | |
| "loss": 0.6122, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 0.41146178555040025, | |
| "grad_norm": 10.710392958957403, | |
| "learning_rate": 3.3232478032579746e-06, | |
| "loss": 0.5539, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 0.41197165145566716, | |
| "grad_norm": 14.032550679107109, | |
| "learning_rate": 3.319348484985771e-06, | |
| "loss": 0.5259, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 0.4124815173609341, | |
| "grad_norm": 29.595540369097325, | |
| "learning_rate": 3.3154469322945083e-06, | |
| "loss": 0.5379, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 0.412991383266201, | |
| "grad_norm": 5.9496106362966605, | |
| "learning_rate": 3.311543155823985e-06, | |
| "loss": 0.6027, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 0.4135012491714679, | |
| "grad_norm": 10.511494733407762, | |
| "learning_rate": 3.3076371662200666e-06, | |
| "loss": 0.5592, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 0.4140111150767348, | |
| "grad_norm": 10.936585430312546, | |
| "learning_rate": 3.303728974134653e-06, | |
| "loss": 0.5493, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 0.41452098098200174, | |
| "grad_norm": 5.7440596533209005, | |
| "learning_rate": 3.2998185902256475e-06, | |
| "loss": 0.5621, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 0.41503084688726866, | |
| "grad_norm": 10.573257615848048, | |
| "learning_rate": 3.295906025156935e-06, | |
| "loss": 0.6092, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 0.41554071279253557, | |
| "grad_norm": 32.59649320062619, | |
| "learning_rate": 3.291991289598347e-06, | |
| "loss": 0.5696, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 0.4160505786978025, | |
| "grad_norm": 24.663894982370834, | |
| "learning_rate": 3.288074394225632e-06, | |
| "loss": 0.6018, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 0.4165604446030694, | |
| "grad_norm": 16.106665031926827, | |
| "learning_rate": 3.284155349720431e-06, | |
| "loss": 0.584, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 0.4170703105083363, | |
| "grad_norm": 7.162412242443836, | |
| "learning_rate": 3.2802341667702448e-06, | |
| "loss": 0.6162, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 0.41758017641360323, | |
| "grad_norm": 3.99419268665801, | |
| "learning_rate": 3.276310856068406e-06, | |
| "loss": 0.624, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 0.41809004231887015, | |
| "grad_norm": 18.830509521933557, | |
| "learning_rate": 3.27238542831405e-06, | |
| "loss": 0.5375, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 0.41859990822413706, | |
| "grad_norm": 13.211612862937661, | |
| "learning_rate": 3.2684578942120853e-06, | |
| "loss": 0.5065, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 0.419109774129404, | |
| "grad_norm": 11.187081189820343, | |
| "learning_rate": 3.2645282644731648e-06, | |
| "loss": 0.5975, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 0.4196196400346709, | |
| "grad_norm": 11.56668755275729, | |
| "learning_rate": 3.2605965498136554e-06, | |
| "loss": 0.5661, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 0.4201295059399378, | |
| "grad_norm": 7.676472574372089, | |
| "learning_rate": 3.2566627609556117e-06, | |
| "loss": 0.5583, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 0.4206393718452047, | |
| "grad_norm": 12.256179639373476, | |
| "learning_rate": 3.252726908626742e-06, | |
| "loss": 0.6085, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 0.42114923775047164, | |
| "grad_norm": 8.779295692765299, | |
| "learning_rate": 3.248789003560385e-06, | |
| "loss": 0.4848, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 0.42165910365573855, | |
| "grad_norm": 8.906852808489715, | |
| "learning_rate": 3.2448490564954744e-06, | |
| "loss": 0.5238, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 0.42216896956100547, | |
| "grad_norm": 11.99953771698637, | |
| "learning_rate": 3.2409070781765147e-06, | |
| "loss": 0.5521, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 0.4226788354662724, | |
| "grad_norm": 13.350283128074937, | |
| "learning_rate": 3.236963079353548e-06, | |
| "loss": 0.5584, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 0.4231887013715393, | |
| "grad_norm": 6.821633592421078, | |
| "learning_rate": 3.2330170707821283e-06, | |
| "loss": 0.5664, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 0.4236985672768062, | |
| "grad_norm": 7.056662121640499, | |
| "learning_rate": 3.229069063223289e-06, | |
| "loss": 0.5773, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 0.42420843318207313, | |
| "grad_norm": 9.355395068848395, | |
| "learning_rate": 3.225119067443515e-06, | |
| "loss": 0.5793, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 0.42471829908734005, | |
| "grad_norm": 5.264154463454432, | |
| "learning_rate": 3.2211670942147144e-06, | |
| "loss": 0.5646, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 0.42522816499260696, | |
| "grad_norm": 9.501578808920266, | |
| "learning_rate": 3.2172131543141865e-06, | |
| "loss": 0.5505, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 0.4257380308978739, | |
| "grad_norm": 7.232532803125666, | |
| "learning_rate": 3.2132572585245946e-06, | |
| "loss": 0.6165, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 0.4262478968031408, | |
| "grad_norm": 9.113984571929457, | |
| "learning_rate": 3.209299417633936e-06, | |
| "loss": 0.5958, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 0.4267577627084077, | |
| "grad_norm": 7.578216172807771, | |
| "learning_rate": 3.2053396424355105e-06, | |
| "loss": 0.6117, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 0.4272676286136746, | |
| "grad_norm": 7.320511070141178, | |
| "learning_rate": 3.201377943727896e-06, | |
| "loss": 0.551, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 0.42777749451894154, | |
| "grad_norm": 14.982785634275919, | |
| "learning_rate": 3.197414332314914e-06, | |
| "loss": 0.4761, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 0.42828736042420845, | |
| "grad_norm": 7.019223513877667, | |
| "learning_rate": 3.193448819005603e-06, | |
| "loss": 0.5969, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 0.42879722632947537, | |
| "grad_norm": 19.840525872281795, | |
| "learning_rate": 3.189481414614186e-06, | |
| "loss": 0.5495, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 0.4293070922347423, | |
| "grad_norm": 22.995303030548026, | |
| "learning_rate": 3.1855121299600454e-06, | |
| "loss": 0.637, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 0.4298169581400092, | |
| "grad_norm": 5.869244255618574, | |
| "learning_rate": 3.1815409758676917e-06, | |
| "loss": 0.6386, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 0.4303268240452761, | |
| "grad_norm": 4.604957329242786, | |
| "learning_rate": 3.1775679631667306e-06, | |
| "loss": 0.5725, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 0.43083668995054303, | |
| "grad_norm": 8.65884034038621, | |
| "learning_rate": 3.1735931026918393e-06, | |
| "loss": 0.6049, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 0.43134655585580994, | |
| "grad_norm": 13.3608961962584, | |
| "learning_rate": 3.1696164052827318e-06, | |
| "loss": 0.5236, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 0.43185642176107686, | |
| "grad_norm": 25.089515960282526, | |
| "learning_rate": 3.165637881784133e-06, | |
| "loss": 0.4994, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 0.4323662876663438, | |
| "grad_norm": 8.409728068654365, | |
| "learning_rate": 3.161657543045747e-06, | |
| "loss": 0.6067, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 0.4328761535716107, | |
| "grad_norm": 20.97435073144917, | |
| "learning_rate": 3.1576753999222275e-06, | |
| "loss": 0.5705, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 0.4333860194768776, | |
| "grad_norm": 33.23045329460443, | |
| "learning_rate": 3.1536914632731512e-06, | |
| "loss": 0.5923, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.4338958853821445, | |
| "grad_norm": 6.16600806964113, | |
| "learning_rate": 3.1497057439629836e-06, | |
| "loss": 0.5394, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 0.43440575128741143, | |
| "grad_norm": 10.017018232729027, | |
| "learning_rate": 3.1457182528610526e-06, | |
| "loss": 0.5935, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 0.43491561719267835, | |
| "grad_norm": 12.120848803469393, | |
| "learning_rate": 3.1417290008415167e-06, | |
| "loss": 0.5272, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 0.43542548309794527, | |
| "grad_norm": 26.662754232184135, | |
| "learning_rate": 3.1377379987833395e-06, | |
| "loss": 0.5517, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 0.4359353490032122, | |
| "grad_norm": 23.485595791019445, | |
| "learning_rate": 3.133745257570253e-06, | |
| "loss": 0.6245, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 0.4364452149084791, | |
| "grad_norm": 10.087847255985833, | |
| "learning_rate": 3.1297507880907357e-06, | |
| "loss": 0.4898, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 0.436955080813746, | |
| "grad_norm": 7.147241397833923, | |
| "learning_rate": 3.1257546012379775e-06, | |
| "loss": 0.543, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 0.4374649467190129, | |
| "grad_norm": 6.6109218274816195, | |
| "learning_rate": 3.121756707909851e-06, | |
| "loss": 0.5064, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 0.43797481262427984, | |
| "grad_norm": 8.054310764050582, | |
| "learning_rate": 3.1177571190088833e-06, | |
| "loss": 0.6016, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 0.43848467852954676, | |
| "grad_norm": 10.629143445576705, | |
| "learning_rate": 3.1137558454422266e-06, | |
| "loss": 0.5692, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 0.43899454443481367, | |
| "grad_norm": 4.656946338708994, | |
| "learning_rate": 3.1097528981216245e-06, | |
| "loss": 0.5431, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 0.43950441034008053, | |
| "grad_norm": 7.686273185864347, | |
| "learning_rate": 3.105748287963388e-06, | |
| "loss": 0.5942, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 0.44001427624534745, | |
| "grad_norm": 6.4017419151248784, | |
| "learning_rate": 3.1017420258883607e-06, | |
| "loss": 0.5951, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 0.44052414215061436, | |
| "grad_norm": 7.715398459808364, | |
| "learning_rate": 3.0977341228218916e-06, | |
| "loss": 0.5274, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 0.4410340080558813, | |
| "grad_norm": 6.437445703730611, | |
| "learning_rate": 3.0937245896938062e-06, | |
| "loss": 0.496, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 0.4415438739611482, | |
| "grad_norm": 12.040187268475353, | |
| "learning_rate": 3.089713437438373e-06, | |
| "loss": 0.5444, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 0.4420537398664151, | |
| "grad_norm": 15.97318478204675, | |
| "learning_rate": 3.085700676994277e-06, | |
| "loss": 0.4989, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 0.442563605771682, | |
| "grad_norm": 5.170088323073286, | |
| "learning_rate": 3.0816863193045898e-06, | |
| "loss": 0.5174, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 0.44307347167694894, | |
| "grad_norm": 11.31806742780724, | |
| "learning_rate": 3.0776703753167382e-06, | |
| "loss": 0.5313, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 0.44358333758221585, | |
| "grad_norm": 8.897162337165318, | |
| "learning_rate": 3.0736528559824736e-06, | |
| "loss": 0.5452, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 0.44409320348748277, | |
| "grad_norm": 4.49436311321466, | |
| "learning_rate": 3.0696337722578444e-06, | |
| "loss": 0.5471, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 0.4446030693927497, | |
| "grad_norm": 42.610576484138235, | |
| "learning_rate": 3.0656131351031663e-06, | |
| "loss": 0.5498, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 0.4451129352980166, | |
| "grad_norm": 12.31209500660843, | |
| "learning_rate": 3.06159095548299e-06, | |
| "loss": 0.5345, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 0.4456228012032835, | |
| "grad_norm": 8.692810512828606, | |
| "learning_rate": 3.057567244366072e-06, | |
| "loss": 0.5101, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 0.44613266710855043, | |
| "grad_norm": 10.979837107264158, | |
| "learning_rate": 3.053542012725347e-06, | |
| "loss": 0.5878, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 0.44664253301381734, | |
| "grad_norm": 7.20795773464883, | |
| "learning_rate": 3.049515271537896e-06, | |
| "loss": 0.5345, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 0.44715239891908426, | |
| "grad_norm": 23.767407508839465, | |
| "learning_rate": 3.045487031784916e-06, | |
| "loss": 0.5482, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 0.4476622648243512, | |
| "grad_norm": 8.421964173524868, | |
| "learning_rate": 3.041457304451691e-06, | |
| "loss": 0.5528, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 0.4481721307296181, | |
| "grad_norm": 9.859187171670458, | |
| "learning_rate": 3.0374261005275606e-06, | |
| "loss": 0.5486, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 0.448681996634885, | |
| "grad_norm": 6.790273628842832, | |
| "learning_rate": 3.033393431005893e-06, | |
| "loss": 0.5345, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 0.4491918625401519, | |
| "grad_norm": 10.253844615931142, | |
| "learning_rate": 3.0293593068840514e-06, | |
| "loss": 0.5658, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 0.44970172844541884, | |
| "grad_norm": 6.617293173795455, | |
| "learning_rate": 3.0253237391633684e-06, | |
| "loss": 0.5748, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 0.45021159435068575, | |
| "grad_norm": 11.268408655058423, | |
| "learning_rate": 3.02128673884911e-06, | |
| "loss": 0.5413, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 0.45072146025595267, | |
| "grad_norm": 18.781346474180374, | |
| "learning_rate": 3.017248316950452e-06, | |
| "loss": 0.5501, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 0.4512313261612196, | |
| "grad_norm": 7.880045771676749, | |
| "learning_rate": 3.0132084844804444e-06, | |
| "loss": 0.5556, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 0.4517411920664865, | |
| "grad_norm": 22.59802767837297, | |
| "learning_rate": 3.0091672524559855e-06, | |
| "loss": 0.521, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 0.4522510579717534, | |
| "grad_norm": 6.6299156399345085, | |
| "learning_rate": 3.0051246318977913e-06, | |
| "loss": 0.5611, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 0.4527609238770203, | |
| "grad_norm": 98.67067204131763, | |
| "learning_rate": 3.0010806338303615e-06, | |
| "loss": 0.6875, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 0.45327078978228724, | |
| "grad_norm": 11.230266616071487, | |
| "learning_rate": 2.997035269281955e-06, | |
| "loss": 0.6012, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 0.45378065568755416, | |
| "grad_norm": 12.834508374143617, | |
| "learning_rate": 2.9929885492845555e-06, | |
| "loss": 0.5707, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 0.4542905215928211, | |
| "grad_norm": 8.358587417826085, | |
| "learning_rate": 2.9889404848738434e-06, | |
| "loss": 0.5561, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 0.454800387498088, | |
| "grad_norm": 8.590838262002269, | |
| "learning_rate": 2.9848910870891667e-06, | |
| "loss": 0.559, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 0.4553102534033549, | |
| "grad_norm": 6.4262883397092425, | |
| "learning_rate": 2.980840366973508e-06, | |
| "loss": 0.6292, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 0.4558201193086218, | |
| "grad_norm": 8.386109243133818, | |
| "learning_rate": 2.9767883355734567e-06, | |
| "loss": 0.5761, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 0.45632998521388873, | |
| "grad_norm": 8.476299499333017, | |
| "learning_rate": 2.9727350039391782e-06, | |
| "loss": 0.5891, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 0.45683985111915565, | |
| "grad_norm": 69.39163092666091, | |
| "learning_rate": 2.968680383124384e-06, | |
| "loss": 0.5577, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 0.45734971702442256, | |
| "grad_norm": 6.599382708009659, | |
| "learning_rate": 2.9646244841862996e-06, | |
| "loss": 0.5711, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 0.4578595829296895, | |
| "grad_norm": 16.290153939994777, | |
| "learning_rate": 2.9605673181856386e-06, | |
| "loss": 0.5651, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 0.4583694488349564, | |
| "grad_norm": 9.201640373968438, | |
| "learning_rate": 2.956508896186569e-06, | |
| "loss": 0.5515, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 0.4588793147402233, | |
| "grad_norm": 19.028256485342254, | |
| "learning_rate": 2.9524492292566824e-06, | |
| "loss": 0.5783, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.4593891806454902, | |
| "grad_norm": 4.33654654210295, | |
| "learning_rate": 2.948388328466968e-06, | |
| "loss": 0.535, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 0.45989904655075714, | |
| "grad_norm": 5.283839053332392, | |
| "learning_rate": 2.944326204891777e-06, | |
| "loss": 0.5128, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 0.46040891245602406, | |
| "grad_norm": 8.155730327753941, | |
| "learning_rate": 2.940262869608798e-06, | |
| "loss": 0.5879, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 0.46091877836129097, | |
| "grad_norm": 8.045593849591542, | |
| "learning_rate": 2.9361983336990217e-06, | |
| "loss": 0.5142, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 0.4614286442665579, | |
| "grad_norm": 7.886643075363955, | |
| "learning_rate": 2.932132608246715e-06, | |
| "loss": 0.5502, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 0.4619385101718248, | |
| "grad_norm": 16.63804533138256, | |
| "learning_rate": 2.928065704339388e-06, | |
| "loss": 0.5364, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 0.4624483760770917, | |
| "grad_norm": 12.882651747731957, | |
| "learning_rate": 2.9239976330677627e-06, | |
| "loss": 0.5808, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 0.46295824198235863, | |
| "grad_norm": 8.13150520823608, | |
| "learning_rate": 2.919928405525748e-06, | |
| "loss": 0.6199, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 0.46346810788762555, | |
| "grad_norm": 11.681625440582726, | |
| "learning_rate": 2.9158580328104036e-06, | |
| "loss": 0.5239, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 0.46397797379289246, | |
| "grad_norm": 6.198613990956929, | |
| "learning_rate": 2.9117865260219117e-06, | |
| "loss": 0.6272, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 0.4644878396981594, | |
| "grad_norm": 5.556099110950296, | |
| "learning_rate": 2.907713896263551e-06, | |
| "loss": 0.5443, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 0.4649977056034263, | |
| "grad_norm": 8.501028484471917, | |
| "learning_rate": 2.903640154641657e-06, | |
| "loss": 0.603, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 0.4655075715086932, | |
| "grad_norm": 5.631359258536221, | |
| "learning_rate": 2.899565312265602e-06, | |
| "loss": 0.5331, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 0.4660174374139601, | |
| "grad_norm": 9.654290186742468, | |
| "learning_rate": 2.895489380247758e-06, | |
| "loss": 0.5103, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 0.46652730331922704, | |
| "grad_norm": 9.735034286631029, | |
| "learning_rate": 2.891412369703469e-06, | |
| "loss": 0.5305, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 0.46703716922449395, | |
| "grad_norm": 7.313438134736095, | |
| "learning_rate": 2.887334291751019e-06, | |
| "loss": 0.557, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 0.46754703512976087, | |
| "grad_norm": 11.519400660039398, | |
| "learning_rate": 2.883255157511605e-06, | |
| "loss": 0.555, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 0.4680569010350278, | |
| "grad_norm": 6.659958582649854, | |
| "learning_rate": 2.8791749781093036e-06, | |
| "loss": 0.5345, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 0.4685667669402947, | |
| "grad_norm": 21.81366907771895, | |
| "learning_rate": 2.8750937646710416e-06, | |
| "loss": 0.5116, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 0.4690766328455616, | |
| "grad_norm": 36.00237424353747, | |
| "learning_rate": 2.8710115283265655e-06, | |
| "loss": 0.6041, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 0.46958649875082853, | |
| "grad_norm": 8.5235261068511, | |
| "learning_rate": 2.866928280208412e-06, | |
| "loss": 0.498, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 0.47009636465609544, | |
| "grad_norm": 7.167372979618883, | |
| "learning_rate": 2.8628440314518752e-06, | |
| "loss": 0.5538, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 0.47060623056136236, | |
| "grad_norm": 11.507343763740069, | |
| "learning_rate": 2.8587587931949806e-06, | |
| "loss": 0.5146, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 0.4711160964666293, | |
| "grad_norm": 6.250940117415326, | |
| "learning_rate": 2.854672576578451e-06, | |
| "loss": 0.5204, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 0.4716259623718962, | |
| "grad_norm": 7.530321442704125, | |
| "learning_rate": 2.850585392745676e-06, | |
| "loss": 0.4885, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 0.4721358282771631, | |
| "grad_norm": 9.163440002400211, | |
| "learning_rate": 2.8464972528426847e-06, | |
| "loss": 0.5226, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 0.47264569418243, | |
| "grad_norm": 16.393409728695335, | |
| "learning_rate": 2.842408168018112e-06, | |
| "loss": 0.6392, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 0.47315556008769694, | |
| "grad_norm": 7.851049582591664, | |
| "learning_rate": 2.8383181494231714e-06, | |
| "loss": 0.5739, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 0.47366542599296385, | |
| "grad_norm": 12.269773289481154, | |
| "learning_rate": 2.8342272082116214e-06, | |
| "loss": 0.5585, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 0.47417529189823077, | |
| "grad_norm": 7.346796842344014, | |
| "learning_rate": 2.830135355539737e-06, | |
| "loss": 0.5394, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 0.4746851578034977, | |
| "grad_norm": 7.796805559267158, | |
| "learning_rate": 2.826042602566279e-06, | |
| "loss": 0.5542, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 0.4751950237087646, | |
| "grad_norm": 11.101461591062174, | |
| "learning_rate": 2.821948960452463e-06, | |
| "loss": 0.5394, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 0.4757048896140315, | |
| "grad_norm": 11.397898491808107, | |
| "learning_rate": 2.817854440361929e-06, | |
| "loss": 0.5437, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 0.4762147555192984, | |
| "grad_norm": 8.962711749510742, | |
| "learning_rate": 2.8137590534607123e-06, | |
| "loss": 0.5814, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 0.47672462142456534, | |
| "grad_norm": 7.963488384641039, | |
| "learning_rate": 2.8096628109172125e-06, | |
| "loss": 0.5331, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 0.47723448732983226, | |
| "grad_norm": 8.368271605384814, | |
| "learning_rate": 2.8055657239021605e-06, | |
| "loss": 0.5423, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 0.4777443532350992, | |
| "grad_norm": 20.145593761398835, | |
| "learning_rate": 2.8014678035885913e-06, | |
| "loss": 0.5616, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 0.4782542191403661, | |
| "grad_norm": 10.98955410252151, | |
| "learning_rate": 2.7973690611518124e-06, | |
| "loss": 0.6045, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 0.478764085045633, | |
| "grad_norm": 8.260243475993803, | |
| "learning_rate": 2.7932695077693745e-06, | |
| "loss": 0.5665, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 0.4792739509508999, | |
| "grad_norm": 13.66264667588255, | |
| "learning_rate": 2.7891691546210374e-06, | |
| "loss": 0.4857, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 0.47978381685616683, | |
| "grad_norm": 8.815539885006997, | |
| "learning_rate": 2.7850680128887424e-06, | |
| "loss": 0.5462, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 0.48029368276143375, | |
| "grad_norm": 13.61453326007113, | |
| "learning_rate": 2.780966093756584e-06, | |
| "loss": 0.5523, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 0.48080354866670066, | |
| "grad_norm": 9.12709099410278, | |
| "learning_rate": 2.7768634084107736e-06, | |
| "loss": 0.5618, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 0.4813134145719676, | |
| "grad_norm": 6.502630279665426, | |
| "learning_rate": 2.772759968039614e-06, | |
| "loss": 0.5186, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 0.4818232804772345, | |
| "grad_norm": 13.15622645433065, | |
| "learning_rate": 2.7686557838334644e-06, | |
| "loss": 0.6274, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 0.4823331463825014, | |
| "grad_norm": 9.81593275094567, | |
| "learning_rate": 2.764550866984716e-06, | |
| "loss": 0.5152, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 0.4828430122877683, | |
| "grad_norm": 7.4162131472691, | |
| "learning_rate": 2.760445228687755e-06, | |
| "loss": 0.5692, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 0.48335287819303524, | |
| "grad_norm": 16.883412871544444, | |
| "learning_rate": 2.7563388801389386e-06, | |
| "loss": 0.5638, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 0.48386274409830216, | |
| "grad_norm": 7.817244543088561, | |
| "learning_rate": 2.752231832536556e-06, | |
| "loss": 0.5365, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 0.48437261000356907, | |
| "grad_norm": 5.344543331403643, | |
| "learning_rate": 2.7481240970808074e-06, | |
| "loss": 0.5585, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.484882475908836, | |
| "grad_norm": 9.552447608535903, | |
| "learning_rate": 2.744015684973766e-06, | |
| "loss": 0.599, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 0.4853923418141029, | |
| "grad_norm": 7.09922339815139, | |
| "learning_rate": 2.739906607419351e-06, | |
| "loss": 0.6551, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 0.4859022077193698, | |
| "grad_norm": 9.793826991717298, | |
| "learning_rate": 2.7357968756232963e-06, | |
| "loss": 0.5411, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 0.48641207362463673, | |
| "grad_norm": 9.05956350099835, | |
| "learning_rate": 2.7316865007931208e-06, | |
| "loss": 0.5401, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 0.48692193952990365, | |
| "grad_norm": 30.11492757634757, | |
| "learning_rate": 2.727575494138096e-06, | |
| "loss": 0.5806, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 0.48743180543517056, | |
| "grad_norm": 9.075866648406908, | |
| "learning_rate": 2.7234638668692166e-06, | |
| "loss": 0.514, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 0.4879416713404375, | |
| "grad_norm": 6.186508572080813, | |
| "learning_rate": 2.7193516301991703e-06, | |
| "loss": 0.5615, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 0.4884515372457044, | |
| "grad_norm": 10.472335454023918, | |
| "learning_rate": 2.7152387953423047e-06, | |
| "loss": 0.5532, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 0.4889614031509713, | |
| "grad_norm": 4.820108163742004, | |
| "learning_rate": 2.711125373514602e-06, | |
| "loss": 0.4781, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 0.4894712690562382, | |
| "grad_norm": 8.995560394442036, | |
| "learning_rate": 2.7070113759336424e-06, | |
| "loss": 0.5203, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 0.48998113496150514, | |
| "grad_norm": 17.393326675668504, | |
| "learning_rate": 2.7028968138185783e-06, | |
| "loss": 0.543, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 0.49049100086677205, | |
| "grad_norm": 2.8353076560498707, | |
| "learning_rate": 2.6987816983900995e-06, | |
| "loss": 0.5306, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 0.49100086677203897, | |
| "grad_norm": 9.503152576570983, | |
| "learning_rate": 2.6946660408704062e-06, | |
| "loss": 0.5211, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 0.4915107326773059, | |
| "grad_norm": 320.61604317399934, | |
| "learning_rate": 2.6905498524831763e-06, | |
| "loss": 0.6006, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 0.4920205985825728, | |
| "grad_norm": 8.724977975058774, | |
| "learning_rate": 2.6864331444535347e-06, | |
| "loss": 0.5338, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 0.4925304644878397, | |
| "grad_norm": 19.80012541814024, | |
| "learning_rate": 2.682315928008026e-06, | |
| "loss": 0.4973, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 0.49304033039310663, | |
| "grad_norm": 6.900731066386949, | |
| "learning_rate": 2.6781982143745776e-06, | |
| "loss": 0.5044, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 0.49355019629837354, | |
| "grad_norm": 17.42457089480257, | |
| "learning_rate": 2.6740800147824764e-06, | |
| "loss": 0.5013, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 0.49406006220364046, | |
| "grad_norm": 13.80209829216203, | |
| "learning_rate": 2.669961340462332e-06, | |
| "loss": 0.6106, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 0.4945699281089074, | |
| "grad_norm": 9.356816676968172, | |
| "learning_rate": 2.6658422026460493e-06, | |
| "loss": 0.5256, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 0.4950797940141743, | |
| "grad_norm": 9.84379922723854, | |
| "learning_rate": 2.6617226125667977e-06, | |
| "loss": 0.5762, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 0.4955896599194412, | |
| "grad_norm": 15.230940811889656, | |
| "learning_rate": 2.65760258145898e-06, | |
| "loss": 0.5258, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 0.4960995258247081, | |
| "grad_norm": 8.986235423766201, | |
| "learning_rate": 2.653482120558201e-06, | |
| "loss": 0.593, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 0.49660939172997504, | |
| "grad_norm": 35.48183796431665, | |
| "learning_rate": 2.6493612411012377e-06, | |
| "loss": 0.5196, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 0.49711925763524195, | |
| "grad_norm": 7.2752655638575, | |
| "learning_rate": 2.645239954326009e-06, | |
| "loss": 0.5706, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 0.49762912354050887, | |
| "grad_norm": 8.42331057856242, | |
| "learning_rate": 2.641118271471543e-06, | |
| "loss": 0.4943, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 0.4981389894457758, | |
| "grad_norm": 7.965377523949411, | |
| "learning_rate": 2.6369962037779513e-06, | |
| "loss": 0.5339, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 0.4986488553510427, | |
| "grad_norm": 10.01729377702691, | |
| "learning_rate": 2.632873762486392e-06, | |
| "loss": 0.5791, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 0.4991587212563096, | |
| "grad_norm": 4.511761837890751, | |
| "learning_rate": 2.6287509588390424e-06, | |
| "loss": 0.5034, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 0.4996685871615765, | |
| "grad_norm": 9.294212001685729, | |
| "learning_rate": 2.6246278040790696e-06, | |
| "loss": 0.5604, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 0.5001784530668434, | |
| "grad_norm": 18.481234335838135, | |
| "learning_rate": 2.620504309450596e-06, | |
| "loss": 0.5656, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 0.5006883189721103, | |
| "grad_norm": 6.461543556394008, | |
| "learning_rate": 2.6163804861986735e-06, | |
| "loss": 0.5326, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 0.5011981848773772, | |
| "grad_norm": 6.775693055786728, | |
| "learning_rate": 2.6122563455692467e-06, | |
| "loss": 0.5439, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 0.5017080507826441, | |
| "grad_norm": 10.348144692895449, | |
| "learning_rate": 2.608131898809129e-06, | |
| "loss": 0.5851, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 0.502217916687911, | |
| "grad_norm": 19.51021168023746, | |
| "learning_rate": 2.6040071571659676e-06, | |
| "loss": 0.519, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 0.502727782593178, | |
| "grad_norm": 8.087870299255922, | |
| "learning_rate": 2.5998821318882127e-06, | |
| "loss": 0.5689, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 0.5032376484984449, | |
| "grad_norm": 17.52411247215236, | |
| "learning_rate": 2.595756834225089e-06, | |
| "loss": 0.5387, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 0.5037475144037118, | |
| "grad_norm": 5.141607773429428, | |
| "learning_rate": 2.5916312754265636e-06, | |
| "loss": 0.5233, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 0.5042573803089787, | |
| "grad_norm": 5.366874066818395, | |
| "learning_rate": 2.587505466743317e-06, | |
| "loss": 0.5694, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 0.5047672462142456, | |
| "grad_norm": 9.064599177565993, | |
| "learning_rate": 2.583379419426709e-06, | |
| "loss": 0.5833, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 0.5052771121195125, | |
| "grad_norm": 7.81476080754217, | |
| "learning_rate": 2.5792531447287515e-06, | |
| "loss": 0.5187, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 0.5057869780247795, | |
| "grad_norm": 6.046482753128124, | |
| "learning_rate": 2.575126653902078e-06, | |
| "loss": 0.4487, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 0.5062968439300464, | |
| "grad_norm": 5.090405722171164, | |
| "learning_rate": 2.570999958199908e-06, | |
| "loss": 0.5504, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 0.5068067098353133, | |
| "grad_norm": 10.708861623918402, | |
| "learning_rate": 2.566873068876022e-06, | |
| "loss": 0.5766, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 0.5073165757405802, | |
| "grad_norm": 11.907291428749533, | |
| "learning_rate": 2.5627459971847264e-06, | |
| "loss": 0.5025, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 0.5078264416458471, | |
| "grad_norm": 14.482994499858956, | |
| "learning_rate": 2.558618754380829e-06, | |
| "loss": 0.5443, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 0.508336307551114, | |
| "grad_norm": 15.282044708608089, | |
| "learning_rate": 2.5544913517196006e-06, | |
| "loss": 0.5886, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 0.508846173456381, | |
| "grad_norm": 8.06318617079194, | |
| "learning_rate": 2.5503638004567487e-06, | |
| "loss": 0.5749, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 0.5093560393616479, | |
| "grad_norm": 4.210852657585504, | |
| "learning_rate": 2.546236111848387e-06, | |
| "loss": 0.5345, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 0.5098659052669148, | |
| "grad_norm": 10.272690227765827, | |
| "learning_rate": 2.5421082971510024e-06, | |
| "loss": 0.5987, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.5103757711721817, | |
| "grad_norm": 18.85294522582985, | |
| "learning_rate": 2.537980367621427e-06, | |
| "loss": 0.5536, | |
| "step": 10010 | |
| }, | |
| { | |
| "epoch": 0.5108856370774486, | |
| "grad_norm": 6.947032878897913, | |
| "learning_rate": 2.533852334516805e-06, | |
| "loss": 0.5668, | |
| "step": 10020 | |
| }, | |
| { | |
| "epoch": 0.5113955029827155, | |
| "grad_norm": 6.178991706671281, | |
| "learning_rate": 2.5297242090945638e-06, | |
| "loss": 0.547, | |
| "step": 10030 | |
| }, | |
| { | |
| "epoch": 0.5119053688879824, | |
| "grad_norm": 9.380242412444959, | |
| "learning_rate": 2.5255960026123825e-06, | |
| "loss": 0.5135, | |
| "step": 10040 | |
| }, | |
| { | |
| "epoch": 0.5124152347932494, | |
| "grad_norm": 17.20190562415558, | |
| "learning_rate": 2.5214677263281613e-06, | |
| "loss": 0.5542, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 0.5129251006985163, | |
| "grad_norm": 4.702811357253125, | |
| "learning_rate": 2.5173393914999894e-06, | |
| "loss": 0.5898, | |
| "step": 10060 | |
| }, | |
| { | |
| "epoch": 0.5134349666037832, | |
| "grad_norm": 5.979778153512825, | |
| "learning_rate": 2.5132110093861174e-06, | |
| "loss": 0.49, | |
| "step": 10070 | |
| }, | |
| { | |
| "epoch": 0.5139448325090501, | |
| "grad_norm": 8.444482069508595, | |
| "learning_rate": 2.5090825912449233e-06, | |
| "loss": 0.5111, | |
| "step": 10080 | |
| }, | |
| { | |
| "epoch": 0.514454698414317, | |
| "grad_norm": 4.728506915605374, | |
| "learning_rate": 2.504954148334886e-06, | |
| "loss": 0.5028, | |
| "step": 10090 | |
| }, | |
| { | |
| "epoch": 0.5149645643195839, | |
| "grad_norm": 26.205972531140272, | |
| "learning_rate": 2.500825691914549e-06, | |
| "loss": 0.5315, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 0.5154744302248508, | |
| "grad_norm": 17.05895922461699, | |
| "learning_rate": 2.4966972332424934e-06, | |
| "loss": 0.5929, | |
| "step": 10110 | |
| }, | |
| { | |
| "epoch": 0.5159842961301178, | |
| "grad_norm": 11.798337674324719, | |
| "learning_rate": 2.492568783577308e-06, | |
| "loss": 0.5302, | |
| "step": 10120 | |
| }, | |
| { | |
| "epoch": 0.5164941620353847, | |
| "grad_norm": 5.8570796040887245, | |
| "learning_rate": 2.4884403541775553e-06, | |
| "loss": 0.5408, | |
| "step": 10130 | |
| }, | |
| { | |
| "epoch": 0.5170040279406516, | |
| "grad_norm": 37.79091695568464, | |
| "learning_rate": 2.4843119563017426e-06, | |
| "loss": 0.5014, | |
| "step": 10140 | |
| }, | |
| { | |
| "epoch": 0.5175138938459185, | |
| "grad_norm": 30.287463725605825, | |
| "learning_rate": 2.480183601208293e-06, | |
| "loss": 0.5793, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 0.5180237597511854, | |
| "grad_norm": 6.331161625657528, | |
| "learning_rate": 2.4760553001555105e-06, | |
| "loss": 0.5064, | |
| "step": 10160 | |
| }, | |
| { | |
| "epoch": 0.5185336256564523, | |
| "grad_norm": 3.476045278909216, | |
| "learning_rate": 2.4719270644015533e-06, | |
| "loss": 0.5359, | |
| "step": 10170 | |
| }, | |
| { | |
| "epoch": 0.5190434915617193, | |
| "grad_norm": 22.474934468593176, | |
| "learning_rate": 2.4677989052044022e-06, | |
| "loss": 0.5924, | |
| "step": 10180 | |
| }, | |
| { | |
| "epoch": 0.5195533574669862, | |
| "grad_norm": 21.07848740636244, | |
| "learning_rate": 2.4636708338218267e-06, | |
| "loss": 0.5211, | |
| "step": 10190 | |
| }, | |
| { | |
| "epoch": 0.5200632233722531, | |
| "grad_norm": 32.20759395836686, | |
| "learning_rate": 2.4595428615113596e-06, | |
| "loss": 0.512, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 0.52057308927752, | |
| "grad_norm": 10.788375544192391, | |
| "learning_rate": 2.4554149995302605e-06, | |
| "loss": 0.5667, | |
| "step": 10210 | |
| }, | |
| { | |
| "epoch": 0.5210829551827869, | |
| "grad_norm": 5.973113159345188, | |
| "learning_rate": 2.451287259135491e-06, | |
| "loss": 0.5772, | |
| "step": 10220 | |
| }, | |
| { | |
| "epoch": 0.5215928210880538, | |
| "grad_norm": 5.622874034318361, | |
| "learning_rate": 2.4471596515836797e-06, | |
| "loss": 0.4957, | |
| "step": 10230 | |
| }, | |
| { | |
| "epoch": 0.5221026869933207, | |
| "grad_norm": 32.96639762729161, | |
| "learning_rate": 2.4430321881310928e-06, | |
| "loss": 0.5963, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 0.5226125528985877, | |
| "grad_norm": 10.127055741762746, | |
| "learning_rate": 2.4389048800336056e-06, | |
| "loss": 0.4812, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 0.5231224188038546, | |
| "grad_norm": 6.503535423951574, | |
| "learning_rate": 2.434777738546666e-06, | |
| "loss": 0.5536, | |
| "step": 10260 | |
| }, | |
| { | |
| "epoch": 0.5236322847091215, | |
| "grad_norm": 8.731984562436182, | |
| "learning_rate": 2.4306507749252715e-06, | |
| "loss": 0.5391, | |
| "step": 10270 | |
| }, | |
| { | |
| "epoch": 0.5241421506143884, | |
| "grad_norm": 14.54106883066779, | |
| "learning_rate": 2.426524000423931e-06, | |
| "loss": 0.5646, | |
| "step": 10280 | |
| }, | |
| { | |
| "epoch": 0.5246520165196553, | |
| "grad_norm": 8.987888618292743, | |
| "learning_rate": 2.4223974262966395e-06, | |
| "loss": 0.5405, | |
| "step": 10290 | |
| }, | |
| { | |
| "epoch": 0.5251618824249222, | |
| "grad_norm": 7.432453186270774, | |
| "learning_rate": 2.4182710637968466e-06, | |
| "loss": 0.5267, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 0.5256717483301891, | |
| "grad_norm": 20.051730795291874, | |
| "learning_rate": 2.414144924177422e-06, | |
| "loss": 0.4944, | |
| "step": 10310 | |
| }, | |
| { | |
| "epoch": 0.5261816142354561, | |
| "grad_norm": 9.238504273020023, | |
| "learning_rate": 2.4100190186906304e-06, | |
| "loss": 0.5266, | |
| "step": 10320 | |
| }, | |
| { | |
| "epoch": 0.526691480140723, | |
| "grad_norm": 6.3325546925437655, | |
| "learning_rate": 2.4058933585880958e-06, | |
| "loss": 0.4788, | |
| "step": 10330 | |
| }, | |
| { | |
| "epoch": 0.5272013460459899, | |
| "grad_norm": 28.46273823581247, | |
| "learning_rate": 2.401767955120776e-06, | |
| "loss": 0.5674, | |
| "step": 10340 | |
| }, | |
| { | |
| "epoch": 0.5277112119512568, | |
| "grad_norm": 11.101555994175811, | |
| "learning_rate": 2.3976428195389236e-06, | |
| "loss": 0.524, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 0.5282210778565237, | |
| "grad_norm": 4.922045076561477, | |
| "learning_rate": 2.393517963092066e-06, | |
| "loss": 0.5428, | |
| "step": 10360 | |
| }, | |
| { | |
| "epoch": 0.5287309437617906, | |
| "grad_norm": 6.089180197710267, | |
| "learning_rate": 2.3893933970289677e-06, | |
| "loss": 0.4983, | |
| "step": 10370 | |
| }, | |
| { | |
| "epoch": 0.5292408096670576, | |
| "grad_norm": 8.7266534647084, | |
| "learning_rate": 2.3852691325975996e-06, | |
| "loss": 0.5968, | |
| "step": 10380 | |
| }, | |
| { | |
| "epoch": 0.5297506755723245, | |
| "grad_norm": 16.173655045642207, | |
| "learning_rate": 2.3811451810451132e-06, | |
| "loss": 0.5491, | |
| "step": 10390 | |
| }, | |
| { | |
| "epoch": 0.5302605414775914, | |
| "grad_norm": 14.188690969216273, | |
| "learning_rate": 2.377021553617803e-06, | |
| "loss": 0.5229, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 0.5307704073828583, | |
| "grad_norm": 15.947197750694937, | |
| "learning_rate": 2.372898261561082e-06, | |
| "loss": 0.5756, | |
| "step": 10410 | |
| }, | |
| { | |
| "epoch": 0.5312802732881252, | |
| "grad_norm": 22.949283972877854, | |
| "learning_rate": 2.3687753161194498e-06, | |
| "loss": 0.5335, | |
| "step": 10420 | |
| }, | |
| { | |
| "epoch": 0.5317901391933921, | |
| "grad_norm": 52.71294742628508, | |
| "learning_rate": 2.3646527285364565e-06, | |
| "loss": 0.4808, | |
| "step": 10430 | |
| }, | |
| { | |
| "epoch": 0.532300005098659, | |
| "grad_norm": 4.074010052195157, | |
| "learning_rate": 2.3605305100546807e-06, | |
| "loss": 0.5642, | |
| "step": 10440 | |
| }, | |
| { | |
| "epoch": 0.532809871003926, | |
| "grad_norm": 9.838693123496997, | |
| "learning_rate": 2.356408671915692e-06, | |
| "loss": 0.5526, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 0.5333197369091929, | |
| "grad_norm": 9.174659419815153, | |
| "learning_rate": 2.352287225360024e-06, | |
| "loss": 0.5293, | |
| "step": 10460 | |
| }, | |
| { | |
| "epoch": 0.5338296028144598, | |
| "grad_norm": 4.384586430304259, | |
| "learning_rate": 2.3481661816271413e-06, | |
| "loss": 0.5477, | |
| "step": 10470 | |
| }, | |
| { | |
| "epoch": 0.5343394687197267, | |
| "grad_norm": 11.497556219158662, | |
| "learning_rate": 2.3440455519554096e-06, | |
| "loss": 0.5239, | |
| "step": 10480 | |
| }, | |
| { | |
| "epoch": 0.5348493346249936, | |
| "grad_norm": 4.9267726313927795, | |
| "learning_rate": 2.339925347582069e-06, | |
| "loss": 0.4597, | |
| "step": 10490 | |
| }, | |
| { | |
| "epoch": 0.5353592005302605, | |
| "grad_norm": 6.460685665908354, | |
| "learning_rate": 2.3358055797431945e-06, | |
| "loss": 0.5595, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.5358690664355275, | |
| "grad_norm": 8.16287039128602, | |
| "learning_rate": 2.3316862596736752e-06, | |
| "loss": 0.5578, | |
| "step": 10510 | |
| }, | |
| { | |
| "epoch": 0.5363789323407944, | |
| "grad_norm": 11.339005579992447, | |
| "learning_rate": 2.327567398607175e-06, | |
| "loss": 0.4851, | |
| "step": 10520 | |
| }, | |
| { | |
| "epoch": 0.5368887982460613, | |
| "grad_norm": 10.028313148707044, | |
| "learning_rate": 2.3234490077761097e-06, | |
| "loss": 0.5108, | |
| "step": 10530 | |
| }, | |
| { | |
| "epoch": 0.5373986641513282, | |
| "grad_norm": 4.465502751196889, | |
| "learning_rate": 2.3193310984116113e-06, | |
| "loss": 0.5397, | |
| "step": 10540 | |
| }, | |
| { | |
| "epoch": 0.5379085300565951, | |
| "grad_norm": 5.70092739153591, | |
| "learning_rate": 2.3152136817434983e-06, | |
| "loss": 0.5576, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 0.538418395961862, | |
| "grad_norm": 9.341114511026655, | |
| "learning_rate": 2.311096769000247e-06, | |
| "loss": 0.5845, | |
| "step": 10560 | |
| }, | |
| { | |
| "epoch": 0.5389282618671289, | |
| "grad_norm": 12.418743380860024, | |
| "learning_rate": 2.306980371408957e-06, | |
| "loss": 0.5421, | |
| "step": 10570 | |
| }, | |
| { | |
| "epoch": 0.5394381277723959, | |
| "grad_norm": 30.682568719995146, | |
| "learning_rate": 2.302864500195326e-06, | |
| "loss": 0.4822, | |
| "step": 10580 | |
| }, | |
| { | |
| "epoch": 0.5399479936776628, | |
| "grad_norm": 28.981287434258377, | |
| "learning_rate": 2.2987491665836137e-06, | |
| "loss": 0.5105, | |
| "step": 10590 | |
| }, | |
| { | |
| "epoch": 0.5404578595829297, | |
| "grad_norm": 7.180895523241582, | |
| "learning_rate": 2.2946343817966147e-06, | |
| "loss": 0.5413, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 0.5409677254881966, | |
| "grad_norm": 8.508579601103788, | |
| "learning_rate": 2.2905201570556294e-06, | |
| "loss": 0.5524, | |
| "step": 10610 | |
| }, | |
| { | |
| "epoch": 0.5414775913934635, | |
| "grad_norm": 5.738514236452168, | |
| "learning_rate": 2.2864065035804253e-06, | |
| "loss": 0.5221, | |
| "step": 10620 | |
| }, | |
| { | |
| "epoch": 0.5419874572987304, | |
| "grad_norm": 16.357357206923098, | |
| "learning_rate": 2.2822934325892178e-06, | |
| "loss": 0.5901, | |
| "step": 10630 | |
| }, | |
| { | |
| "epoch": 0.5424973232039974, | |
| "grad_norm": 8.393585681141637, | |
| "learning_rate": 2.2781809552986296e-06, | |
| "loss": 0.539, | |
| "step": 10640 | |
| }, | |
| { | |
| "epoch": 0.5430071891092643, | |
| "grad_norm": 9.054243646581382, | |
| "learning_rate": 2.2740690829236672e-06, | |
| "loss": 0.5317, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 0.5435170550145312, | |
| "grad_norm": 8.715999157087243, | |
| "learning_rate": 2.269957826677685e-06, | |
| "loss": 0.5474, | |
| "step": 10660 | |
| }, | |
| { | |
| "epoch": 0.5440269209197981, | |
| "grad_norm": 6.328287254963458, | |
| "learning_rate": 2.2658471977723593e-06, | |
| "loss": 0.5335, | |
| "step": 10670 | |
| }, | |
| { | |
| "epoch": 0.544536786825065, | |
| "grad_norm": 17.818805969409258, | |
| "learning_rate": 2.2617372074176565e-06, | |
| "loss": 0.5085, | |
| "step": 10680 | |
| }, | |
| { | |
| "epoch": 0.5450466527303319, | |
| "grad_norm": 8.44665589054514, | |
| "learning_rate": 2.2576278668217967e-06, | |
| "loss": 0.5066, | |
| "step": 10690 | |
| }, | |
| { | |
| "epoch": 0.5455565186355988, | |
| "grad_norm": 8.836352465670993, | |
| "learning_rate": 2.2535191871912337e-06, | |
| "loss": 0.5242, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 0.5460663845408658, | |
| "grad_norm": 6.03086653823258, | |
| "learning_rate": 2.2494111797306146e-06, | |
| "loss": 0.529, | |
| "step": 10710 | |
| }, | |
| { | |
| "epoch": 0.5465762504461327, | |
| "grad_norm": 4.725682085559597, | |
| "learning_rate": 2.2453038556427557e-06, | |
| "loss": 0.5783, | |
| "step": 10720 | |
| }, | |
| { | |
| "epoch": 0.5470861163513996, | |
| "grad_norm": 6.910137878700736, | |
| "learning_rate": 2.2411972261286107e-06, | |
| "loss": 0.505, | |
| "step": 10730 | |
| }, | |
| { | |
| "epoch": 0.5475959822566665, | |
| "grad_norm": 13.116572965114555, | |
| "learning_rate": 2.2370913023872357e-06, | |
| "loss": 0.6158, | |
| "step": 10740 | |
| }, | |
| { | |
| "epoch": 0.5481058481619334, | |
| "grad_norm": 5.016888656730006, | |
| "learning_rate": 2.2329860956157655e-06, | |
| "loss": 0.5148, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 0.5486157140672003, | |
| "grad_norm": 7.704134433953211, | |
| "learning_rate": 2.2288816170093767e-06, | |
| "loss": 0.5255, | |
| "step": 10760 | |
| }, | |
| { | |
| "epoch": 0.5491255799724672, | |
| "grad_norm": 6.62013774387146, | |
| "learning_rate": 2.224777877761264e-06, | |
| "loss": 0.4716, | |
| "step": 10770 | |
| }, | |
| { | |
| "epoch": 0.5496354458777342, | |
| "grad_norm": 5.166956877073506, | |
| "learning_rate": 2.2206748890626004e-06, | |
| "loss": 0.5539, | |
| "step": 10780 | |
| }, | |
| { | |
| "epoch": 0.5501453117830011, | |
| "grad_norm": 12.107412717723916, | |
| "learning_rate": 2.216572662102518e-06, | |
| "loss": 0.5941, | |
| "step": 10790 | |
| }, | |
| { | |
| "epoch": 0.550655177688268, | |
| "grad_norm": 12.185456102816504, | |
| "learning_rate": 2.212471208068068e-06, | |
| "loss": 0.4973, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 0.5511650435935349, | |
| "grad_norm": 12.953954914140017, | |
| "learning_rate": 2.2083705381441933e-06, | |
| "loss": 0.5258, | |
| "step": 10810 | |
| }, | |
| { | |
| "epoch": 0.5516749094988018, | |
| "grad_norm": 45.0233604442855, | |
| "learning_rate": 2.204270663513702e-06, | |
| "loss": 0.5006, | |
| "step": 10820 | |
| }, | |
| { | |
| "epoch": 0.5521847754040687, | |
| "grad_norm": 6.859535095869529, | |
| "learning_rate": 2.200171595357229e-06, | |
| "loss": 0.5132, | |
| "step": 10830 | |
| }, | |
| { | |
| "epoch": 0.5526946413093357, | |
| "grad_norm": 8.507813436801658, | |
| "learning_rate": 2.196073344853213e-06, | |
| "loss": 0.4566, | |
| "step": 10840 | |
| }, | |
| { | |
| "epoch": 0.5532045072146026, | |
| "grad_norm": 13.085163419859967, | |
| "learning_rate": 2.1919759231778616e-06, | |
| "loss": 0.577, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 0.5537143731198695, | |
| "grad_norm": 7.089280829159933, | |
| "learning_rate": 2.187879341505123e-06, | |
| "loss": 0.5239, | |
| "step": 10860 | |
| }, | |
| { | |
| "epoch": 0.5542242390251364, | |
| "grad_norm": 8.945899360906559, | |
| "learning_rate": 2.1837836110066544e-06, | |
| "loss": 0.5751, | |
| "step": 10870 | |
| }, | |
| { | |
| "epoch": 0.5547341049304033, | |
| "grad_norm": 13.650888729895103, | |
| "learning_rate": 2.17968874285179e-06, | |
| "loss": 0.6315, | |
| "step": 10880 | |
| }, | |
| { | |
| "epoch": 0.5552439708356702, | |
| "grad_norm": 16.259039053047864, | |
| "learning_rate": 2.175594748207516e-06, | |
| "loss": 0.5123, | |
| "step": 10890 | |
| }, | |
| { | |
| "epoch": 0.5557538367409371, | |
| "grad_norm": 37.239300000407646, | |
| "learning_rate": 2.1715016382384314e-06, | |
| "loss": 0.5585, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 0.5562637026462041, | |
| "grad_norm": 8.293582577335172, | |
| "learning_rate": 2.1674094241067275e-06, | |
| "loss": 0.4595, | |
| "step": 10910 | |
| }, | |
| { | |
| "epoch": 0.556773568551471, | |
| "grad_norm": 7.899576476666288, | |
| "learning_rate": 2.1633181169721518e-06, | |
| "loss": 0.5378, | |
| "step": 10920 | |
| }, | |
| { | |
| "epoch": 0.5572834344567379, | |
| "grad_norm": 4.496485111294446, | |
| "learning_rate": 2.159227727991974e-06, | |
| "loss": 0.531, | |
| "step": 10930 | |
| }, | |
| { | |
| "epoch": 0.5577933003620048, | |
| "grad_norm": 43.58374674974216, | |
| "learning_rate": 2.155138268320966e-06, | |
| "loss": 0.5692, | |
| "step": 10940 | |
| }, | |
| { | |
| "epoch": 0.5583031662672717, | |
| "grad_norm": 17.668955644082533, | |
| "learning_rate": 2.151049749111361e-06, | |
| "loss": 0.5973, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 0.5588130321725386, | |
| "grad_norm": 11.098601977216765, | |
| "learning_rate": 2.146962181512829e-06, | |
| "loss": 0.5913, | |
| "step": 10960 | |
| }, | |
| { | |
| "epoch": 0.5593228980778056, | |
| "grad_norm": 14.526932562809277, | |
| "learning_rate": 2.142875576672446e-06, | |
| "loss": 0.5381, | |
| "step": 10970 | |
| }, | |
| { | |
| "epoch": 0.5598327639830725, | |
| "grad_norm": 6.722695581409926, | |
| "learning_rate": 2.13878994573466e-06, | |
| "loss": 0.5268, | |
| "step": 10980 | |
| }, | |
| { | |
| "epoch": 0.5603426298883394, | |
| "grad_norm": 7.053335246073939, | |
| "learning_rate": 2.1347052998412667e-06, | |
| "loss": 0.5253, | |
| "step": 10990 | |
| }, | |
| { | |
| "epoch": 0.5608524957936063, | |
| "grad_norm": 7.331713955991623, | |
| "learning_rate": 2.1306216501313705e-06, | |
| "loss": 0.5202, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.5613623616988732, | |
| "grad_norm": 17.5984337607932, | |
| "learning_rate": 2.1265390077413643e-06, | |
| "loss": 0.458, | |
| "step": 11010 | |
| }, | |
| { | |
| "epoch": 0.5618722276041401, | |
| "grad_norm": 32.27052143652757, | |
| "learning_rate": 2.1224573838048894e-06, | |
| "loss": 0.5248, | |
| "step": 11020 | |
| }, | |
| { | |
| "epoch": 0.562382093509407, | |
| "grad_norm": 7.9197575597868966, | |
| "learning_rate": 2.1183767894528135e-06, | |
| "loss": 0.4883, | |
| "step": 11030 | |
| }, | |
| { | |
| "epoch": 0.562891959414674, | |
| "grad_norm": 5.525935578082481, | |
| "learning_rate": 2.114297235813196e-06, | |
| "loss": 0.5198, | |
| "step": 11040 | |
| }, | |
| { | |
| "epoch": 0.5634018253199409, | |
| "grad_norm": 8.462639242102355, | |
| "learning_rate": 2.110218734011255e-06, | |
| "loss": 0.4974, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 0.5639116912252078, | |
| "grad_norm": 7.481252936658193, | |
| "learning_rate": 2.106141295169344e-06, | |
| "loss": 0.5222, | |
| "step": 11060 | |
| }, | |
| { | |
| "epoch": 0.5644215571304747, | |
| "grad_norm": 5.849561031104596, | |
| "learning_rate": 2.1020649304069144e-06, | |
| "loss": 0.499, | |
| "step": 11070 | |
| }, | |
| { | |
| "epoch": 0.5649314230357416, | |
| "grad_norm": 5.449131688881039, | |
| "learning_rate": 2.0979896508404917e-06, | |
| "loss": 0.5126, | |
| "step": 11080 | |
| }, | |
| { | |
| "epoch": 0.5654412889410085, | |
| "grad_norm": 11.72768434163546, | |
| "learning_rate": 2.0939154675836407e-06, | |
| "loss": 0.4826, | |
| "step": 11090 | |
| }, | |
| { | |
| "epoch": 0.5659511548462755, | |
| "grad_norm": 10.72487124116107, | |
| "learning_rate": 2.0898423917469344e-06, | |
| "loss": 0.5187, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 0.5664610207515424, | |
| "grad_norm": 17.57913672580963, | |
| "learning_rate": 2.085770434437931e-06, | |
| "loss": 0.4703, | |
| "step": 11110 | |
| }, | |
| { | |
| "epoch": 0.5669708866568093, | |
| "grad_norm": 10.94435575365214, | |
| "learning_rate": 2.0816996067611315e-06, | |
| "loss": 0.514, | |
| "step": 11120 | |
| }, | |
| { | |
| "epoch": 0.5674807525620762, | |
| "grad_norm": 77.09310463771246, | |
| "learning_rate": 2.0776299198179624e-06, | |
| "loss": 0.6672, | |
| "step": 11130 | |
| }, | |
| { | |
| "epoch": 0.5679906184673431, | |
| "grad_norm": 6.817395796607053, | |
| "learning_rate": 2.0735613847067355e-06, | |
| "loss": 0.5581, | |
| "step": 11140 | |
| }, | |
| { | |
| "epoch": 0.56850048437261, | |
| "grad_norm": 12.77230188101944, | |
| "learning_rate": 2.0694940125226224e-06, | |
| "loss": 0.5739, | |
| "step": 11150 | |
| }, | |
| { | |
| "epoch": 0.5690103502778769, | |
| "grad_norm": 3.4770700526025147, | |
| "learning_rate": 2.0654278143576263e-06, | |
| "loss": 0.562, | |
| "step": 11160 | |
| }, | |
| { | |
| "epoch": 0.5695202161831439, | |
| "grad_norm": 7.673055088082523, | |
| "learning_rate": 2.0613628013005437e-06, | |
| "loss": 0.569, | |
| "step": 11170 | |
| }, | |
| { | |
| "epoch": 0.5700300820884108, | |
| "grad_norm": 27.69235924478429, | |
| "learning_rate": 2.0572989844369427e-06, | |
| "loss": 0.5193, | |
| "step": 11180 | |
| }, | |
| { | |
| "epoch": 0.5705399479936777, | |
| "grad_norm": 8.25982359313178, | |
| "learning_rate": 2.053236374849128e-06, | |
| "loss": 0.52, | |
| "step": 11190 | |
| }, | |
| { | |
| "epoch": 0.5710498138989446, | |
| "grad_norm": 12.446644595207585, | |
| "learning_rate": 2.049174983616113e-06, | |
| "loss": 0.5377, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 0.5715596798042115, | |
| "grad_norm": 7.177576833130309, | |
| "learning_rate": 2.045114821813588e-06, | |
| "loss": 0.4965, | |
| "step": 11210 | |
| }, | |
| { | |
| "epoch": 0.5720695457094784, | |
| "grad_norm": 9.94974275794222, | |
| "learning_rate": 2.0410559005138893e-06, | |
| "loss": 0.5104, | |
| "step": 11220 | |
| }, | |
| { | |
| "epoch": 0.5725794116147453, | |
| "grad_norm": 4.755356595206868, | |
| "learning_rate": 2.0369982307859728e-06, | |
| "loss": 0.5485, | |
| "step": 11230 | |
| }, | |
| { | |
| "epoch": 0.5730892775200123, | |
| "grad_norm": 5.131064490503118, | |
| "learning_rate": 2.032941823695378e-06, | |
| "loss": 0.49, | |
| "step": 11240 | |
| }, | |
| { | |
| "epoch": 0.5735991434252792, | |
| "grad_norm": 30.454278464618394, | |
| "learning_rate": 2.0288866903042055e-06, | |
| "loss": 0.5414, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 0.5741090093305461, | |
| "grad_norm": 9.761350727583425, | |
| "learning_rate": 2.024832841671077e-06, | |
| "loss": 0.555, | |
| "step": 11260 | |
| }, | |
| { | |
| "epoch": 0.574618875235813, | |
| "grad_norm": 9.070832317219523, | |
| "learning_rate": 2.0207802888511155e-06, | |
| "loss": 0.5431, | |
| "step": 11270 | |
| }, | |
| { | |
| "epoch": 0.5751287411410799, | |
| "grad_norm": 12.80477246245281, | |
| "learning_rate": 2.0167290428959082e-06, | |
| "loss": 0.5229, | |
| "step": 11280 | |
| }, | |
| { | |
| "epoch": 0.5756386070463468, | |
| "grad_norm": 20.566934327233376, | |
| "learning_rate": 2.0126791148534777e-06, | |
| "loss": 0.5416, | |
| "step": 11290 | |
| }, | |
| { | |
| "epoch": 0.5761484729516138, | |
| "grad_norm": 9.72319235009137, | |
| "learning_rate": 2.0086305157682546e-06, | |
| "loss": 0.4794, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 0.5766583388568807, | |
| "grad_norm": 17.305246483705588, | |
| "learning_rate": 2.004583256681042e-06, | |
| "loss": 0.5459, | |
| "step": 11310 | |
| }, | |
| { | |
| "epoch": 0.5771682047621476, | |
| "grad_norm": 6.1869155697423395, | |
| "learning_rate": 2.0005373486289932e-06, | |
| "loss": 0.5315, | |
| "step": 11320 | |
| }, | |
| { | |
| "epoch": 0.5776780706674145, | |
| "grad_norm": 7.02173694085358, | |
| "learning_rate": 1.9964928026455715e-06, | |
| "loss": 0.4243, | |
| "step": 11330 | |
| }, | |
| { | |
| "epoch": 0.5781879365726814, | |
| "grad_norm": 8.536789719667972, | |
| "learning_rate": 1.9924496297605315e-06, | |
| "loss": 0.5404, | |
| "step": 11340 | |
| }, | |
| { | |
| "epoch": 0.5786978024779483, | |
| "grad_norm": 6.550119622890504, | |
| "learning_rate": 1.988407840999881e-06, | |
| "loss": 0.4879, | |
| "step": 11350 | |
| }, | |
| { | |
| "epoch": 0.5792076683832152, | |
| "grad_norm": 9.137261097383348, | |
| "learning_rate": 1.984367447385851e-06, | |
| "loss": 0.5653, | |
| "step": 11360 | |
| }, | |
| { | |
| "epoch": 0.5797175342884822, | |
| "grad_norm": 7.224113742426661, | |
| "learning_rate": 1.9803284599368704e-06, | |
| "loss": 0.5484, | |
| "step": 11370 | |
| }, | |
| { | |
| "epoch": 0.5802274001937491, | |
| "grad_norm": 28.75988441418933, | |
| "learning_rate": 1.976290889667533e-06, | |
| "loss": 0.5581, | |
| "step": 11380 | |
| }, | |
| { | |
| "epoch": 0.580737266099016, | |
| "grad_norm": 17.034587606823564, | |
| "learning_rate": 1.9722547475885685e-06, | |
| "loss": 0.5403, | |
| "step": 11390 | |
| }, | |
| { | |
| "epoch": 0.5812471320042829, | |
| "grad_norm": 8.363382694116806, | |
| "learning_rate": 1.9682200447068095e-06, | |
| "loss": 0.5392, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 0.5817569979095498, | |
| "grad_norm": 6.142045262902703, | |
| "learning_rate": 1.9641867920251655e-06, | |
| "loss": 0.5267, | |
| "step": 11410 | |
| }, | |
| { | |
| "epoch": 0.5822668638148167, | |
| "grad_norm": 5.818725842542696, | |
| "learning_rate": 1.9601550005425925e-06, | |
| "loss": 0.4801, | |
| "step": 11420 | |
| }, | |
| { | |
| "epoch": 0.5827767297200837, | |
| "grad_norm": 6.139658242859134, | |
| "learning_rate": 1.9561246812540572e-06, | |
| "loss": 0.5558, | |
| "step": 11430 | |
| }, | |
| { | |
| "epoch": 0.5832865956253506, | |
| "grad_norm": 8.805001339523916, | |
| "learning_rate": 1.952095845150518e-06, | |
| "loss": 0.4811, | |
| "step": 11440 | |
| }, | |
| { | |
| "epoch": 0.5837964615306175, | |
| "grad_norm": 13.305203178035956, | |
| "learning_rate": 1.9480685032188816e-06, | |
| "loss": 0.5293, | |
| "step": 11450 | |
| }, | |
| { | |
| "epoch": 0.5843063274358844, | |
| "grad_norm": 10.067357266300476, | |
| "learning_rate": 1.9440426664419855e-06, | |
| "loss": 0.5679, | |
| "step": 11460 | |
| }, | |
| { | |
| "epoch": 0.5848161933411513, | |
| "grad_norm": 6.7367825841837305, | |
| "learning_rate": 1.940018345798561e-06, | |
| "loss": 0.486, | |
| "step": 11470 | |
| }, | |
| { | |
| "epoch": 0.5853260592464182, | |
| "grad_norm": 4.7734179339308636, | |
| "learning_rate": 1.935995552263202e-06, | |
| "loss": 0.5227, | |
| "step": 11480 | |
| }, | |
| { | |
| "epoch": 0.5858359251516851, | |
| "grad_norm": 6.440122499331953, | |
| "learning_rate": 1.931974296806342e-06, | |
| "loss": 0.5004, | |
| "step": 11490 | |
| }, | |
| { | |
| "epoch": 0.5863457910569521, | |
| "grad_norm": 6.5132289741934315, | |
| "learning_rate": 1.9279545903942174e-06, | |
| "loss": 0.5213, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.586855656962219, | |
| "grad_norm": 18.800925704781697, | |
| "learning_rate": 1.923936443988841e-06, | |
| "loss": 0.5312, | |
| "step": 11510 | |
| }, | |
| { | |
| "epoch": 0.5873655228674859, | |
| "grad_norm": 14.57112932904238, | |
| "learning_rate": 1.9199198685479732e-06, | |
| "loss": 0.5311, | |
| "step": 11520 | |
| }, | |
| { | |
| "epoch": 0.5878753887727528, | |
| "grad_norm": 10.180178069678226, | |
| "learning_rate": 1.9159048750250855e-06, | |
| "loss": 0.5216, | |
| "step": 11530 | |
| }, | |
| { | |
| "epoch": 0.5883852546780197, | |
| "grad_norm": 10.195783008921715, | |
| "learning_rate": 1.9118914743693407e-06, | |
| "loss": 0.597, | |
| "step": 11540 | |
| }, | |
| { | |
| "epoch": 0.5888951205832866, | |
| "grad_norm": 10.974937494383452, | |
| "learning_rate": 1.907879677525554e-06, | |
| "loss": 0.5168, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 0.5894049864885536, | |
| "grad_norm": 8.849536966836947, | |
| "learning_rate": 1.9038694954341697e-06, | |
| "loss": 0.4529, | |
| "step": 11560 | |
| }, | |
| { | |
| "epoch": 0.5899148523938205, | |
| "grad_norm": 3.3795489934918295, | |
| "learning_rate": 1.8998609390312251e-06, | |
| "loss": 0.4619, | |
| "step": 11570 | |
| }, | |
| { | |
| "epoch": 0.5904247182990874, | |
| "grad_norm": 6.395874103233876, | |
| "learning_rate": 1.8958540192483277e-06, | |
| "loss": 0.5475, | |
| "step": 11580 | |
| }, | |
| { | |
| "epoch": 0.5909345842043543, | |
| "grad_norm": 26.128124043964572, | |
| "learning_rate": 1.8918487470126207e-06, | |
| "loss": 0.5457, | |
| "step": 11590 | |
| }, | |
| { | |
| "epoch": 0.5914444501096212, | |
| "grad_norm": 7.414428933895366, | |
| "learning_rate": 1.8878451332467529e-06, | |
| "loss": 0.4924, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 0.5919543160148881, | |
| "grad_norm": 12.17596702715116, | |
| "learning_rate": 1.8838431888688528e-06, | |
| "loss": 0.5839, | |
| "step": 11610 | |
| }, | |
| { | |
| "epoch": 0.592464181920155, | |
| "grad_norm": 7.214396505430331, | |
| "learning_rate": 1.879842924792493e-06, | |
| "loss": 0.5426, | |
| "step": 11620 | |
| }, | |
| { | |
| "epoch": 0.592974047825422, | |
| "grad_norm": 3.3425281166048286, | |
| "learning_rate": 1.8758443519266667e-06, | |
| "loss": 0.498, | |
| "step": 11630 | |
| }, | |
| { | |
| "epoch": 0.5934839137306889, | |
| "grad_norm": 7.942972243447627, | |
| "learning_rate": 1.8718474811757553e-06, | |
| "loss": 0.4897, | |
| "step": 11640 | |
| }, | |
| { | |
| "epoch": 0.5939937796359558, | |
| "grad_norm": 9.82831623989867, | |
| "learning_rate": 1.8678523234394956e-06, | |
| "loss": 0.5543, | |
| "step": 11650 | |
| }, | |
| { | |
| "epoch": 0.5945036455412227, | |
| "grad_norm": 3.5276416237463493, | |
| "learning_rate": 1.863858889612956e-06, | |
| "loss": 0.4956, | |
| "step": 11660 | |
| }, | |
| { | |
| "epoch": 0.5950135114464896, | |
| "grad_norm": 5.747155748080999, | |
| "learning_rate": 1.8598671905865002e-06, | |
| "loss": 0.5495, | |
| "step": 11670 | |
| }, | |
| { | |
| "epoch": 0.5955233773517565, | |
| "grad_norm": 10.434001686139608, | |
| "learning_rate": 1.8558772372457647e-06, | |
| "loss": 0.532, | |
| "step": 11680 | |
| }, | |
| { | |
| "epoch": 0.5960332432570234, | |
| "grad_norm": 7.468458684107875, | |
| "learning_rate": 1.8518890404716227e-06, | |
| "loss": 0.4994, | |
| "step": 11690 | |
| }, | |
| { | |
| "epoch": 0.5965431091622904, | |
| "grad_norm": 5.821387671858344, | |
| "learning_rate": 1.8479026111401594e-06, | |
| "loss": 0.5085, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 0.5970529750675573, | |
| "grad_norm": 6.498163167113197, | |
| "learning_rate": 1.8439179601226376e-06, | |
| "loss": 0.4926, | |
| "step": 11710 | |
| }, | |
| { | |
| "epoch": 0.5975628409728242, | |
| "grad_norm": 3.903883912104386, | |
| "learning_rate": 1.8399350982854717e-06, | |
| "loss": 0.47, | |
| "step": 11720 | |
| }, | |
| { | |
| "epoch": 0.5980727068780911, | |
| "grad_norm": 17.69494525041306, | |
| "learning_rate": 1.835954036490198e-06, | |
| "loss": 0.5112, | |
| "step": 11730 | |
| }, | |
| { | |
| "epoch": 0.598582572783358, | |
| "grad_norm": 8.871473917185979, | |
| "learning_rate": 1.8319747855934416e-06, | |
| "loss": 0.5604, | |
| "step": 11740 | |
| }, | |
| { | |
| "epoch": 0.5990924386886249, | |
| "grad_norm": 6.957782916536802, | |
| "learning_rate": 1.8279973564468906e-06, | |
| "loss": 0.4845, | |
| "step": 11750 | |
| }, | |
| { | |
| "epoch": 0.5996023045938919, | |
| "grad_norm": 15.674418544975776, | |
| "learning_rate": 1.8240217598972665e-06, | |
| "loss": 0.5703, | |
| "step": 11760 | |
| }, | |
| { | |
| "epoch": 0.6001121704991588, | |
| "grad_norm": 5.457187043090276, | |
| "learning_rate": 1.8200480067862888e-06, | |
| "loss": 0.5326, | |
| "step": 11770 | |
| }, | |
| { | |
| "epoch": 0.6006220364044257, | |
| "grad_norm": 19.100409980702484, | |
| "learning_rate": 1.8160761079506553e-06, | |
| "loss": 0.5163, | |
| "step": 11780 | |
| }, | |
| { | |
| "epoch": 0.6011319023096926, | |
| "grad_norm": 12.483490528108064, | |
| "learning_rate": 1.812106074222002e-06, | |
| "loss": 0.5, | |
| "step": 11790 | |
| }, | |
| { | |
| "epoch": 0.6016417682149595, | |
| "grad_norm": 10.438949202796984, | |
| "learning_rate": 1.8081379164268826e-06, | |
| "loss": 0.543, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 0.6021516341202264, | |
| "grad_norm": 10.21693346478386, | |
| "learning_rate": 1.804171645386732e-06, | |
| "loss": 0.5265, | |
| "step": 11810 | |
| }, | |
| { | |
| "epoch": 0.6026615000254933, | |
| "grad_norm": 10.090100648916717, | |
| "learning_rate": 1.800207271917842e-06, | |
| "loss": 0.5225, | |
| "step": 11820 | |
| }, | |
| { | |
| "epoch": 0.6031713659307603, | |
| "grad_norm": 9.33838726133408, | |
| "learning_rate": 1.7962448068313298e-06, | |
| "loss": 0.538, | |
| "step": 11830 | |
| }, | |
| { | |
| "epoch": 0.6036812318360272, | |
| "grad_norm": 9.295998599675029, | |
| "learning_rate": 1.7922842609331053e-06, | |
| "loss": 0.504, | |
| "step": 11840 | |
| }, | |
| { | |
| "epoch": 0.6041910977412941, | |
| "grad_norm": 7.436412661247303, | |
| "learning_rate": 1.788325645023848e-06, | |
| "loss": 0.4834, | |
| "step": 11850 | |
| }, | |
| { | |
| "epoch": 0.604700963646561, | |
| "grad_norm": 11.230956866328153, | |
| "learning_rate": 1.7843689698989715e-06, | |
| "loss": 0.4796, | |
| "step": 11860 | |
| }, | |
| { | |
| "epoch": 0.6052108295518279, | |
| "grad_norm": 13.322406594718249, | |
| "learning_rate": 1.7804142463486e-06, | |
| "loss": 0.4953, | |
| "step": 11870 | |
| }, | |
| { | |
| "epoch": 0.6057206954570948, | |
| "grad_norm": 6.739605043613098, | |
| "learning_rate": 1.776461485157531e-06, | |
| "loss": 0.5836, | |
| "step": 11880 | |
| }, | |
| { | |
| "epoch": 0.6062305613623618, | |
| "grad_norm": 3.6900610967993064, | |
| "learning_rate": 1.7725106971052147e-06, | |
| "loss": 0.5154, | |
| "step": 11890 | |
| }, | |
| { | |
| "epoch": 0.6067404272676287, | |
| "grad_norm": 5.6557903709902675, | |
| "learning_rate": 1.7685618929657193e-06, | |
| "loss": 0.5439, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 0.6072502931728956, | |
| "grad_norm": 19.661102790117276, | |
| "learning_rate": 1.7646150835077014e-06, | |
| "loss": 0.5327, | |
| "step": 11910 | |
| }, | |
| { | |
| "epoch": 0.6077601590781625, | |
| "grad_norm": 6.91807500636854, | |
| "learning_rate": 1.7606702794943803e-06, | |
| "loss": 0.5193, | |
| "step": 11920 | |
| }, | |
| { | |
| "epoch": 0.6082700249834293, | |
| "grad_norm": 16.566079544705957, | |
| "learning_rate": 1.756727491683503e-06, | |
| "loss": 0.5436, | |
| "step": 11930 | |
| }, | |
| { | |
| "epoch": 0.6087798908886962, | |
| "grad_norm": 39.024318471176294, | |
| "learning_rate": 1.7527867308273211e-06, | |
| "loss": 0.5198, | |
| "step": 11940 | |
| }, | |
| { | |
| "epoch": 0.6092897567939631, | |
| "grad_norm": 15.767456247322672, | |
| "learning_rate": 1.7488480076725584e-06, | |
| "loss": 0.5732, | |
| "step": 11950 | |
| }, | |
| { | |
| "epoch": 0.60979962269923, | |
| "grad_norm": 6.138523190826617, | |
| "learning_rate": 1.7449113329603787e-06, | |
| "loss": 0.5496, | |
| "step": 11960 | |
| }, | |
| { | |
| "epoch": 0.610309488604497, | |
| "grad_norm": 36.330040916142245, | |
| "learning_rate": 1.7409767174263643e-06, | |
| "loss": 0.5342, | |
| "step": 11970 | |
| }, | |
| { | |
| "epoch": 0.6108193545097639, | |
| "grad_norm": 12.436489842915702, | |
| "learning_rate": 1.7370441718004771e-06, | |
| "loss": 0.6011, | |
| "step": 11980 | |
| }, | |
| { | |
| "epoch": 0.6113292204150308, | |
| "grad_norm": 5.2678899426017605, | |
| "learning_rate": 1.733113706807038e-06, | |
| "loss": 0.532, | |
| "step": 11990 | |
| }, | |
| { | |
| "epoch": 0.6118390863202977, | |
| "grad_norm": 11.889046920515222, | |
| "learning_rate": 1.7291853331646917e-06, | |
| "loss": 0.5039, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.6123489522255646, | |
| "grad_norm": 12.823042645212187, | |
| "learning_rate": 1.7252590615863809e-06, | |
| "loss": 0.5318, | |
| "step": 12010 | |
| }, | |
| { | |
| "epoch": 0.6128588181308315, | |
| "grad_norm": 14.421157906088819, | |
| "learning_rate": 1.7213349027793153e-06, | |
| "loss": 0.5196, | |
| "step": 12020 | |
| }, | |
| { | |
| "epoch": 0.6133686840360985, | |
| "grad_norm": 19.97193610453995, | |
| "learning_rate": 1.7174128674449422e-06, | |
| "loss": 0.4755, | |
| "step": 12030 | |
| }, | |
| { | |
| "epoch": 0.6138785499413654, | |
| "grad_norm": 9.066846582449964, | |
| "learning_rate": 1.7134929662789204e-06, | |
| "loss": 0.5236, | |
| "step": 12040 | |
| }, | |
| { | |
| "epoch": 0.6143884158466323, | |
| "grad_norm": 17.112162242451333, | |
| "learning_rate": 1.709575209971085e-06, | |
| "loss": 0.467, | |
| "step": 12050 | |
| }, | |
| { | |
| "epoch": 0.6148982817518992, | |
| "grad_norm": 17.40291640222636, | |
| "learning_rate": 1.7056596092054245e-06, | |
| "loss": 0.493, | |
| "step": 12060 | |
| }, | |
| { | |
| "epoch": 0.6154081476571661, | |
| "grad_norm": 4.683129228842877, | |
| "learning_rate": 1.7017461746600506e-06, | |
| "loss": 0.5323, | |
| "step": 12070 | |
| }, | |
| { | |
| "epoch": 0.615918013562433, | |
| "grad_norm": 3.7109051124059502, | |
| "learning_rate": 1.697834917007163e-06, | |
| "loss": 0.5268, | |
| "step": 12080 | |
| }, | |
| { | |
| "epoch": 0.6164278794677, | |
| "grad_norm": 9.258106753637072, | |
| "learning_rate": 1.6939258469130288e-06, | |
| "loss": 0.534, | |
| "step": 12090 | |
| }, | |
| { | |
| "epoch": 0.6169377453729669, | |
| "grad_norm": 30.938152269344794, | |
| "learning_rate": 1.6900189750379469e-06, | |
| "loss": 0.5247, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 0.6174476112782338, | |
| "grad_norm": 6.162255674210957, | |
| "learning_rate": 1.6861143120362239e-06, | |
| "loss": 0.5215, | |
| "step": 12110 | |
| }, | |
| { | |
| "epoch": 0.6179574771835007, | |
| "grad_norm": 9.513714545588028, | |
| "learning_rate": 1.6822118685561403e-06, | |
| "loss": 0.5121, | |
| "step": 12120 | |
| }, | |
| { | |
| "epoch": 0.6184673430887676, | |
| "grad_norm": 6.834967431212922, | |
| "learning_rate": 1.6783116552399258e-06, | |
| "loss": 0.5008, | |
| "step": 12130 | |
| }, | |
| { | |
| "epoch": 0.6189772089940345, | |
| "grad_norm": 4.829247413785687, | |
| "learning_rate": 1.6744136827237283e-06, | |
| "loss": 0.5184, | |
| "step": 12140 | |
| }, | |
| { | |
| "epoch": 0.6194870748993014, | |
| "grad_norm": 7.175050633523205, | |
| "learning_rate": 1.670517961637582e-06, | |
| "loss": 0.4854, | |
| "step": 12150 | |
| }, | |
| { | |
| "epoch": 0.6199969408045684, | |
| "grad_norm": 10.676152611521568, | |
| "learning_rate": 1.666624502605385e-06, | |
| "loss": 0.5476, | |
| "step": 12160 | |
| }, | |
| { | |
| "epoch": 0.6205068067098353, | |
| "grad_norm": 19.354648974770278, | |
| "learning_rate": 1.6627333162448638e-06, | |
| "loss": 0.5143, | |
| "step": 12170 | |
| }, | |
| { | |
| "epoch": 0.6210166726151022, | |
| "grad_norm": 9.35966299661283, | |
| "learning_rate": 1.6588444131675486e-06, | |
| "loss": 0.5187, | |
| "step": 12180 | |
| }, | |
| { | |
| "epoch": 0.6215265385203691, | |
| "grad_norm": 8.87291800305617, | |
| "learning_rate": 1.6549578039787436e-06, | |
| "loss": 0.5525, | |
| "step": 12190 | |
| }, | |
| { | |
| "epoch": 0.622036404425636, | |
| "grad_norm": 6.982194117119849, | |
| "learning_rate": 1.6510734992774953e-06, | |
| "loss": 0.4918, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 0.6225462703309029, | |
| "grad_norm": 8.091889155656451, | |
| "learning_rate": 1.647191509656567e-06, | |
| "loss": 0.4945, | |
| "step": 12210 | |
| }, | |
| { | |
| "epoch": 0.6230561362361698, | |
| "grad_norm": 5.905044063744679, | |
| "learning_rate": 1.6433118457024094e-06, | |
| "loss": 0.5587, | |
| "step": 12220 | |
| }, | |
| { | |
| "epoch": 0.6235660021414368, | |
| "grad_norm": 25.530868877288977, | |
| "learning_rate": 1.6394345179951293e-06, | |
| "loss": 0.4604, | |
| "step": 12230 | |
| }, | |
| { | |
| "epoch": 0.6240758680467037, | |
| "grad_norm": 5.695133583269696, | |
| "learning_rate": 1.6355595371084627e-06, | |
| "loss": 0.4881, | |
| "step": 12240 | |
| }, | |
| { | |
| "epoch": 0.6245857339519706, | |
| "grad_norm": 15.30654531084609, | |
| "learning_rate": 1.6316869136097467e-06, | |
| "loss": 0.5455, | |
| "step": 12250 | |
| }, | |
| { | |
| "epoch": 0.6250955998572375, | |
| "grad_norm": 3.749941919122588, | |
| "learning_rate": 1.6278166580598897e-06, | |
| "loss": 0.4457, | |
| "step": 12260 | |
| }, | |
| { | |
| "epoch": 0.6256054657625044, | |
| "grad_norm": 12.003227562589707, | |
| "learning_rate": 1.6239487810133404e-06, | |
| "loss": 0.5192, | |
| "step": 12270 | |
| }, | |
| { | |
| "epoch": 0.6261153316677713, | |
| "grad_norm": 4.426958208572336, | |
| "learning_rate": 1.6200832930180643e-06, | |
| "loss": 0.4482, | |
| "step": 12280 | |
| }, | |
| { | |
| "epoch": 0.6266251975730383, | |
| "grad_norm": 8.717956824567121, | |
| "learning_rate": 1.6162202046155085e-06, | |
| "loss": 0.5408, | |
| "step": 12290 | |
| }, | |
| { | |
| "epoch": 0.6271350634783052, | |
| "grad_norm": 5.105859555737084, | |
| "learning_rate": 1.6123595263405783e-06, | |
| "loss": 0.5138, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 0.6276449293835721, | |
| "grad_norm": 6.392138296699517, | |
| "learning_rate": 1.6085012687216078e-06, | |
| "loss": 0.4492, | |
| "step": 12310 | |
| }, | |
| { | |
| "epoch": 0.628154795288839, | |
| "grad_norm": 18.93794396910775, | |
| "learning_rate": 1.6046454422803253e-06, | |
| "loss": 0.5681, | |
| "step": 12320 | |
| }, | |
| { | |
| "epoch": 0.6286646611941059, | |
| "grad_norm": 6.3885973165769085, | |
| "learning_rate": 1.6007920575318334e-06, | |
| "loss": 0.5657, | |
| "step": 12330 | |
| }, | |
| { | |
| "epoch": 0.6291745270993728, | |
| "grad_norm": 15.232212507178879, | |
| "learning_rate": 1.5969411249845737e-06, | |
| "loss": 0.4983, | |
| "step": 12340 | |
| }, | |
| { | |
| "epoch": 0.6296843930046397, | |
| "grad_norm": 6.38450536379092, | |
| "learning_rate": 1.5930926551403025e-06, | |
| "loss": 0.4535, | |
| "step": 12350 | |
| }, | |
| { | |
| "epoch": 0.6301942589099067, | |
| "grad_norm": 6.810440003124526, | |
| "learning_rate": 1.5892466584940574e-06, | |
| "loss": 0.5246, | |
| "step": 12360 | |
| }, | |
| { | |
| "epoch": 0.6307041248151736, | |
| "grad_norm": 92.33699964113386, | |
| "learning_rate": 1.5854031455341332e-06, | |
| "loss": 0.4888, | |
| "step": 12370 | |
| }, | |
| { | |
| "epoch": 0.6312139907204405, | |
| "grad_norm": 5.952166627327409, | |
| "learning_rate": 1.5815621267420526e-06, | |
| "loss": 0.4983, | |
| "step": 12380 | |
| }, | |
| { | |
| "epoch": 0.6317238566257074, | |
| "grad_norm": 13.330565091112442, | |
| "learning_rate": 1.5777236125925333e-06, | |
| "loss": 0.4552, | |
| "step": 12390 | |
| }, | |
| { | |
| "epoch": 0.6322337225309743, | |
| "grad_norm": 13.431149694108665, | |
| "learning_rate": 1.5738876135534659e-06, | |
| "loss": 0.5294, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 0.6327435884362412, | |
| "grad_norm": 5.8613937499911435, | |
| "learning_rate": 1.5700541400858793e-06, | |
| "loss": 0.5536, | |
| "step": 12410 | |
| }, | |
| { | |
| "epoch": 0.6332534543415081, | |
| "grad_norm": 13.497522029092826, | |
| "learning_rate": 1.5662232026439172e-06, | |
| "loss": 0.5, | |
| "step": 12420 | |
| }, | |
| { | |
| "epoch": 0.6337633202467751, | |
| "grad_norm": 6.317092643414451, | |
| "learning_rate": 1.5623948116748074e-06, | |
| "loss": 0.4692, | |
| "step": 12430 | |
| }, | |
| { | |
| "epoch": 0.634273186152042, | |
| "grad_norm": 10.31916177700157, | |
| "learning_rate": 1.5585689776188321e-06, | |
| "loss": 0.5556, | |
| "step": 12440 | |
| }, | |
| { | |
| "epoch": 0.6347830520573089, | |
| "grad_norm": 7.051415888978958, | |
| "learning_rate": 1.5547457109093004e-06, | |
| "loss": 0.433, | |
| "step": 12450 | |
| }, | |
| { | |
| "epoch": 0.6352929179625758, | |
| "grad_norm": 13.194214008195562, | |
| "learning_rate": 1.5509250219725207e-06, | |
| "loss": 0.4866, | |
| "step": 12460 | |
| }, | |
| { | |
| "epoch": 0.6358027838678427, | |
| "grad_norm": 5.496980733703801, | |
| "learning_rate": 1.5471069212277729e-06, | |
| "loss": 0.4726, | |
| "step": 12470 | |
| }, | |
| { | |
| "epoch": 0.6363126497731096, | |
| "grad_norm": 27.394391903919008, | |
| "learning_rate": 1.5432914190872757e-06, | |
| "loss": 0.5034, | |
| "step": 12480 | |
| }, | |
| { | |
| "epoch": 0.6368225156783766, | |
| "grad_norm": 8.800161090855273, | |
| "learning_rate": 1.539478525956164e-06, | |
| "loss": 0.5441, | |
| "step": 12490 | |
| }, | |
| { | |
| "epoch": 0.6373323815836435, | |
| "grad_norm": 12.660866908159553, | |
| "learning_rate": 1.5356682522324578e-06, | |
| "loss": 0.4604, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.6378422474889104, | |
| "grad_norm": 14.972027838813567, | |
| "learning_rate": 1.5318606083070305e-06, | |
| "loss": 0.5573, | |
| "step": 12510 | |
| }, | |
| { | |
| "epoch": 0.6383521133941773, | |
| "grad_norm": 8.968890917955061, | |
| "learning_rate": 1.5280556045635881e-06, | |
| "loss": 0.51, | |
| "step": 12520 | |
| }, | |
| { | |
| "epoch": 0.6388619792994442, | |
| "grad_norm": 9.16886223869151, | |
| "learning_rate": 1.5242532513786334e-06, | |
| "loss": 0.4978, | |
| "step": 12530 | |
| }, | |
| { | |
| "epoch": 0.6393718452047111, | |
| "grad_norm": 7.865488171989786, | |
| "learning_rate": 1.5204535591214428e-06, | |
| "loss": 0.5459, | |
| "step": 12540 | |
| }, | |
| { | |
| "epoch": 0.639881711109978, | |
| "grad_norm": 7.322671706067244, | |
| "learning_rate": 1.516656538154035e-06, | |
| "loss": 0.5672, | |
| "step": 12550 | |
| }, | |
| { | |
| "epoch": 0.640391577015245, | |
| "grad_norm": 6.981912098501564, | |
| "learning_rate": 1.5128621988311448e-06, | |
| "loss": 0.4719, | |
| "step": 12560 | |
| }, | |
| { | |
| "epoch": 0.6409014429205119, | |
| "grad_norm": 13.018524518385442, | |
| "learning_rate": 1.5090705515001949e-06, | |
| "loss": 0.566, | |
| "step": 12570 | |
| }, | |
| { | |
| "epoch": 0.6414113088257788, | |
| "grad_norm": 28.22848869993069, | |
| "learning_rate": 1.5052816065012635e-06, | |
| "loss": 0.4726, | |
| "step": 12580 | |
| }, | |
| { | |
| "epoch": 0.6419211747310457, | |
| "grad_norm": 6.130618010338353, | |
| "learning_rate": 1.501495374167063e-06, | |
| "loss": 0.5574, | |
| "step": 12590 | |
| }, | |
| { | |
| "epoch": 0.6424310406363126, | |
| "grad_norm": 3.801489463317786, | |
| "learning_rate": 1.497711864822905e-06, | |
| "loss": 0.4853, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 0.6429409065415795, | |
| "grad_norm": 8.339613236306585, | |
| "learning_rate": 1.4939310887866775e-06, | |
| "loss": 0.5063, | |
| "step": 12610 | |
| }, | |
| { | |
| "epoch": 0.6434507724468465, | |
| "grad_norm": 6.973148186606896, | |
| "learning_rate": 1.4901530563688154e-06, | |
| "loss": 0.4856, | |
| "step": 12620 | |
| }, | |
| { | |
| "epoch": 0.6439606383521134, | |
| "grad_norm": 12.994572347779247, | |
| "learning_rate": 1.4863777778722682e-06, | |
| "loss": 0.5193, | |
| "step": 12630 | |
| }, | |
| { | |
| "epoch": 0.6444705042573803, | |
| "grad_norm": 6.558333340395644, | |
| "learning_rate": 1.482605263592478e-06, | |
| "loss": 0.4978, | |
| "step": 12640 | |
| }, | |
| { | |
| "epoch": 0.6449803701626472, | |
| "grad_norm": 5.141682909872482, | |
| "learning_rate": 1.4788355238173473e-06, | |
| "loss": 0.5004, | |
| "step": 12650 | |
| }, | |
| { | |
| "epoch": 0.6454902360679141, | |
| "grad_norm": 6.522893352262928, | |
| "learning_rate": 1.4750685688272143e-06, | |
| "loss": 0.5, | |
| "step": 12660 | |
| }, | |
| { | |
| "epoch": 0.646000101973181, | |
| "grad_norm": 6.603165179621692, | |
| "learning_rate": 1.4713044088948197e-06, | |
| "loss": 0.4654, | |
| "step": 12670 | |
| }, | |
| { | |
| "epoch": 0.6465099678784479, | |
| "grad_norm": 5.3467446412016315, | |
| "learning_rate": 1.4675430542852848e-06, | |
| "loss": 0.5501, | |
| "step": 12680 | |
| }, | |
| { | |
| "epoch": 0.6470198337837149, | |
| "grad_norm": 19.14659553255697, | |
| "learning_rate": 1.4637845152560804e-06, | |
| "loss": 0.526, | |
| "step": 12690 | |
| }, | |
| { | |
| "epoch": 0.6475296996889818, | |
| "grad_norm": 8.229901563411332, | |
| "learning_rate": 1.4600288020569959e-06, | |
| "loss": 0.5258, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 0.6480395655942487, | |
| "grad_norm": 9.759256471709117, | |
| "learning_rate": 1.4562759249301185e-06, | |
| "loss": 0.607, | |
| "step": 12710 | |
| }, | |
| { | |
| "epoch": 0.6485494314995156, | |
| "grad_norm": 6.935966414378126, | |
| "learning_rate": 1.4525258941097985e-06, | |
| "loss": 0.5217, | |
| "step": 12720 | |
| }, | |
| { | |
| "epoch": 0.6490592974047825, | |
| "grad_norm": 15.54602309816824, | |
| "learning_rate": 1.4487787198226244e-06, | |
| "loss": 0.5363, | |
| "step": 12730 | |
| }, | |
| { | |
| "epoch": 0.6495691633100494, | |
| "grad_norm": 4.928647102455407, | |
| "learning_rate": 1.4450344122873985e-06, | |
| "loss": 0.4742, | |
| "step": 12740 | |
| }, | |
| { | |
| "epoch": 0.6500790292153164, | |
| "grad_norm": 12.495812231264386, | |
| "learning_rate": 1.4412929817150982e-06, | |
| "loss": 0.5405, | |
| "step": 12750 | |
| }, | |
| { | |
| "epoch": 0.6505888951205833, | |
| "grad_norm": 8.425342337716248, | |
| "learning_rate": 1.437554438308863e-06, | |
| "loss": 0.4444, | |
| "step": 12760 | |
| }, | |
| { | |
| "epoch": 0.6510987610258502, | |
| "grad_norm": 9.84721696599807, | |
| "learning_rate": 1.4338187922639506e-06, | |
| "loss": 0.5144, | |
| "step": 12770 | |
| }, | |
| { | |
| "epoch": 0.6516086269311171, | |
| "grad_norm": 5.167726505845119, | |
| "learning_rate": 1.430086053767726e-06, | |
| "loss": 0.5276, | |
| "step": 12780 | |
| }, | |
| { | |
| "epoch": 0.652118492836384, | |
| "grad_norm": 12.668625938673603, | |
| "learning_rate": 1.4263562329996194e-06, | |
| "loss": 0.505, | |
| "step": 12790 | |
| }, | |
| { | |
| "epoch": 0.6526283587416509, | |
| "grad_norm": 15.763800394384761, | |
| "learning_rate": 1.422629340131106e-06, | |
| "loss": 0.5763, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.6531382246469178, | |
| "grad_norm": 8.02198435905354, | |
| "learning_rate": 1.4189053853256757e-06, | |
| "loss": 0.5103, | |
| "step": 12810 | |
| }, | |
| { | |
| "epoch": 0.6536480905521848, | |
| "grad_norm": 11.950637204603689, | |
| "learning_rate": 1.4151843787388062e-06, | |
| "loss": 0.5635, | |
| "step": 12820 | |
| }, | |
| { | |
| "epoch": 0.6541579564574517, | |
| "grad_norm": 8.293082362914374, | |
| "learning_rate": 1.4114663305179382e-06, | |
| "loss": 0.5072, | |
| "step": 12830 | |
| }, | |
| { | |
| "epoch": 0.6546678223627186, | |
| "grad_norm": 4.3737022288493765, | |
| "learning_rate": 1.4077512508024382e-06, | |
| "loss": 0.4543, | |
| "step": 12840 | |
| }, | |
| { | |
| "epoch": 0.6551776882679855, | |
| "grad_norm": 18.10698828602165, | |
| "learning_rate": 1.4040391497235845e-06, | |
| "loss": 0.5085, | |
| "step": 12850 | |
| }, | |
| { | |
| "epoch": 0.6556875541732524, | |
| "grad_norm": 7.07731221377441, | |
| "learning_rate": 1.4003300374045283e-06, | |
| "loss": 0.4776, | |
| "step": 12860 | |
| }, | |
| { | |
| "epoch": 0.6561974200785193, | |
| "grad_norm": 4.675481069069686, | |
| "learning_rate": 1.396623923960271e-06, | |
| "loss": 0.466, | |
| "step": 12870 | |
| }, | |
| { | |
| "epoch": 0.6567072859837862, | |
| "grad_norm": 6.033379297093717, | |
| "learning_rate": 1.3929208194976362e-06, | |
| "loss": 0.5344, | |
| "step": 12880 | |
| }, | |
| { | |
| "epoch": 0.6572171518890532, | |
| "grad_norm": 8.628525763507344, | |
| "learning_rate": 1.3892207341152416e-06, | |
| "loss": 0.5409, | |
| "step": 12890 | |
| }, | |
| { | |
| "epoch": 0.6577270177943201, | |
| "grad_norm": 5.409542842182235, | |
| "learning_rate": 1.385523677903472e-06, | |
| "loss": 0.4883, | |
| "step": 12900 | |
| }, | |
| { | |
| "epoch": 0.658236883699587, | |
| "grad_norm": 11.783825873969915, | |
| "learning_rate": 1.38182966094445e-06, | |
| "loss": 0.5546, | |
| "step": 12910 | |
| }, | |
| { | |
| "epoch": 0.6587467496048539, | |
| "grad_norm": 14.457282758726171, | |
| "learning_rate": 1.3781386933120133e-06, | |
| "loss": 0.4842, | |
| "step": 12920 | |
| }, | |
| { | |
| "epoch": 0.6592566155101208, | |
| "grad_norm": 7.7607092167340355, | |
| "learning_rate": 1.3744507850716804e-06, | |
| "loss": 0.5051, | |
| "step": 12930 | |
| }, | |
| { | |
| "epoch": 0.6597664814153877, | |
| "grad_norm": 7.137912593306867, | |
| "learning_rate": 1.3707659462806284e-06, | |
| "loss": 0.4834, | |
| "step": 12940 | |
| }, | |
| { | |
| "epoch": 0.6602763473206547, | |
| "grad_norm": 6.143705561272351, | |
| "learning_rate": 1.367084186987663e-06, | |
| "loss": 0.5108, | |
| "step": 12950 | |
| }, | |
| { | |
| "epoch": 0.6607862132259216, | |
| "grad_norm": 8.731079626973258, | |
| "learning_rate": 1.3634055172331926e-06, | |
| "loss": 0.4852, | |
| "step": 12960 | |
| }, | |
| { | |
| "epoch": 0.6612960791311885, | |
| "grad_norm": 5.371437695099816, | |
| "learning_rate": 1.3597299470491986e-06, | |
| "loss": 0.5675, | |
| "step": 12970 | |
| }, | |
| { | |
| "epoch": 0.6618059450364554, | |
| "grad_norm": 7.673890908642838, | |
| "learning_rate": 1.356057486459214e-06, | |
| "loss": 0.5427, | |
| "step": 12980 | |
| }, | |
| { | |
| "epoch": 0.6623158109417223, | |
| "grad_norm": 5.580626395226384, | |
| "learning_rate": 1.352388145478285e-06, | |
| "loss": 0.5475, | |
| "step": 12990 | |
| }, | |
| { | |
| "epoch": 0.6628256768469892, | |
| "grad_norm": 14.001010484519423, | |
| "learning_rate": 1.3487219341129566e-06, | |
| "loss": 0.4702, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.6633355427522561, | |
| "grad_norm": 11.362626293229546, | |
| "learning_rate": 1.3450588623612353e-06, | |
| "loss": 0.5142, | |
| "step": 13010 | |
| }, | |
| { | |
| "epoch": 0.6638454086575231, | |
| "grad_norm": 4.961569081167888, | |
| "learning_rate": 1.3413989402125682e-06, | |
| "loss": 0.5187, | |
| "step": 13020 | |
| }, | |
| { | |
| "epoch": 0.66435527456279, | |
| "grad_norm": 4.915951893745529, | |
| "learning_rate": 1.3377421776478111e-06, | |
| "loss": 0.5272, | |
| "step": 13030 | |
| }, | |
| { | |
| "epoch": 0.6648651404680569, | |
| "grad_norm": 5.045332996976456, | |
| "learning_rate": 1.3340885846392032e-06, | |
| "loss": 0.5923, | |
| "step": 13040 | |
| }, | |
| { | |
| "epoch": 0.6653750063733238, | |
| "grad_norm": 5.509953458864324, | |
| "learning_rate": 1.3304381711503444e-06, | |
| "loss": 0.49, | |
| "step": 13050 | |
| }, | |
| { | |
| "epoch": 0.6658848722785907, | |
| "grad_norm": 7.9456633485728165, | |
| "learning_rate": 1.3267909471361574e-06, | |
| "loss": 0.5562, | |
| "step": 13060 | |
| }, | |
| { | |
| "epoch": 0.6663947381838576, | |
| "grad_norm": 24.233820150621078, | |
| "learning_rate": 1.3231469225428726e-06, | |
| "loss": 0.5177, | |
| "step": 13070 | |
| }, | |
| { | |
| "epoch": 0.6669046040891246, | |
| "grad_norm": 7.9754418427625255, | |
| "learning_rate": 1.3195061073079901e-06, | |
| "loss": 0.5494, | |
| "step": 13080 | |
| }, | |
| { | |
| "epoch": 0.6674144699943915, | |
| "grad_norm": 7.739534240779945, | |
| "learning_rate": 1.3158685113602636e-06, | |
| "loss": 0.4843, | |
| "step": 13090 | |
| }, | |
| { | |
| "epoch": 0.6679243358996584, | |
| "grad_norm": 14.361556260016403, | |
| "learning_rate": 1.3122341446196636e-06, | |
| "loss": 0.5515, | |
| "step": 13100 | |
| }, | |
| { | |
| "epoch": 0.6684342018049253, | |
| "grad_norm": 4.830484020015252, | |
| "learning_rate": 1.3086030169973552e-06, | |
| "loss": 0.5049, | |
| "step": 13110 | |
| }, | |
| { | |
| "epoch": 0.6689440677101922, | |
| "grad_norm": 11.617407710197574, | |
| "learning_rate": 1.3049751383956707e-06, | |
| "loss": 0.4704, | |
| "step": 13120 | |
| }, | |
| { | |
| "epoch": 0.6694539336154591, | |
| "grad_norm": 20.61436121006635, | |
| "learning_rate": 1.301350518708081e-06, | |
| "loss": 0.5146, | |
| "step": 13130 | |
| }, | |
| { | |
| "epoch": 0.669963799520726, | |
| "grad_norm": 22.54013582179851, | |
| "learning_rate": 1.2977291678191733e-06, | |
| "loss": 0.4731, | |
| "step": 13140 | |
| }, | |
| { | |
| "epoch": 0.670473665425993, | |
| "grad_norm": 5.086290366289214, | |
| "learning_rate": 1.2941110956046142e-06, | |
| "loss": 0.5069, | |
| "step": 13150 | |
| }, | |
| { | |
| "epoch": 0.6709835313312599, | |
| "grad_norm": 7.472775727129846, | |
| "learning_rate": 1.290496311931135e-06, | |
| "loss": 0.5016, | |
| "step": 13160 | |
| }, | |
| { | |
| "epoch": 0.6714933972365268, | |
| "grad_norm": 9.605057298095543, | |
| "learning_rate": 1.2868848266564964e-06, | |
| "loss": 0.4805, | |
| "step": 13170 | |
| }, | |
| { | |
| "epoch": 0.6720032631417937, | |
| "grad_norm": 24.030989984043877, | |
| "learning_rate": 1.2832766496294647e-06, | |
| "loss": 0.4861, | |
| "step": 13180 | |
| }, | |
| { | |
| "epoch": 0.6725131290470606, | |
| "grad_norm": 7.688398644867407, | |
| "learning_rate": 1.2796717906897831e-06, | |
| "loss": 0.5021, | |
| "step": 13190 | |
| }, | |
| { | |
| "epoch": 0.6730229949523275, | |
| "grad_norm": 8.08782204632997, | |
| "learning_rate": 1.2760702596681478e-06, | |
| "loss": 0.5308, | |
| "step": 13200 | |
| }, | |
| { | |
| "epoch": 0.6735328608575945, | |
| "grad_norm": 6.724965448279945, | |
| "learning_rate": 1.2724720663861792e-06, | |
| "loss": 0.4748, | |
| "step": 13210 | |
| }, | |
| { | |
| "epoch": 0.6740427267628614, | |
| "grad_norm": 15.869715866274502, | |
| "learning_rate": 1.2688772206563938e-06, | |
| "loss": 0.5366, | |
| "step": 13220 | |
| }, | |
| { | |
| "epoch": 0.6745525926681283, | |
| "grad_norm": 17.33308634891779, | |
| "learning_rate": 1.2652857322821821e-06, | |
| "loss": 0.4798, | |
| "step": 13230 | |
| }, | |
| { | |
| "epoch": 0.6750624585733952, | |
| "grad_norm": 6.413858029938686, | |
| "learning_rate": 1.2616976110577766e-06, | |
| "loss": 0.5286, | |
| "step": 13240 | |
| }, | |
| { | |
| "epoch": 0.6755723244786621, | |
| "grad_norm": 27.500826205776104, | |
| "learning_rate": 1.2581128667682286e-06, | |
| "loss": 0.525, | |
| "step": 13250 | |
| }, | |
| { | |
| "epoch": 0.676082190383929, | |
| "grad_norm": 4.587601999321133, | |
| "learning_rate": 1.2545315091893784e-06, | |
| "loss": 0.4952, | |
| "step": 13260 | |
| }, | |
| { | |
| "epoch": 0.6765920562891959, | |
| "grad_norm": 8.682141735935351, | |
| "learning_rate": 1.2509535480878325e-06, | |
| "loss": 0.4982, | |
| "step": 13270 | |
| }, | |
| { | |
| "epoch": 0.6771019221944629, | |
| "grad_norm": 5.875702610092279, | |
| "learning_rate": 1.2473789932209331e-06, | |
| "loss": 0.4626, | |
| "step": 13280 | |
| }, | |
| { | |
| "epoch": 0.6776117880997298, | |
| "grad_norm": 11.57688664010269, | |
| "learning_rate": 1.2438078543367377e-06, | |
| "loss": 0.5797, | |
| "step": 13290 | |
| }, | |
| { | |
| "epoch": 0.6781216540049967, | |
| "grad_norm": 7.625967931326347, | |
| "learning_rate": 1.2402401411739806e-06, | |
| "loss": 0.5266, | |
| "step": 13300 | |
| }, | |
| { | |
| "epoch": 0.6786315199102636, | |
| "grad_norm": 9.280506265403062, | |
| "learning_rate": 1.2366758634620615e-06, | |
| "loss": 0.5461, | |
| "step": 13310 | |
| }, | |
| { | |
| "epoch": 0.6791413858155305, | |
| "grad_norm": 22.898801072596648, | |
| "learning_rate": 1.2331150309210075e-06, | |
| "loss": 0.5171, | |
| "step": 13320 | |
| }, | |
| { | |
| "epoch": 0.6796512517207974, | |
| "grad_norm": 2.926333613028221, | |
| "learning_rate": 1.2295576532614506e-06, | |
| "loss": 0.4778, | |
| "step": 13330 | |
| }, | |
| { | |
| "epoch": 0.6801611176260643, | |
| "grad_norm": 9.094467060554189, | |
| "learning_rate": 1.226003740184602e-06, | |
| "loss": 0.4894, | |
| "step": 13340 | |
| }, | |
| { | |
| "epoch": 0.6806709835313313, | |
| "grad_norm": 8.219844480956706, | |
| "learning_rate": 1.2224533013822237e-06, | |
| "loss": 0.5202, | |
| "step": 13350 | |
| }, | |
| { | |
| "epoch": 0.6811808494365982, | |
| "grad_norm": 10.280263133918698, | |
| "learning_rate": 1.2189063465366064e-06, | |
| "loss": 0.6116, | |
| "step": 13360 | |
| }, | |
| { | |
| "epoch": 0.6816907153418651, | |
| "grad_norm": 12.133501298347303, | |
| "learning_rate": 1.2153628853205336e-06, | |
| "loss": 0.5026, | |
| "step": 13370 | |
| }, | |
| { | |
| "epoch": 0.682200581247132, | |
| "grad_norm": 7.117093334590749, | |
| "learning_rate": 1.2118229273972684e-06, | |
| "loss": 0.5053, | |
| "step": 13380 | |
| }, | |
| { | |
| "epoch": 0.6827104471523989, | |
| "grad_norm": 9.511696988963454, | |
| "learning_rate": 1.2082864824205138e-06, | |
| "loss": 0.4878, | |
| "step": 13390 | |
| }, | |
| { | |
| "epoch": 0.6832203130576658, | |
| "grad_norm": 13.917897942567095, | |
| "learning_rate": 1.2047535600343984e-06, | |
| "loss": 0.4947, | |
| "step": 13400 | |
| }, | |
| { | |
| "epoch": 0.6837301789629328, | |
| "grad_norm": 9.625477395299297, | |
| "learning_rate": 1.2012241698734408e-06, | |
| "loss": 0.5615, | |
| "step": 13410 | |
| }, | |
| { | |
| "epoch": 0.6842400448681997, | |
| "grad_norm": 9.318591854910585, | |
| "learning_rate": 1.1976983215625285e-06, | |
| "loss": 0.5031, | |
| "step": 13420 | |
| }, | |
| { | |
| "epoch": 0.6847499107734666, | |
| "grad_norm": 10.712263091770266, | |
| "learning_rate": 1.1941760247168893e-06, | |
| "loss": 0.5237, | |
| "step": 13430 | |
| }, | |
| { | |
| "epoch": 0.6852597766787335, | |
| "grad_norm": 4.196412737538298, | |
| "learning_rate": 1.1906572889420655e-06, | |
| "loss": 0.4728, | |
| "step": 13440 | |
| }, | |
| { | |
| "epoch": 0.6857696425840004, | |
| "grad_norm": 7.784431942907173, | |
| "learning_rate": 1.1871421238338917e-06, | |
| "loss": 0.5148, | |
| "step": 13450 | |
| }, | |
| { | |
| "epoch": 0.6862795084892673, | |
| "grad_norm": 6.63260049619592, | |
| "learning_rate": 1.1836305389784588e-06, | |
| "loss": 0.5399, | |
| "step": 13460 | |
| }, | |
| { | |
| "epoch": 0.6867893743945342, | |
| "grad_norm": 4.469197479198041, | |
| "learning_rate": 1.1801225439521003e-06, | |
| "loss": 0.4436, | |
| "step": 13470 | |
| }, | |
| { | |
| "epoch": 0.6872992402998012, | |
| "grad_norm": 14.254868714509106, | |
| "learning_rate": 1.176618148321356e-06, | |
| "loss": 0.4758, | |
| "step": 13480 | |
| }, | |
| { | |
| "epoch": 0.6878091062050681, | |
| "grad_norm": 3.7814670801049695, | |
| "learning_rate": 1.1731173616429514e-06, | |
| "loss": 0.4495, | |
| "step": 13490 | |
| }, | |
| { | |
| "epoch": 0.688318972110335, | |
| "grad_norm": 5.161779138546176, | |
| "learning_rate": 1.16962019346377e-06, | |
| "loss": 0.4625, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.6888288380156019, | |
| "grad_norm": 10.993559836072276, | |
| "learning_rate": 1.1661266533208274e-06, | |
| "loss": 0.5132, | |
| "step": 13510 | |
| }, | |
| { | |
| "epoch": 0.6893387039208688, | |
| "grad_norm": 10.908435267498524, | |
| "learning_rate": 1.1626367507412443e-06, | |
| "loss": 0.4824, | |
| "step": 13520 | |
| }, | |
| { | |
| "epoch": 0.6898485698261357, | |
| "grad_norm": 6.856450877530777, | |
| "learning_rate": 1.1591504952422243e-06, | |
| "loss": 0.5084, | |
| "step": 13530 | |
| }, | |
| { | |
| "epoch": 0.6903584357314027, | |
| "grad_norm": 4.982024981770672, | |
| "learning_rate": 1.1556678963310222e-06, | |
| "loss": 0.5541, | |
| "step": 13540 | |
| }, | |
| { | |
| "epoch": 0.6908683016366696, | |
| "grad_norm": 4.906060311571326, | |
| "learning_rate": 1.152188963504922e-06, | |
| "loss": 0.4915, | |
| "step": 13550 | |
| }, | |
| { | |
| "epoch": 0.6913781675419365, | |
| "grad_norm": 29.493906037211964, | |
| "learning_rate": 1.148713706251211e-06, | |
| "loss": 0.4951, | |
| "step": 13560 | |
| }, | |
| { | |
| "epoch": 0.6918880334472034, | |
| "grad_norm": 8.97699120582468, | |
| "learning_rate": 1.1452421340471514e-06, | |
| "loss": 0.5329, | |
| "step": 13570 | |
| }, | |
| { | |
| "epoch": 0.6923978993524703, | |
| "grad_norm": 5.699186471774075, | |
| "learning_rate": 1.1417742563599568e-06, | |
| "loss": 0.5537, | |
| "step": 13580 | |
| }, | |
| { | |
| "epoch": 0.6929077652577372, | |
| "grad_norm": 6.4448339310820435, | |
| "learning_rate": 1.1383100826467653e-06, | |
| "loss": 0.5556, | |
| "step": 13590 | |
| }, | |
| { | |
| "epoch": 0.6934176311630041, | |
| "grad_norm": 4.7807112185144565, | |
| "learning_rate": 1.1348496223546162e-06, | |
| "loss": 0.498, | |
| "step": 13600 | |
| }, | |
| { | |
| "epoch": 0.6939274970682711, | |
| "grad_norm": 17.810465336560668, | |
| "learning_rate": 1.131392884920417e-06, | |
| "loss": 0.4422, | |
| "step": 13610 | |
| }, | |
| { | |
| "epoch": 0.694437362973538, | |
| "grad_norm": 119.33725015633846, | |
| "learning_rate": 1.1279398797709293e-06, | |
| "loss": 0.5334, | |
| "step": 13620 | |
| }, | |
| { | |
| "epoch": 0.6949472288788049, | |
| "grad_norm": 8.000152099817969, | |
| "learning_rate": 1.1244906163227295e-06, | |
| "loss": 0.5405, | |
| "step": 13630 | |
| }, | |
| { | |
| "epoch": 0.6954570947840718, | |
| "grad_norm": 5.822401607695464, | |
| "learning_rate": 1.1210451039821965e-06, | |
| "loss": 0.5167, | |
| "step": 13640 | |
| }, | |
| { | |
| "epoch": 0.6959669606893387, | |
| "grad_norm": 11.163768611048248, | |
| "learning_rate": 1.1176033521454758e-06, | |
| "loss": 0.4986, | |
| "step": 13650 | |
| }, | |
| { | |
| "epoch": 0.6964768265946056, | |
| "grad_norm": 6.217197235375367, | |
| "learning_rate": 1.114165370198458e-06, | |
| "loss": 0.5233, | |
| "step": 13660 | |
| }, | |
| { | |
| "epoch": 0.6969866924998726, | |
| "grad_norm": 10.532638730674618, | |
| "learning_rate": 1.1107311675167558e-06, | |
| "loss": 0.5622, | |
| "step": 13670 | |
| }, | |
| { | |
| "epoch": 0.6974965584051395, | |
| "grad_norm": 9.982847064316687, | |
| "learning_rate": 1.1073007534656712e-06, | |
| "loss": 0.5113, | |
| "step": 13680 | |
| }, | |
| { | |
| "epoch": 0.6980064243104064, | |
| "grad_norm": 11.286075006858832, | |
| "learning_rate": 1.1038741374001793e-06, | |
| "loss": 0.508, | |
| "step": 13690 | |
| }, | |
| { | |
| "epoch": 0.6985162902156733, | |
| "grad_norm": 13.847965895609494, | |
| "learning_rate": 1.1004513286648922e-06, | |
| "loss": 0.4808, | |
| "step": 13700 | |
| }, | |
| { | |
| "epoch": 0.6990261561209402, | |
| "grad_norm": 3.750806517115084, | |
| "learning_rate": 1.0970323365940443e-06, | |
| "loss": 0.4999, | |
| "step": 13710 | |
| }, | |
| { | |
| "epoch": 0.6995360220262071, | |
| "grad_norm": 4.49734953510548, | |
| "learning_rate": 1.093617170511459e-06, | |
| "loss": 0.5244, | |
| "step": 13720 | |
| }, | |
| { | |
| "epoch": 0.700045887931474, | |
| "grad_norm": 3.5635278395876604, | |
| "learning_rate": 1.0902058397305268e-06, | |
| "loss": 0.5034, | |
| "step": 13730 | |
| }, | |
| { | |
| "epoch": 0.700555753836741, | |
| "grad_norm": 7.630693193745469, | |
| "learning_rate": 1.0867983535541785e-06, | |
| "loss": 0.5485, | |
| "step": 13740 | |
| }, | |
| { | |
| "epoch": 0.7010656197420079, | |
| "grad_norm": 20.88551938761256, | |
| "learning_rate": 1.0833947212748597e-06, | |
| "loss": 0.5409, | |
| "step": 13750 | |
| }, | |
| { | |
| "epoch": 0.7015754856472748, | |
| "grad_norm": 20.23048041237047, | |
| "learning_rate": 1.07999495217451e-06, | |
| "loss": 0.5645, | |
| "step": 13760 | |
| }, | |
| { | |
| "epoch": 0.7020853515525417, | |
| "grad_norm": 4.762845462769943, | |
| "learning_rate": 1.0765990555245275e-06, | |
| "loss": 0.5196, | |
| "step": 13770 | |
| }, | |
| { | |
| "epoch": 0.7025952174578086, | |
| "grad_norm": 5.618109909368287, | |
| "learning_rate": 1.0732070405857562e-06, | |
| "loss": 0.5406, | |
| "step": 13780 | |
| }, | |
| { | |
| "epoch": 0.7031050833630755, | |
| "grad_norm": 10.640295482886632, | |
| "learning_rate": 1.0698189166084501e-06, | |
| "loss": 0.4254, | |
| "step": 13790 | |
| }, | |
| { | |
| "epoch": 0.7036149492683424, | |
| "grad_norm": 4.911022067208778, | |
| "learning_rate": 1.0664346928322547e-06, | |
| "loss": 0.5113, | |
| "step": 13800 | |
| }, | |
| { | |
| "epoch": 0.7041248151736094, | |
| "grad_norm": 9.081453850201648, | |
| "learning_rate": 1.063054378486178e-06, | |
| "loss": 0.5947, | |
| "step": 13810 | |
| }, | |
| { | |
| "epoch": 0.7046346810788763, | |
| "grad_norm": 6.77043436264162, | |
| "learning_rate": 1.059677982788567e-06, | |
| "loss": 0.4636, | |
| "step": 13820 | |
| }, | |
| { | |
| "epoch": 0.7051445469841432, | |
| "grad_norm": 7.5449704839316425, | |
| "learning_rate": 1.056305514947082e-06, | |
| "loss": 0.4751, | |
| "step": 13830 | |
| }, | |
| { | |
| "epoch": 0.7056544128894101, | |
| "grad_norm": 11.690143819464407, | |
| "learning_rate": 1.0529369841586743e-06, | |
| "loss": 0.4504, | |
| "step": 13840 | |
| }, | |
| { | |
| "epoch": 0.706164278794677, | |
| "grad_norm": 4.31175777202485, | |
| "learning_rate": 1.0495723996095533e-06, | |
| "loss": 0.4495, | |
| "step": 13850 | |
| }, | |
| { | |
| "epoch": 0.7066741446999439, | |
| "grad_norm": 13.202730632004345, | |
| "learning_rate": 1.046211770475173e-06, | |
| "loss": 0.4702, | |
| "step": 13860 | |
| }, | |
| { | |
| "epoch": 0.7071840106052109, | |
| "grad_norm": 7.249743343482479, | |
| "learning_rate": 1.0428551059201964e-06, | |
| "loss": 0.4651, | |
| "step": 13870 | |
| }, | |
| { | |
| "epoch": 0.7076938765104778, | |
| "grad_norm": 7.318651462556601, | |
| "learning_rate": 1.039502415098476e-06, | |
| "loss": 0.5248, | |
| "step": 13880 | |
| }, | |
| { | |
| "epoch": 0.7082037424157447, | |
| "grad_norm": 8.280309990686213, | |
| "learning_rate": 1.0361537071530277e-06, | |
| "loss": 0.5498, | |
| "step": 13890 | |
| }, | |
| { | |
| "epoch": 0.7087136083210116, | |
| "grad_norm": 7.704516862067239, | |
| "learning_rate": 1.0328089912160055e-06, | |
| "loss": 0.5427, | |
| "step": 13900 | |
| }, | |
| { | |
| "epoch": 0.7092234742262785, | |
| "grad_norm": 5.1790978507620276, | |
| "learning_rate": 1.0294682764086794e-06, | |
| "loss": 0.5272, | |
| "step": 13910 | |
| }, | |
| { | |
| "epoch": 0.7097333401315454, | |
| "grad_norm": 6.6383138164611974, | |
| "learning_rate": 1.0261315718414028e-06, | |
| "loss": 0.5172, | |
| "step": 13920 | |
| }, | |
| { | |
| "epoch": 0.7102432060368123, | |
| "grad_norm": 10.878766001476842, | |
| "learning_rate": 1.0227988866135995e-06, | |
| "loss": 0.4949, | |
| "step": 13930 | |
| }, | |
| { | |
| "epoch": 0.7107530719420793, | |
| "grad_norm": 10.28873182386919, | |
| "learning_rate": 1.0194702298137251e-06, | |
| "loss": 0.4777, | |
| "step": 13940 | |
| }, | |
| { | |
| "epoch": 0.7112629378473462, | |
| "grad_norm": 6.691725960477623, | |
| "learning_rate": 1.016145610519256e-06, | |
| "loss": 0.5215, | |
| "step": 13950 | |
| }, | |
| { | |
| "epoch": 0.7117728037526131, | |
| "grad_norm": 8.8432699489868, | |
| "learning_rate": 1.0128250377966545e-06, | |
| "loss": 0.5249, | |
| "step": 13960 | |
| }, | |
| { | |
| "epoch": 0.71228266965788, | |
| "grad_norm": 6.205838612221995, | |
| "learning_rate": 1.009508520701347e-06, | |
| "loss": 0.4928, | |
| "step": 13970 | |
| }, | |
| { | |
| "epoch": 0.7127925355631469, | |
| "grad_norm": 7.022108699926237, | |
| "learning_rate": 1.006196068277704e-06, | |
| "loss": 0.4703, | |
| "step": 13980 | |
| }, | |
| { | |
| "epoch": 0.7133024014684138, | |
| "grad_norm": 4.790738966564985, | |
| "learning_rate": 1.002887689559005e-06, | |
| "loss": 0.4899, | |
| "step": 13990 | |
| }, | |
| { | |
| "epoch": 0.7138122673736808, | |
| "grad_norm": 7.20639824109062, | |
| "learning_rate": 9.99583393567428e-07, | |
| "loss": 0.4402, | |
| "step": 14000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 19613, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.0829919385288704e+16, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |