| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "global_step": 1635, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 9.877675840978595e-06, | |
| "loss": 2.0614, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 9.755351681957187e-06, | |
| "loss": 1.6662, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 9.633027522935781e-06, | |
| "loss": 1.6353, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 9.510703363914374e-06, | |
| "loss": 1.6422, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 9.388379204892966e-06, | |
| "loss": 1.6402, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 9.26605504587156e-06, | |
| "loss": 1.5641, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 9.143730886850154e-06, | |
| "loss": 1.4371, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 9.021406727828746e-06, | |
| "loss": 1.3502, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 8.89908256880734e-06, | |
| "loss": 1.3114, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 8.776758409785935e-06, | |
| "loss": 1.2897, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 8.654434250764527e-06, | |
| "loss": 1.2489, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 8.53211009174312e-06, | |
| "loss": 1.2658, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 8.409785932721713e-06, | |
| "loss": 1.2609, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 8.287461773700306e-06, | |
| "loss": 1.2638, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 8.1651376146789e-06, | |
| "loss": 1.2487, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 8.042813455657494e-06, | |
| "loss": 1.1931, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 7.920489296636086e-06, | |
| "loss": 1.2257, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 7.79816513761468e-06, | |
| "loss": 1.1958, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 7.675840978593273e-06, | |
| "loss": 1.1877, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 7.553516819571866e-06, | |
| "loss": 1.1715, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 7.431192660550459e-06, | |
| "loss": 1.1104, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 7.308868501529053e-06, | |
| "loss": 1.1759, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 7.186544342507645e-06, | |
| "loss": 1.1948, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 7.0642201834862385e-06, | |
| "loss": 1.1691, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 6.941896024464833e-06, | |
| "loss": 1.1641, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 6.819571865443425e-06, | |
| "loss": 1.1882, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 6.697247706422019e-06, | |
| "loss": 1.1756, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 6.574923547400612e-06, | |
| "loss": 0.94, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 6.452599388379206e-06, | |
| "loss": 0.8573, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 6.330275229357799e-06, | |
| "loss": 0.7569, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 6.207951070336392e-06, | |
| "loss": 0.7624, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 6.085626911314986e-06, | |
| "loss": 0.8716, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 5.963302752293578e-06, | |
| "loss": 0.7996, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 5.840978593272172e-06, | |
| "loss": 0.829, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 5.7186544342507654e-06, | |
| "loss": 0.8173, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 5.596330275229358e-06, | |
| "loss": 0.7988, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 5.474006116207952e-06, | |
| "loss": 0.833, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 5.351681957186545e-06, | |
| "loss": 0.7905, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 5.229357798165137e-06, | |
| "loss": 0.7307, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 5.1070336391437315e-06, | |
| "loss": 0.7253, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 4.984709480122325e-06, | |
| "loss": 0.8086, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 4.862385321100918e-06, | |
| "loss": 0.7031, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 4.740061162079511e-06, | |
| "loss": 0.8438, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 4.617737003058104e-06, | |
| "loss": 0.8416, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 4.4954128440366975e-06, | |
| "loss": 0.779, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 4.373088685015291e-06, | |
| "loss": 0.7943, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 4.250764525993884e-06, | |
| "loss": 0.7652, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 4.128440366972478e-06, | |
| "loss": 0.8063, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 4.00611620795107e-06, | |
| "loss": 0.7567, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 3.8837920489296635e-06, | |
| "loss": 0.7479, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 3.7614678899082575e-06, | |
| "loss": 0.7807, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 3.6391437308868503e-06, | |
| "loss": 0.8079, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 3.5168195718654435e-06, | |
| "loss": 0.771, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 3.394495412844037e-06, | |
| "loss": 0.7851, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 3.2721712538226303e-06, | |
| "loss": 0.6836, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 3.149847094801223e-06, | |
| "loss": 0.4451, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 3.0275229357798168e-06, | |
| "loss": 0.3817, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 2.90519877675841e-06, | |
| "loss": 0.4478, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 2.782874617737003e-06, | |
| "loss": 0.4355, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 2.6605504587155968e-06, | |
| "loss": 0.3843, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 2.5382262996941896e-06, | |
| "loss": 0.4202, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 2.415902140672783e-06, | |
| "loss": 0.4912, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 2.2935779816513764e-06, | |
| "loss": 0.449, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 2.1712538226299696e-06, | |
| "loss": 0.4334, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 2.048929663608563e-06, | |
| "loss": 0.3713, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.9266055045871564e-06, | |
| "loss": 0.4785, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.8042813455657492e-06, | |
| "loss": 0.4187, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.6819571865443426e-06, | |
| "loss": 0.4006, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.559633027522936e-06, | |
| "loss": 0.448, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.437308868501529e-06, | |
| "loss": 0.3649, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 1.3149847094801224e-06, | |
| "loss": 0.3852, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 1.1926605504587159e-06, | |
| "loss": 0.3508, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 1.070336391437309e-06, | |
| "loss": 0.3937, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 9.480122324159022e-07, | |
| "loss": 0.4453, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 8.256880733944956e-07, | |
| "loss": 0.3777, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 7.033639143730888e-07, | |
| "loss": 0.446, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 5.81039755351682e-07, | |
| "loss": 0.3668, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 4.587155963302753e-07, | |
| "loss": 0.3983, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 3.363914373088685e-07, | |
| "loss": 0.3924, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 2.140672782874618e-07, | |
| "loss": 0.4269, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 9.174311926605506e-08, | |
| "loss": 0.4145, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 1635, | |
| "total_flos": 32093975347200.0, | |
| "train_loss": 0.847750219875884, | |
| "train_runtime": 10989.1179, | |
| "train_samples_per_second": 3.566, | |
| "train_steps_per_second": 0.149 | |
| } | |
| ], | |
| "max_steps": 1635, | |
| "num_train_epochs": 3, | |
| "total_flos": 32093975347200.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |