| { | |
| "best_metric": 1.2769535779953003, | |
| "best_model_checkpoint": "mental-roberta-base-CD_baseline/checkpoint-1000", | |
| "epoch": 8.0, | |
| "eval_steps": 500, | |
| "global_step": 2000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 4.378986358642578, | |
| "learning_rate": 1.9900000000000003e-05, | |
| "loss": 2.389, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 5.568194389343262, | |
| "learning_rate": 1.98e-05, | |
| "loss": 2.1424, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 15.351970672607422, | |
| "learning_rate": 1.97e-05, | |
| "loss": 1.978, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 6.168962478637695, | |
| "learning_rate": 1.9600000000000002e-05, | |
| "loss": 1.8678, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 5.635171413421631, | |
| "learning_rate": 1.95e-05, | |
| "loss": 1.7668, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 3.921550989151001, | |
| "learning_rate": 1.94e-05, | |
| "loss": 1.6985, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 21.30545425415039, | |
| "learning_rate": 1.93e-05, | |
| "loss": 1.7725, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 3.5897629261016846, | |
| "learning_rate": 1.9200000000000003e-05, | |
| "loss": 1.9746, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.9138140678405762, | |
| "learning_rate": 1.91e-05, | |
| "loss": 1.4866, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 3.1776585578918457, | |
| "learning_rate": 1.9e-05, | |
| "loss": 1.737, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 7.035165309906006, | |
| "learning_rate": 1.8900000000000002e-05, | |
| "loss": 1.98, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 4.098265647888184, | |
| "learning_rate": 1.88e-05, | |
| "loss": 1.6998, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 4.009236812591553, | |
| "learning_rate": 1.8700000000000004e-05, | |
| "loss": 1.6323, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 2.8470473289489746, | |
| "learning_rate": 1.86e-05, | |
| "loss": 1.8226, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 49.7083740234375, | |
| "learning_rate": 1.8500000000000002e-05, | |
| "loss": 1.7309, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 4.127583980560303, | |
| "learning_rate": 1.8400000000000003e-05, | |
| "loss": 1.7606, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 2.7759668827056885, | |
| "learning_rate": 1.83e-05, | |
| "loss": 1.4, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 6.541387557983398, | |
| "learning_rate": 1.8200000000000002e-05, | |
| "loss": 1.647, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 5.6331706047058105, | |
| "learning_rate": 1.8100000000000003e-05, | |
| "loss": 1.5166, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 4.045393466949463, | |
| "learning_rate": 1.8e-05, | |
| "loss": 1.7162, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 6.527945041656494, | |
| "learning_rate": 1.79e-05, | |
| "loss": 1.6117, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 3.565760374069214, | |
| "learning_rate": 1.7800000000000002e-05, | |
| "loss": 1.7464, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 4.898312568664551, | |
| "learning_rate": 1.77e-05, | |
| "loss": 1.561, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 5.3700103759765625, | |
| "learning_rate": 1.76e-05, | |
| "loss": 1.5109, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 15.15703010559082, | |
| "learning_rate": 1.7500000000000002e-05, | |
| "loss": 1.7629, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.47391304347826085, | |
| "eval_f1": 0.38428281400288544, | |
| "eval_loss": 1.5328094959259033, | |
| "eval_precision": 0.3534519719474924, | |
| "eval_recall": 0.47391304347826085, | |
| "eval_runtime": 1.0211, | |
| "eval_samples_per_second": 225.243, | |
| "eval_steps_per_second": 28.4, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 11.55996322631836, | |
| "learning_rate": 1.7400000000000003e-05, | |
| "loss": 1.5392, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 9.391329765319824, | |
| "learning_rate": 1.73e-05, | |
| "loss": 1.5147, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 7.441842079162598, | |
| "learning_rate": 1.72e-05, | |
| "loss": 1.5203, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 9.423290252685547, | |
| "learning_rate": 1.7100000000000002e-05, | |
| "loss": 1.34, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 6.1367573738098145, | |
| "learning_rate": 1.7e-05, | |
| "loss": 1.368, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 8.754669189453125, | |
| "learning_rate": 1.69e-05, | |
| "loss": 1.4532, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 12.048688888549805, | |
| "learning_rate": 1.6800000000000002e-05, | |
| "loss": 1.5626, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 8.47139835357666, | |
| "learning_rate": 1.67e-05, | |
| "loss": 1.4029, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "grad_norm": 13.538434982299805, | |
| "learning_rate": 1.66e-05, | |
| "loss": 1.2561, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 11.475522994995117, | |
| "learning_rate": 1.65e-05, | |
| "loss": 1.5971, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 14.821061134338379, | |
| "learning_rate": 1.64e-05, | |
| "loss": 1.6198, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 12.751998901367188, | |
| "learning_rate": 1.63e-05, | |
| "loss": 1.28, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 10.061027526855469, | |
| "learning_rate": 1.62e-05, | |
| "loss": 1.226, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "grad_norm": 21.2222900390625, | |
| "learning_rate": 1.6100000000000002e-05, | |
| "loss": 1.1642, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 15.691130638122559, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 1.4167, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "grad_norm": 17.03270721435547, | |
| "learning_rate": 1.5900000000000004e-05, | |
| "loss": 1.3666, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "grad_norm": 8.01834487915039, | |
| "learning_rate": 1.58e-05, | |
| "loss": 1.237, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 18.645164489746094, | |
| "learning_rate": 1.5700000000000002e-05, | |
| "loss": 1.4502, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 12.169353485107422, | |
| "learning_rate": 1.5600000000000003e-05, | |
| "loss": 1.5965, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 13.11835765838623, | |
| "learning_rate": 1.55e-05, | |
| "loss": 1.1895, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "grad_norm": 29.902376174926758, | |
| "learning_rate": 1.54e-05, | |
| "loss": 1.4158, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 8.317435264587402, | |
| "learning_rate": 1.5300000000000003e-05, | |
| "loss": 1.2013, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 10.2247314453125, | |
| "learning_rate": 1.5200000000000002e-05, | |
| "loss": 1.3446, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 11.59114933013916, | |
| "learning_rate": 1.5100000000000001e-05, | |
| "loss": 1.2326, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 11.2176513671875, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 1.4033, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.5260869565217391, | |
| "eval_f1": 0.5009687691265158, | |
| "eval_loss": 1.3281586170196533, | |
| "eval_precision": 0.5395269845664888, | |
| "eval_recall": 0.5260869565217391, | |
| "eval_runtime": 1.0114, | |
| "eval_samples_per_second": 227.398, | |
| "eval_steps_per_second": 28.672, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "grad_norm": 11.444624900817871, | |
| "learning_rate": 1.4900000000000001e-05, | |
| "loss": 1.1357, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "grad_norm": 13.202922821044922, | |
| "learning_rate": 1.48e-05, | |
| "loss": 1.154, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "grad_norm": 16.034732818603516, | |
| "learning_rate": 1.4700000000000002e-05, | |
| "loss": 1.0562, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 7.477020263671875, | |
| "learning_rate": 1.46e-05, | |
| "loss": 1.0975, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "grad_norm": 12.851716995239258, | |
| "learning_rate": 1.45e-05, | |
| "loss": 1.1655, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 13.109989166259766, | |
| "learning_rate": 1.4400000000000001e-05, | |
| "loss": 1.0101, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "grad_norm": 14.998272895812988, | |
| "learning_rate": 1.43e-05, | |
| "loss": 0.9762, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 22.052188873291016, | |
| "learning_rate": 1.4200000000000001e-05, | |
| "loss": 1.0931, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "grad_norm": 20.38056182861328, | |
| "learning_rate": 1.41e-05, | |
| "loss": 1.0295, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 14.597001075744629, | |
| "learning_rate": 1.4e-05, | |
| "loss": 1.0001, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "grad_norm": 18.28936195373535, | |
| "learning_rate": 1.39e-05, | |
| "loss": 1.1457, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 4.188849925994873, | |
| "learning_rate": 1.38e-05, | |
| "loss": 1.1505, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "grad_norm": 19.210851669311523, | |
| "learning_rate": 1.3700000000000003e-05, | |
| "loss": 0.9443, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 14.057235717773438, | |
| "learning_rate": 1.3600000000000002e-05, | |
| "loss": 1.1828, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "grad_norm": 18.877193450927734, | |
| "learning_rate": 1.3500000000000001e-05, | |
| "loss": 1.2169, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 11.197978019714355, | |
| "learning_rate": 1.3400000000000002e-05, | |
| "loss": 1.0629, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "grad_norm": 34.57655715942383, | |
| "learning_rate": 1.3300000000000001e-05, | |
| "loss": 1.183, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "grad_norm": 15.682902336120605, | |
| "learning_rate": 1.3200000000000002e-05, | |
| "loss": 0.9974, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "grad_norm": 12.469610214233398, | |
| "learning_rate": 1.3100000000000002e-05, | |
| "loss": 1.1861, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 19.878341674804688, | |
| "learning_rate": 1.3000000000000001e-05, | |
| "loss": 1.0039, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "grad_norm": 18.73297691345215, | |
| "learning_rate": 1.2900000000000002e-05, | |
| "loss": 1.0106, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 23.088468551635742, | |
| "learning_rate": 1.2800000000000001e-05, | |
| "loss": 1.0126, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "grad_norm": 16.777488708496094, | |
| "learning_rate": 1.27e-05, | |
| "loss": 0.8662, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 16.595956802368164, | |
| "learning_rate": 1.2600000000000001e-05, | |
| "loss": 1.0424, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 12.72782039642334, | |
| "learning_rate": 1.25e-05, | |
| "loss": 0.7635, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.5608695652173913, | |
| "eval_f1": 0.5336963494705901, | |
| "eval_loss": 1.3101598024368286, | |
| "eval_precision": 0.537211136181388, | |
| "eval_recall": 0.5608695652173913, | |
| "eval_runtime": 1.0439, | |
| "eval_samples_per_second": 220.326, | |
| "eval_steps_per_second": 27.78, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "grad_norm": 8.257983207702637, | |
| "learning_rate": 1.2400000000000002e-05, | |
| "loss": 0.7983, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "grad_norm": 17.17106056213379, | |
| "learning_rate": 1.23e-05, | |
| "loss": 0.8106, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "grad_norm": 6.935492515563965, | |
| "learning_rate": 1.22e-05, | |
| "loss": 0.5296, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "grad_norm": 26.85825538635254, | |
| "learning_rate": 1.2100000000000001e-05, | |
| "loss": 0.9861, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "grad_norm": 10.665637016296387, | |
| "learning_rate": 1.2e-05, | |
| "loss": 0.9043, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "grad_norm": 34.452388763427734, | |
| "learning_rate": 1.1900000000000001e-05, | |
| "loss": 0.8038, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 3.28, | |
| "grad_norm": 18.837228775024414, | |
| "learning_rate": 1.18e-05, | |
| "loss": 0.8004, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "grad_norm": 13.866151809692383, | |
| "learning_rate": 1.17e-05, | |
| "loss": 0.6666, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "grad_norm": 12.578895568847656, | |
| "learning_rate": 1.16e-05, | |
| "loss": 0.6488, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "grad_norm": 2.9693028926849365, | |
| "learning_rate": 1.15e-05, | |
| "loss": 0.9074, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "grad_norm": 11.259392738342285, | |
| "learning_rate": 1.14e-05, | |
| "loss": 0.7097, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "grad_norm": 8.34443187713623, | |
| "learning_rate": 1.13e-05, | |
| "loss": 0.7404, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "grad_norm": 22.02679443359375, | |
| "learning_rate": 1.1200000000000001e-05, | |
| "loss": 0.7031, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 3.56, | |
| "grad_norm": 18.178070068359375, | |
| "learning_rate": 1.1100000000000002e-05, | |
| "loss": 0.7763, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "grad_norm": 38.39835739135742, | |
| "learning_rate": 1.1000000000000001e-05, | |
| "loss": 0.8073, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "grad_norm": 27.14322280883789, | |
| "learning_rate": 1.0900000000000002e-05, | |
| "loss": 0.745, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "grad_norm": 14.671334266662598, | |
| "learning_rate": 1.0800000000000002e-05, | |
| "loss": 0.7552, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "grad_norm": 6.039632320404053, | |
| "learning_rate": 1.0700000000000001e-05, | |
| "loss": 0.6619, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "grad_norm": 20.04022979736328, | |
| "learning_rate": 1.0600000000000002e-05, | |
| "loss": 0.7781, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 3.8, | |
| "grad_norm": 20.994312286376953, | |
| "learning_rate": 1.0500000000000001e-05, | |
| "loss": 0.8624, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "grad_norm": 27.07198143005371, | |
| "learning_rate": 1.04e-05, | |
| "loss": 0.728, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "grad_norm": 17.27962875366211, | |
| "learning_rate": 1.0300000000000001e-05, | |
| "loss": 0.6684, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "grad_norm": 20.02420997619629, | |
| "learning_rate": 1.02e-05, | |
| "loss": 0.6107, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 3.96, | |
| "grad_norm": 4.354977607727051, | |
| "learning_rate": 1.0100000000000002e-05, | |
| "loss": 0.6526, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 29.899433135986328, | |
| "learning_rate": 1e-05, | |
| "loss": 0.8688, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.5956521739130435, | |
| "eval_f1": 0.5794783047946798, | |
| "eval_loss": 1.2769535779953003, | |
| "eval_precision": 0.580110231445542, | |
| "eval_recall": 0.5956521739130435, | |
| "eval_runtime": 1.0338, | |
| "eval_samples_per_second": 222.477, | |
| "eval_steps_per_second": 28.051, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "grad_norm": 13.660650253295898, | |
| "learning_rate": 9.9e-06, | |
| "loss": 0.5165, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "grad_norm": 6.362195014953613, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.3606, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "grad_norm": 4.183453559875488, | |
| "learning_rate": 9.7e-06, | |
| "loss": 0.4387, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "grad_norm": 26.112573623657227, | |
| "learning_rate": 9.600000000000001e-06, | |
| "loss": 0.586, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "grad_norm": 19.786802291870117, | |
| "learning_rate": 9.5e-06, | |
| "loss": 0.5322, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "grad_norm": 8.961174011230469, | |
| "learning_rate": 9.4e-06, | |
| "loss": 0.6079, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "grad_norm": 15.39157772064209, | |
| "learning_rate": 9.3e-06, | |
| "loss": 0.4597, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "grad_norm": 24.373172760009766, | |
| "learning_rate": 9.200000000000002e-06, | |
| "loss": 0.6407, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "grad_norm": 20.961610794067383, | |
| "learning_rate": 9.100000000000001e-06, | |
| "loss": 0.4461, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "grad_norm": 16.351242065429688, | |
| "learning_rate": 9e-06, | |
| "loss": 0.5012, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "grad_norm": 10.587532043457031, | |
| "learning_rate": 8.900000000000001e-06, | |
| "loss": 0.6065, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "grad_norm": 4.534018516540527, | |
| "learning_rate": 8.8e-06, | |
| "loss": 0.2906, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "grad_norm": 1.747666597366333, | |
| "learning_rate": 8.700000000000001e-06, | |
| "loss": 0.4286, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "grad_norm": 17.067201614379883, | |
| "learning_rate": 8.6e-06, | |
| "loss": 0.4721, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "grad_norm": 15.651820182800293, | |
| "learning_rate": 8.5e-06, | |
| "loss": 0.5677, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "grad_norm": 20.833948135375977, | |
| "learning_rate": 8.400000000000001e-06, | |
| "loss": 0.4825, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "grad_norm": 1.568106770515442, | |
| "learning_rate": 8.3e-06, | |
| "loss": 0.5056, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "grad_norm": 21.512529373168945, | |
| "learning_rate": 8.2e-06, | |
| "loss": 0.4923, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "grad_norm": 11.180012702941895, | |
| "learning_rate": 8.1e-06, | |
| "loss": 0.6553, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "grad_norm": 17.37482452392578, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.3976, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "grad_norm": 15.096935272216797, | |
| "learning_rate": 7.9e-06, | |
| "loss": 0.6105, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "grad_norm": 8.122323036193848, | |
| "learning_rate": 7.800000000000002e-06, | |
| "loss": 0.4211, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "grad_norm": 15.952132225036621, | |
| "learning_rate": 7.7e-06, | |
| "loss": 0.6625, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "grad_norm": 18.230487823486328, | |
| "learning_rate": 7.600000000000001e-06, | |
| "loss": 0.5536, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 43.71910095214844, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.4432, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_accuracy": 0.6086956521739131, | |
| "eval_f1": 0.594058293120158, | |
| "eval_loss": 1.4374070167541504, | |
| "eval_precision": 0.6038115384886801, | |
| "eval_recall": 0.6086956521739131, | |
| "eval_runtime": 1.0253, | |
| "eval_samples_per_second": 224.32, | |
| "eval_steps_per_second": 28.284, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "grad_norm": 16.41998863220215, | |
| "learning_rate": 7.4e-06, | |
| "loss": 0.3491, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 5.08, | |
| "grad_norm": 16.400732040405273, | |
| "learning_rate": 7.3e-06, | |
| "loss": 0.3177, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "grad_norm": 21.35452651977539, | |
| "learning_rate": 7.2000000000000005e-06, | |
| "loss": 0.3004, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 5.16, | |
| "grad_norm": 9.482089042663574, | |
| "learning_rate": 7.100000000000001e-06, | |
| "loss": 0.2588, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "grad_norm": 14.700976371765137, | |
| "learning_rate": 7e-06, | |
| "loss": 0.3262, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "grad_norm": 11.437019348144531, | |
| "learning_rate": 6.9e-06, | |
| "loss": 0.3954, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "grad_norm": 10.643627166748047, | |
| "learning_rate": 6.800000000000001e-06, | |
| "loss": 0.1852, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "grad_norm": 33.29022216796875, | |
| "learning_rate": 6.700000000000001e-06, | |
| "loss": 0.5024, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "grad_norm": 27.292346954345703, | |
| "learning_rate": 6.600000000000001e-06, | |
| "loss": 0.2858, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "grad_norm": 34.4414176940918, | |
| "learning_rate": 6.5000000000000004e-06, | |
| "loss": 0.2465, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "grad_norm": 19.40508270263672, | |
| "learning_rate": 6.4000000000000006e-06, | |
| "loss": 0.3818, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "grad_norm": 30.40248680114746, | |
| "learning_rate": 6.300000000000001e-06, | |
| "loss": 0.3123, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "grad_norm": 16.111865997314453, | |
| "learning_rate": 6.200000000000001e-06, | |
| "loss": 0.2661, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "grad_norm": 11.861916542053223, | |
| "learning_rate": 6.1e-06, | |
| "loss": 0.4043, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "grad_norm": 39.95186996459961, | |
| "learning_rate": 6e-06, | |
| "loss": 0.3573, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "grad_norm": 12.199712753295898, | |
| "learning_rate": 5.9e-06, | |
| "loss": 0.2876, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "grad_norm": 37.9615364074707, | |
| "learning_rate": 5.8e-06, | |
| "loss": 0.3462, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "grad_norm": 60.29270553588867, | |
| "learning_rate": 5.7e-06, | |
| "loss": 0.4482, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "grad_norm": 10.729787826538086, | |
| "learning_rate": 5.600000000000001e-06, | |
| "loss": 0.3709, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "grad_norm": 28.756450653076172, | |
| "learning_rate": 5.500000000000001e-06, | |
| "loss": 0.4185, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "grad_norm": 12.605400085449219, | |
| "learning_rate": 5.400000000000001e-06, | |
| "loss": 0.3089, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 5.88, | |
| "grad_norm": 1.9020392894744873, | |
| "learning_rate": 5.300000000000001e-06, | |
| "loss": 0.1827, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "grad_norm": 4.700870513916016, | |
| "learning_rate": 5.2e-06, | |
| "loss": 0.3849, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "grad_norm": 8.93205451965332, | |
| "learning_rate": 5.1e-06, | |
| "loss": 0.3077, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 32.244083404541016, | |
| "learning_rate": 5e-06, | |
| "loss": 0.3489, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_accuracy": 0.5869565217391305, | |
| "eval_f1": 0.5901467318357858, | |
| "eval_loss": 1.481338620185852, | |
| "eval_precision": 0.6123818572731616, | |
| "eval_recall": 0.5869565217391305, | |
| "eval_runtime": 1.0437, | |
| "eval_samples_per_second": 220.36, | |
| "eval_steps_per_second": 27.784, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 6.04, | |
| "grad_norm": 10.3549165725708, | |
| "learning_rate": 4.9000000000000005e-06, | |
| "loss": 0.2758, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "grad_norm": 12.63352108001709, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 0.205, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 6.12, | |
| "grad_norm": 9.606993675231934, | |
| "learning_rate": 4.7e-06, | |
| "loss": 0.2557, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "grad_norm": 1.3631489276885986, | |
| "learning_rate": 4.600000000000001e-06, | |
| "loss": 0.255, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "grad_norm": 22.932092666625977, | |
| "learning_rate": 4.5e-06, | |
| "loss": 0.3251, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "grad_norm": 15.687180519104004, | |
| "learning_rate": 4.4e-06, | |
| "loss": 0.1323, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 6.28, | |
| "grad_norm": 0.9915614128112793, | |
| "learning_rate": 4.3e-06, | |
| "loss": 0.1807, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "grad_norm": 1.9621161222457886, | |
| "learning_rate": 4.2000000000000004e-06, | |
| "loss": 0.2397, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "grad_norm": 1.7371257543563843, | |
| "learning_rate": 4.1e-06, | |
| "loss": 0.2391, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "grad_norm": 3.6397323608398438, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.2326, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 6.44, | |
| "grad_norm": 19.90898323059082, | |
| "learning_rate": 3.900000000000001e-06, | |
| "loss": 0.1249, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 6.48, | |
| "grad_norm": 24.969144821166992, | |
| "learning_rate": 3.8000000000000005e-06, | |
| "loss": 0.2336, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 6.52, | |
| "grad_norm": 8.904400825500488, | |
| "learning_rate": 3.7e-06, | |
| "loss": 0.244, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "grad_norm": 18.82139778137207, | |
| "learning_rate": 3.6000000000000003e-06, | |
| "loss": 0.2858, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "grad_norm": 24.00533676147461, | |
| "learning_rate": 3.5e-06, | |
| "loss": 0.3073, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "grad_norm": 7.122366905212402, | |
| "learning_rate": 3.4000000000000005e-06, | |
| "loss": 0.1949, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 6.68, | |
| "grad_norm": 3.693237781524658, | |
| "learning_rate": 3.3000000000000006e-06, | |
| "loss": 0.1558, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "grad_norm": 16.943511962890625, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 0.3307, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 6.76, | |
| "grad_norm": 1.202619194984436, | |
| "learning_rate": 3.1000000000000004e-06, | |
| "loss": 0.1836, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "grad_norm": 2.2889859676361084, | |
| "learning_rate": 3e-06, | |
| "loss": 0.1612, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 6.84, | |
| "grad_norm": 24.393550872802734, | |
| "learning_rate": 2.9e-06, | |
| "loss": 0.2287, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "grad_norm": 15.723957061767578, | |
| "learning_rate": 2.8000000000000003e-06, | |
| "loss": 0.1942, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "grad_norm": 11.214823722839355, | |
| "learning_rate": 2.7000000000000004e-06, | |
| "loss": 0.2522, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "grad_norm": 37.70159149169922, | |
| "learning_rate": 2.6e-06, | |
| "loss": 0.1987, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "grad_norm": 19.29395866394043, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.3154, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "eval_accuracy": 0.591304347826087, | |
| "eval_f1": 0.59225954110776, | |
| "eval_loss": 1.561277151107788, | |
| "eval_precision": 0.6051404726650386, | |
| "eval_recall": 0.591304347826087, | |
| "eval_runtime": 1.0235, | |
| "eval_samples_per_second": 224.722, | |
| "eval_steps_per_second": 28.334, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "grad_norm": 8.514004707336426, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 0.141, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 7.08, | |
| "grad_norm": 14.699041366577148, | |
| "learning_rate": 2.3000000000000004e-06, | |
| "loss": 0.1434, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "grad_norm": 3.5710041522979736, | |
| "learning_rate": 2.2e-06, | |
| "loss": 0.111, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 7.16, | |
| "grad_norm": 6.2862043380737305, | |
| "learning_rate": 2.1000000000000002e-06, | |
| "loss": 0.1849, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "grad_norm": 0.646734356880188, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.1165, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 7.24, | |
| "grad_norm": 11.804097175598145, | |
| "learning_rate": 1.9000000000000002e-06, | |
| "loss": 0.1244, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "grad_norm": 0.8081532716751099, | |
| "learning_rate": 1.8000000000000001e-06, | |
| "loss": 0.1073, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "grad_norm": 11.27471923828125, | |
| "learning_rate": 1.7000000000000002e-06, | |
| "loss": 0.1358, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "grad_norm": 5.462678909301758, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 0.2306, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 7.4, | |
| "grad_norm": 12.020944595336914, | |
| "learning_rate": 1.5e-06, | |
| "loss": 0.1748, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 7.44, | |
| "grad_norm": 1.7447506189346313, | |
| "learning_rate": 1.4000000000000001e-06, | |
| "loss": 0.1637, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 7.48, | |
| "grad_norm": 0.5202816724777222, | |
| "learning_rate": 1.3e-06, | |
| "loss": 0.2389, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 7.52, | |
| "grad_norm": 2.9551496505737305, | |
| "learning_rate": 1.2000000000000002e-06, | |
| "loss": 0.1258, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 7.56, | |
| "grad_norm": 23.559301376342773, | |
| "learning_rate": 1.1e-06, | |
| "loss": 0.2493, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "grad_norm": 41.33919906616211, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 0.3827, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 7.64, | |
| "grad_norm": 0.6776648759841919, | |
| "learning_rate": 9.000000000000001e-07, | |
| "loss": 0.2181, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "grad_norm": 22.915781021118164, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 0.1071, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "grad_norm": 2.265955686569214, | |
| "learning_rate": 7.000000000000001e-07, | |
| "loss": 0.1141, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "grad_norm": 12.892213821411133, | |
| "learning_rate": 6.000000000000001e-07, | |
| "loss": 0.2067, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 7.8, | |
| "grad_norm": 9.663905143737793, | |
| "learning_rate": 5.000000000000001e-07, | |
| "loss": 0.1923, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "grad_norm": 1.7676267623901367, | |
| "learning_rate": 4.0000000000000003e-07, | |
| "loss": 0.1594, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 7.88, | |
| "grad_norm": 10.767372131347656, | |
| "learning_rate": 3.0000000000000004e-07, | |
| "loss": 0.2527, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "grad_norm": 0.9327512979507446, | |
| "learning_rate": 2.0000000000000002e-07, | |
| "loss": 0.1834, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 7.96, | |
| "grad_norm": 5.60221004486084, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 0.1309, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "grad_norm": 5.293955326080322, | |
| "learning_rate": 0.0, | |
| "loss": 0.1553, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "eval_accuracy": 0.5956521739130435, | |
| "eval_f1": 0.5930584359493084, | |
| "eval_loss": 1.61259126663208, | |
| "eval_precision": 0.5985692492099816, | |
| "eval_recall": 0.5956521739130435, | |
| "eval_runtime": 1.0349, | |
| "eval_samples_per_second": 222.237, | |
| "eval_steps_per_second": 28.021, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "step": 2000, | |
| "total_flos": 1768238889429720.0, | |
| "train_loss": 0.7754807794690132, | |
| "train_runtime": 287.3312, | |
| "train_samples_per_second": 55.573, | |
| "train_steps_per_second": 6.961 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 2000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 8, | |
| "save_steps": 500, | |
| "total_flos": 1768238889429720.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |