| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 5.0, | |
| "eval_steps": 500, | |
| "global_step": 625, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 15.03039869641152, | |
| "learning_rate": 1.5873015873015873e-06, | |
| "loss": 1.8679, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 8.34761758965131, | |
| "learning_rate": 3.1746031746031746e-06, | |
| "loss": 1.6424, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 7.899297623511452, | |
| "learning_rate": 4.761904761904762e-06, | |
| "loss": 1.3325, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 6.940007282917506, | |
| "learning_rate": 6.349206349206349e-06, | |
| "loss": 1.2171, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 7.127061848980475, | |
| "learning_rate": 7.936507936507936e-06, | |
| "loss": 1.1657, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 6.383989972682173, | |
| "learning_rate": 9.523809523809525e-06, | |
| "loss": 1.1137, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 6.840866307970571, | |
| "learning_rate": 9.996172565322375e-06, | |
| "loss": 1.1119, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 7.0228583831508535, | |
| "learning_rate": 9.97744005136599e-06, | |
| "loss": 1.1425, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 6.036501026822485, | |
| "learning_rate": 9.943157907471825e-06, | |
| "loss": 1.1189, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 6.1766677083533885, | |
| "learning_rate": 9.893433231795864e-06, | |
| "loss": 1.1229, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 6.418808106696129, | |
| "learning_rate": 9.828421365296023e-06, | |
| "loss": 1.107, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 6.962653276823899, | |
| "learning_rate": 9.748325406443647e-06, | |
| "loss": 1.1024, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 4.841205898363944, | |
| "learning_rate": 9.653395576739504e-06, | |
| "loss": 0.9217, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 5.321762138665447, | |
| "learning_rate": 9.543928439016445e-06, | |
| "loss": 0.6809, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 6.422514398304575, | |
| "learning_rate": 9.42026597097071e-06, | |
| "loss": 0.7087, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 4.679106489068451, | |
| "learning_rate": 9.282794496816244e-06, | |
| "loss": 0.7217, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.3599999999999999, | |
| "grad_norm": 5.422510609716275, | |
| "learning_rate": 9.131943480399531e-06, | |
| "loss": 0.7567, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 4.8817497021591, | |
| "learning_rate": 8.968184183545285e-06, | |
| "loss": 0.7154, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 5.344993652088741, | |
| "learning_rate": 8.792028193824364e-06, | |
| "loss": 0.7325, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 5.885034195284592, | |
| "learning_rate": 8.604025826343167e-06, | |
| "loss": 0.7385, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.6800000000000002, | |
| "grad_norm": 5.602877653556014, | |
| "learning_rate": 8.404764404547404e-06, | |
| "loss": 0.7504, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 5.288051996241332, | |
| "learning_rate": 8.194866425410984e-06, | |
| "loss": 0.7257, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.8399999999999999, | |
| "grad_norm": 6.146382669563802, | |
| "learning_rate": 7.974987614742066e-06, | |
| "loss": 0.7239, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 5.404345327961935, | |
| "learning_rate": 7.745814878681516e-06, | |
| "loss": 0.7369, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 5.789625889061605, | |
| "learning_rate": 7.50806415779332e-06, | |
| "loss": 0.7162, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "grad_norm": 4.381011441257434, | |
| "learning_rate": 7.262478190450834e-06, | |
| "loss": 0.2779, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 3.9113444849282537, | |
| "learning_rate": 7.0098241925061215e-06, | |
| "loss": 0.2615, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 3.763625408769967, | |
| "learning_rate": 6.750891460491093e-06, | |
| "loss": 0.2667, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 3.9835139188072644, | |
| "learning_rate": 6.486488905838143e-06, | |
| "loss": 0.2786, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 4.41365324655939, | |
| "learning_rate": 6.2174425278234115e-06, | |
| "loss": 0.2695, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 3.868085785594723, | |
| "learning_rate": 5.944592833127253e-06, | |
| "loss": 0.2647, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 3.922599746763818, | |
| "learning_rate": 5.668792210073255e-06, | |
| "loss": 0.2784, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 4.077382724425529, | |
| "learning_rate": 5.39090226574877e-06, | |
| "loss": 0.2814, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.7199999999999998, | |
| "grad_norm": 3.647866852866338, | |
| "learning_rate": 5.111791134325793e-06, | |
| "loss": 0.2603, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 3.9427050456877497, | |
| "learning_rate": 4.832330764991131e-06, | |
| "loss": 0.2709, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 3.7980075000487874, | |
| "learning_rate": 4.553394197958339e-06, | |
| "loss": 0.2711, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 4.435470823777743, | |
| "learning_rate": 4.275852837071309e-06, | |
| "loss": 0.2669, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "grad_norm": 2.0816448514818457, | |
| "learning_rate": 4.000573727519868e-06, | |
| "loss": 0.1745, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "grad_norm": 2.4217507805753624, | |
| "learning_rate": 3.7284168471719527e-06, | |
| "loss": 0.0652, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "grad_norm": 2.8401214654437164, | |
| "learning_rate": 3.4602324199842026e-06, | |
| "loss": 0.0673, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 3.2800000000000002, | |
| "grad_norm": 2.6760620467462157, | |
| "learning_rate": 3.1968582598840234e-06, | |
| "loss": 0.0685, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "grad_norm": 2.1425362543433937, | |
| "learning_rate": 2.9391171534208185e-06, | |
| "loss": 0.0609, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "grad_norm": 2.030540419923713, | |
| "learning_rate": 2.6878142893630904e-06, | |
| "loss": 0.062, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "grad_norm": 1.984598490381961, | |
| "learning_rate": 2.4437347432713838e-06, | |
| "loss": 0.0672, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "grad_norm": 2.3256779456514973, | |
| "learning_rate": 2.207641024905322e-06, | |
| "loss": 0.0531, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "grad_norm": 2.702472865256339, | |
| "learning_rate": 1.9802706961266936e-06, | |
| "loss": 0.0651, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "grad_norm": 2.1547962960900087, | |
| "learning_rate": 1.7623340667403089e-06, | |
| "loss": 0.0593, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "grad_norm": 2.3691282553726714, | |
| "learning_rate": 1.5545119754708682e-06, | |
| "loss": 0.0473, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "grad_norm": 2.2286247738184746, | |
| "learning_rate": 1.3574536630081208e-06, | |
| "loss": 0.0559, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 2.388028641223682, | |
| "learning_rate": 1.1717747437649657e-06, | |
| "loss": 0.057, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "grad_norm": 0.7037974392373446, | |
| "learning_rate": 9.980552826847635e-07, | |
| "loss": 0.0141, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "grad_norm": 1.067919434862055, | |
| "learning_rate": 8.368379831059592e-07, | |
| "loss": 0.0121, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "grad_norm": 1.0103052364255096, | |
| "learning_rate": 6.886264913451635e-07, | |
| "loss": 0.0111, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "grad_norm": 1.5619285730336336, | |
| "learning_rate": 5.538838232952104e-07, | |
| "loss": 0.0114, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "grad_norm": 0.8776634711386034, | |
| "learning_rate": 4.3303091795353024e-07, | |
| "loss": 0.0119, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "grad_norm": 0.5346038765999809, | |
| "learning_rate": 3.2644532239966444e-07, | |
| "loss": 0.0104, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 4.5600000000000005, | |
| "grad_norm": 1.1807971364317145, | |
| "learning_rate": 2.3446001233004333e-07, | |
| "loss": 0.0087, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "grad_norm": 1.2465130552788162, | |
| "learning_rate": 1.573623518347517e-07, | |
| "loss": 0.0081, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "grad_norm": 0.7152846447151024, | |
| "learning_rate": 9.539319566590766e-08, | |
| "loss": 0.0092, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "grad_norm": 0.4607875138185059, | |
| "learning_rate": 4.8746136802240716e-08, | |
| "loss": 0.0088, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "grad_norm": 0.9827771211863391, | |
| "learning_rate": 1.75669016604485e-08, | |
| "loss": 0.0081, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "grad_norm": 1.3414674305437924, | |
| "learning_rate": 1.952894842735531e-09, | |
| "loss": 0.0082, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "step": 625, | |
| "total_flos": 10563498835968.0, | |
| "train_loss": 0.46323055886030196, | |
| "train_runtime": 10955.0584, | |
| "train_samples_per_second": 0.456, | |
| "train_steps_per_second": 0.057 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 625, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 10563498835968.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |