summaryrefslogtreecommitdiffstatshomepage
path: root/models/trpg-final/checkpoint-380/trainer_state.json
diff options
context:
space:
mode:
Diffstat (limited to 'models/trpg-final/checkpoint-380/trainer_state.json')
-rw-r--r--models/trpg-final/checkpoint-380/trainer_state.json566
1 files changed, 0 insertions, 566 deletions
diff --git a/models/trpg-final/checkpoint-380/trainer_state.json b/models/trpg-final/checkpoint-380/trainer_state.json
deleted file mode 100644
index a90f665..0000000
--- a/models/trpg-final/checkpoint-380/trainer_state.json
+++ /dev/null
@@ -1,566 +0,0 @@
-{
- "best_global_step": null,
- "best_metric": null,
- "best_model_checkpoint": null,
- "epoch": 20.0,
- "eval_steps": 500,
- "global_step": 380,
- "is_hyper_param_search": false,
- "is_local_process_zero": true,
- "is_world_process_zero": true,
- "log_history": [
- {
- "epoch": 0.2631578947368421,
- "grad_norm": Infinity,
- "learning_rate": 4.9473684210526315e-05,
- "loss": 2.4394,
- "step": 5
- },
- {
- "epoch": 0.5263157894736842,
- "grad_norm": 6.091742515563965,
- "learning_rate": 4.881578947368421e-05,
- "loss": 1.776,
- "step": 10
- },
- {
- "epoch": 0.7894736842105263,
- "grad_norm": 6.011572360992432,
- "learning_rate": 4.8157894736842105e-05,
- "loss": 1.6479,
- "step": 15
- },
- {
- "epoch": 1.0526315789473684,
- "grad_norm": 5.232929706573486,
- "learning_rate": 4.75e-05,
- "loss": 1.3513,
- "step": 20
- },
- {
- "epoch": 1.3157894736842106,
- "grad_norm": 3.361309289932251,
- "learning_rate": 4.68421052631579e-05,
- "loss": 1.2262,
- "step": 25
- },
- {
- "epoch": 1.5789473684210527,
- "grad_norm": 3.8729989528656006,
- "learning_rate": 4.618421052631579e-05,
- "loss": 1.1481,
- "step": 30
- },
- {
- "epoch": 1.8421052631578947,
- "grad_norm": 8.15366268157959,
- "learning_rate": 4.552631578947369e-05,
- "loss": 0.9278,
- "step": 35
- },
- {
- "epoch": 2.1052631578947367,
- "grad_norm": 5.728829383850098,
- "learning_rate": 4.486842105263158e-05,
- "loss": 0.8307,
- "step": 40
- },
- {
- "epoch": 2.3684210526315788,
- "grad_norm": 4.391445159912109,
- "learning_rate": 4.421052631578947e-05,
- "loss": 0.8832,
- "step": 45
- },
- {
- "epoch": 2.6315789473684212,
- "grad_norm": 5.178050518035889,
- "learning_rate": 4.355263157894737e-05,
- "loss": 0.626,
- "step": 50
- },
- {
- "epoch": 2.8947368421052633,
- "grad_norm": 4.203180313110352,
- "learning_rate": 4.289473684210527e-05,
- "loss": 0.6467,
- "step": 55
- },
- {
- "epoch": 3.1578947368421053,
- "grad_norm": 2.902172327041626,
- "learning_rate": 4.223684210526316e-05,
- "loss": 0.3657,
- "step": 60
- },
- {
- "epoch": 3.4210526315789473,
- "grad_norm": 1.7815818786621094,
- "learning_rate": 4.157894736842106e-05,
- "loss": 0.3913,
- "step": 65
- },
- {
- "epoch": 3.6842105263157894,
- "grad_norm": 5.220995903015137,
- "learning_rate": 4.092105263157895e-05,
- "loss": 0.6332,
- "step": 70
- },
- {
- "epoch": 3.9473684210526314,
- "grad_norm": 2.012242555618286,
- "learning_rate": 4.026315789473684e-05,
- "loss": 0.4408,
- "step": 75
- },
- {
- "epoch": 4.2105263157894735,
- "grad_norm": 1.9606434106826782,
- "learning_rate": 3.960526315789474e-05,
- "loss": 0.5089,
- "step": 80
- },
- {
- "epoch": 4.473684210526316,
- "grad_norm": 1.584269404411316,
- "learning_rate": 3.894736842105263e-05,
- "loss": 0.297,
- "step": 85
- },
- {
- "epoch": 4.7368421052631575,
- "grad_norm": 2.2993006706237793,
- "learning_rate": 3.828947368421053e-05,
- "loss": 0.2525,
- "step": 90
- },
- {
- "epoch": 5.0,
- "grad_norm": 1.7839508056640625,
- "learning_rate": 3.7631578947368425e-05,
- "loss": 0.3642,
- "step": 95
- },
- {
- "epoch": 5.2631578947368425,
- "grad_norm": 2.162219285964966,
- "learning_rate": 3.6973684210526316e-05,
- "loss": 0.2416,
- "step": 100
- },
- {
- "epoch": 5.526315789473684,
- "grad_norm": 2.49100399017334,
- "learning_rate": 3.6315789473684214e-05,
- "loss": 0.2607,
- "step": 105
- },
- {
- "epoch": 5.7894736842105265,
- "grad_norm": 6.302850723266602,
- "learning_rate": 3.5657894736842106e-05,
- "loss": 0.3316,
- "step": 110
- },
- {
- "epoch": 6.052631578947368,
- "grad_norm": 1.1700443029403687,
- "learning_rate": 3.5e-05,
- "loss": 0.2009,
- "step": 115
- },
- {
- "epoch": 6.315789473684211,
- "grad_norm": 1.686787724494934,
- "learning_rate": 3.4342105263157895e-05,
- "loss": 0.2794,
- "step": 120
- },
- {
- "epoch": 6.578947368421053,
- "grad_norm": 6.972183704376221,
- "learning_rate": 3.368421052631579e-05,
- "loss": 0.3693,
- "step": 125
- },
- {
- "epoch": 6.842105263157895,
- "grad_norm": 3.670428991317749,
- "learning_rate": 3.302631578947369e-05,
- "loss": 0.2268,
- "step": 130
- },
- {
- "epoch": 7.105263157894737,
- "grad_norm": 0.7313272953033447,
- "learning_rate": 3.236842105263158e-05,
- "loss": 0.1025,
- "step": 135
- },
- {
- "epoch": 7.368421052631579,
- "grad_norm": 2.2111823558807373,
- "learning_rate": 3.1710526315789473e-05,
- "loss": 0.2386,
- "step": 140
- },
- {
- "epoch": 7.631578947368421,
- "grad_norm": 0.6066373586654663,
- "learning_rate": 3.105263157894737e-05,
- "loss": 0.2176,
- "step": 145
- },
- {
- "epoch": 7.894736842105263,
- "grad_norm": 1.489353060722351,
- "learning_rate": 3.0394736842105266e-05,
- "loss": 0.1689,
- "step": 150
- },
- {
- "epoch": 8.157894736842104,
- "grad_norm": 0.5530461668968201,
- "learning_rate": 2.9736842105263157e-05,
- "loss": 0.1457,
- "step": 155
- },
- {
- "epoch": 8.421052631578947,
- "grad_norm": 2.413187026977539,
- "learning_rate": 2.9078947368421055e-05,
- "loss": 0.2149,
- "step": 160
- },
- {
- "epoch": 8.68421052631579,
- "grad_norm": 0.7150534987449646,
- "learning_rate": 2.842105263157895e-05,
- "loss": 0.1202,
- "step": 165
- },
- {
- "epoch": 8.947368421052632,
- "grad_norm": 5.491703510284424,
- "learning_rate": 2.776315789473684e-05,
- "loss": 0.2105,
- "step": 170
- },
- {
- "epoch": 9.210526315789474,
- "grad_norm": 0.81364506483078,
- "learning_rate": 2.710526315789474e-05,
- "loss": 0.0898,
- "step": 175
- },
- {
- "epoch": 9.473684210526315,
- "grad_norm": 0.8343147039413452,
- "learning_rate": 2.644736842105263e-05,
- "loss": 0.1286,
- "step": 180
- },
- {
- "epoch": 9.736842105263158,
- "grad_norm": 0.5138881206512451,
- "learning_rate": 2.578947368421053e-05,
- "loss": 0.1681,
- "step": 185
- },
- {
- "epoch": 10.0,
- "grad_norm": 0.5581791400909424,
- "learning_rate": 2.5131578947368423e-05,
- "loss": 0.1773,
- "step": 190
- },
- {
- "epoch": 10.263157894736842,
- "grad_norm": 0.6555180549621582,
- "learning_rate": 2.4473684210526318e-05,
- "loss": 0.0637,
- "step": 195
- },
- {
- "epoch": 10.526315789473685,
- "grad_norm": 1.0848801136016846,
- "learning_rate": 2.3815789473684212e-05,
- "loss": 0.1363,
- "step": 200
- },
- {
- "epoch": 10.789473684210526,
- "grad_norm": 0.3970225751399994,
- "learning_rate": 2.3157894736842107e-05,
- "loss": 0.0784,
- "step": 205
- },
- {
- "epoch": 11.052631578947368,
- "grad_norm": 2.0135111808776855,
- "learning_rate": 2.25e-05,
- "loss": 0.3391,
- "step": 210
- },
- {
- "epoch": 11.31578947368421,
- "grad_norm": 2.3919425010681152,
- "learning_rate": 2.1842105263157896e-05,
- "loss": 0.0865,
- "step": 215
- },
- {
- "epoch": 11.578947368421053,
- "grad_norm": 1.5701793432235718,
- "learning_rate": 2.118421052631579e-05,
- "loss": 0.2226,
- "step": 220
- },
- {
- "epoch": 11.842105263157894,
- "grad_norm": 0.6800135374069214,
- "learning_rate": 2.0526315789473685e-05,
- "loss": 0.0702,
- "step": 225
- },
- {
- "epoch": 12.105263157894736,
- "grad_norm": 6.435481071472168,
- "learning_rate": 1.986842105263158e-05,
- "loss": 0.1457,
- "step": 230
- },
- {
- "epoch": 12.368421052631579,
- "grad_norm": 0.4558697044849396,
- "learning_rate": 1.9210526315789474e-05,
- "loss": 0.07,
- "step": 235
- },
- {
- "epoch": 12.631578947368421,
- "grad_norm": 0.35098525881767273,
- "learning_rate": 1.855263157894737e-05,
- "loss": 0.0722,
- "step": 240
- },
- {
- "epoch": 12.894736842105264,
- "grad_norm": 13.637933731079102,
- "learning_rate": 1.7894736842105264e-05,
- "loss": 0.1701,
- "step": 245
- },
- {
- "epoch": 13.157894736842104,
- "grad_norm": 30.400209426879883,
- "learning_rate": 1.723684210526316e-05,
- "loss": 0.0991,
- "step": 250
- },
- {
- "epoch": 13.421052631578947,
- "grad_norm": 0.5243188738822937,
- "learning_rate": 1.6578947368421053e-05,
- "loss": 0.3336,
- "step": 255
- },
- {
- "epoch": 13.68421052631579,
- "grad_norm": 0.6692495346069336,
- "learning_rate": 1.5921052631578948e-05,
- "loss": 0.0951,
- "step": 260
- },
- {
- "epoch": 13.947368421052632,
- "grad_norm": 0.45694392919540405,
- "learning_rate": 1.5263157894736842e-05,
- "loss": 0.0598,
- "step": 265
- },
- {
- "epoch": 14.210526315789474,
- "grad_norm": 0.9331479668617249,
- "learning_rate": 1.4605263157894739e-05,
- "loss": 0.265,
- "step": 270
- },
- {
- "epoch": 14.473684210526315,
- "grad_norm": 0.40160542726516724,
- "learning_rate": 1.3947368421052631e-05,
- "loss": 0.1644,
- "step": 275
- },
- {
- "epoch": 14.736842105263158,
- "grad_norm": 0.38934916257858276,
- "learning_rate": 1.3289473684210526e-05,
- "loss": 0.0577,
- "step": 280
- },
- {
- "epoch": 15.0,
- "grad_norm": 0.3913317322731018,
- "learning_rate": 1.2631578947368422e-05,
- "loss": 0.0656,
- "step": 285
- },
- {
- "epoch": 15.263157894736842,
- "grad_norm": 0.317749947309494,
- "learning_rate": 1.1973684210526315e-05,
- "loss": 0.0441,
- "step": 290
- },
- {
- "epoch": 15.526315789473685,
- "grad_norm": 2.1456081867218018,
- "learning_rate": 1.1315789473684212e-05,
- "loss": 0.0586,
- "step": 295
- },
- {
- "epoch": 15.789473684210526,
- "grad_norm": 0.7734329700469971,
- "learning_rate": 1.0657894736842106e-05,
- "loss": 0.2439,
- "step": 300
- },
- {
- "epoch": 16.05263157894737,
- "grad_norm": 0.4043017029762268,
- "learning_rate": 1e-05,
- "loss": 0.1657,
- "step": 305
- },
- {
- "epoch": 16.31578947368421,
- "grad_norm": 0.3189968466758728,
- "learning_rate": 9.342105263157895e-06,
- "loss": 0.0942,
- "step": 310
- },
- {
- "epoch": 16.57894736842105,
- "grad_norm": 2.8793745040893555,
- "learning_rate": 8.68421052631579e-06,
- "loss": 0.1211,
- "step": 315
- },
- {
- "epoch": 16.842105263157894,
- "grad_norm": 3.2684664726257324,
- "learning_rate": 8.026315789473685e-06,
- "loss": 0.0432,
- "step": 320
- },
- {
- "epoch": 17.105263157894736,
- "grad_norm": 0.29015296697616577,
- "learning_rate": 7.3684210526315784e-06,
- "loss": 0.1026,
- "step": 325
- },
- {
- "epoch": 17.36842105263158,
- "grad_norm": 9.337630271911621,
- "learning_rate": 6.710526315789474e-06,
- "loss": 0.0841,
- "step": 330
- },
- {
- "epoch": 17.63157894736842,
- "grad_norm": 0.33837392926216125,
- "learning_rate": 6.0526315789473685e-06,
- "loss": 0.08,
- "step": 335
- },
- {
- "epoch": 17.894736842105264,
- "grad_norm": 8.001954078674316,
- "learning_rate": 5.394736842105263e-06,
- "loss": 0.1835,
- "step": 340
- },
- {
- "epoch": 18.157894736842106,
- "grad_norm": 5.349979400634766,
- "learning_rate": 4.736842105263159e-06,
- "loss": 0.1063,
- "step": 345
- },
- {
- "epoch": 18.42105263157895,
- "grad_norm": 6.384610652923584,
- "learning_rate": 4.078947368421053e-06,
- "loss": 0.1241,
- "step": 350
- },
- {
- "epoch": 18.68421052631579,
- "grad_norm": 0.4049034118652344,
- "learning_rate": 3.421052631578948e-06,
- "loss": 0.0441,
- "step": 355
- },
- {
- "epoch": 18.94736842105263,
- "grad_norm": 0.38923588395118713,
- "learning_rate": 2.763157894736842e-06,
- "loss": 0.1969,
- "step": 360
- },
- {
- "epoch": 19.210526315789473,
- "grad_norm": 1.6115776300430298,
- "learning_rate": 2.105263157894737e-06,
- "loss": 0.2516,
- "step": 365
- },
- {
- "epoch": 19.473684210526315,
- "grad_norm": 1.166119933128357,
- "learning_rate": 1.4473684210526317e-06,
- "loss": 0.0834,
- "step": 370
- },
- {
- "epoch": 19.736842105263158,
- "grad_norm": 0.624567449092865,
- "learning_rate": 7.894736842105264e-07,
- "loss": 0.0486,
- "step": 375
- },
- {
- "epoch": 20.0,
- "grad_norm": 0.30260634422302246,
- "learning_rate": 1.3157894736842107e-07,
- "loss": 0.038,
- "step": 380
- }
- ],
- "logging_steps": 5,
- "max_steps": 380,
- "num_input_tokens_seen": 0,
- "num_train_epochs": 20,
- "save_steps": 200,
- "stateful_callbacks": {
- "TrainerControl": {
- "args": {
- "should_epoch_stop": false,
- "should_evaluate": false,
- "should_log": false,
- "should_save": true,
- "should_training_stop": true
- },
- "attributes": {}
- }
- },
- "total_flos": 5389832079360.0,
- "train_batch_size": 4,
- "trial_name": null,
- "trial_params": null
-}