|
{ |
|
"best_metric": 0.9786, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-eurosat/checkpoint-1053", |
|
"epoch": 2.9936034115138592, |
|
"eval_steps": 500, |
|
"global_step": 1053, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 10.769173622131348, |
|
"learning_rate": 4.716981132075472e-06, |
|
"loss": 0.359, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 10.249465942382812, |
|
"learning_rate": 9.433962264150944e-06, |
|
"loss": 0.3279, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 8.692254066467285, |
|
"learning_rate": 1.4150943396226415e-05, |
|
"loss": 0.3266, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 9.361005783081055, |
|
"learning_rate": 1.8867924528301888e-05, |
|
"loss": 0.3898, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 9.085017204284668, |
|
"learning_rate": 2.358490566037736e-05, |
|
"loss": 0.3335, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 10.954299926757812, |
|
"learning_rate": 2.830188679245283e-05, |
|
"loss": 0.3713, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 8.1724853515625, |
|
"learning_rate": 3.30188679245283e-05, |
|
"loss": 0.3949, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 12.637982368469238, |
|
"learning_rate": 3.7735849056603776e-05, |
|
"loss": 0.3814, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 10.450471878051758, |
|
"learning_rate": 4.245283018867925e-05, |
|
"loss": 0.3647, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 15.103857040405273, |
|
"learning_rate": 4.716981132075472e-05, |
|
"loss": 0.4129, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 9.379721641540527, |
|
"learning_rate": 4.978880675818374e-05, |
|
"loss": 0.3911, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 9.111693382263184, |
|
"learning_rate": 4.9260823653643085e-05, |
|
"loss": 0.3898, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 10.918614387512207, |
|
"learning_rate": 4.8732840549102435e-05, |
|
"loss": 0.3858, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 10.90676498413086, |
|
"learning_rate": 4.820485744456177e-05, |
|
"loss": 0.4037, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.954671859741211, |
|
"learning_rate": 4.767687434002112e-05, |
|
"loss": 0.3892, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 15.418940544128418, |
|
"learning_rate": 4.7148891235480466e-05, |
|
"loss": 0.4286, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 9.472236633300781, |
|
"learning_rate": 4.662090813093981e-05, |
|
"loss": 0.4043, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 9.019627571105957, |
|
"learning_rate": 4.609292502639916e-05, |
|
"loss": 0.4145, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 11.91484260559082, |
|
"learning_rate": 4.55649419218585e-05, |
|
"loss": 0.4325, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 10.02440071105957, |
|
"learning_rate": 4.503695881731785e-05, |
|
"loss": 0.3681, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 11.77423095703125, |
|
"learning_rate": 4.45089757127772e-05, |
|
"loss": 0.4297, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 8.672981262207031, |
|
"learning_rate": 4.398099260823654e-05, |
|
"loss": 0.4146, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 7.705791473388672, |
|
"learning_rate": 4.3453009503695884e-05, |
|
"loss": 0.3831, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 10.497554779052734, |
|
"learning_rate": 4.292502639915523e-05, |
|
"loss": 0.4295, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 11.265559196472168, |
|
"learning_rate": 4.239704329461457e-05, |
|
"loss": 0.399, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 8.27481460571289, |
|
"learning_rate": 4.186906019007392e-05, |
|
"loss": 0.4026, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 9.239062309265137, |
|
"learning_rate": 4.1341077085533265e-05, |
|
"loss": 0.4072, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 5.953380584716797, |
|
"learning_rate": 4.081309398099261e-05, |
|
"loss": 0.3824, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 9.537753105163574, |
|
"learning_rate": 4.028511087645195e-05, |
|
"loss": 0.3596, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 8.818833351135254, |
|
"learning_rate": 3.97571277719113e-05, |
|
"loss": 0.3585, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 9.671758651733398, |
|
"learning_rate": 3.9229144667370646e-05, |
|
"loss": 0.3727, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 8.81728744506836, |
|
"learning_rate": 3.870116156282999e-05, |
|
"loss": 0.3746, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 8.520674705505371, |
|
"learning_rate": 3.817317845828934e-05, |
|
"loss": 0.4116, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 6.638798713684082, |
|
"learning_rate": 3.764519535374868e-05, |
|
"loss": 0.367, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 9.20146656036377, |
|
"learning_rate": 3.711721224920803e-05, |
|
"loss": 0.3729, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.97, |
|
"eval_loss": 0.08550076186656952, |
|
"eval_runtime": 62.7041, |
|
"eval_samples_per_second": 79.74, |
|
"eval_steps_per_second": 2.504, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 7.533067226409912, |
|
"learning_rate": 3.658922914466738e-05, |
|
"loss": 0.3848, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 6.615562915802002, |
|
"learning_rate": 3.6061246040126714e-05, |
|
"loss": 0.3507, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 6.549566268920898, |
|
"learning_rate": 3.5533262935586064e-05, |
|
"loss": 0.365, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 11.576876640319824, |
|
"learning_rate": 3.500527983104541e-05, |
|
"loss": 0.3678, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 5.605098724365234, |
|
"learning_rate": 3.447729672650475e-05, |
|
"loss": 0.3361, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 8.614058494567871, |
|
"learning_rate": 3.3949313621964095e-05, |
|
"loss": 0.3001, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 8.19360637664795, |
|
"learning_rate": 3.3421330517423445e-05, |
|
"loss": 0.33, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 8.590721130371094, |
|
"learning_rate": 3.289334741288279e-05, |
|
"loss": 0.383, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 10.264902114868164, |
|
"learning_rate": 3.236536430834213e-05, |
|
"loss": 0.3727, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 10.737936973571777, |
|
"learning_rate": 3.183738120380148e-05, |
|
"loss": 0.3563, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 7.508800983428955, |
|
"learning_rate": 3.130939809926082e-05, |
|
"loss": 0.3686, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 8.50932502746582, |
|
"learning_rate": 3.078141499472017e-05, |
|
"loss": 0.4022, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 6.809201717376709, |
|
"learning_rate": 3.0253431890179517e-05, |
|
"loss": 0.3758, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 11.068066596984863, |
|
"learning_rate": 2.972544878563886e-05, |
|
"loss": 0.3995, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 6.781606674194336, |
|
"learning_rate": 2.9197465681098207e-05, |
|
"loss": 0.3837, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 9.607550621032715, |
|
"learning_rate": 2.8669482576557548e-05, |
|
"loss": 0.3967, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 9.486953735351562, |
|
"learning_rate": 2.8141499472016898e-05, |
|
"loss": 0.3244, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 9.246254920959473, |
|
"learning_rate": 2.7613516367476245e-05, |
|
"loss": 0.3958, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 11.376776695251465, |
|
"learning_rate": 2.7085533262935585e-05, |
|
"loss": 0.3647, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 14.191611289978027, |
|
"learning_rate": 2.6557550158394935e-05, |
|
"loss": 0.4012, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 8.423450469970703, |
|
"learning_rate": 2.6029567053854276e-05, |
|
"loss": 0.3034, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 8.304765701293945, |
|
"learning_rate": 2.5501583949313622e-05, |
|
"loss": 0.3679, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 8.273816108703613, |
|
"learning_rate": 2.497360084477297e-05, |
|
"loss": 0.3266, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 7.429447650909424, |
|
"learning_rate": 2.4445617740232313e-05, |
|
"loss": 0.3279, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 7.221258640289307, |
|
"learning_rate": 2.391763463569166e-05, |
|
"loss": 0.3295, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 9.304533958435059, |
|
"learning_rate": 2.3389651531151003e-05, |
|
"loss": 0.3773, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 6.450652599334717, |
|
"learning_rate": 2.286166842661035e-05, |
|
"loss": 0.3361, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 9.466483116149902, |
|
"learning_rate": 2.2333685322069694e-05, |
|
"loss": 0.3088, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 8.043838500976562, |
|
"learning_rate": 2.180570221752904e-05, |
|
"loss": 0.3567, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 8.639891624450684, |
|
"learning_rate": 2.1277719112988384e-05, |
|
"loss": 0.3387, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 5.995266437530518, |
|
"learning_rate": 2.074973600844773e-05, |
|
"loss": 0.3102, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 8.48534870147705, |
|
"learning_rate": 2.0221752903907075e-05, |
|
"loss": 0.2951, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 12.092377662658691, |
|
"learning_rate": 1.9693769799366422e-05, |
|
"loss": 0.3628, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 6.4258036613464355, |
|
"learning_rate": 1.9165786694825765e-05, |
|
"loss": 0.3122, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 10.837594032287598, |
|
"learning_rate": 1.863780359028511e-05, |
|
"loss": 0.3739, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9752, |
|
"eval_loss": 0.07546680420637131, |
|
"eval_runtime": 62.6469, |
|
"eval_samples_per_second": 79.812, |
|
"eval_steps_per_second": 2.506, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 7.845410346984863, |
|
"learning_rate": 1.810982048574446e-05, |
|
"loss": 0.3308, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 7.4693827629089355, |
|
"learning_rate": 1.7581837381203803e-05, |
|
"loss": 0.3026, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 8.682798385620117, |
|
"learning_rate": 1.7053854276663146e-05, |
|
"loss": 0.3112, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 7.5129594802856445, |
|
"learning_rate": 1.6525871172122493e-05, |
|
"loss": 0.2891, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 9.642603874206543, |
|
"learning_rate": 1.5997888067581837e-05, |
|
"loss": 0.307, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 6.948006629943848, |
|
"learning_rate": 1.5469904963041184e-05, |
|
"loss": 0.2985, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 7.5828752517700195, |
|
"learning_rate": 1.4941921858500529e-05, |
|
"loss": 0.3052, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 10.263833999633789, |
|
"learning_rate": 1.4413938753959874e-05, |
|
"loss": 0.3424, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 13.665331840515137, |
|
"learning_rate": 1.388595564941922e-05, |
|
"loss": 0.3069, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 9.523612022399902, |
|
"learning_rate": 1.3357972544878563e-05, |
|
"loss": 0.2856, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 7.971409797668457, |
|
"learning_rate": 1.2829989440337912e-05, |
|
"loss": 0.295, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 8.445518493652344, |
|
"learning_rate": 1.2302006335797255e-05, |
|
"loss": 0.3238, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 6.57492733001709, |
|
"learning_rate": 1.17740232312566e-05, |
|
"loss": 0.3009, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 7.864781856536865, |
|
"learning_rate": 1.1246040126715946e-05, |
|
"loss": 0.3064, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 7.406197547912598, |
|
"learning_rate": 1.0718057022175291e-05, |
|
"loss": 0.3518, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"grad_norm": 6.5516157150268555, |
|
"learning_rate": 1.0190073917634636e-05, |
|
"loss": 0.3405, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 8.429482460021973, |
|
"learning_rate": 9.662090813093982e-06, |
|
"loss": 0.2913, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 10.97824478149414, |
|
"learning_rate": 9.134107708553327e-06, |
|
"loss": 0.3262, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 6.202483654022217, |
|
"learning_rate": 8.606124604012672e-06, |
|
"loss": 0.3222, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 8.449475288391113, |
|
"learning_rate": 8.078141499472017e-06, |
|
"loss": 0.3015, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 7.686133861541748, |
|
"learning_rate": 7.5501583949313625e-06, |
|
"loss": 0.3116, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 8.766165733337402, |
|
"learning_rate": 7.022175290390708e-06, |
|
"loss": 0.3147, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 9.25505542755127, |
|
"learning_rate": 6.494192185850054e-06, |
|
"loss": 0.2885, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 8.573286056518555, |
|
"learning_rate": 5.966209081309398e-06, |
|
"loss": 0.2585, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 6.591097354888916, |
|
"learning_rate": 5.438225976768744e-06, |
|
"loss": 0.3104, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 7.301019191741943, |
|
"learning_rate": 4.910242872228089e-06, |
|
"loss": 0.2786, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 9.411577224731445, |
|
"learning_rate": 4.382259767687434e-06, |
|
"loss": 0.2982, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 8.191555976867676, |
|
"learning_rate": 3.854276663146779e-06, |
|
"loss": 0.3043, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 8.216190338134766, |
|
"learning_rate": 3.326293558606125e-06, |
|
"loss": 0.3089, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 14.009899139404297, |
|
"learning_rate": 2.79831045406547e-06, |
|
"loss": 0.3294, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 7.6990132331848145, |
|
"learning_rate": 2.2703273495248154e-06, |
|
"loss": 0.3346, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 7.637197494506836, |
|
"learning_rate": 1.7423442449841606e-06, |
|
"loss": 0.2865, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 8.707183837890625, |
|
"learning_rate": 1.2143611404435059e-06, |
|
"loss": 0.3253, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 8.528026580810547, |
|
"learning_rate": 6.863780359028511e-07, |
|
"loss": 0.3151, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 7.991356372833252, |
|
"learning_rate": 1.5839493136219642e-07, |
|
"loss": 0.2845, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"eval_accuracy": 0.9786, |
|
"eval_loss": 0.06669362634420395, |
|
"eval_runtime": 62.6981, |
|
"eval_samples_per_second": 79.747, |
|
"eval_steps_per_second": 2.504, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"step": 1053, |
|
"total_flos": 3.3497451642252165e+18, |
|
"train_loss": 0.34938513287338663, |
|
"train_runtime": 2166.1495, |
|
"train_samples_per_second": 62.323, |
|
"train_steps_per_second": 0.486 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1053, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 3.3497451642252165e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|