|
{ |
|
"best_metric": 0.9788888888888889, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-eurosat/checkpoint-570", |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 570, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 7.109246253967285, |
|
"learning_rate": 8.771929824561403e-06, |
|
"loss": 2.2922, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 6.89300012588501, |
|
"learning_rate": 1.7543859649122806e-05, |
|
"loss": 2.0928, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.730234622955322, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 1.6756, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 9.772377014160156, |
|
"learning_rate": 3.508771929824561e-05, |
|
"loss": 1.0287, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 17.15283966064453, |
|
"learning_rate": 4.3859649122807014e-05, |
|
"loss": 0.5986, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 27.78217315673828, |
|
"learning_rate": 4.970760233918128e-05, |
|
"loss": 0.4796, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 11.25429630279541, |
|
"learning_rate": 4.8732943469785574e-05, |
|
"loss": 0.4245, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 29.021835327148438, |
|
"learning_rate": 4.7758284600389865e-05, |
|
"loss": 0.3994, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 15.250253677368164, |
|
"learning_rate": 4.678362573099415e-05, |
|
"loss": 0.3424, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 9.626118659973145, |
|
"learning_rate": 4.580896686159844e-05, |
|
"loss": 0.3437, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 14.916412353515625, |
|
"learning_rate": 4.483430799220273e-05, |
|
"loss": 0.3188, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 11.76590633392334, |
|
"learning_rate": 4.3859649122807014e-05, |
|
"loss": 0.3324, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 8.164874076843262, |
|
"learning_rate": 4.2884990253411305e-05, |
|
"loss": 0.3169, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 10.07582950592041, |
|
"learning_rate": 4.1910331384015596e-05, |
|
"loss": 0.3342, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 7.93110466003418, |
|
"learning_rate": 4.093567251461988e-05, |
|
"loss": 0.2696, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 12.225959777832031, |
|
"learning_rate": 3.996101364522417e-05, |
|
"loss": 0.2655, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 9.060185432434082, |
|
"learning_rate": 3.898635477582846e-05, |
|
"loss": 0.2531, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 11.284368515014648, |
|
"learning_rate": 3.8011695906432746e-05, |
|
"loss": 0.244, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 12.08557415008545, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.2825, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.957037037037037, |
|
"eval_loss": 0.1273288130760193, |
|
"eval_runtime": 27.9178, |
|
"eval_samples_per_second": 96.713, |
|
"eval_steps_per_second": 3.045, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 9.616697311401367, |
|
"learning_rate": 3.606237816764133e-05, |
|
"loss": 0.2376, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 7.073214530944824, |
|
"learning_rate": 3.508771929824561e-05, |
|
"loss": 0.2612, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 9.791173934936523, |
|
"learning_rate": 3.41130604288499e-05, |
|
"loss": 0.2563, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 8.431086540222168, |
|
"learning_rate": 3.313840155945419e-05, |
|
"loss": 0.2412, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 8.49256706237793, |
|
"learning_rate": 3.216374269005848e-05, |
|
"loss": 0.2159, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 18.413179397583008, |
|
"learning_rate": 3.118908382066277e-05, |
|
"loss": 0.2413, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 10.551178932189941, |
|
"learning_rate": 3.0214424951267055e-05, |
|
"loss": 0.2605, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 10.21691608428955, |
|
"learning_rate": 2.9239766081871346e-05, |
|
"loss": 0.2021, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 5.306635856628418, |
|
"learning_rate": 2.8265107212475634e-05, |
|
"loss": 0.2129, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 5.005842685699463, |
|
"learning_rate": 2.729044834307992e-05, |
|
"loss": 0.1824, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 8.03073501586914, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 0.197, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 9.381324768066406, |
|
"learning_rate": 2.53411306042885e-05, |
|
"loss": 0.2008, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 9.063702583312988, |
|
"learning_rate": 2.4366471734892787e-05, |
|
"loss": 0.2149, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 7.741683006286621, |
|
"learning_rate": 2.3391812865497074e-05, |
|
"loss": 0.1696, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 13.849589347839355, |
|
"learning_rate": 2.2417153996101365e-05, |
|
"loss": 0.1918, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 9.223557472229004, |
|
"learning_rate": 2.1442495126705653e-05, |
|
"loss": 0.173, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 6.596388339996338, |
|
"learning_rate": 2.046783625730994e-05, |
|
"loss": 0.1916, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 7.863681316375732, |
|
"learning_rate": 1.949317738791423e-05, |
|
"loss": 0.1772, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 15.154926300048828, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 0.1404, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9733333333333334, |
|
"eval_loss": 0.07973578572273254, |
|
"eval_runtime": 32.4882, |
|
"eval_samples_per_second": 83.107, |
|
"eval_steps_per_second": 2.616, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 8.267267227172852, |
|
"learning_rate": 1.7543859649122806e-05, |
|
"loss": 0.1721, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 6.031956195831299, |
|
"learning_rate": 1.6569200779727097e-05, |
|
"loss": 0.1819, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 4.921173095703125, |
|
"learning_rate": 1.5594541910331384e-05, |
|
"loss": 0.1735, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 6.759762287139893, |
|
"learning_rate": 1.4619883040935673e-05, |
|
"loss": 0.1523, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 8.071491241455078, |
|
"learning_rate": 1.364522417153996e-05, |
|
"loss": 0.1574, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 7.740592956542969, |
|
"learning_rate": 1.267056530214425e-05, |
|
"loss": 0.1413, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 10.867220878601074, |
|
"learning_rate": 1.1695906432748537e-05, |
|
"loss": 0.1445, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 6.91710901260376, |
|
"learning_rate": 1.0721247563352826e-05, |
|
"loss": 0.15, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 5.530294418334961, |
|
"learning_rate": 9.746588693957115e-06, |
|
"loss": 0.1222, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 7.548226833343506, |
|
"learning_rate": 8.771929824561403e-06, |
|
"loss": 0.1442, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 8.537381172180176, |
|
"learning_rate": 7.797270955165692e-06, |
|
"loss": 0.1485, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 10.676304817199707, |
|
"learning_rate": 6.82261208576998e-06, |
|
"loss": 0.1551, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"grad_norm": 7.077390670776367, |
|
"learning_rate": 5.8479532163742686e-06, |
|
"loss": 0.1716, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 8.090137481689453, |
|
"learning_rate": 4.873294346978558e-06, |
|
"loss": 0.1413, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 9.676934242248535, |
|
"learning_rate": 3.898635477582846e-06, |
|
"loss": 0.164, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 18.995750427246094, |
|
"learning_rate": 2.9239766081871343e-06, |
|
"loss": 0.1703, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 5.193699359893799, |
|
"learning_rate": 1.949317738791423e-06, |
|
"loss": 0.1412, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 18.007177352905273, |
|
"learning_rate": 9.746588693957115e-07, |
|
"loss": 0.1192, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 15.536856651306152, |
|
"learning_rate": 0.0, |
|
"loss": 0.1394, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9788888888888889, |
|
"eval_loss": 0.061217330396175385, |
|
"eval_runtime": 43.6421, |
|
"eval_samples_per_second": 61.867, |
|
"eval_steps_per_second": 1.948, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 570, |
|
"total_flos": 1.8124066505760768e+18, |
|
"train_loss": 0.33600869262427613, |
|
"train_runtime": 3352.6448, |
|
"train_samples_per_second": 21.744, |
|
"train_steps_per_second": 0.17 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 570, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 1.8124066505760768e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|