loucad commited on
Commit
23d732d
1 Parent(s): 0befbf0

Training in progress, epoch 0

Browse files
Files changed (5) hide show
  1. .gitignore +1 -0
  2. config.json +457 -0
  3. preprocessor_config.json +22 -0
  4. pytorch_model.bin +3 -0
  5. training_args.bin +3 -0
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
config.json ADDED
@@ -0,0 +1,457 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "microsoft/swin-tiny-patch4-window7-224",
3
+ "architectures": [
4
+ "SwinForImageClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.0,
7
+ "depths": [
8
+ 2,
9
+ 2,
10
+ 6,
11
+ 2
12
+ ],
13
+ "drop_path_rate": 0.1,
14
+ "embed_dim": 96,
15
+ "encoder_stride": 32,
16
+ "hidden_act": "gelu",
17
+ "hidden_dropout_prob": 0.0,
18
+ "hidden_size": 768,
19
+ "id2label": {
20
+ "0": "n01443537",
21
+ "1": "n01629819",
22
+ "2": "n01641577",
23
+ "3": "n01644900",
24
+ "4": "n01698640",
25
+ "5": "n01742172",
26
+ "6": "n01768244",
27
+ "7": "n01770393",
28
+ "8": "n01774384",
29
+ "9": "n01774750",
30
+ "10": "n01784675",
31
+ "11": "n01882714",
32
+ "12": "n01910747",
33
+ "13": "n01917289",
34
+ "14": "n01944390",
35
+ "15": "n01950731",
36
+ "16": "n01983481",
37
+ "17": "n01984695",
38
+ "18": "n02002724",
39
+ "19": "n02056570",
40
+ "20": "n02058221",
41
+ "21": "n02074367",
42
+ "22": "n02094433",
43
+ "23": "n02099601",
44
+ "24": "n02099712",
45
+ "25": "n02106662",
46
+ "26": "n02113799",
47
+ "27": "n02123045",
48
+ "28": "n02123394",
49
+ "29": "n02124075",
50
+ "30": "n02125311",
51
+ "31": "n02129165",
52
+ "32": "n02132136",
53
+ "33": "n02165456",
54
+ "34": "n02226429",
55
+ "35": "n02231487",
56
+ "36": "n02233338",
57
+ "37": "n02236044",
58
+ "38": "n02268443",
59
+ "39": "n02279972",
60
+ "40": "n02281406",
61
+ "41": "n02321529",
62
+ "42": "n02364673",
63
+ "43": "n02395406",
64
+ "44": "n02403003",
65
+ "45": "n02410509",
66
+ "46": "n02415577",
67
+ "47": "n02423022",
68
+ "48": "n02437312",
69
+ "49": "n02480495",
70
+ "50": "n02481823",
71
+ "51": "n02486410",
72
+ "52": "n02504458",
73
+ "53": "n02509815",
74
+ "54": "n02666347",
75
+ "55": "n02669723",
76
+ "56": "n02699494",
77
+ "57": "n02769748",
78
+ "58": "n02788148",
79
+ "59": "n02791270",
80
+ "60": "n02793495",
81
+ "61": "n02795169",
82
+ "62": "n02802426",
83
+ "63": "n02808440",
84
+ "64": "n02814533",
85
+ "65": "n02814860",
86
+ "66": "n02815834",
87
+ "67": "n02823428",
88
+ "68": "n02837789",
89
+ "69": "n02841315",
90
+ "70": "n02843684",
91
+ "71": "n02883205",
92
+ "72": "n02892201",
93
+ "73": "n02909870",
94
+ "74": "n02917067",
95
+ "75": "n02927161",
96
+ "76": "n02948072",
97
+ "77": "n02950826",
98
+ "78": "n02963159",
99
+ "79": "n02977058",
100
+ "80": "n02988304",
101
+ "81": "n03014705",
102
+ "82": "n03026506",
103
+ "83": "n03042490",
104
+ "84": "n03085013",
105
+ "85": "n03089624",
106
+ "86": "n03100240",
107
+ "87": "n03126707",
108
+ "88": "n03160309",
109
+ "89": "n03179701",
110
+ "90": "n03201208",
111
+ "91": "n03255030",
112
+ "92": "n03355925",
113
+ "93": "n03373237",
114
+ "94": "n03388043",
115
+ "95": "n03393912",
116
+ "96": "n03400231",
117
+ "97": "n03404251",
118
+ "98": "n03424325",
119
+ "99": "n03444034",
120
+ "100": "n03447447",
121
+ "101": "n03544143",
122
+ "102": "n03584254",
123
+ "103": "n03599486",
124
+ "104": "n03617480",
125
+ "105": "n03637318",
126
+ "106": "n03649909",
127
+ "107": "n03662601",
128
+ "108": "n03670208",
129
+ "109": "n03706229",
130
+ "110": "n03733131",
131
+ "111": "n03763968",
132
+ "112": "n03770439",
133
+ "113": "n03796401",
134
+ "114": "n03814639",
135
+ "115": "n03837869",
136
+ "116": "n03838899",
137
+ "117": "n03854065",
138
+ "118": "n03891332",
139
+ "119": "n03902125",
140
+ "120": "n03930313",
141
+ "121": "n03937543",
142
+ "122": "n03970156",
143
+ "123": "n03977966",
144
+ "124": "n03980874",
145
+ "125": "n03983396",
146
+ "126": "n03992509",
147
+ "127": "n04008634",
148
+ "128": "n04023962",
149
+ "129": "n04070727",
150
+ "130": "n04074963",
151
+ "131": "n04099969",
152
+ "132": "n04118538",
153
+ "133": "n04133789",
154
+ "134": "n04146614",
155
+ "135": "n04149813",
156
+ "136": "n04179913",
157
+ "137": "n04251144",
158
+ "138": "n04254777",
159
+ "139": "n04259630",
160
+ "140": "n04265275",
161
+ "141": "n04275548",
162
+ "142": "n04285008",
163
+ "143": "n04311004",
164
+ "144": "n04328186",
165
+ "145": "n04356056",
166
+ "146": "n04366367",
167
+ "147": "n04371430",
168
+ "148": "n04376876",
169
+ "149": "n04398044",
170
+ "150": "n04399382",
171
+ "151": "n04417672",
172
+ "152": "n04456115",
173
+ "153": "n04465666",
174
+ "154": "n04486054",
175
+ "155": "n04487081",
176
+ "156": "n04501370",
177
+ "157": "n04507155",
178
+ "158": "n04532106",
179
+ "159": "n04532670",
180
+ "160": "n04540053",
181
+ "161": "n04560804",
182
+ "162": "n04562935",
183
+ "163": "n04596742",
184
+ "164": "n04598010",
185
+ "165": "n06596364",
186
+ "166": "n07056680",
187
+ "167": "n07583066",
188
+ "168": "n07614500",
189
+ "169": "n07615774",
190
+ "170": "n07646821",
191
+ "171": "n07647870",
192
+ "172": "n07657664",
193
+ "173": "n07695742",
194
+ "174": "n07711569",
195
+ "175": "n07715103",
196
+ "176": "n07720875",
197
+ "177": "n07749582",
198
+ "178": "n07753592",
199
+ "179": "n07768694",
200
+ "180": "n07871810",
201
+ "181": "n07873807",
202
+ "182": "n07875152",
203
+ "183": "n07920052",
204
+ "184": "n07975909",
205
+ "185": "n08496334",
206
+ "186": "n08620881",
207
+ "187": "n08742578",
208
+ "188": "n09193705",
209
+ "189": "n09246464",
210
+ "190": "n09256479",
211
+ "191": "n09332890",
212
+ "192": "n09428293",
213
+ "193": "n12267677",
214
+ "194": "n12520864",
215
+ "195": "n13001041",
216
+ "196": "n13652335",
217
+ "197": "n13652994",
218
+ "198": "n13719102",
219
+ "199": "n14991210"
220
+ },
221
+ "image_size": 224,
222
+ "initializer_range": 0.02,
223
+ "label2id": {
224
+ "n01443537": 0,
225
+ "n01629819": 1,
226
+ "n01641577": 2,
227
+ "n01644900": 3,
228
+ "n01698640": 4,
229
+ "n01742172": 5,
230
+ "n01768244": 6,
231
+ "n01770393": 7,
232
+ "n01774384": 8,
233
+ "n01774750": 9,
234
+ "n01784675": 10,
235
+ "n01882714": 11,
236
+ "n01910747": 12,
237
+ "n01917289": 13,
238
+ "n01944390": 14,
239
+ "n01950731": 15,
240
+ "n01983481": 16,
241
+ "n01984695": 17,
242
+ "n02002724": 18,
243
+ "n02056570": 19,
244
+ "n02058221": 20,
245
+ "n02074367": 21,
246
+ "n02094433": 22,
247
+ "n02099601": 23,
248
+ "n02099712": 24,
249
+ "n02106662": 25,
250
+ "n02113799": 26,
251
+ "n02123045": 27,
252
+ "n02123394": 28,
253
+ "n02124075": 29,
254
+ "n02125311": 30,
255
+ "n02129165": 31,
256
+ "n02132136": 32,
257
+ "n02165456": 33,
258
+ "n02226429": 34,
259
+ "n02231487": 35,
260
+ "n02233338": 36,
261
+ "n02236044": 37,
262
+ "n02268443": 38,
263
+ "n02279972": 39,
264
+ "n02281406": 40,
265
+ "n02321529": 41,
266
+ "n02364673": 42,
267
+ "n02395406": 43,
268
+ "n02403003": 44,
269
+ "n02410509": 45,
270
+ "n02415577": 46,
271
+ "n02423022": 47,
272
+ "n02437312": 48,
273
+ "n02480495": 49,
274
+ "n02481823": 50,
275
+ "n02486410": 51,
276
+ "n02504458": 52,
277
+ "n02509815": 53,
278
+ "n02666347": 54,
279
+ "n02669723": 55,
280
+ "n02699494": 56,
281
+ "n02769748": 57,
282
+ "n02788148": 58,
283
+ "n02791270": 59,
284
+ "n02793495": 60,
285
+ "n02795169": 61,
286
+ "n02802426": 62,
287
+ "n02808440": 63,
288
+ "n02814533": 64,
289
+ "n02814860": 65,
290
+ "n02815834": 66,
291
+ "n02823428": 67,
292
+ "n02837789": 68,
293
+ "n02841315": 69,
294
+ "n02843684": 70,
295
+ "n02883205": 71,
296
+ "n02892201": 72,
297
+ "n02909870": 73,
298
+ "n02917067": 74,
299
+ "n02927161": 75,
300
+ "n02948072": 76,
301
+ "n02950826": 77,
302
+ "n02963159": 78,
303
+ "n02977058": 79,
304
+ "n02988304": 80,
305
+ "n03014705": 81,
306
+ "n03026506": 82,
307
+ "n03042490": 83,
308
+ "n03085013": 84,
309
+ "n03089624": 85,
310
+ "n03100240": 86,
311
+ "n03126707": 87,
312
+ "n03160309": 88,
313
+ "n03179701": 89,
314
+ "n03201208": 90,
315
+ "n03255030": 91,
316
+ "n03355925": 92,
317
+ "n03373237": 93,
318
+ "n03388043": 94,
319
+ "n03393912": 95,
320
+ "n03400231": 96,
321
+ "n03404251": 97,
322
+ "n03424325": 98,
323
+ "n03444034": 99,
324
+ "n03447447": 100,
325
+ "n03544143": 101,
326
+ "n03584254": 102,
327
+ "n03599486": 103,
328
+ "n03617480": 104,
329
+ "n03637318": 105,
330
+ "n03649909": 106,
331
+ "n03662601": 107,
332
+ "n03670208": 108,
333
+ "n03706229": 109,
334
+ "n03733131": 110,
335
+ "n03763968": 111,
336
+ "n03770439": 112,
337
+ "n03796401": 113,
338
+ "n03814639": 114,
339
+ "n03837869": 115,
340
+ "n03838899": 116,
341
+ "n03854065": 117,
342
+ "n03891332": 118,
343
+ "n03902125": 119,
344
+ "n03930313": 120,
345
+ "n03937543": 121,
346
+ "n03970156": 122,
347
+ "n03977966": 123,
348
+ "n03980874": 124,
349
+ "n03983396": 125,
350
+ "n03992509": 126,
351
+ "n04008634": 127,
352
+ "n04023962": 128,
353
+ "n04070727": 129,
354
+ "n04074963": 130,
355
+ "n04099969": 131,
356
+ "n04118538": 132,
357
+ "n04133789": 133,
358
+ "n04146614": 134,
359
+ "n04149813": 135,
360
+ "n04179913": 136,
361
+ "n04251144": 137,
362
+ "n04254777": 138,
363
+ "n04259630": 139,
364
+ "n04265275": 140,
365
+ "n04275548": 141,
366
+ "n04285008": 142,
367
+ "n04311004": 143,
368
+ "n04328186": 144,
369
+ "n04356056": 145,
370
+ "n04366367": 146,
371
+ "n04371430": 147,
372
+ "n04376876": 148,
373
+ "n04398044": 149,
374
+ "n04399382": 150,
375
+ "n04417672": 151,
376
+ "n04456115": 152,
377
+ "n04465666": 153,
378
+ "n04486054": 154,
379
+ "n04487081": 155,
380
+ "n04501370": 156,
381
+ "n04507155": 157,
382
+ "n04532106": 158,
383
+ "n04532670": 159,
384
+ "n04540053": 160,
385
+ "n04560804": 161,
386
+ "n04562935": 162,
387
+ "n04596742": 163,
388
+ "n04598010": 164,
389
+ "n06596364": 165,
390
+ "n07056680": 166,
391
+ "n07583066": 167,
392
+ "n07614500": 168,
393
+ "n07615774": 169,
394
+ "n07646821": 170,
395
+ "n07647870": 171,
396
+ "n07657664": 172,
397
+ "n07695742": 173,
398
+ "n07711569": 174,
399
+ "n07715103": 175,
400
+ "n07720875": 176,
401
+ "n07749582": 177,
402
+ "n07753592": 178,
403
+ "n07768694": 179,
404
+ "n07871810": 180,
405
+ "n07873807": 181,
406
+ "n07875152": 182,
407
+ "n07920052": 183,
408
+ "n07975909": 184,
409
+ "n08496334": 185,
410
+ "n08620881": 186,
411
+ "n08742578": 187,
412
+ "n09193705": 188,
413
+ "n09246464": 189,
414
+ "n09256479": 190,
415
+ "n09332890": 191,
416
+ "n09428293": 192,
417
+ "n12267677": 193,
418
+ "n12520864": 194,
419
+ "n13001041": 195,
420
+ "n13652335": 196,
421
+ "n13652994": 197,
422
+ "n13719102": 198,
423
+ "n14991210": 199
424
+ },
425
+ "layer_norm_eps": 1e-05,
426
+ "mlp_ratio": 4.0,
427
+ "model_type": "swin",
428
+ "num_channels": 3,
429
+ "num_heads": [
430
+ 3,
431
+ 6,
432
+ 12,
433
+ 24
434
+ ],
435
+ "num_layers": 4,
436
+ "out_features": [
437
+ "stage4"
438
+ ],
439
+ "out_indices": [
440
+ 4
441
+ ],
442
+ "patch_size": 4,
443
+ "path_norm": true,
444
+ "problem_type": "single_label_classification",
445
+ "qkv_bias": true,
446
+ "stage_names": [
447
+ "stem",
448
+ "stage1",
449
+ "stage2",
450
+ "stage3",
451
+ "stage4"
452
+ ],
453
+ "torch_dtype": "float32",
454
+ "transformers_version": "4.28.0",
455
+ "use_absolute_embeddings": false,
456
+ "window_size": 7
457
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "do_rescale": true,
4
+ "do_resize": true,
5
+ "image_mean": [
6
+ 0.485,
7
+ 0.456,
8
+ 0.406
9
+ ],
10
+ "image_processor_type": "ViTImageProcessor",
11
+ "image_std": [
12
+ 0.229,
13
+ 0.224,
14
+ 0.225
15
+ ],
16
+ "resample": 3,
17
+ "rescale_factor": 0.00392156862745098,
18
+ "size": {
19
+ "height": 224,
20
+ "width": 224
21
+ }
22
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4dbd39de2c26e05660263166f5809cb20b8af0992b599b114b32d14e1b23747f
3
+ size 111003889
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a30e523bdf50d0e8a0885a0132660bca332f7f176d6f698d32864b2233f7480
3
+ size 3643