loucad's picture
Training in progress, epoch 0
23d732d
raw
history blame
9.97 kB
{
"_name_or_path": "microsoft/swin-tiny-patch4-window7-224",
"architectures": [
"SwinForImageClassification"
],
"attention_probs_dropout_prob": 0.0,
"depths": [
2,
2,
6,
2
],
"drop_path_rate": 0.1,
"embed_dim": 96,
"encoder_stride": 32,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 768,
"id2label": {
"0": "n01443537",
"1": "n01629819",
"2": "n01641577",
"3": "n01644900",
"4": "n01698640",
"5": "n01742172",
"6": "n01768244",
"7": "n01770393",
"8": "n01774384",
"9": "n01774750",
"10": "n01784675",
"11": "n01882714",
"12": "n01910747",
"13": "n01917289",
"14": "n01944390",
"15": "n01950731",
"16": "n01983481",
"17": "n01984695",
"18": "n02002724",
"19": "n02056570",
"20": "n02058221",
"21": "n02074367",
"22": "n02094433",
"23": "n02099601",
"24": "n02099712",
"25": "n02106662",
"26": "n02113799",
"27": "n02123045",
"28": "n02123394",
"29": "n02124075",
"30": "n02125311",
"31": "n02129165",
"32": "n02132136",
"33": "n02165456",
"34": "n02226429",
"35": "n02231487",
"36": "n02233338",
"37": "n02236044",
"38": "n02268443",
"39": "n02279972",
"40": "n02281406",
"41": "n02321529",
"42": "n02364673",
"43": "n02395406",
"44": "n02403003",
"45": "n02410509",
"46": "n02415577",
"47": "n02423022",
"48": "n02437312",
"49": "n02480495",
"50": "n02481823",
"51": "n02486410",
"52": "n02504458",
"53": "n02509815",
"54": "n02666347",
"55": "n02669723",
"56": "n02699494",
"57": "n02769748",
"58": "n02788148",
"59": "n02791270",
"60": "n02793495",
"61": "n02795169",
"62": "n02802426",
"63": "n02808440",
"64": "n02814533",
"65": "n02814860",
"66": "n02815834",
"67": "n02823428",
"68": "n02837789",
"69": "n02841315",
"70": "n02843684",
"71": "n02883205",
"72": "n02892201",
"73": "n02909870",
"74": "n02917067",
"75": "n02927161",
"76": "n02948072",
"77": "n02950826",
"78": "n02963159",
"79": "n02977058",
"80": "n02988304",
"81": "n03014705",
"82": "n03026506",
"83": "n03042490",
"84": "n03085013",
"85": "n03089624",
"86": "n03100240",
"87": "n03126707",
"88": "n03160309",
"89": "n03179701",
"90": "n03201208",
"91": "n03255030",
"92": "n03355925",
"93": "n03373237",
"94": "n03388043",
"95": "n03393912",
"96": "n03400231",
"97": "n03404251",
"98": "n03424325",
"99": "n03444034",
"100": "n03447447",
"101": "n03544143",
"102": "n03584254",
"103": "n03599486",
"104": "n03617480",
"105": "n03637318",
"106": "n03649909",
"107": "n03662601",
"108": "n03670208",
"109": "n03706229",
"110": "n03733131",
"111": "n03763968",
"112": "n03770439",
"113": "n03796401",
"114": "n03814639",
"115": "n03837869",
"116": "n03838899",
"117": "n03854065",
"118": "n03891332",
"119": "n03902125",
"120": "n03930313",
"121": "n03937543",
"122": "n03970156",
"123": "n03977966",
"124": "n03980874",
"125": "n03983396",
"126": "n03992509",
"127": "n04008634",
"128": "n04023962",
"129": "n04070727",
"130": "n04074963",
"131": "n04099969",
"132": "n04118538",
"133": "n04133789",
"134": "n04146614",
"135": "n04149813",
"136": "n04179913",
"137": "n04251144",
"138": "n04254777",
"139": "n04259630",
"140": "n04265275",
"141": "n04275548",
"142": "n04285008",
"143": "n04311004",
"144": "n04328186",
"145": "n04356056",
"146": "n04366367",
"147": "n04371430",
"148": "n04376876",
"149": "n04398044",
"150": "n04399382",
"151": "n04417672",
"152": "n04456115",
"153": "n04465666",
"154": "n04486054",
"155": "n04487081",
"156": "n04501370",
"157": "n04507155",
"158": "n04532106",
"159": "n04532670",
"160": "n04540053",
"161": "n04560804",
"162": "n04562935",
"163": "n04596742",
"164": "n04598010",
"165": "n06596364",
"166": "n07056680",
"167": "n07583066",
"168": "n07614500",
"169": "n07615774",
"170": "n07646821",
"171": "n07647870",
"172": "n07657664",
"173": "n07695742",
"174": "n07711569",
"175": "n07715103",
"176": "n07720875",
"177": "n07749582",
"178": "n07753592",
"179": "n07768694",
"180": "n07871810",
"181": "n07873807",
"182": "n07875152",
"183": "n07920052",
"184": "n07975909",
"185": "n08496334",
"186": "n08620881",
"187": "n08742578",
"188": "n09193705",
"189": "n09246464",
"190": "n09256479",
"191": "n09332890",
"192": "n09428293",
"193": "n12267677",
"194": "n12520864",
"195": "n13001041",
"196": "n13652335",
"197": "n13652994",
"198": "n13719102",
"199": "n14991210"
},
"image_size": 224,
"initializer_range": 0.02,
"label2id": {
"n01443537": 0,
"n01629819": 1,
"n01641577": 2,
"n01644900": 3,
"n01698640": 4,
"n01742172": 5,
"n01768244": 6,
"n01770393": 7,
"n01774384": 8,
"n01774750": 9,
"n01784675": 10,
"n01882714": 11,
"n01910747": 12,
"n01917289": 13,
"n01944390": 14,
"n01950731": 15,
"n01983481": 16,
"n01984695": 17,
"n02002724": 18,
"n02056570": 19,
"n02058221": 20,
"n02074367": 21,
"n02094433": 22,
"n02099601": 23,
"n02099712": 24,
"n02106662": 25,
"n02113799": 26,
"n02123045": 27,
"n02123394": 28,
"n02124075": 29,
"n02125311": 30,
"n02129165": 31,
"n02132136": 32,
"n02165456": 33,
"n02226429": 34,
"n02231487": 35,
"n02233338": 36,
"n02236044": 37,
"n02268443": 38,
"n02279972": 39,
"n02281406": 40,
"n02321529": 41,
"n02364673": 42,
"n02395406": 43,
"n02403003": 44,
"n02410509": 45,
"n02415577": 46,
"n02423022": 47,
"n02437312": 48,
"n02480495": 49,
"n02481823": 50,
"n02486410": 51,
"n02504458": 52,
"n02509815": 53,
"n02666347": 54,
"n02669723": 55,
"n02699494": 56,
"n02769748": 57,
"n02788148": 58,
"n02791270": 59,
"n02793495": 60,
"n02795169": 61,
"n02802426": 62,
"n02808440": 63,
"n02814533": 64,
"n02814860": 65,
"n02815834": 66,
"n02823428": 67,
"n02837789": 68,
"n02841315": 69,
"n02843684": 70,
"n02883205": 71,
"n02892201": 72,
"n02909870": 73,
"n02917067": 74,
"n02927161": 75,
"n02948072": 76,
"n02950826": 77,
"n02963159": 78,
"n02977058": 79,
"n02988304": 80,
"n03014705": 81,
"n03026506": 82,
"n03042490": 83,
"n03085013": 84,
"n03089624": 85,
"n03100240": 86,
"n03126707": 87,
"n03160309": 88,
"n03179701": 89,
"n03201208": 90,
"n03255030": 91,
"n03355925": 92,
"n03373237": 93,
"n03388043": 94,
"n03393912": 95,
"n03400231": 96,
"n03404251": 97,
"n03424325": 98,
"n03444034": 99,
"n03447447": 100,
"n03544143": 101,
"n03584254": 102,
"n03599486": 103,
"n03617480": 104,
"n03637318": 105,
"n03649909": 106,
"n03662601": 107,
"n03670208": 108,
"n03706229": 109,
"n03733131": 110,
"n03763968": 111,
"n03770439": 112,
"n03796401": 113,
"n03814639": 114,
"n03837869": 115,
"n03838899": 116,
"n03854065": 117,
"n03891332": 118,
"n03902125": 119,
"n03930313": 120,
"n03937543": 121,
"n03970156": 122,
"n03977966": 123,
"n03980874": 124,
"n03983396": 125,
"n03992509": 126,
"n04008634": 127,
"n04023962": 128,
"n04070727": 129,
"n04074963": 130,
"n04099969": 131,
"n04118538": 132,
"n04133789": 133,
"n04146614": 134,
"n04149813": 135,
"n04179913": 136,
"n04251144": 137,
"n04254777": 138,
"n04259630": 139,
"n04265275": 140,
"n04275548": 141,
"n04285008": 142,
"n04311004": 143,
"n04328186": 144,
"n04356056": 145,
"n04366367": 146,
"n04371430": 147,
"n04376876": 148,
"n04398044": 149,
"n04399382": 150,
"n04417672": 151,
"n04456115": 152,
"n04465666": 153,
"n04486054": 154,
"n04487081": 155,
"n04501370": 156,
"n04507155": 157,
"n04532106": 158,
"n04532670": 159,
"n04540053": 160,
"n04560804": 161,
"n04562935": 162,
"n04596742": 163,
"n04598010": 164,
"n06596364": 165,
"n07056680": 166,
"n07583066": 167,
"n07614500": 168,
"n07615774": 169,
"n07646821": 170,
"n07647870": 171,
"n07657664": 172,
"n07695742": 173,
"n07711569": 174,
"n07715103": 175,
"n07720875": 176,
"n07749582": 177,
"n07753592": 178,
"n07768694": 179,
"n07871810": 180,
"n07873807": 181,
"n07875152": 182,
"n07920052": 183,
"n07975909": 184,
"n08496334": 185,
"n08620881": 186,
"n08742578": 187,
"n09193705": 188,
"n09246464": 189,
"n09256479": 190,
"n09332890": 191,
"n09428293": 192,
"n12267677": 193,
"n12520864": 194,
"n13001041": 195,
"n13652335": 196,
"n13652994": 197,
"n13719102": 198,
"n14991210": 199
},
"layer_norm_eps": 1e-05,
"mlp_ratio": 4.0,
"model_type": "swin",
"num_channels": 3,
"num_heads": [
3,
6,
12,
24
],
"num_layers": 4,
"out_features": [
"stage4"
],
"out_indices": [
4
],
"patch_size": 4,
"path_norm": true,
"problem_type": "single_label_classification",
"qkv_bias": true,
"stage_names": [
"stem",
"stage1",
"stage2",
"stage3",
"stage4"
],
"torch_dtype": "float32",
"transformers_version": "4.28.0",
"use_absolute_embeddings": false,
"window_size": 7
}