babylm commited on
Commit
eccedac
1 Parent(s): c475184

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "ltg/ltg-bert-babylm",
3
  "architectures": [
4
  "LtgBertForMaskedLM"
5
  ],
@@ -7,7 +7,7 @@
7
  "auto_map": {
8
  "AutoConfig": "ltg/ltg-bert-babylm--configuration_ltgbert.LtgBertConfig",
9
  "AutoModelForMaskedLM": "ltg/ltg-bert-babylm--modeling_ltgbert.LtgBertForMaskedLM",
10
- "AutoModelForSequenceClassification": "ltg/ltg-bert-babylm--modeling_ltgbert.LtgBertForSequenceClassification"
11
  },
12
  "classifier_dropout": 0.2,
13
  "hidden_dropout_prob": 0.1,
@@ -22,6 +22,6 @@
22
  "pad_token_id": 4,
23
  "position_bucket_size": 32,
24
  "torch_dtype": "float32",
25
- "transformers_version": "4.40.2",
26
  "vocab_size": 16384
27
  }
 
1
  {
2
+ "_name_or_path": "babylm/ltgbert-100m-2024",
3
  "architectures": [
4
  "LtgBertForMaskedLM"
5
  ],
 
7
  "auto_map": {
8
  "AutoConfig": "ltg/ltg-bert-babylm--configuration_ltgbert.LtgBertConfig",
9
  "AutoModelForMaskedLM": "ltg/ltg-bert-babylm--modeling_ltgbert.LtgBertForMaskedLM",
10
+ "AutoModelForSequenceClassification": "modeling_ltgbert.LtgBertForSequenceClassification"
11
  },
12
  "classifier_dropout": 0.2,
13
  "hidden_dropout_prob": 0.1,
 
22
  "pad_token_id": 4,
23
  "position_bucket_size": 32,
24
  "torch_dtype": "float32",
25
+ "transformers_version": "4.43.3",
26
  "vocab_size": 16384
27
  }