File size: 337 Bytes
88f2a22 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
{
"alignments": "linear",
"architectures": [
"LangBridgeModel"
],
"dim_enc": 1024,
"dim_lm": 4096,
"enc": "castorini/afriteva_v2_large",
"freeze_encoder": true,
"freeze_language_model": true,
"lm": "llama-lang-adapt/MetaMath-Mistral-LayerSpecific",
"torch_dtype": "bfloat16",
"transformers_version": "4.37.2"
}
|