Anthesis_7B / mergekit_config.yml
rmdhirr's picture
Upload folder using huggingface_hub
bdabfdd verified
raw
history blame contribute delete
297 Bytes
models:
- model: ResplendentAI/Paradigm_Shift_7B
# no parameters necessary for base model
- model: rmdhirr/Foxglove_7B
parameters:
density: 0.60
weight: 0.40
merge_method: dare_ties
base_model: ResplendentAI/Paradigm_Shift_7B
parameters:
int8_mask: true
dtype: bfloat16