models: - model: alpindale/Mistral-7B-v0.2-hf # No parameters necessary for base model - model: mistralai/Mistral-7B-Instruct-v0.2 parameters: density: 0.53 weight: 0.4 - model: cognitivecomputations/dolphin-2.8-mistral-7b-v02 parameters: density: 0.53 weight: 0.3 - model: Locutusque/Hercules-4.0-Mistral-v0.2-7B parameters: density: 0.53 weight: 0.3 merge_method: dare_ties base_model: alpindale/Mistral-7B-v0.2-hf parameters: int8_mask: true dtype: bfloat16