slices: - sources: - model: Gaivoronsky/Mistral-7B-Saiga layer_range: [0, 32] - sources: - model: HuggingFaceH4/mistral-7b-grok layer_range: [24, 32] - sources: - model: NousResearch/Yarn-Mistral-7b-128k layer_range: [26, 32] - sources: - model: OpenBuddy/openbuddy-mistral2-7b-v20.3-32k layer_range: [26, 32] - sources: - model: ajibawa-2023/Code-Mistral-7B layer_range: [28, 32] merge_method: passthrough dtype: bfloat16