merge_method: sce select_topk: 0.25 models: - model: sam-paech/Llama-3.3-70B-Instruct-ftpo_1k - model: HPAI-BSC/Llama3.1-Aloe-Beta-70B - model: Writer/Palmyra-Med-70B-32K - model: schonsense/IPOplectic base_model: sam-paech/Llama-3.3-70B-Instruct-ftpo_1k parameters: normalize: false int8_mask: true dtype: float32 out_dtype: bfloat16 tokenizer: source: base pad_to_multiple_of: 8