| merge_method: breadcrumbs_ties | |
| base_model: meta-llama/Llama-3.2-3B | |
| tokenizer_source: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B | |
| dtype: bfloat16 | |
| parameters: | |
| normalize: true | |
| models: | |
| - model: meta-llama/Llama-3.2-3B-Instruct | |
| parameters: | |
| weight: 1 | |
| density: 0.9 | |
| gamma: 0.01 | |
| - model: PJMixers-Dev/LLaMa-3.2-Instruct-JankMix-v0.1-SFT-3B | |
| parameters: | |
| weight: 1 | |
| density: 0.9 | |
| gamma: 0.01 | |