orthogonal-2x7B-base / mergekit_moe_config.yml
LoSboccacc's picture
Upload 10 files
bb41882 verified
raw
history blame
567 Bytes
base_model: mistralai/Mistral-7B-Instruct-v0.2
gate_mode: hidden # one of "hidden", "cheap_embed", or "random"
dtype: bfloat16 # output dtype (float32, float16, or bfloat16)
experts:
- source_model: SanjiWatsuki/Silicon-Maid-7B
positive_prompts:
- "roleplay"
- source_model: mistralai/Mistral-7B-Instruct-v0.2
positive_prompts:
- "chat"
#"{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}"