Mamba2InLlama_0_50: prompt_template: "templates/llama3.txt" fn_completions: "huggingface_local_completions" completions_kwargs: model_name: "JunxiongWang/Mamba2InLlama_0_50" model_kwargs: torch_dtype: 'bfloat16' max_new_tokens: 2048 temperature: 0.7 top_p: 1.0 do_sample: True pretty_name: "Mamba2 0 5 From meta-llama/Meta-Llama-3-8B-Instruct" link: "https://huggingface.co/JunxiongWang/Mamba2InLlama_0_50"