File size: 443 Bytes
6957169
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
import torch
from .arch.modeling_mmamba import MeteorMambaForCausalLM

def load_mmamba(link):

    # huggingface model configuration
    huggingface_config = {}
    huggingface_config.update(dict(
        ignore_mismatched_sizes=True,
        torch_dtype=torch.float32,
        low_cpu_mem_usage=True,
    ))

    # Meteor Mamba Model (no fp32)
    mmamba = MeteorMambaForCausalLM.from_pretrained(link, **huggingface_config)

    return mmamba