Upload folder using huggingface_hub
Browse files- README.md +0 -3
- model.safetensors +2 -2
README.md
CHANGED
@@ -81,9 +81,6 @@ automap = config_json['auto_map']
|
|
81 |
torch.set_default_dtype(torch.bfloat16)
|
82 |
model = AutoModelForCausalLM.from_config(config, trust_remote_code=True)
|
83 |
torch.set_default_dtype(torch.float32)
|
84 |
-
# according to source model, gat is in FP32
|
85 |
-
for i in range(config.num_hidden_layers):
|
86 |
-
model.model.layers[i].block_sparse_moe.gate.float()
|
87 |
if file_exists(filename="generation_config.json", repo_id=source_model_id, repo_type='model'):
|
88 |
model.generation_config = GenerationConfig.from_pretrained(
|
89 |
source_model_id, trust_remote_code=True,
|
|
|
81 |
torch.set_default_dtype(torch.bfloat16)
|
82 |
model = AutoModelForCausalLM.from_config(config, trust_remote_code=True)
|
83 |
torch.set_default_dtype(torch.float32)
|
|
|
|
|
|
|
84 |
if file_exists(filename="generation_config.json", repo_id=source_model_id, repo_type='model'):
|
85 |
model.generation_config = GenerationConfig.from_pretrained(
|
86 |
source_model_id, trust_remote_code=True,
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9144a495919a7e8bac13b7687422122068d55741598d5ff13648df632158603a
|
3 |
+
size 5016928
|