Spaces:
Running
on
Zero
Running
on
Zero
Commit
·
bc93ed3
1
Parent(s):
116a714
try remote_code=False and device_map=auto
Browse files- utils/models.py +4 -4
utils/models.py
CHANGED
@@ -159,16 +159,16 @@ def run_inference(model_name, context, question):
|
|
159 |
if "bitnet" in model_name.lower():
|
160 |
bitnet_model = BitNetForCausalLM.from_pretrained(
|
161 |
model_name,
|
162 |
-
device_map="
|
163 |
torch_dtype=torch.bfloat16,
|
164 |
-
trust_remote_code=True,
|
165 |
)
|
166 |
pipe = pipeline(
|
167 |
"text-generation",
|
168 |
model=bitnet_model,
|
169 |
tokenizer=tokenizer,
|
170 |
-
device_map="
|
171 |
-
trust_remote_code=True,
|
172 |
torch_dtype=torch.bfloat16,
|
173 |
model_kwargs={
|
174 |
"attn_implementation": "eager",
|
|
|
159 |
if "bitnet" in model_name.lower():
|
160 |
bitnet_model = BitNetForCausalLM.from_pretrained(
|
161 |
model_name,
|
162 |
+
device_map="auto",
|
163 |
torch_dtype=torch.bfloat16,
|
164 |
+
#trust_remote_code=True,
|
165 |
)
|
166 |
pipe = pipeline(
|
167 |
"text-generation",
|
168 |
model=bitnet_model,
|
169 |
tokenizer=tokenizer,
|
170 |
+
device_map="auto",
|
171 |
+
#trust_remote_code=True,
|
172 |
torch_dtype=torch.bfloat16,
|
173 |
model_kwargs={
|
174 |
"attn_implementation": "eager",
|