rodrigomasini commited on
Commit
3c27a6e
·
verified ·
1 Parent(s): 30cb4c4

Update helper.py

Browse files
Files changed (1) hide show
  1. helper.py +1 -1
helper.py CHANGED
@@ -39,7 +39,7 @@ def get_fn(model_path: str, **model_kwargs):
39
  # Fallback to default attention implementation
40
  model = AutoModelForCausalLM.from_pretrained(
41
  model_path,
42
- torch_dtype= torch.bfloat16
43
  device_map="auto",
44
  quantization_config=quantization_config,
45
  )
 
39
  # Fallback to default attention implementation
40
  model = AutoModelForCausalLM.from_pretrained(
41
  model_path,
42
+ torch_dtype= torch.bfloat16,
43
  device_map="auto",
44
  quantization_config=quantization_config,
45
  )