FantasticGNU commited on
Commit
5b27a77
·
1 Parent(s): 2f64416

Update model/openllama.py

Browse files
Files changed (1) hide show
  1. model/openllama.py +5 -3
model/openllama.py CHANGED
@@ -210,10 +210,12 @@ class OpenLLAMAPEFTModel(nn.Module):
210
  self.llama_model = AutoModelForCausalLM.from_config(config)
211
 
212
  device_map = infer_auto_device_map(self.llama_model, no_split_module_classes=["OPTDecoderLayer"], dtype="float16")
213
- self.llama_model = load_checkpoint_and_dispatch(self.llama_model, vicuna_ckpt_path, device_map=device_map, offload_folder="offload", offload_state_dict = True)
214
- self.llama_model.to(torch.float16)
 
 
215
  # try:
216
- # self.llama_model = AutoModelForCausalLM.from_pretrained(vicuna_ckpt_path, torch_dtype=torch.float16, device_map='auto', offload_folder="offload", offload_state_dict = True)
217
  # except:
218
  # pass
219
  # finally:
 
210
  self.llama_model = AutoModelForCausalLM.from_config(config)
211
 
212
  device_map = infer_auto_device_map(self.llama_model, no_split_module_classes=["OPTDecoderLayer"], dtype="float16")
213
+ print(device_map)
214
+
215
+ # self.llama_model = load_checkpoint_and_dispatch(self.llama_model, vicuna_ckpt_path, device_map=device_map, offload_folder="offload", offload_state_dict = True)
216
+ # self.llama_model.to(torch.float16)
217
  # try:
218
+ self.llama_model = AutoModelForCausalLM.from_pretrained(vicuna_ckpt_path, torch_dtype=torch.float16, device_map=device_map, offload_folder="offload", offload_state_dict = True)
219
  # except:
220
  # pass
221
  # finally: