raccoote commited on
Commit
1d49bb9
·
verified ·
1 Parent(s): 05948b3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -14,13 +14,12 @@ if hasattr(config, 'rope_scaling'):
14
  'factor': 8.0 # Adjust this value based on your needs
15
  }
16
 
17
- # Step 3: Load the tokenizer and base model using the modified configuration
18
  tokenizer = AutoTokenizer.from_pretrained(base_model_name)
19
  base_model = AutoModelForCausalLM.from_pretrained(
20
- base_model_name,
21
  config=config,
22
- load_in_4bit=False, # Ensure quantization is not attempted
23
- load_in_8bit=False, # Ensure quantization is not attempted
24
  )
25
 
26
  # Step 4: Load the LoRA adapter from the local files or Hugging Face repository
 
14
  'factor': 8.0 # Adjust this value based on your needs
15
  }
16
 
 
17
  tokenizer = AutoTokenizer.from_pretrained(base_model_name)
18
  base_model = AutoModelForCausalLM.from_pretrained(
19
+ base_model_name,
20
  config=config,
21
+ torch_dtype=torch.float32, # Ensure it loads in full precision
22
+ device_map="auto" # This ensures it loads correctly on CPU if GPU is not available
23
  )
24
 
25
  # Step 4: Load the LoRA adapter from the local files or Hugging Face repository