Spaces:
Runtime error
Runtime error
FlawedLLM
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -25,9 +25,12 @@ model_id = "FlawedLLM/BhashiniLLM"
|
|
25 |
|
26 |
# Load the base model (the one you fine-tuned with LoRA)
|
27 |
base_model = AutoModelForCausalLM.from_pretrained(model_id, device_map='auto') # Load in 8-bit for efficiency
|
|
|
|
|
28 |
|
29 |
# Load the LoRA adapter weights
|
30 |
model = PeftModel.from_pretrained(base_model, model_id)
|
|
|
31 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
32 |
|
33 |
|
|
|
25 |
|
26 |
# Load the base model (the one you fine-tuned with LoRA)
|
27 |
base_model = AutoModelForCausalLM.from_pretrained(model_id, device_map='auto') # Load in 8-bit for efficiency
|
28 |
+
for param in base_model.parameters():
|
29 |
+
param.data = param.data.to(torch.float16) # or torch.float32
|
30 |
|
31 |
# Load the LoRA adapter weights
|
32 |
model = PeftModel.from_pretrained(base_model, model_id)
|
33 |
+
|
34 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
35 |
|
36 |
|