Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -21,19 +21,27 @@ class Query(BaseModel):
|
|
21 |
|
22 |
app = FastAPI(title="Financial Chatbot API")
|
23 |
|
24 |
-
# Load base model
|
25 |
-
base_model_name = "meta-llama/Llama-3.2-3B" # Update
|
26 |
model = AutoModelForCausalLM.from_pretrained(
|
27 |
base_model_name,
|
28 |
device_map="auto",
|
29 |
trust_remote_code=True
|
30 |
)
|
31 |
|
32 |
-
# Load adapter from your checkpoint
|
33 |
peft_model_id = "Phoenix21/llama-3-2-3b-finetuned-finance_checkpoint2"
|
34 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
|
36 |
-
# Load tokenizer from base model
|
37 |
tokenizer = AutoTokenizer.from_pretrained(base_model_name, trust_remote_code=True)
|
38 |
tokenizer.pad_token = tokenizer.eos_token
|
39 |
|
|
|
21 |
|
22 |
app = FastAPI(title="Financial Chatbot API")
|
23 |
|
24 |
+
# Load the base model
|
25 |
+
base_model_name = "meta-llama/Llama-3.2-3B" # Update if using a different base model
|
26 |
model = AutoModelForCausalLM.from_pretrained(
|
27 |
base_model_name,
|
28 |
device_map="auto",
|
29 |
trust_remote_code=True
|
30 |
)
|
31 |
|
32 |
+
# Load adapter from your checkpoint with a workaround for 'eva_config'
|
33 |
peft_model_id = "Phoenix21/llama-3-2-3b-finetuned-finance_checkpoint2"
|
34 |
+
# Load the PEFT configuration first
|
35 |
+
peft_config = PeftConfig.from_pretrained(peft_model_id)
|
36 |
+
# Remove 'eva_config' if it exists in the configuration
|
37 |
+
peft_config_dict = peft_config.to_dict()
|
38 |
+
if "eva_config" in peft_config_dict:
|
39 |
+
peft_config_dict.pop("eva_config")
|
40 |
+
peft_config = PeftConfig.from_dict(peft_config_dict)
|
41 |
+
# Load the adapter using the filtered configuration
|
42 |
+
model = PeftModel.from_pretrained(model, peft_model_id, config=peft_config)
|
43 |
|
44 |
+
# Load tokenizer from the base model
|
45 |
tokenizer = AutoTokenizer.from_pretrained(base_model_name, trust_remote_code=True)
|
46 |
tokenizer.pad_token = tokenizer.eos_token
|
47 |
|