Update app.py
Browse filesServe customer chatboot stevugnin/llama-2-7b-bics-multi_woz_v22
this model is based on meta-llama/Llama-2-7b-chat-hf and was finetuned with MULTIWOZ 2.2
app.py
CHANGED
@@ -34,7 +34,7 @@ if not torch.cuda.is_available():
|
|
34 |
|
35 |
|
36 |
if torch.cuda.is_available():
|
37 |
-
model_id = "
|
38 |
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto")
|
39 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
40 |
tokenizer.use_default_system_prompt = False
|
|
|
34 |
|
35 |
|
36 |
if torch.cuda.is_available():
|
37 |
+
model_id = "stevugnin/llama-2-7b-bics-multi_woz_v22"
|
38 |
model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto")
|
39 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
40 |
tokenizer.use_default_system_prompt = False
|