Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,15 +1,17 @@
|
|
1 |
import gradio as gr
|
2 |
import transformers
|
3 |
import torch
|
|
|
4 |
|
5 |
model_id = "JerniganLab/interviews-and-qa"
|
6 |
|
7 |
pipeline = transformers.pipeline(
|
8 |
"text-generation",
|
9 |
-
model=
|
10 |
model_kwargs={"torch_dtype": torch.bfloat16},
|
11 |
device="cuda",
|
12 |
)
|
|
|
13 |
|
14 |
def chat_function(message, history, system_prompt, max_new_tokens, temperature):
|
15 |
messages = [{"role":"system","content":system_prompt},
|
|
|
1 |
import gradio as gr
|
2 |
import transformers
|
3 |
import torch
|
4 |
+
from peft import PeftModel
|
5 |
|
6 |
model_id = "JerniganLab/interviews-and-qa"
|
7 |
|
8 |
pipeline = transformers.pipeline(
|
9 |
"text-generation",
|
10 |
+
model="meta-llama/Meta-Llama-3-8B-Instruct",
|
11 |
model_kwargs={"torch_dtype": torch.bfloat16},
|
12 |
device="cuda",
|
13 |
)
|
14 |
+
pipeline.model = PeftModel.from_pretrained(model=base_model, model_id)
|
15 |
|
16 |
def chat_function(message, history, system_prompt, max_new_tokens, temperature):
|
17 |
messages = [{"role":"system","content":system_prompt},
|