Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -52,9 +52,11 @@ def format_prompt(prompt, retrieved_documents, k):
|
|
52 |
return PROMPT
|
53 |
|
54 |
def generate(formatted_prompt):
|
55 |
-
|
56 |
-
|
57 |
-
|
|
|
|
|
58 |
outputs = model.generate(
|
59 |
input_ids,
|
60 |
max_new_tokens=1024,
|
@@ -63,7 +65,10 @@ def generate(formatted_prompt):
|
|
63 |
temperature=0.6,
|
64 |
top_p=0.9
|
65 |
)
|
66 |
-
|
|
|
|
|
|
|
67 |
|
68 |
def rag_chatbot_interface(prompt: str, k: int = 2):
|
69 |
scores, retrieved_documents = search(prompt, k)
|
|
|
52 |
return PROMPT
|
53 |
|
54 |
def generate(formatted_prompt):
|
55 |
+
# ν둬ννΈλ₯Ό λ¬Έμμ΄λ‘ κ²°ν©
|
56 |
+
prompt_text = f"{SYS_PROMPT} {formatted_prompt}"
|
57 |
+
# ν ν¬λμ΄μ§
|
58 |
+
input_ids = tokenizer(prompt_text, return_tensors="pt", padding=True).input_ids.to(accelerator.device)
|
59 |
+
# μλ΅ μμ±
|
60 |
outputs = model.generate(
|
61 |
input_ids,
|
62 |
max_new_tokens=1024,
|
|
|
65 |
temperature=0.6,
|
66 |
top_p=0.9
|
67 |
)
|
68 |
+
# μλ΅ ν
μ€νΈλ‘ λμ½λ©
|
69 |
+
response = tokenizer.decode(outputs[0][input_ids.shape[-1]:], skip_special_tokens=True)
|
70 |
+
return response
|
71 |
+
|
72 |
|
73 |
def rag_chatbot_interface(prompt: str, k: int = 2):
|
74 |
scores, retrieved_documents = search(prompt, k)
|