Commit
·
26be916
1
Parent(s):
a07e832
Update app.py
Browse files
app.py
CHANGED
|
@@ -3,7 +3,7 @@ import gradio as gr
|
|
| 3 |
|
| 4 |
client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.1")
|
| 5 |
|
| 6 |
-
def format_prompt(message
|
| 7 |
prompt = "<s>"
|
| 8 |
for user_prompt, bot_response in history:
|
| 9 |
prompt += f"[INST] {user_prompt} [/INST]"
|
|
@@ -28,7 +28,7 @@ def generate(
|
|
| 28 |
seed=42,
|
| 29 |
)
|
| 30 |
|
| 31 |
-
formatted_prompt = format_prompt(prompt
|
| 32 |
|
| 33 |
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
| 34 |
output = ""
|
|
|
|
| 3 |
|
| 4 |
client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.1")
|
| 5 |
|
| 6 |
+
def format_prompt(message):
|
| 7 |
prompt = "<s>"
|
| 8 |
for user_prompt, bot_response in history:
|
| 9 |
prompt += f"[INST] {user_prompt} [/INST]"
|
|
|
|
| 28 |
seed=42,
|
| 29 |
)
|
| 30 |
|
| 31 |
+
formatted_prompt = format_prompt(prompt)
|
| 32 |
|
| 33 |
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
| 34 |
output = ""
|