Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -12,7 +12,7 @@ def generate_text(input_text):
|
|
12 |
output = llm(f"Q: {input_text} A:", max_tokens=521, stop=["Q:", "\n"], echo=True)
|
13 |
return output['choices'][0]['text']
|
14 |
|
15 |
-
input_text =
|
16 |
output_text = gr.outputs.Textbox(label="Output text")
|
17 |
|
18 |
description = "bro neil it currently dosent work two people sending it request at the same time so going to fix that but currently running ggml models with llama.cpp implementation in python [https://github.com/abetlen/llama-cpp-python]"
|
@@ -32,7 +32,7 @@ with gr.Blocks() as demo:
|
|
32 |
return "", history + [[user_message, None]]
|
33 |
|
34 |
def bot(history):
|
35 |
-
bot_message =
|
36 |
history[-1][1] = ""
|
37 |
for character in bot_message:
|
38 |
history[-1][1] += character
|
|
|
12 |
output = llm(f"Q: {input_text} A:", max_tokens=521, stop=["Q:", "\n"], echo=True)
|
13 |
return output['choices'][0]['text']
|
14 |
|
15 |
+
input_text = user_message
|
16 |
output_text = gr.outputs.Textbox(label="Output text")
|
17 |
|
18 |
description = "bro neil it currently dosent work two people sending it request at the same time so going to fix that but currently running ggml models with llama.cpp implementation in python [https://github.com/abetlen/llama-cpp-python]"
|
|
|
32 |
return "", history + [[user_message, None]]
|
33 |
|
34 |
def bot(history):
|
35 |
+
bot_message = gr.outputs.Textbox(label="Output text")
|
36 |
history[-1][1] = ""
|
37 |
for character in bot_message:
|
38 |
history[-1][1] += character
|