Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,20 +1,20 @@
|
|
1 |
import gradio as gr
|
2 |
-
import ctypes
|
3 |
import llama_cpp
|
4 |
from llama_cpp import Llama
|
5 |
-
from huggingface_hub import hf_hub_download
|
6 |
|
7 |
|
8 |
llm = Llama(model_path= hf_hub_download(repo_id="TheBloke/StableBeluga-7B-GGML", filename="stablebeluga-7b.ggmlv3.q6_K.bin"))
|
9 |
|
10 |
def generate_text(input_text):
|
11 |
-
output = llm(f"Q: {input_text}
|
12 |
return output['choices'][0]['text']
|
13 |
|
14 |
input_text = gr.inputs.Textbox(lines= 10, label="Enter your input text")
|
15 |
output_text = gr.outputs.Textbox(label="Output text")
|
16 |
|
17 |
-
description = "llama.cpp implementation in python [https://github.com/abetlen/llama-cpp-python]"
|
18 |
|
19 |
examples = [
|
20 |
["What is the capital of France? ", "The capital of France is Paris."],
|
@@ -23,6 +23,6 @@ examples = [
|
|
23 |
]
|
24 |
|
25 |
|
26 |
-
demo = gr.Interface(fn=generate_text, inputs=input_text, outputs=output_text, title="Llama Language Model", description=description, examples=examples).queue
|
27 |
demo.queue(concurrency_count=3)
|
28 |
demo.launch()
|
|
|
1 |
import gradio as gr
|
2 |
+
import ctypes #to run on C api directly
|
3 |
import llama_cpp
|
4 |
from llama_cpp import Llama
|
5 |
+
from huggingface_hub import hf_hub_download #load from huggingfaces
|
6 |
|
7 |
|
8 |
llm = Llama(model_path= hf_hub_download(repo_id="TheBloke/StableBeluga-7B-GGML", filename="stablebeluga-7b.ggmlv3.q6_K.bin"))
|
9 |
|
10 |
def generate_text(input_text):
|
11 |
+
output = llm(f"Q: {input_text} A:", max_tokens=521, stop=["Q:", "\n"], echo=True)
|
12 |
return output['choices'][0]['text']
|
13 |
|
14 |
input_text = gr.inputs.Textbox(lines= 10, label="Enter your input text")
|
15 |
output_text = gr.outputs.Textbox(label="Output text")
|
16 |
|
17 |
+
description = "bro neil it currently dosent work two people sending it request at the same time so going to fix that but currently running ggml models with llama.cpp implementation in python [https://github.com/abetlen/llama-cpp-python]"
|
18 |
|
19 |
examples = [
|
20 |
["What is the capital of France? ", "The capital of France is Paris."],
|
|
|
23 |
]
|
24 |
|
25 |
|
26 |
+
demo = gr.Interface(fn=generate_text, inputs=input_text, outputs=output_text, title="Llama Language Model", description=description, examples=examples).queue
|
27 |
demo.queue(concurrency_count=3)
|
28 |
demo.launch()
|