Spaces:
Sleeping
Sleeping
Update main.py
Browse files
main.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
from ctransformers import AutoModelForCausalLM
|
2 |
from fastapi import FastAPI
|
3 |
from pydantic import BaseModel
|
4 |
-
|
5 |
|
6 |
llm = AutoModelForCausalLM.from_pretrained("zephyr-7b-beta.Q8_0.gguf",
|
7 |
model_type='mistral',
|
@@ -15,19 +15,10 @@ class validation(BaseModel):
|
|
15 |
#Fast API
|
16 |
app = FastAPI()
|
17 |
|
18 |
-
@app.post("/
|
19 |
async def stream(item: validation):
|
20 |
system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
|
21 |
E_INST = "</s>"
|
22 |
user, assistant = "<|user|>", "<|assistant|>"
|
23 |
prompt = f"{system_prompt}{E_INST}\n{user}\n{item.prompt}{E_INST}\n{assistant}\n"
|
24 |
return llm(prompt)
|
25 |
-
|
26 |
-
|
27 |
-
demo = gradio.Interface(
|
28 |
-
fn=stream,
|
29 |
-
inputs=["text", "slider"],
|
30 |
-
outputs=["text"],
|
31 |
-
)
|
32 |
-
|
33 |
-
demo.launch()
|
|
|
1 |
from ctransformers import AutoModelForCausalLM
|
2 |
from fastapi import FastAPI
|
3 |
from pydantic import BaseModel
|
4 |
+
|
5 |
|
6 |
llm = AutoModelForCausalLM.from_pretrained("zephyr-7b-beta.Q8_0.gguf",
|
7 |
model_type='mistral',
|
|
|
15 |
#Fast API
|
16 |
app = FastAPI()
|
17 |
|
18 |
+
@app.post("/bpandey23_llm")
|
19 |
async def stream(item: validation):
|
20 |
system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
|
21 |
E_INST = "</s>"
|
22 |
user, assistant = "<|user|>", "<|assistant|>"
|
23 |
prompt = f"{system_prompt}{E_INST}\n{user}\n{item.prompt}{E_INST}\n{assistant}\n"
|
24 |
return llm(prompt)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|