sha1779 commited on
Commit
4bdffdd
·
verified ·
1 Parent(s): cdc156c

Create main.py

Browse files
Files changed (1) hide show
  1. main.py +25 -0
main.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from ctransformers import AutoModelForCausalLM
2
+ from fastapi import FastAPI, Form
3
+ from pydantic import BaseModel
4
+
5
+ #Model loading
6
+ llm = AutoModelForCausalLM.from_pretrained("pt_merge_model_v3.Q4_K_M.gguf",
7
+ model_type='llama',
8
+ max_new_tokens = 512,
9
+ threads = 3,
10
+ )
11
+
12
+
13
+ #Pydantic object
14
+ class validation(BaseModel):
15
+ prompt: str
16
+
17
+ #Fast API
18
+ app = FastAPI()
19
+
20
+ #Zephyr completion
21
+ @app.post("/llama_on_cpu")
22
+ async def stream(item: validation):
23
+ system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
24
+ prompt = f"<s>[INST]<<SYS>>\n + {system_prompt} + <</SYS>>\n{item.prompt.strip()}[/INST]"
25
+ return llm(prompt)