File size: 947 Bytes
bc985a0
 
f41e5fe
 
 
b245107
bc985a0
 
 
 
 
 
 
 
 
 
 
f41e5fe
 
 
 
 
dd25592
f41e5fe
 
 
 
 
 
 
 
dd25592
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
import os

from ctransformers import AutoModelForCausalLM
from fastapi import FastAPI
from pydantic import BaseModel
import requests
from huggingface_hub import hf_hub_download

file_name = "zephyr-7b-beta.Q4_K_S.gguf"
if not os.path.exists(file_name):
    hf_hub_download("TheBloke/zephyr-7B-beta-GGUF", filename=file_name, local_dir=f"./")

llm = AutoModelForCausalLM.from_pretrained(file_name,
    model_type='mistral',
    max_new_tokens = 1096,
    threads = 3,
)

#Pydantic object
class validation(BaseModel):
    prompt: str
#Fast API

app = FastAPI()

@app.post("/llm_on_cpu")
async def stream(item: validation):
    system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
    E_INST = "</s>"
    user, assistant = "<|user|>", "<|assistant|>"
    prompt = f"{system_prompt}{E_INST}\n{user}\n{item.prompt}{E_INST}\n{assistant}\n"
    # return llm(prompt)
    return prompt