phuongnv commited on
Commit
27d36d2
·
verified ·
1 Parent(s): ee1824b

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +4 -6
main.py CHANGED
@@ -3,10 +3,9 @@ from fastapi import FastAPI, Form
3
  from pydantic import BaseModel
4
 
5
  #Model loading
6
- llm = AutoModelForCausalLM.from_pretrained("zephyr-7b-beta.Q4_K_S.gguf",
7
- model_type='mistral',
8
- max_new_tokens = 1096,
9
- threads = 3,
10
  )
11
 
12
 
@@ -20,8 +19,7 @@ app = FastAPI()
20
  #Zephyr completion
21
  @app.post("/llm_on_cpu")
22
  async def stream(item: validation):
23
- system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
24
  E_INST = "</s>"
25
  user, assistant = "<|user|>", "<|assistant|>"
26
- prompt = f"{system_prompt}{E_INST}\n{user}\n{item.prompt.strip()}{E_INST}\n{assistant}\n"
27
  return llm(prompt)
 
3
  from pydantic import BaseModel
4
 
5
  #Model loading
6
+ llm = AutoModelForCausalLM.from_pretrained("model.gguf",
7
+ model_type='internLM2',
8
+ max_new_tokens = 512
 
9
  )
10
 
11
 
 
19
  #Zephyr completion
20
  @app.post("/llm_on_cpu")
21
  async def stream(item: validation):
 
22
  E_INST = "</s>"
23
  user, assistant = "<|user|>", "<|assistant|>"
24
+ prompt = f"{E_INST}\n{user}\n{item.prompt.strip()}{E_INST}\n{assistant}\n"
25
  return llm(prompt)