Do0rMaMu commited on
Commit
0f93b24
·
verified ·
1 Parent(s): 706b2e4

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +2 -6
main.py CHANGED
@@ -23,14 +23,10 @@ app = FastAPI()
23
  @app.post("/generate_response")
24
  async def generate_response(item: Validation):
25
  # Construct the complete prompt using the given system and user prompts
26
- prompt = f"""<|begin_of_text|><|start_header_id|>system<|end_header_id|> \n
27
-
28
- { item.system_prompt }<|eot_id|> \n <|start_header_id|>user<|end_header_id|>
29
-
30
- { item.user_prompt }<|eot_id|> \n <|start_header_id|>assistant<|end_header_id|>"""
31
 
32
  # Call the Llama model to generate a response
33
- output = llm(prompt, max_tokens=1024, echo=True) # Update parameters as needed
34
 
35
  # Extract and return the text from the response
36
  return output['choices'][0]['text']
 
23
  @app.post("/generate_response")
24
  async def generate_response(item: Validation):
25
  # Construct the complete prompt using the given system and user prompts
26
+ prompt = f"""system \n{item.system_prompt}\nuser \n{item.user_prompt}\n assistant"""
 
 
 
 
27
 
28
  # Call the Llama model to generate a response
29
+ output = llm(prompt, echo=True) # Update parameters as needed
30
 
31
  # Extract and return the text from the response
32
  return output['choices'][0]['text']