Do0rMaMu commited on
Commit
dce1999
·
verified ·
1 Parent(s): 73bce0b

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +2 -1
main.py CHANGED
@@ -33,4 +33,5 @@ async def generate_response(item: Validation):
33
  output = llm(prompt, max_tokens = item.max_tokens,temperature = item.temperature, echo=True)
34
 
35
  # Extract and return the text from the response
36
- return output['choices'][0]['text']
 
 
33
  output = llm(prompt, max_tokens = item.max_tokens,temperature = item.temperature, echo=True)
34
 
35
  # Extract and return the text from the response
36
+ return output['choices'][0]['text']
37
+