Do0rMaMu commited on
Commit
54e9d6a
·
verified ·
1 Parent(s): b6d68ce

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +1 -1
main.py CHANGED
@@ -33,4 +33,4 @@ async def generate_response(item: Validation):
33
  output = llm(prompt, max_tokens=1024, stop=["Q:", "\n"], echo=True) # Update parameters as needed
34
 
35
  # Extract and return the text from the response
36
- return output['choices'][0]['text']
 
33
  output = llm(prompt, max_tokens=1024, stop=["Q:", "\n"], echo=True) # Update parameters as needed
34
 
35
  # Extract and return the text from the response
36
+ return output