NCTCMumbai commited on
Commit
1bcb5ec
·
verified ·
1 Parent(s): 72e1224

Update backend/query_llm.py

Browse files
Files changed (1) hide show
  1. backend/query_llm.py +2 -2
backend/query_llm.py CHANGED
@@ -162,8 +162,8 @@ def generate_openai(prompt: str, history: str, temperature: float = 0.9, max_new
162
  output = ""
163
  for chunk in stream:
164
  output += chunk.choices[0].delta.get("content", "")
165
- yield output
166
-
167
  except Exception as e:
168
  if "Too Many Requests" in str(e):
169
  print("ERROR: Too many requests on OpenAI client")
 
162
  output = ""
163
  for chunk in stream:
164
  output += chunk.choices[0].delta.get("content", "")
165
+ #yield output
166
+ return output
167
  except Exception as e:
168
  if "Too Many Requests" in str(e):
169
  print("ERROR: Too many requests on OpenAI client")