om4r932 commited on
Commit
b2fc61a
·
1 Parent(s): 343c758

Change response

Browse files
Files changed (1) hide show
  1. app.py +10 -10
app.py CHANGED
@@ -47,7 +47,7 @@ class ChatResponse(BaseModel):
47
  def main_page():
48
  return {"status": "ok"}
49
 
50
- @app.post("/chat", response_model=ChatResponse)
51
  def ask_groq_llm(req: ChatRequest):
52
  models = req.models
53
  query = req.query
@@ -59,22 +59,22 @@ def ask_groq_llm(req: ChatRequest):
59
  resp = requests.post("https://api.groq.com/openai/v1/chat/completions", verify=False, headers={"Content-Type": "application/json", "Authorization": f"Bearer {key}"}, data=json.dumps({"model": model, "messages": [{"role": "user", "content": query}]}))
60
  if resp.status_code == 200:
61
  respJson = resp.json()
62
- print("Asked to", model, "with the key ID", str(api_keys.index(key)), ":", query)
63
- return ChatResponse(output=respJson["choices"][0]["message"]["content"])
64
  print(resp.status_code, resp.text)
65
  looping = False
66
- return ChatResponse(output="ERROR !")
67
  elif len(models) == 1:
68
  while looping:
69
  for key in api_keys:
70
  resp = requests.post("https://api.groq.com/openai/v1/chat/completions", verify=False, headers={"Content-Type": "application/json", "Authorization": f"Bearer {key}"}, data=json.dumps({"model": models[0], "messages": [{"role": "user", "content": query}]}))
71
  if resp.status_code == 200:
72
  respJson = resp.json()
73
- print("Asked to", model, "with the key ID", str(api_keys.index(key)), ":", query)
74
- return ChatResponse(output=respJson["choices"][0]["message"]["content"])
75
  print(resp.status_code, resp.text)
76
  looping = False
77
- return ChatResponse(output="ERROR !")
78
  else:
79
  while looping:
80
  order = {val: ind for ind, val in enumerate(ranked_models)}
@@ -84,8 +84,8 @@ def ask_groq_llm(req: ChatRequest):
84
  resp = requests.post("https://api.groq.com/openai/v1/chat/completions", verify=False, headers={"Content-Type": "application/json", "Authorization": f"Bearer {key}"}, data=json.dumps({"model": model, "messages": [{"role": "user", "content": query}]}))
85
  if resp.status_code == 200:
86
  respJson = resp.json()
87
- print("Asked to", model, "with the key ID", str(api_keys.index(key)), ":", query)
88
- return ChatResponse(output=respJson["choices"][0]["message"]["content"])
89
  print(resp.status_code, resp.text)
90
  looping = False
91
- return ChatResponse(output="ERROR !")
 
47
  def main_page():
48
  return {"status": "ok"}
49
 
50
+ @app.post("/chat")
51
  def ask_groq_llm(req: ChatRequest):
52
  models = req.models
53
  query = req.query
 
59
  resp = requests.post("https://api.groq.com/openai/v1/chat/completions", verify=False, headers={"Content-Type": "application/json", "Authorization": f"Bearer {key}"}, data=json.dumps({"model": model, "messages": [{"role": "user", "content": query}]}))
60
  if resp.status_code == 200:
61
  respJson = resp.json()
62
+ print("Asked to", model, "with the key ID", str(api_keys.index(key)+1), ":", query)
63
+ return {"error": False, "content": respJson["choices"]}
64
  print(resp.status_code, resp.text)
65
  looping = False
66
+ return {"error": True, "content": "Aucun des modèles, ni des clés ne fonctionne, patientez ...."}
67
  elif len(models) == 1:
68
  while looping:
69
  for key in api_keys:
70
  resp = requests.post("https://api.groq.com/openai/v1/chat/completions", verify=False, headers={"Content-Type": "application/json", "Authorization": f"Bearer {key}"}, data=json.dumps({"model": models[0], "messages": [{"role": "user", "content": query}]}))
71
  if resp.status_code == 200:
72
  respJson = resp.json()
73
+ print("Asked to", model, "with the key ID", str(api_keys.index(key)+1), ":", query)
74
+ return {"error": False, "content": respJson["choices"]}
75
  print(resp.status_code, resp.text)
76
  looping = False
77
+ return {"error": True, "content": "Aucun des modèles, ni des clés ne fonctionne, patientez ...."}
78
  else:
79
  while looping:
80
  order = {val: ind for ind, val in enumerate(ranked_models)}
 
84
  resp = requests.post("https://api.groq.com/openai/v1/chat/completions", verify=False, headers={"Content-Type": "application/json", "Authorization": f"Bearer {key}"}, data=json.dumps({"model": model, "messages": [{"role": "user", "content": query}]}))
85
  if resp.status_code == 200:
86
  respJson = resp.json()
87
+ print("Asked to", model, "with the key ID", str(api_keys.index(key)+1), ":", query)
88
+ return {"error": False, "content": respJson["choices"]}
89
  print(resp.status_code, resp.text)
90
  looping = False
91
+ return {"error": True, "content": "Aucun des modèles, ni des clés ne fonctionne, patientez ...."}