hadadrjt commited on
Commit
6785ddc
·
1 Parent(s): 13c3084

ai: Enhance API and Server randomization with fault tolerance.

Browse files
Files changed (1) hide show
  1. jarvis.py +15 -9
jarvis.py CHANGED
@@ -18,6 +18,7 @@ import fitz
18
  import io
19
  import uuid
20
  import concurrent.futures
 
21
 
22
  from openai import OpenAI
23
 
@@ -134,15 +135,20 @@ def chat_with_model(history, user_input, selected_model_display, sess):
134
  messages = [{"role": "user", "content": user} for user, _ in history]
135
  messages += [{"role": "assistant", "content": assistant} for _, assistant in history if assistant]
136
  messages.append({"role": "user", "content": user_input})
137
- futures = []
138
- with concurrent.futures.ThreadPoolExecutor(max_workers=len(LINUX_SERVER_HOSTS)) as executor:
139
- for host, key in zip(LINUX_SERVER_HOSTS, LINUX_SERVER_PROVIDER_KEYS):
140
- futures.append(executor.submit(fetch_response, host, key, selected_model, messages, model_config, sess.session_id))
141
- done, not_done = concurrent.futures.wait(futures, return_when=concurrent.futures.FIRST_COMPLETED)
142
- for future in not_done:
143
- future.cancel()
144
- result = list(done)[0].result() if done else RESPONSES["RESPONSE_2"]
145
- return result
 
 
 
 
 
146
 
147
  def respond(multi_input, history, selected_model_display, sess):
148
  message = {"text": multi_input.get("text", "").strip(), "files": multi_input.get("files", [])}
 
18
  import io
19
  import uuid
20
  import concurrent.futures
21
+ import itertools
22
 
23
  from openai import OpenAI
24
 
 
135
  messages = [{"role": "user", "content": user} for user, _ in history]
136
  messages += [{"role": "assistant", "content": assistant} for _, assistant in history if assistant]
137
  messages.append({"role": "user", "content": user_input})
138
+ candidates = [(host, key) for host in LINUX_SERVER_HOSTS for key in LINUX_SERVER_PROVIDER_KEYS]
139
+ random.shuffle(candidates)
140
+ with concurrent.futures.ThreadPoolExecutor(max_workers=len(candidates)) as executor:
141
+ futures = {executor.submit(fetch_response, host, key, selected_model, messages, model_config, sess.session_id): (host, key) for (host, key) in candidates}
142
+ for future in concurrent.futures.as_completed(futures):
143
+ try:
144
+ result = future.result()
145
+ for f in futures:
146
+ if f is not future:
147
+ f.cancel()
148
+ return result
149
+ except Exception:
150
+ continue
151
+ return RESPONSES["RESPONSE_2"]
152
 
153
  def respond(multi_input, history, selected_model_display, sess):
154
  message = {"text": multi_input.get("text", "").strip(), "files": multi_input.get("files", [])}