Uhhy commited on
Commit
5847cfe
·
verified ·
1 Parent(s): 3964343

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -6
app.py CHANGED
@@ -73,9 +73,6 @@ global_data['models'] = model_manager.load_all_models()
73
 
74
  class ChatRequest(BaseModel):
75
  message: str
76
- top_k: int = 50
77
- top_p: float = 0.95
78
- temperature: float = 0.7
79
 
80
  def normalize_input(input_text):
81
  return input_text.strip()
@@ -94,9 +91,9 @@ def remove_duplicates(text):
94
  return '\n'.join(unique_lines)
95
 
96
  @GPU(duration=0)
97
- def generate_model_response(model, inputs, top_k, top_p, temperature):
98
  try:
99
- response = model(inputs, top_k=top_k, top_p=top_p, temperature=temperature)
100
  return remove_duplicates(response['choices'][0]['text'])
101
  except Exception as e:
102
  print(f"Error generating model response: {e}")
@@ -108,7 +105,7 @@ async def generate(request: ChatRequest):
108
  inputs = normalize_input(request.message)
109
  with ThreadPoolExecutor() as executor:
110
  futures = [
111
- executor.submit(generate_model_response, model, inputs, request.top_k, request.top_p, request.temperature)
112
  for model in global_data['models'].values()
113
  ]
114
  responses = [{'model': model_name, 'response': future.result()} for model_name, future in zip(global_data['models'].keys(), as_completed(futures))]
 
73
 
74
  class ChatRequest(BaseModel):
75
  message: str
 
 
 
76
 
77
  def normalize_input(input_text):
78
  return input_text.strip()
 
91
  return '\n'.join(unique_lines)
92
 
93
  @GPU(duration=0)
94
+ def generate_model_response(model, inputs):
95
  try:
96
+ response = model(inputs)
97
  return remove_duplicates(response['choices'][0]['text'])
98
  except Exception as e:
99
  print(f"Error generating model response: {e}")
 
105
  inputs = normalize_input(request.message)
106
  with ThreadPoolExecutor() as executor:
107
  futures = [
108
+ executor.submit(generate_model_response, model, inputs)
109
  for model in global_data['models'].values()
110
  ]
111
  responses = [{'model': model_name, 'response': future.result()} for model_name, future in zip(global_data['models'].keys(), as_completed(futures))]