Nils Durner commited on
Commit
a16f62d
·
2 Parent(s): 808df82 830e496

Merge branch 'main' of https://huggingface.co/spaces/ndurner/amz_bedrock_chat into main

Browse files
Files changed (3) hide show
  1. README.md +1 -1
  2. app.py +43 -1
  3. llm.py +7 -5
README.md CHANGED
@@ -4,7 +4,7 @@ emoji: 📈
4
  colorFrom: yellow
5
  colorTo: gray
6
  sdk: gradio
7
- sdk_version: 5.4.0
8
  app_file: app.py
9
  pinned: false
10
  license: mit
 
4
  colorFrom: yellow
5
  colorTo: gray
6
  sdk: gradio
7
+ sdk_version: 5.7.1
8
  app_file: app.py
9
  pinned: false
10
  license: mit
app.py CHANGED
@@ -167,6 +167,8 @@ def export_history(h, s):
167
  pass
168
 
169
  with gr.Blocks(delete_cache=(86400, 86400)) as demo:
 
 
170
  gr.Markdown("# Amazon™️ Bedrock™️ Chat™️ (Nils' Version™️) feat. Mistral™️ AI & Anthropic™️ Claude™️")
171
 
172
  with gr.Accordion("Startup"):
@@ -180,7 +182,7 @@ with gr.Blocks(delete_cache=(86400, 86400)) as demo:
180
  aws_token = gr.Textbox(label="AWS Session Token", elem_id="aws_token")
181
  model = gr.Dropdown(label="Model", value="anthropic.claude-3-5-sonnet-20241022-v2:0", allow_custom_value=True, elem_id="model",
182
  choices=["anthropic.claude-3-5-sonnet-20240620-v1:0", "anthropic.claude-3-opus-20240229-v1:0", "meta.llama3-1-405b-instruct-v1:0", "anthropic.claude-3-sonnet-20240229-v1:0", "anthropic.claude-3-haiku-20240307-v1:0", "anthropic.claude-v2:1", "anthropic.claude-v2",
183
- "mistral.mistral-7b-instruct-v0:2", "mistral.mixtral-8x7b-instruct-v0:1", "mistral.mistral-large-2407-v1:0", "anthropic.claude-3-5-sonnet-20241022-v2:0"])
184
  system_prompt = gr.TextArea("You are a helpful yet diligent AI assistant. Answer faithfully and factually correct. Respond with 'I do not know' if uncertain.", label="System Prompt", lines=3, max_lines=250, elem_id="system_prompt")
185
  region = gr.Dropdown(label="Region", value="us-west-2", allow_custom_value=True, elem_id="region",
186
  choices=["eu-central-1", "eu-west-3", "us-east-1", "us-west-1", "us-west-2"])
@@ -192,6 +194,46 @@ with gr.Blocks(delete_cache=(86400, 86400)) as demo:
192
  dl_settings_button = gr.Button("Download Settings")
193
  ul_settings_button = gr.Button("Upload Settings")
194
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
195
  load_button.click(load_settings, js="""
196
  () => {
197
  let elems = ['#aws_access textarea', '#aws_secret textarea', '#aws_token textarea', '#system_prompt textarea', '#temp input', '#max_tokens input', '#model', '#region'];
 
167
  pass
168
 
169
  with gr.Blocks(delete_cache=(86400, 86400)) as demo:
170
+ settings_state = gr.BrowserState({})
171
+
172
  gr.Markdown("# Amazon™️ Bedrock™️ Chat™️ (Nils' Version™️) feat. Mistral™️ AI & Anthropic™️ Claude™️")
173
 
174
  with gr.Accordion("Startup"):
 
182
  aws_token = gr.Textbox(label="AWS Session Token", elem_id="aws_token")
183
  model = gr.Dropdown(label="Model", value="anthropic.claude-3-5-sonnet-20241022-v2:0", allow_custom_value=True, elem_id="model",
184
  choices=["anthropic.claude-3-5-sonnet-20240620-v1:0", "anthropic.claude-3-opus-20240229-v1:0", "meta.llama3-1-405b-instruct-v1:0", "anthropic.claude-3-sonnet-20240229-v1:0", "anthropic.claude-3-haiku-20240307-v1:0", "anthropic.claude-v2:1", "anthropic.claude-v2",
185
+ "mistral.mistral-7b-instruct-v0:2", "mistral.mixtral-8x7b-instruct-v0:1", "mistral.mistral-large-2407-v1:0", "anthropic.claude-3-5-sonnet-20241022-v2:0", "us.amazon.nova-pro-v1:0", "us.amazon.nova-lite-v1:0", "us.amazon.nova-micro-v1:0"])
186
  system_prompt = gr.TextArea("You are a helpful yet diligent AI assistant. Answer faithfully and factually correct. Respond with 'I do not know' if uncertain.", label="System Prompt", lines=3, max_lines=250, elem_id="system_prompt")
187
  region = gr.Dropdown(label="Region", value="us-west-2", allow_custom_value=True, elem_id="region",
188
  choices=["eu-central-1", "eu-west-3", "us-east-1", "us-west-1", "us-west-2"])
 
194
  dl_settings_button = gr.Button("Download Settings")
195
  ul_settings_button = gr.Button("Upload Settings")
196
 
197
+ @demo.load(inputs=[settings_state],
198
+ outputs=[aws_access, aws_secret, aws_token, system_prompt,
199
+ temp, max_tokens, model, region, python_use])
200
+ def load_from_browser_storage(saved_values):
201
+ if not saved_values:
202
+ return (aws_access.value, aws_secret.value, aws_token.value,
203
+ system_prompt.value, temp.value, max_tokens.value,
204
+ model.value, region.value, python_use.value)
205
+ return (saved_values.get('aws_access', aws_access.value),
206
+ saved_values.get('aws_secret', aws_secret.value),
207
+ saved_values.get('aws_token', aws_token.value),
208
+ saved_values.get('system_prompt', system_prompt.value),
209
+ saved_values.get('temp', temp.value),
210
+ saved_values.get('max_tokens', max_tokens.value),
211
+ saved_values.get('model', model.value),
212
+ saved_values.get('region', region.value),
213
+ saved_values.get('python_use', python_use.value))
214
+
215
+ @gr.on(
216
+ [aws_access.change, aws_secret.change, aws_token.change,
217
+ system_prompt.change, temp.change, max_tokens.change,
218
+ model.change, region.change, python_use.change],
219
+ inputs=[aws_access, aws_secret, aws_token, system_prompt,
220
+ temp, max_tokens, model, region, python_use],
221
+ outputs=[settings_state]
222
+ )
223
+ def save_to_browser_storage(acc, sec, tok, prompt, temperature,
224
+ tokens, mdl, reg, py_use):
225
+ return {
226
+ 'aws_access': acc,
227
+ 'aws_secret': sec,
228
+ 'aws_token': tok,
229
+ 'system_prompt': prompt,
230
+ 'temp': temperature,
231
+ 'max_tokens': tokens,
232
+ 'model': mdl,
233
+ 'region': reg,
234
+ 'python_use': py_use
235
+ }
236
+
237
  load_button.click(load_settings, js="""
238
  () => {
239
  let elems = ['#aws_access textarea', '#aws_secret textarea', '#aws_token textarea', '#system_prompt textarea', '#temp input', '#max_tokens input', '#model', '#region'];
llm.py CHANGED
@@ -62,11 +62,13 @@ class LLM:
62
  user_msg_parts = last_msg["content"]
63
  else:
64
  user_msg_parts = []
65
- if message["text"]:
66
- user_msg_parts.append({"text": message["text"]})
67
- if message["files"]:
68
- for file in message["files"]:
69
- user_msg_parts.extend(self._process_file(file))
 
 
70
 
71
  if user_msg_parts:
72
  messages.append({"role": "user", "content": user_msg_parts})
 
62
  user_msg_parts = last_msg["content"]
63
  else:
64
  user_msg_parts = []
65
+
66
+ if message:
67
+ if message["text"]:
68
+ user_msg_parts.append({"text": message["text"]})
69
+ if message["files"]:
70
+ for file in message["files"]:
71
+ user_msg_parts.extend(self._process_file(file))
72
 
73
  if user_msg_parts:
74
  messages.append({"role": "user", "content": user_msg_parts})