barunsaha commited on
Commit
89c5253
2 Parent(s): 729565d 2551512

Merge pull request #61 from barun-saha/byok

Browse files
Files changed (3) hide show
  1. README.md +0 -10
  2. app.py +12 -1
  3. helpers/llm_helper.py +10 -3
README.md CHANGED
@@ -55,16 +55,6 @@ SlideDeck AI uses a subset of icons from [bootstrap-icons-1.11.3](https://github
55
  (CC0, MIT, and Apache licenses) are also used.
56
 
57
 
58
- # Known Issues
59
-
60
- - **Model unavailable**: Mistral Nemo currently appears to be unavailable. See this [issue](https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407/discussions/83).
61
- - **Connection timeout**: Requests sent to the Hugging Face Inference endpoint might time out. If it still does not work, wait for a while and try again.
62
-
63
- The following is not an issue but might appear as a strange behavior:
64
- - **Cannot paste text in the input box**: If the length of the copied text is greater than the maximum
65
- number of allowed characters in the textbox, pasting would not work.
66
-
67
-
68
  # Local Development
69
 
70
  SlideDeck AI uses LLMs via different providers, such as Hugging Face, Google, and Gemini.
 
55
  (CC0, MIT, and Apache licenses) are also used.
56
 
57
 
 
 
 
 
 
 
 
 
 
 
58
  # Local Development
59
 
60
  SlideDeck AI uses LLMs via different providers, such as Hugging Face, Google, and Gemini.
app.py CHANGED
@@ -82,7 +82,8 @@ def are_all_inputs_valid(
82
  if not llm_helper.is_valid_llm_provider_model(selected_provider, selected_model, user_key):
83
  handle_error(
84
  'The LLM settings do not look correct. Make sure that an API key/access token'
85
- ' is provided if the selected LLM requires it.',
 
86
  False
87
  )
88
  return False
@@ -104,6 +105,14 @@ def handle_error(error_msg: str, should_log: bool):
104
  st.error(error_msg)
105
 
106
 
 
 
 
 
 
 
 
 
107
  APP_TEXT = _load_strings()
108
 
109
  # Session variables
@@ -132,6 +141,7 @@ with st.sidebar:
132
  options=[f'{k} ({v["description"]})' for k, v in GlobalConfig.VALID_MODELS.items()],
133
  index=GlobalConfig.DEFAULT_MODEL_INDEX,
134
  help=GlobalConfig.LLM_PROVIDER_HELP,
 
135
  ).split(' ')[0]
136
 
137
  # The API key/access token
@@ -142,6 +152,7 @@ with st.sidebar:
142
  ' *Optional* for HF Mistral LLMs but still encouraged.\n\n'
143
  ),
144
  type='password',
 
145
  )
146
 
147
 
 
82
  if not llm_helper.is_valid_llm_provider_model(selected_provider, selected_model, user_key):
83
  handle_error(
84
  'The LLM settings do not look correct. Make sure that an API key/access token'
85
+ ' is provided if the selected LLM requires it. An API key should be 6-64 characters'
86
+ ' long, only containing alphanumeric characters, hyphens, and underscores.',
87
  False
88
  )
89
  return False
 
105
  st.error(error_msg)
106
 
107
 
108
+ def reset_api_key():
109
+ """
110
+ Clear API key input when a different LLM is selected from the dropdown list.
111
+ """
112
+
113
+ st.session_state.api_key_input = ''
114
+
115
+
116
  APP_TEXT = _load_strings()
117
 
118
  # Session variables
 
141
  options=[f'{k} ({v["description"]})' for k, v in GlobalConfig.VALID_MODELS.items()],
142
  index=GlobalConfig.DEFAULT_MODEL_INDEX,
143
  help=GlobalConfig.LLM_PROVIDER_HELP,
144
+ on_change=reset_api_key
145
  ).split(' ')[0]
146
 
147
  # The API key/access token
 
152
  ' *Optional* for HF Mistral LLMs but still encouraged.\n\n'
153
  ),
154
  type='password',
155
+ key='api_key_input'
156
  )
157
 
158
 
helpers/llm_helper.py CHANGED
@@ -17,6 +17,8 @@ from global_config import GlobalConfig
17
 
18
 
19
  LLM_PROVIDER_MODEL_REGEX = re.compile(r'\[(.*?)\](.*)')
 
 
20
  HF_API_HEADERS = {'Authorization': f'Bearer {GlobalConfig.HUGGINGFACEHUB_API_TOKEN}'}
21
  REQUEST_TIMEOUT = 35
22
 
@@ -70,9 +72,14 @@ def is_valid_llm_provider_model(provider: str, model: str, api_key: str) -> bool
70
  if not provider or not model or provider not in GlobalConfig.VALID_PROVIDERS:
71
  return False
72
 
73
- if provider in [GlobalConfig.PROVIDER_GOOGLE_GEMINI, GlobalConfig.PROVIDER_COHERE,]:
74
- if not api_key or len(api_key) < 5:
75
- return False
 
 
 
 
 
76
 
77
  return True
78
 
 
17
 
18
 
19
  LLM_PROVIDER_MODEL_REGEX = re.compile(r'\[(.*?)\](.*)')
20
+ # 6-64 characters long, only containing alphanumeric characters, hyphens, and underscores
21
+ API_KEY_REGEX = re.compile(r'^[a-zA-Z0-9\-_]{6,64}$')
22
  HF_API_HEADERS = {'Authorization': f'Bearer {GlobalConfig.HUGGINGFACEHUB_API_TOKEN}'}
23
  REQUEST_TIMEOUT = 35
24
 
 
72
  if not provider or not model or provider not in GlobalConfig.VALID_PROVIDERS:
73
  return False
74
 
75
+ if provider in [
76
+ GlobalConfig.PROVIDER_GOOGLE_GEMINI,
77
+ GlobalConfig.PROVIDER_COHERE,
78
+ ] and not api_key:
79
+ return False
80
+
81
+ if api_key:
82
+ return API_KEY_REGEX.match(api_key) is not None
83
 
84
  return True
85