Broomva commited on
Commit
b8d1c52
·
1 Parent(s): eea0255

added auth back

Browse files
Files changed (4) hide show
  1. README.md +1 -1
  2. __pycache__/app.cpython-311.pyc +0 -0
  3. app.py +91 -126
  4. chainlit.md +1 -1
README.md CHANGED
@@ -26,7 +26,7 @@ This chat application leverages the power of Langchain QA retriever to access an
26
  4. Set up environment variables for Langchain credentials.
27
 
28
  ## Usage
29
- - Start the app: `python app.py`
30
  - Use the chat interface to ask questions like "What is MLFlow in Databricks?"
31
  - Receive concise, accurate answers sourced from Broomva's Tech Book.
32
 
 
26
  4. Set up environment variables for Langchain credentials.
27
 
28
  ## Usage
29
+ - Start the app: `chainlit run app.py --watch`
30
  - Use the chat interface to ask questions like "What is MLFlow in Databricks?"
31
  - Receive concise, accurate answers sourced from Broomva's Tech Book.
32
 
__pycache__/app.cpython-311.pyc DELETED
Binary file (3.66 kB)
 
app.py CHANGED
@@ -21,91 +21,91 @@ embeddings = OpenAIEmbeddings()
21
  vector_store = FAISS.load_local("docs.faiss", embeddings)
22
 
23
 
24
- # @cl.oauth_callback
25
- # def oauth_callback(
26
- # provider_id: str,
27
- # token: str,
28
- # raw_user_data: Dict[str, str],
29
- # default_app_user: cl.AppUser,
30
- # ) -> Optional[cl.AppUser]:
31
- # # set AppUser tags as regular_user
32
- # match default_app_user.username:
33
- # case "Broomva":
34
- # default_app_user.tags = ["admin_user"]
35
- # default_app_user.role = "ADMIN"
36
- # case _:
37
- # default_app_user.tags = ["regular_user"]
38
- # default_app_user.role = "USER"
39
- # print(default_app_user)
40
- # return default_app_user
41
-
42
-
43
- # @cl.header_auth_callback
44
- # def header_auth_callback(headers) -> Optional[cl.AppUser]:
45
- # # Verify the signature of a token in the header (ex: jwt token)
46
- # # or check that the value is matching a row from your database
47
- # print(headers)
48
- # if (
49
- # headers.get("cookie")
50
- # == "ajs_user_id=5011e946-0d0d-5bd4-a293-65742db98d3d; ajs_anonymous_id=67d2569d-3f50-48f3-beaf-b756286276d9"
51
- # ):
52
- # return cl.AppUser(username="Broomva", role="ADMIN", provider="header")
53
- # else:
54
- # return None
55
-
56
-
57
- # @cl.password_auth_callback
58
- # def auth_callback(
59
- # username: str = "guest", password: str = "guest"
60
- # ) -> Optional[cl.AppUser]:
61
- # # Fetch the user matching username from your database
62
- # # and compare the hashed password with the value stored in the database
63
- # import hashlib
64
-
65
- # # Create a new sha256 hash object
66
- # hash_object = hashlib.sha256()
67
-
68
- # # Hash the password
69
- # hash_object.update(password.encode())
70
-
71
- # # Get the hexadecimal representation of the hash
72
- # hashed_password = hash_object.hexdigest()
73
-
74
- # if (username, hashed_password) == (
75
- # "broomva",
76
- # "b68cacbadaee450b8a8ce2dd44842f1de03ee9993ad97b5e99dea64ef93960ba",
77
- # ):
78
- # return cl.AppUser(username="Broomva", role="ADMIN", provider="credentials")
79
- # elif (username, password) == ("guest", "guest"):
80
- # return cl.AppUser(username="Guest", role="USER", provider="credentials")
81
- # else:
82
- # return None
83
-
84
-
85
- # @cl.set_chat_profiles
86
- # async def chat_profile(current_user: cl.AppUser):
87
- # if "ADMIN" not in current_user.role:
88
- # # Default to 3.5 when not admin
89
- # return [
90
- # cl.ChatProfile(
91
- # name="Broomva Book Agent",
92
- # markdown_description="The underlying LLM model is **GPT-3.5**.",
93
- # # icon="https://picsum.photos/200",
94
- # ),
95
- # ]
96
-
97
- # return [
98
- # cl.ChatProfile(
99
- # name="Broomva Book Agent Lite",
100
- # markdown_description="The underlying LLM model is **GPT-3.5**.",
101
- # # icon="https://picsum.photos/200",
102
- # ),
103
- # cl.ChatProfile(
104
- # name="Broomva Book Agent Turbo",
105
- # markdown_description="The underlying LLM model is **GPT-4 Turbo**.",
106
- # # icon="https://picsum.photos/250",
107
- # ),
108
- # ]
109
 
110
 
111
  @cl.on_settings_update
@@ -149,12 +149,12 @@ async def init():
149
  ]
150
  ).send()
151
 
152
- # chat_profile = cl.user_session.get("chat_profile")
153
 
154
- # if chat_profile == "Broomva Book Agent Lite":
155
- # settings["model"] = "gpt-3.5-turbo"
156
- # elif chat_profile == "Broomva Book Agent Turbo":
157
- # settings["model"] = "gpt-4-1106-preview"
158
 
159
  chain = RetrievalQAWithSourcesChain.from_chain_type(
160
  ChatOpenAI(
@@ -174,39 +174,4 @@ async def init():
174
  async def main(message):
175
  chain = cl.user_session.get("chain") # type: RetrievalQAWithSourcesChain
176
 
177
- cb = cl.AsyncLangchainCallbackHandler(
178
- stream_final_answer=True, # answer_prefix_tokens=["FINAL", "ANSWER"]
179
- )
180
- cb.answer_reached = True
181
-
182
- await chain.acall(message.content, callbacks=[cb])
183
-
184
- # if cb.has_streamed_final_answer:
185
- # await cb.final_stream.update()
186
- # else:
187
- #
188
- #answer = res["answer"]
189
- #await cl.Message(
190
- # content=answer,
191
- #).send()
192
-
193
-
194
- # # Instantiate the LLM
195
- # llm = HuggingFaceHub(
196
- # model_kwargs={"max_length": 500},
197
- # repo_id="Broomva/bart-large-translation-spa-guc",
198
- # )
199
-
200
- # # Add the LLM provider
201
- # add_llm_provider(
202
- # LangchainGenericProvider(
203
- # # It is important that the id of the provider matches the _llm_type
204
- # id=llm._llm_type,
205
- # # The name is not important. It will be displayed in the UI.
206
- # name="Spa - Guc Translation",
207
- # # This should always be a Langchain llm instance (correctly configured)
208
- # llm=llm,
209
- # # If the LLM works with messages, set this to True
210
- # is_chat=True
211
- # )
212
- # )
 
21
  vector_store = FAISS.load_local("docs.faiss", embeddings)
22
 
23
 
24
+ @cl.oauth_callback
25
+ def oauth_callback(
26
+ provider_id: str,
27
+ token: str,
28
+ raw_user_data: Dict[str, str],
29
+ default_app_user: cl.AppUser,
30
+ ) -> Optional[cl.AppUser]:
31
+ # set AppUser tags as regular_user
32
+ match default_app_user.username:
33
+ case "Broomva":
34
+ default_app_user.tags = ["admin_user"]
35
+ default_app_user.role = "ADMIN"
36
+ case _:
37
+ default_app_user.tags = ["regular_user"]
38
+ default_app_user.role = "USER"
39
+ # print(default_app_user)
40
+ return default_app_user
41
+
42
+
43
+ @cl.header_auth_callback
44
+ def header_auth_callback(headers) -> Optional[cl.AppUser]:
45
+ # Verify the signature of a token in the header (ex: jwt token)
46
+ # or check that the value is matching a row from your database
47
+ # print(headers)
48
+ if (
49
+ headers.get("cookie")
50
+ == "ajs_user_id=5011e946-0d0d-5bd4-a293-65742db98d3d; ajs_anonymous_id=67d2569d-3f50-48f3-beaf-b756286276d9"
51
+ ):
52
+ return cl.AppUser(username="Broomva", role="ADMIN", provider="header")
53
+ else:
54
+ return None
55
+
56
+
57
+ @cl.password_auth_callback
58
+ def auth_callback(
59
+ username: str = "guest", password: str = "guest"
60
+ ) -> Optional[cl.AppUser]:
61
+ # Fetch the user matching username from your database
62
+ # and compare the hashed password with the value stored in the database
63
+ import hashlib
64
+
65
+ # Create a new sha256 hash object
66
+ hash_object = hashlib.sha256()
67
+
68
+ # Hash the password
69
+ hash_object.update(password.encode())
70
+
71
+ # Get the hexadecimal representation of the hash
72
+ hashed_password = hash_object.hexdigest()
73
+
74
+ if (username, hashed_password) == (
75
+ "broomva",
76
+ "b68cacbadaee450b8a8ce2dd44842f1de03ee9993ad97b5e99dea64ef93960ba",
77
+ ):
78
+ return cl.AppUser(username="Broomva", role="ADMIN", provider="credentials")
79
+ elif (username, password) == ("guest", "guest"):
80
+ return cl.AppUser(username="Guest", role="USER", provider="credentials")
81
+ else:
82
+ return None
83
+
84
+
85
+ @cl.set_chat_profiles
86
+ async def chat_profile(current_user: cl.AppUser):
87
+ if "ADMIN" not in current_user.role:
88
+ # Default to 3.5 when not admin
89
+ return [
90
+ cl.ChatProfile(
91
+ name="Broomva Book Agent",
92
+ markdown_description="The underlying LLM model is **GPT-3.5**.",
93
+ # icon="https://picsum.photos/200",
94
+ ),
95
+ ]
96
+
97
+ return [
98
+ cl.ChatProfile(
99
+ name="Broomva Book Agent Lite",
100
+ markdown_description="The underlying LLM model is **GPT-3.5**.",
101
+ # icon="https://picsum.photos/200",
102
+ ),
103
+ cl.ChatProfile(
104
+ name="Broomva Book Agent Turbo",
105
+ markdown_description="The underlying LLM model is **GPT-4 Turbo**.",
106
+ # icon="https://picsum.photos/250",
107
+ ),
108
+ ]
109
 
110
 
111
  @cl.on_settings_update
 
149
  ]
150
  ).send()
151
 
152
+ chat_profile = cl.user_session.get("chat_profile")
153
 
154
+ if chat_profile == "Broomva Book Agent Lite":
155
+ settings["model"] = "gpt-3.5-turbo"
156
+ elif chat_profile == "Broomva Book Agent Turbo":
157
+ settings["model"] = "gpt-4-1106-preview"
158
 
159
  chain = RetrievalQAWithSourcesChain.from_chain_type(
160
  ChatOpenAI(
 
174
  async def main(message):
175
  chain = cl.user_session.get("chain") # type: RetrievalQAWithSourcesChain
176
 
177
+ await chain.acall(message.content, callbacks=[cl.AsyncLangchainCallbackHandler()])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
chainlit.md CHANGED
@@ -5,4 +5,4 @@ answer questions leveraging information found in the book. Go ahead and ask thin
5
 
6
  `What is machine learning and deep learning?`
7
 
8
- `what is quantum computing?`
 
5
 
6
  `What is machine learning and deep learning?`
7
 
8
+ `what is quantum computing?`