Spaces:
Sleeping
Sleeping
- __pycache__/app.cpython-311.pyc +0 -0
- app.py +61 -42
__pycache__/app.cpython-311.pyc
ADDED
Binary file (7.15 kB). View file
|
|
app.py
CHANGED
@@ -3,16 +3,16 @@ from typing import Dict, Optional
|
|
3 |
|
4 |
import chainlit as cl
|
5 |
from chainlit.input_widget import Select, Slider, Switch
|
6 |
-
from chainlit.playground.config import add_llm_provider
|
7 |
-
from chainlit.playground.providers.langchain import LangchainGenericProvider
|
8 |
# from chainlit import user_session
|
9 |
from langchain.chains import RetrievalQAWithSourcesChain
|
10 |
from langchain.chat_models import ChatOpenAI
|
11 |
from langchain.embeddings.openai import OpenAIEmbeddings
|
12 |
-
from langchain.llms import HuggingFaceHub
|
13 |
-
from langchain.prompts.chat import (AIMessagePromptTemplate,
|
14 |
-
|
15 |
-
|
16 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
17 |
from langchain.vectorstores import FAISS
|
18 |
|
@@ -39,19 +39,25 @@ def oauth_callback(
|
|
39 |
print(default_app_user)
|
40 |
return default_app_user
|
41 |
|
|
|
42 |
@cl.header_auth_callback
|
43 |
def header_auth_callback(headers) -> Optional[cl.AppUser]:
|
44 |
# Verify the signature of a token in the header (ex: jwt token)
|
45 |
# or check that the value is matching a row from your database
|
46 |
print(headers)
|
47 |
-
if
|
|
|
|
|
|
|
48 |
return cl.AppUser(username="Broomva", role="ADMIN", provider="header")
|
49 |
else:
|
50 |
return None
|
51 |
|
52 |
|
53 |
@cl.password_auth_callback
|
54 |
-
def auth_callback(
|
|
|
|
|
55 |
# Fetch the user matching username from your database
|
56 |
# and compare the hashed password with the value stored in the database
|
57 |
import hashlib
|
@@ -64,14 +70,18 @@ def auth_callback(username: str = 'guest', password: str = 'guest') -> Optional[
|
|
64 |
|
65 |
# Get the hexadecimal representation of the hash
|
66 |
hashed_password = hash_object.hexdigest()
|
67 |
-
|
68 |
-
if (username, hashed_password) == (
|
|
|
|
|
|
|
69 |
return cl.AppUser(username="Broomva", role="ADMIN", provider="credentials")
|
70 |
elif (username, password) == ("guest", "guest"):
|
71 |
return cl.AppUser(username="Guest", role="USER", provider="credentials")
|
72 |
else:
|
73 |
return None
|
74 |
|
|
|
75 |
@cl.set_chat_profiles
|
76 |
async def chat_profile(current_user: cl.AppUser):
|
77 |
if "ADMIN" not in current_user.role:
|
@@ -96,21 +106,26 @@ async def chat_profile(current_user: cl.AppUser):
|
|
96 |
# icon="https://picsum.photos/250",
|
97 |
),
|
98 |
]
|
99 |
-
|
100 |
|
101 |
@cl.on_settings_update
|
102 |
async def setup_agent(settings):
|
103 |
print("on_settings_update", settings)
|
104 |
|
|
|
105 |
@cl.on_chat_start
|
106 |
async def init():
|
107 |
-
|
108 |
settings = await cl.ChatSettings(
|
109 |
[
|
110 |
Select(
|
111 |
id="model",
|
112 |
label="OpenAI - Model",
|
113 |
-
values=[
|
|
|
|
|
|
|
|
|
|
|
114 |
initial_index=0,
|
115 |
),
|
116 |
Switch(id="streaming", label="OpenAI - Stream Tokens", initial=True),
|
@@ -132,23 +147,26 @@ async def init():
|
|
132 |
),
|
133 |
]
|
134 |
).send()
|
135 |
-
|
136 |
chat_profile = cl.user_session.get("chat_profile")
|
137 |
-
|
138 |
if chat_profile == "Broomva Book Agent Lite":
|
139 |
-
settings[
|
140 |
elif chat_profile == "Broomva Book Agent Turbo":
|
141 |
-
settings[
|
142 |
|
143 |
chain = RetrievalQAWithSourcesChain.from_chain_type(
|
144 |
-
ChatOpenAI(
|
|
|
|
|
|
|
|
|
145 |
chain_type="stuff",
|
146 |
-
retriever=vector_store.as_retriever(search_kwargs={"k": int(settings[
|
147 |
-
|
148 |
)
|
149 |
|
150 |
cl.user_session.set("settings", settings)
|
151 |
-
|
152 |
print(settings)
|
153 |
cl.user_session.set("chain", chain)
|
154 |
|
@@ -156,9 +174,9 @@ async def init():
|
|
156 |
@cl.on_message
|
157 |
async def main(message):
|
158 |
chain = cl.user_session.get("chain") # type: RetrievalQAWithSourcesChain
|
159 |
-
|
160 |
cb = cl.AsyncLangchainCallbackHandler(
|
161 |
-
stream_final_answer=True,
|
162 |
)
|
163 |
cb.answer_reached = True
|
164 |
|
@@ -172,22 +190,23 @@ async def main(message):
|
|
172 |
content=answer,
|
173 |
).send()
|
174 |
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
-
|
191 |
-
|
192 |
-
|
193 |
-
)
|
|
|
|
3 |
|
4 |
import chainlit as cl
|
5 |
from chainlit.input_widget import Select, Slider, Switch
|
6 |
+
# from chainlit.playground.config import add_llm_provider
|
7 |
+
# from chainlit.playground.providers.langchain import LangchainGenericProvider
|
8 |
# from chainlit import user_session
|
9 |
from langchain.chains import RetrievalQAWithSourcesChain
|
10 |
from langchain.chat_models import ChatOpenAI
|
11 |
from langchain.embeddings.openai import OpenAIEmbeddings
|
12 |
+
# from langchain.llms import HuggingFaceHub
|
13 |
+
# from langchain.prompts.chat import (AIMessagePromptTemplate,
|
14 |
+
# ChatPromptTemplate,
|
15 |
+
# HumanMessagePromptTemplate)
|
16 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
17 |
from langchain.vectorstores import FAISS
|
18 |
|
|
|
39 |
print(default_app_user)
|
40 |
return default_app_user
|
41 |
|
42 |
+
|
43 |
@cl.header_auth_callback
|
44 |
def header_auth_callback(headers) -> Optional[cl.AppUser]:
|
45 |
# Verify the signature of a token in the header (ex: jwt token)
|
46 |
# or check that the value is matching a row from your database
|
47 |
print(headers)
|
48 |
+
if (
|
49 |
+
headers.get("cookie")
|
50 |
+
== "ajs_user_id=5011e946-0d0d-5bd4-a293-65742db98d3d; ajs_anonymous_id=67d2569d-3f50-48f3-beaf-b756286276d9"
|
51 |
+
):
|
52 |
return cl.AppUser(username="Broomva", role="ADMIN", provider="header")
|
53 |
else:
|
54 |
return None
|
55 |
|
56 |
|
57 |
@cl.password_auth_callback
|
58 |
+
def auth_callback(
|
59 |
+
username: str = "guest", password: str = "guest"
|
60 |
+
) -> Optional[cl.AppUser]:
|
61 |
# Fetch the user matching username from your database
|
62 |
# and compare the hashed password with the value stored in the database
|
63 |
import hashlib
|
|
|
70 |
|
71 |
# Get the hexadecimal representation of the hash
|
72 |
hashed_password = hash_object.hexdigest()
|
73 |
+
|
74 |
+
if (username, hashed_password) == (
|
75 |
+
"broomva",
|
76 |
+
"b68cacbadaee450b8a8ce2dd44842f1de03ee9993ad97b5e99dea64ef93960ba",
|
77 |
+
):
|
78 |
return cl.AppUser(username="Broomva", role="ADMIN", provider="credentials")
|
79 |
elif (username, password) == ("guest", "guest"):
|
80 |
return cl.AppUser(username="Guest", role="USER", provider="credentials")
|
81 |
else:
|
82 |
return None
|
83 |
|
84 |
+
|
85 |
@cl.set_chat_profiles
|
86 |
async def chat_profile(current_user: cl.AppUser):
|
87 |
if "ADMIN" not in current_user.role:
|
|
|
106 |
# icon="https://picsum.photos/250",
|
107 |
),
|
108 |
]
|
109 |
+
|
110 |
|
111 |
@cl.on_settings_update
|
112 |
async def setup_agent(settings):
|
113 |
print("on_settings_update", settings)
|
114 |
|
115 |
+
|
116 |
@cl.on_chat_start
|
117 |
async def init():
|
|
|
118 |
settings = await cl.ChatSettings(
|
119 |
[
|
120 |
Select(
|
121 |
id="model",
|
122 |
label="OpenAI - Model",
|
123 |
+
values=[
|
124 |
+
"gpt-3.5-turbo",
|
125 |
+
"gpt-3.5-turbo-1106",
|
126 |
+
"gpt-4",
|
127 |
+
"gpt-4-1106-preview",
|
128 |
+
],
|
129 |
initial_index=0,
|
130 |
),
|
131 |
Switch(id="streaming", label="OpenAI - Stream Tokens", initial=True),
|
|
|
147 |
),
|
148 |
]
|
149 |
).send()
|
150 |
+
|
151 |
chat_profile = cl.user_session.get("chat_profile")
|
152 |
+
|
153 |
if chat_profile == "Broomva Book Agent Lite":
|
154 |
+
settings["model"] = "gpt-3.5-turbo"
|
155 |
elif chat_profile == "Broomva Book Agent Turbo":
|
156 |
+
settings["model"] = "gpt-4-1106-preview"
|
157 |
|
158 |
chain = RetrievalQAWithSourcesChain.from_chain_type(
|
159 |
+
ChatOpenAI(
|
160 |
+
temperature=settings["temperature"],
|
161 |
+
streaming=settings["streaming"],
|
162 |
+
model=settings["model"],
|
163 |
+
),
|
164 |
chain_type="stuff",
|
165 |
+
retriever=vector_store.as_retriever(search_kwargs={"k": int(settings["k"])}),
|
|
|
166 |
)
|
167 |
|
168 |
cl.user_session.set("settings", settings)
|
169 |
+
|
170 |
print(settings)
|
171 |
cl.user_session.set("chain", chain)
|
172 |
|
|
|
174 |
@cl.on_message
|
175 |
async def main(message):
|
176 |
chain = cl.user_session.get("chain") # type: RetrievalQAWithSourcesChain
|
177 |
+
|
178 |
cb = cl.AsyncLangchainCallbackHandler(
|
179 |
+
stream_final_answer=True, # answer_prefix_tokens=["FINAL", "ANSWER"]
|
180 |
)
|
181 |
cb.answer_reached = True
|
182 |
|
|
|
190 |
content=answer,
|
191 |
).send()
|
192 |
|
193 |
+
|
194 |
+
# # Instantiate the LLM
|
195 |
+
# llm = HuggingFaceHub(
|
196 |
+
# model_kwargs={"max_length": 500},
|
197 |
+
# repo_id="Broomva/bart-large-translation-spa-guc",
|
198 |
+
# )
|
199 |
+
|
200 |
+
# # Add the LLM provider
|
201 |
+
# add_llm_provider(
|
202 |
+
# LangchainGenericProvider(
|
203 |
+
# # It is important that the id of the provider matches the _llm_type
|
204 |
+
# id=llm._llm_type,
|
205 |
+
# # The name is not important. It will be displayed in the UI.
|
206 |
+
# name="Spa - Guc Translation",
|
207 |
+
# # This should always be a Langchain llm instance (correctly configured)
|
208 |
+
# llm=llm,
|
209 |
+
# # If the LLM works with messages, set this to True
|
210 |
+
# is_chat=True
|
211 |
+
# )
|
212 |
+
# )
|