gerasdf
commited on
Commit
·
5ec174c
1
Parent(s):
cf5e123
enable auth, enable AI
Browse files
query.py
CHANGED
@@ -8,13 +8,14 @@ from langchain_core.runnables import RunnablePassthrough, RunnableLambda
|
|
8 |
from langchain_core.messages import SystemMessage, AIMessage, HumanMessage
|
9 |
from langchain_openai import OpenAIEmbeddings, ChatOpenAI
|
10 |
|
|
|
11 |
import os
|
12 |
|
13 |
prompt_template = os.environ.get("PROMPT_TEMPLATE")
|
14 |
|
15 |
prompt = ChatPromptTemplate.from_messages([('system', prompt_template)])
|
16 |
|
17 |
-
AI =
|
18 |
|
19 |
def ai_setup():
|
20 |
global llm, prompt_chain
|
@@ -91,27 +92,50 @@ def just_read(pipeline_state):
|
|
91 |
|
92 |
def new_state():
|
93 |
return gr.State({
|
|
|
94 |
"system": None,
|
95 |
})
|
96 |
|
97 |
-
def
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
state["system"] = system_prompt
|
102 |
else:
|
103 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
104 |
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
messages.append(HumanMessage(message))
|
110 |
-
|
111 |
-
all = ''
|
112 |
-
for response in llm.stream(messages):
|
113 |
-
all += response.content
|
114 |
-
yield all
|
115 |
|
116 |
def gr_main():
|
117 |
theme = gr.Theme.from_hub("freddyaboulton/[email protected]")
|
@@ -137,8 +161,15 @@ def gr_main():
|
|
137 |
"and I just received a letter threatening me to make public some things"
|
138 |
"of my past I'd rather keep quiet, unless I don't marry"],
|
139 |
],
|
140 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
141 |
app.launch(show_api=False)
|
|
|
142 |
if __name__ == "__main__":
|
143 |
ai_setup()
|
144 |
gr_main()
|
|
|
8 |
from langchain_core.messages import SystemMessage, AIMessage, HumanMessage
|
9 |
from langchain_openai import OpenAIEmbeddings, ChatOpenAI
|
10 |
|
11 |
+
from json import loads as json_loads
|
12 |
import os
|
13 |
|
14 |
prompt_template = os.environ.get("PROMPT_TEMPLATE")
|
15 |
|
16 |
prompt = ChatPromptTemplate.from_messages([('system', prompt_template)])
|
17 |
|
18 |
+
AI = True
|
19 |
|
20 |
def ai_setup():
|
21 |
global llm, prompt_chain
|
|
|
92 |
|
93 |
def new_state():
|
94 |
return gr.State({
|
95 |
+
"user": None,
|
96 |
"system": None,
|
97 |
})
|
98 |
|
99 |
+
def auth(token, state):
|
100 |
+
tokens=os.environ.get("APP_TOKENS", None)
|
101 |
+
if tokens is None:
|
102 |
+
state["user"] = "anonymous"
|
|
|
103 |
else:
|
104 |
+
tokens=json_loads(tokens)
|
105 |
+
state["user"] = tokens.get(token, None)
|
106 |
+
return "", state
|
107 |
+
|
108 |
+
AUTH_JS = """function auth_js(token, state) {
|
109 |
+
if (!!document.location.hash) {
|
110 |
+
token = document.location.hash
|
111 |
+
document.location.hash=""
|
112 |
+
}
|
113 |
+
return [token, state]
|
114 |
+
}
|
115 |
+
"""
|
116 |
+
|
117 |
+
def chat(message, history, state):
|
118 |
+
if (state is None) or (not state['user']):
|
119 |
+
gr.Warning("You need to authenticate first")
|
120 |
+
yield "You need to authenticate first"
|
121 |
+
else:
|
122 |
+
if not history:
|
123 |
+
system_prompt = prompt_chain.invoke(message)
|
124 |
+
system_prompt = system_prompt.messages[0]
|
125 |
+
state["system"] = system_prompt
|
126 |
+
else:
|
127 |
+
system_prompt = state["system"]
|
128 |
+
|
129 |
+
messages = [system_prompt]
|
130 |
+
for human, ai in history:
|
131 |
+
messages.append(HumanMessage(human))
|
132 |
+
messages.append(AIMessage(ai))
|
133 |
+
messages.append(HumanMessage(message))
|
134 |
|
135 |
+
all = ''
|
136 |
+
for response in llm.stream(messages):
|
137 |
+
all += response.content
|
138 |
+
yield all
|
|
|
|
|
|
|
|
|
|
|
|
|
139 |
|
140 |
def gr_main():
|
141 |
theme = gr.Theme.from_hub("freddyaboulton/[email protected]")
|
|
|
161 |
"and I just received a letter threatening me to make public some things"
|
162 |
"of my past I'd rather keep quiet, unless I don't marry"],
|
163 |
],
|
164 |
+
additional_inputs=[state])
|
165 |
+
token = gr.Textbox(visible=False)
|
166 |
+
app.load(auth,
|
167 |
+
[token,state],
|
168 |
+
[token,state],
|
169 |
+
js=AUTH_JS)
|
170 |
+
|
171 |
app.launch(show_api=False)
|
172 |
+
|
173 |
if __name__ == "__main__":
|
174 |
ai_setup()
|
175 |
gr_main()
|