Spaces:
Sleeping
Sleeping
Commit
ยท
3a798e9
1
Parent(s):
b5cd9e9
fix: bug
Browse files
app.py
CHANGED
@@ -29,14 +29,13 @@ def create_assistant(client, vector_store_id):
|
|
29 |
temperature=0
|
30 |
)
|
31 |
|
32 |
-
def create_thread(
|
33 |
"""Create a new thread."""
|
34 |
return client.beta.threads.create()
|
35 |
|
36 |
-
def clear_thread(
|
37 |
-
"""
|
38 |
-
|
39 |
-
thread = create_thread(client)
|
40 |
return [], ""
|
41 |
|
42 |
def get_annotations(msg):
|
@@ -113,6 +112,7 @@ def chatbot_response(history, message):
|
|
113 |
|
114 |
return history, ""
|
115 |
|
|
|
116 |
# Load environment variables
|
117 |
load_environment()
|
118 |
|
@@ -124,103 +124,24 @@ vector_store_id = os.getenv("AZURE_OPENAI_VECTOR_STORE_ID")
|
|
124 |
|
125 |
# Create assistant and thread
|
126 |
assistant = create_assistant(client, vector_store_id)
|
127 |
-
thread = create_thread(client)
|
128 |
-
|
129 |
-
with gr.Blocks() as demo:
|
130 |
-
import gradio as gr
|
131 |
-
from openai import AzureOpenAI
|
132 |
-
import os
|
133 |
-
from dotenv import load_dotenv
|
134 |
-
import time
|
135 |
-
|
136 |
-
def load_environment():
|
137 |
-
"""Load environment variables."""
|
138 |
-
load_dotenv(override=True)
|
139 |
-
|
140 |
-
def initialize_openai_client():
|
141 |
-
"""Initialize the Azure OpenAI client."""
|
142 |
-
return AzureOpenAI(
|
143 |
-
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
|
144 |
-
api_key=os.getenv("AZURE_OPENAI_API_KEY"),
|
145 |
-
api_version="2024-10-01-preview"
|
146 |
-
)
|
147 |
-
|
148 |
-
def create_assistant(client, vector_store_id):
|
149 |
-
"""Create an assistant with specified configuration."""
|
150 |
-
return client.beta.assistants.create(
|
151 |
-
model="gpt-4o",
|
152 |
-
instructions="",
|
153 |
-
tools=[{
|
154 |
-
"type": "file_search",
|
155 |
-
"file_search": {"ranking_options": {"ranker": "default_2024_08_21", "score_threshold": 0}}
|
156 |
-
}],
|
157 |
-
tool_resources={"file_search": {"vector_store_ids": [vector_store_id]}},
|
158 |
-
temperature=0
|
159 |
-
)
|
160 |
-
|
161 |
-
def create_thread(client):
|
162 |
-
"""Create a new thread."""
|
163 |
-
return client.beta.threads.create()
|
164 |
-
|
165 |
-
def clear_thread(_):
|
166 |
-
"""Clear the chat history and reset the thread."""
|
167 |
-
global thread
|
168 |
-
thread = create_thread(client)
|
169 |
-
return [], ""
|
170 |
-
|
171 |
-
def get_chatbot_response(client, thread_id, assistant_id, message):
|
172 |
-
"""Get chatbot response for a given message."""
|
173 |
-
client.beta.threads.messages.create(
|
174 |
-
thread_id=thread_id,
|
175 |
-
role="user",
|
176 |
-
content=message # Ensure the content is an object with a `text` key
|
177 |
-
)
|
178 |
-
|
179 |
-
run = client.beta.threads.runs.create(
|
180 |
-
thread_id=thread_id,
|
181 |
-
assistant_id=assistant_id
|
182 |
-
)
|
183 |
-
|
184 |
-
while run.status in ["queued", "in_progress", "cancelling"]:
|
185 |
-
time.sleep(1)
|
186 |
-
run = client.beta.threads.runs.retrieve(
|
187 |
-
thread_id=thread_id,
|
188 |
-
run_id=run.id
|
189 |
-
)
|
190 |
-
|
191 |
-
if run.status == "completed":
|
192 |
-
messages = client.beta.threads.messages.list(thread_id=thread_id)
|
193 |
-
|
194 |
-
for msg in messages:
|
195 |
-
main_text = msg.content[0].text.value
|
196 |
-
return main_text
|
197 |
-
|
198 |
-
return "Unable to retrieve a response." # Fallback response
|
199 |
|
200 |
-
def
|
201 |
-
"""Wrapper function to generate chatbot response."""
|
202 |
-
global thread
|
203 |
-
# Get response from the API
|
204 |
-
assistant_response = get_chatbot_response(client, thread.id, assistant.id, message)
|
205 |
|
206 |
-
# Update chat history
|
207 |
-
history.append({"role": "user", "content": message})
|
208 |
-
history.append({"role": "assistant", "content": assistant_response})
|
209 |
|
210 |
-
return history, ""
|
211 |
|
212 |
-
|
213 |
-
|
214 |
|
215 |
-
|
216 |
-
client = initialize_openai_client()
|
217 |
|
218 |
-
|
219 |
-
|
|
|
220 |
|
221 |
-
|
222 |
-
|
223 |
-
thread = create_thread(
|
|
|
224 |
|
225 |
with gr.Blocks() as demo:
|
226 |
gr.Markdown("""
|
@@ -229,20 +150,12 @@ with gr.Blocks() as demo:
|
|
229 |
""")
|
230 |
|
231 |
chatbot = gr.Chatbot(type="messages")
|
232 |
-
msg = gr.Textbox()
|
233 |
-
|
234 |
-
|
235 |
-
def respond(message, chat_history):
|
236 |
-
|
237 |
-
bot_message = get_chatbot_response(client, thread.id, assistant.id, message)
|
238 |
-
|
239 |
-
chat_history.append({"role": "user", "content": message})
|
240 |
-
chat_history.append({"role": "assistant", "content": bot_message})
|
241 |
-
return "", chat_history
|
242 |
-
|
243 |
-
msg.submit(respond, [msg, chatbot], [msg, chatbot])
|
244 |
|
245 |
-
|
|
|
246 |
|
247 |
if __name__ == "__main__":
|
248 |
demo.launch()
|
|
|
29 |
temperature=0
|
30 |
)
|
31 |
|
32 |
+
def create_thread():
|
33 |
"""Create a new thread."""
|
34 |
return client.beta.threads.create()
|
35 |
|
36 |
+
def clear_thread(state):
|
37 |
+
"""ใปใใทใงใณใใชใปใใใใใใฃใใๅฑฅๆญดใใฏใชใขใใใ"""
|
38 |
+
state = initialize_session() # ๆฐใใในใฌใใใ็ๆ
|
|
|
39 |
return [], ""
|
40 |
|
41 |
def get_annotations(msg):
|
|
|
112 |
|
113 |
return history, ""
|
114 |
|
115 |
+
|
116 |
# Load environment variables
|
117 |
load_environment()
|
118 |
|
|
|
124 |
|
125 |
# Create assistant and thread
|
126 |
assistant = create_assistant(client, vector_store_id)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
127 |
|
128 |
+
def respond(message, chat_history, state):
|
|
|
|
|
|
|
|
|
129 |
|
|
|
|
|
|
|
130 |
|
|
|
131 |
|
132 |
+
"""ใใฃใใๅฑฅๆญดใจ็ถๆ
ใๆดๆฐใใใ"""
|
133 |
+
thread_id = state["thread_id"]
|
134 |
|
135 |
+
bot_message = get_chatbot_response(client, thread_id, assistant.id, message)
|
|
|
136 |
|
137 |
+
chat_history.append({"role": "user", "content": message})
|
138 |
+
chat_history.append({"role": "assistant", "content": bot_message})
|
139 |
+
return "", chat_history
|
140 |
|
141 |
+
def initialize_session():
|
142 |
+
"""ใปใใทใงใณใใจใซ็ฌ็ซใใในใฌใใใๅๆๅใใใ"""
|
143 |
+
thread = create_thread()
|
144 |
+
return {"thread_id": thread.id}
|
145 |
|
146 |
with gr.Blocks() as demo:
|
147 |
gr.Markdown("""
|
|
|
150 |
""")
|
151 |
|
152 |
chatbot = gr.Chatbot(type="messages")
|
153 |
+
msg = gr.Textbox(placeholder="ใใใซใกใใปใผใธใๅ
ฅๅใใฆใใ ใใ...")
|
154 |
+
state = gr.State(initialize_session) # ใปใใทใงใณใใจใฎ็ถๆ
ใๅๆๅ
|
155 |
+
clear = gr.Button("Clear")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
156 |
|
157 |
+
msg.submit(respond, [msg, chatbot, state], [msg, chatbot])
|
158 |
+
clear.click(clear_thread, inputs=[state], outputs=[chatbot, msg])
|
159 |
|
160 |
if __name__ == "__main__":
|
161 |
demo.launch()
|