Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -2,56 +2,49 @@ import gradio as gr
|
|
2 |
import openai
|
3 |
|
4 |
|
5 |
-
|
6 |
-
|
7 |
|
8 |
-
|
9 |
openai.api_key = os.getenv('OPENAI_API_KEY')
|
10 |
-
|
11 |
assistant_id=os.getenv('ASSISTANT_ID')
|
12 |
-
|
13 |
-
# Initialize OpenAI client
|
14 |
client = openai.OpenAI(api_key=openai.api_key)
|
15 |
|
16 |
-
# Function to interact with OpenAI Assistant
|
17 |
def ask_openai(question):
|
18 |
-
# Step 2: Create a Thread
|
19 |
thread = client.beta.threads.create()
|
20 |
-
|
21 |
-
# Step 3: Add a Message to a Thread
|
22 |
client.beta.threads.messages.create(
|
23 |
thread_id=thread.id,
|
24 |
role="user",
|
25 |
content=question
|
26 |
)
|
27 |
-
|
28 |
-
# Step 4: Run the Assistant
|
29 |
run = client.beta.threads.runs.create(
|
30 |
thread_id=thread.id,
|
31 |
)
|
32 |
-
|
33 |
-
# Step 5: Check the Run status
|
34 |
run = client.beta.threads.runs.retrieve(
|
35 |
thread_id=thread.id,
|
36 |
run_id=run.id
|
37 |
)
|
38 |
|
39 |
-
# Step 6: Display the Assistant's Response
|
40 |
messages = client.beta.threads.messages.list(
|
41 |
thread_id=thread.id
|
42 |
)
|
43 |
|
44 |
-
# Extracting the assistant's response
|
45 |
response = next((msg for msg in messages['data'] if msg['role'] == 'assistant'), None)
|
46 |
return response['content'][0]['text']['value'] if response else "No response."
|
47 |
|
48 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
49 |
iface = gr.Interface(
|
50 |
fn=ask_openai,
|
51 |
-
inputs=gr.inputs.Textbox(lines=2, placeholder="
|
52 |
-
outputs=
|
53 |
-
title=
|
54 |
-
description=
|
55 |
)
|
56 |
|
57 |
# Run the Gradio app
|
|
|
2 |
import openai
|
3 |
|
4 |
|
5 |
+
title="Wecome to Tonic's🌱🪴Bulbi🌴🎋Plant👨🏻⚕️🌿Doctor🍂🍃",
|
6 |
+
description="Introduce your🌴plant below👇🏻. Respond with additional📊information when prompted. Save your plants with 🌱🪴Bulbi🌴🎋Plant👨🏻⚕️🌿Doctor🍂🍃. "
|
7 |
|
8 |
+
load_dotenv()
|
9 |
openai.api_key = os.getenv('OPENAI_API_KEY')
|
|
|
10 |
assistant_id=os.getenv('ASSISTANT_ID')
|
|
|
|
|
11 |
client = openai.OpenAI(api_key=openai.api_key)
|
12 |
|
|
|
13 |
def ask_openai(question):
|
|
|
14 |
thread = client.beta.threads.create()
|
|
|
|
|
15 |
client.beta.threads.messages.create(
|
16 |
thread_id=thread.id,
|
17 |
role="user",
|
18 |
content=question
|
19 |
)
|
|
|
|
|
20 |
run = client.beta.threads.runs.create(
|
21 |
thread_id=thread.id,
|
22 |
)
|
|
|
|
|
23 |
run = client.beta.threads.runs.retrieve(
|
24 |
thread_id=thread.id,
|
25 |
run_id=run.id
|
26 |
)
|
27 |
|
|
|
28 |
messages = client.beta.threads.messages.list(
|
29 |
thread_id=thread.id
|
30 |
)
|
31 |
|
|
|
32 |
response = next((msg for msg in messages['data'] if msg['role'] == 'assistant'), None)
|
33 |
return response['content'][0]['text']['value'] if response else "No response."
|
34 |
|
35 |
+
# Examples for the interface
|
36 |
+
examples = [
|
37 |
+
["My Eucalyptus tree is struggling outside in the cold weather in europe"],
|
38 |
+
["My callatea house plant is yellowing."],
|
39 |
+
["We have a catcus as work that suddently started yellowing and wilting."]
|
40 |
+
]
|
41 |
+
|
42 |
iface = gr.Interface(
|
43 |
fn=ask_openai,
|
44 |
+
inputs=gr.inputs.Textbox(lines=2, placeholder="Hi there, I have a plant that's..."),
|
45 |
+
outputs=gr.outputs.Markdown(),
|
46 |
+
title=title,
|
47 |
+
description=description
|
48 |
)
|
49 |
|
50 |
# Run the Gradio app
|