Tonic commited on
Commit
7af0ebc
·
1 Parent(s): c98a916

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +50 -21
app.py CHANGED
@@ -1,34 +1,54 @@
1
  import gradio as gr
2
  import openai
3
  from dotenv import load_dotenv
4
- from gradio_client import Client
5
  import os
6
  import time
7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  examples = [
9
- ["My Eucalyptus tree is struggling outside in the cold weather in europe"],
10
- ["My callatea house plant is yellowing."],
11
- ["We have a catcus as work that suddently started yellowing and wilting."]
12
  ]
13
 
14
  load_dotenv()
15
  openai.api_key = os.getenv('OPENAI_API_KEY')
16
- assistant_id=os.getenv('ASSISTANT_ID')
17
  client = openai.OpenAI(api_key=openai.api_key)
 
18
 
 
 
19
 
20
- def ask_openai(question):
21
  try:
22
- thread = client.beta.threads.create()
 
 
 
 
 
23
 
24
  client.beta.threads.messages.create(
25
- thread_id=thread.id,
26
  role="user",
27
  content=question,
28
  )
29
 
30
  run = client.beta.threads.runs.create(
31
- thread_id=thread.id,
32
  assistant_id=assistant_id
33
  )
34
 
@@ -38,36 +58,41 @@ def ask_openai(question):
38
 
39
  while not response_received and time.time() - start_time < timeout:
40
  run_status = client.beta.threads.runs.retrieve(
41
- thread_id=thread.id,
42
  run_id=run.id,
43
  )
44
  if run_status.status == 'completed':
45
  response_received = True
46
  else:
47
- time.sleep(4)
48
 
49
  if not response_received:
50
  return "Response timed out."
51
 
52
- # Ensure steps is defined here
53
  steps = client.beta.threads.runs.steps.list(
54
- thread_id=thread.id,
55
  run_id=run.id
56
  )
57
 
58
- response_text = "No response."
59
  if steps.data:
60
  last_step = steps.data[-1]
61
  if last_step.type == 'message_creation':
62
  message_id = last_step.step_details.message_creation.message_id
63
  message = client.beta.threads.messages.retrieve(
64
- thread_id=thread.id,
65
  message_id=message_id
66
  )
67
  if message.content and message.content[0].type == 'text':
68
  response_text = message.content[0].text.value
 
 
 
 
 
 
 
69
 
70
- gradio_client = Client("https://tonic1-tulu.hf.space/--replicas/gzzjr/")
71
  final_result = gradio_client.predict(
72
  response_text,
73
  "I am Tulu, an Expert Plant Doctor, I will exactly summarize the information you provide to me.",
@@ -80,12 +105,16 @@ def ask_openai(question):
80
  return f"An error occurred: {str(e)}"
81
 
82
  iface = gr.Interface(
83
- fn=ask_openai,
84
- inputs=gr.Textbox(lines=5, placeholder="Hi there, I have a plant that's..."),
 
 
 
 
 
 
85
  outputs=gr.Markdown(),
86
- title="Wecome to Tonic's Bulbi-Tulu Plant Doctor",
87
- description="""Welcome to Bulbi using [Tulu](https://huggingface.co/allenai/tulu-2-dpo-70b). Introduce your plant below. Be as descriptive as possible. Respond with additional information when prompted. Save your plants with Tulu Plant Doctor""",
88
  examples=examples
89
  )
90
 
91
- iface.launch()
 
1
  import gradio as gr
2
  import openai
3
  from dotenv import load_dotenv
 
4
  import os
5
  import time
6
 
7
+ current_thread_id = None
8
+
9
+ title = "# Welcome to 🙋🏻‍♂️Tonic's🕵🏻‍♂️Bulbi🪴Plant👩🏻‍⚕️Doctor!"
10
+ description = """Here you can use Bulbi - an OpenAI agent that helps you save your plants!
11
+ OpenAI doesnt let you use Agents without paying for it, so I made you an interface you can use for free !
12
+ ### How to use:
13
+ - Introduce your🌵plant below.
14
+ - Be as🌿descriptive as possible.
15
+ - **Respond with additional🗣️information when prompted.**
16
+ - Save your plants with👨🏻‍⚕️Bulbi Plant Doctor!
17
+ ### Join us:
18
+ [Join my active builders' server on discord](https://discord.gg/VqTxc76K3u). Let's build together!
19
+ Big thanks to 🤗Huggingface Organisation for the🫂Community Grant"""
20
+
21
  examples = [
22
+ ["My Eucalyptus tree is struggling outside in the cold weather in Europe",True, None],
23
+ ["My calathea house plant is yellowing.",True, None],
24
+ ["We have a cactus at work that suddenly started yellowing and wilting.",True, None]
25
  ]
26
 
27
  load_dotenv()
28
  openai.api_key = os.getenv('OPENAI_API_KEY')
29
+ assistant_id = os.getenv('ASSISTANT_ID')
30
  client = openai.OpenAI(api_key=openai.api_key)
31
+ thread_ids = {}
32
 
33
+ def ask_openai(question, start_new_thread=True, selected_thread_id=None):
34
+ global thread_ids
35
 
 
36
  try:
37
+ if start_new_thread or selected_thread_id not in thread_ids:
38
+ thread = client.beta.threads.create()
39
+ current_thread_id = thread.id
40
+ thread_ids[current_thread_id] = thread.id
41
+ else:
42
+ current_thread_id = thread_ids[selected_thread_id]
43
 
44
  client.beta.threads.messages.create(
45
+ thread_id=current_thread_id,
46
  role="user",
47
  content=question,
48
  )
49
 
50
  run = client.beta.threads.runs.create(
51
+ thread_id=current_thread_id,
52
  assistant_id=assistant_id
53
  )
54
 
 
58
 
59
  while not response_received and time.time() - start_time < timeout:
60
  run_status = client.beta.threads.runs.retrieve(
61
+ thread_id=current_thread_id,
62
  run_id=run.id,
63
  )
64
  if run_status.status == 'completed':
65
  response_received = True
66
  else:
67
+ time.sleep(4)
68
 
69
  if not response_received:
70
  return "Response timed out."
71
 
 
72
  steps = client.beta.threads.runs.steps.list(
73
+ thread_id=current_thread_id,
74
  run_id=run.id
75
  )
76
 
 
77
  if steps.data:
78
  last_step = steps.data[-1]
79
  if last_step.type == 'message_creation':
80
  message_id = last_step.step_details.message_creation.message_id
81
  message = client.beta.threads.messages.retrieve(
82
+ thread_id=current_thread_id,
83
  message_id=message_id
84
  )
85
  if message.content and message.content[0].type == 'text':
86
  response_text = message.content[0].text.value
87
+ else:
88
+ return "No response."
89
+ else:
90
+ return "No response."
91
+
92
+ # Initialize Gradio client
93
+ gradio_client = Client("https://tonic1-tulu.hf.space/--replicas/t5vxm/")
94
 
95
+ # Pass the response text to the Gradio client and get the prediction
96
  final_result = gradio_client.predict(
97
  response_text,
98
  "I am Tulu, an Expert Plant Doctor, I will exactly summarize the information you provide to me.",
 
105
  return f"An error occurred: {str(e)}"
106
 
107
  iface = gr.Interface(
108
+ title=title,
109
+ description=description,
110
+ fn=ask_openai,
111
+ inputs=[
112
+ gr.Textbox(lines=5, placeholder="Hi there, I have a plant that's..."),
113
+ gr.Checkbox(label="Start a new conversation thread"),
114
+ gr.Dropdown(label="Select previous thread", choices=list(thread_ids.keys()))
115
+ ],
116
  outputs=gr.Markdown(),
 
 
117
  examples=examples
118
  )
119
 
120
+ iface.launch()