lgaleana commited on
Commit
af799b3
1 Parent(s): ef06a8c

Added layout for code task

Browse files
Files changed (2) hide show
  1. ai/llm.py +1 -1
  2. components.py +125 -23
ai/llm.py CHANGED
@@ -20,7 +20,7 @@ def call(
20
  ) -> Dict[str, Any]:
21
  if not model:
22
  model = MODEL
23
- if not temperature:
24
  temperature = TEMPERATURE
25
 
26
  return openai.ChatCompletion.create( # type: ignore
 
20
  ) -> Dict[str, Any]:
21
  if not model:
22
  model = MODEL
23
+ if temperature is None:
24
  temperature = TEMPERATURE
25
 
26
  return openai.ChatCompletion.create( # type: ignore
components.py CHANGED
@@ -1,5 +1,6 @@
1
  from abc import ABC, abstractmethod
2
- from typing import List, Union
 
3
 
4
  import gradio as gr
5
  import requests
@@ -8,7 +9,7 @@ import ai
8
 
9
 
10
  class Component(ABC):
11
- def __init__(self, id_: int, visible: bool = False):
12
  # Internal state
13
  self._id = id_
14
  self._source = self.__class__.__name__
@@ -81,7 +82,7 @@ class AITask(TaskComponent):
81
  self.output = gr.Textbox(
82
  label=f"Output: {{{self.vname}{id_}}}",
83
  lines=10,
84
- interactive=False,
85
  )
86
  return gr_component
87
 
@@ -89,42 +90,143 @@ class AITask(TaskComponent):
89
  return ai.llm.next([{"role": "user", "content": prompt}])
90
 
91
 
92
- class VisitURL(TaskComponent):
93
- name = "Visit URL"
94
 
95
- def _render(self, id_: int) -> gr.Box:
96
- with gr.Box(visible=False) as gr_component:
97
- gr.Markdown("Get the content from an URL.")
 
 
 
98
  with gr.Row():
99
- self.input = gr.Textbox(
100
- interactive=True,
101
- placeholder="URL",
102
- show_label=False,
103
- )
104
- self.output = gr.Textbox(
105
- label=f"Output: {{{self.vname}{id_}}}",
106
- lines=10,
107
- interactive=False,
108
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
109
  return gr_component
110
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
111
  def execute(self, url: str) -> str:
112
- return requests.get(url).text
113
 
114
 
115
  class Task(Component):
116
- available_tasks = [AITask, VisitURL]
117
  vname = "t"
118
 
119
  def __init__(self, id_: int, visible: bool = False):
120
- super().__init__(id_, visible)
121
  self._inner_tasks = [t() for t in self.available_tasks]
122
  self.gr_component: gr.Box
123
 
124
  def _render(self, id_: int) -> gr.Box:
125
  with gr.Box(visible=False) as gr_component:
126
  self.active_index = gr.Dropdown(
127
- [AITask.name, VisitURL.name],
128
  label="Pick a new Task",
129
  type="index",
130
  )
@@ -139,7 +241,7 @@ class Task(Component):
139
  return gr_component
140
 
141
  @staticmethod
142
- def pick_task(idx):
143
  update = [gr.Box.update(visible=False)] * len(Task.available_tasks)
144
  update[idx] = gr.Box.update(visible=True)
145
  return update
 
1
  from abc import ABC, abstractmethod
2
+ from concurrent.futures import ThreadPoolExecutor
3
+ from typing import Dict, List, Union
4
 
5
  import gradio as gr
6
  import requests
 
9
 
10
 
11
  class Component(ABC):
12
+ def __init__(self, id_: int):
13
  # Internal state
14
  self._id = id_
15
  self._source = self.__class__.__name__
 
82
  self.output = gr.Textbox(
83
  label=f"Output: {{{self.vname}{id_}}}",
84
  lines=10,
85
+ interactive=True,
86
  )
87
  return gr_component
88
 
 
90
  return ai.llm.next([{"role": "user", "content": prompt}])
91
 
92
 
93
+ class CodeTask(TaskComponent):
94
+ name = "Code Task"
95
 
96
+ def _render(self, id_: int) -> gr.Column:
97
+ with gr.Column(visible=False) as gr_component:
98
+ code_prompt = gr.Textbox(
99
+ label="What would you like to do?",
100
+ interactive=True,
101
+ )
102
  with gr.Row():
103
+ generate_code = gr.Button("Generate code")
104
+ save_code = gr.Button("Save code")
105
+ with gr.Row():
106
+ with gr.Column():
107
+ with gr.Accordion(label="Generated code") as accordion:
108
+ raw_prompt_output = gr.Textbox(
109
+ label="Raw output",
110
+ lines=5,
111
+ interactive=True,
112
+ )
113
+ packages = gr.Textbox(
114
+ label="The following packages will be installed",
115
+ interactive=True,
116
+ )
117
+ function = gr.Textbox(
118
+ label="Function to be executed",
119
+ lines=10,
120
+ interactive=True,
121
+ )
122
+ error_message = gr.HighlightedText(value=None, visible=False)
123
+
124
+ self.input = gr.Textbox(
125
+ interactive=True,
126
+ placeholder="Input to the function",
127
+ show_label=False,
128
+ )
129
+ with gr.Column():
130
+ self.output = gr.Textbox(
131
+ label=f"Output: {{{self.vname}{id_}}}",
132
+ lines=10,
133
+ interactive=True,
134
+ )
135
+
136
+ generate_code.click(
137
+ self.generate_code,
138
+ inputs=[code_prompt],
139
+ outputs=[raw_prompt_output, packages, function, error_message],
140
+ )
141
+ save_code.click(
142
+ lambda: gr.Accordion.update(open=False),
143
+ inputs=[],
144
+ outputs=[accordion],
145
+ )
146
+
147
  return gr_component
148
 
149
+ @staticmethod
150
+ def generate_code(code_prompt: str):
151
+ try:
152
+ raw_prompt_output = ai.llm.next(
153
+ [
154
+ {
155
+ "role": "user",
156
+ "content": f"""
157
+ Write a python function for the following request:
158
+ {code_prompt}
159
+
160
+ Do't save anything to disk. Instead, the function should return the necessary data.
161
+ Include all the necessary imports but put them inside the function itself.
162
+ """,
163
+ }
164
+ ],
165
+ temperature=0,
166
+ )
167
+
168
+ def llm_call(prompt):
169
+ return ai.llm.next([{"role": "user", "content": prompt}], temperature=0)
170
+
171
+ with ThreadPoolExecutor(max_workers=2) as executor:
172
+ packages, function = tuple(
173
+ executor.map(
174
+ llm_call,
175
+ [
176
+ f"""
177
+ The following text should have a python function with some imports that might need to be installed:
178
+ {raw_prompt_output}
179
+
180
+ Extract all the python packages, nothing else. Print them in a single python list what can be used with eval().
181
+ """,
182
+ f"""
183
+ The following text should have a python function:
184
+ {raw_prompt_output}
185
+
186
+ Exclusively extract the function, nothing else.
187
+ """,
188
+ ],
189
+ )
190
+ )
191
+ except Exception as e:
192
+ return (
193
+ "",
194
+ "",
195
+ "",
196
+ gr.HighlightedText.update(
197
+ value=[
198
+ (
199
+ f"The following variables are being used before being defined :: {str(e)}. Please check your tasks.",
200
+ "ERROR",
201
+ )
202
+ ],
203
+ visible=True,
204
+ ),
205
+ )
206
+ return (
207
+ raw_prompt_output,
208
+ packages,
209
+ function,
210
+ gr.HighlightedText.update(value=None, visible=False),
211
+ )
212
+
213
  def execute(self, url: str) -> str:
214
+ ...
215
 
216
 
217
  class Task(Component):
218
+ available_tasks = [AITask, CodeTask]
219
  vname = "t"
220
 
221
  def __init__(self, id_: int, visible: bool = False):
222
+ super().__init__(id_)
223
  self._inner_tasks = [t() for t in self.available_tasks]
224
  self.gr_component: gr.Box
225
 
226
  def _render(self, id_: int) -> gr.Box:
227
  with gr.Box(visible=False) as gr_component:
228
  self.active_index = gr.Dropdown(
229
+ [AITask.name, CodeTask.name],
230
  label="Pick a new Task",
231
  type="index",
232
  )
 
241
  return gr_component
242
 
243
  @staticmethod
244
+ def pick_task(idx: int) -> List[Dict]:
245
  update = [gr.Box.update(visible=False)] * len(Task.available_tasks)
246
  update[idx] = gr.Box.update(visible=True)
247
  return update