yaleh commited on
Commit
6eaed59
·
1 Parent(s): bae938f

Added buttons for individual steps.

Browse files
Files changed (1) hide show
  1. demo/sample_generator.ipynb +110 -2
demo/sample_generator.ipynb CHANGED
@@ -207,7 +207,35 @@
207
  " return result\n",
208
  "\n",
209
  " except Exception as e:\n",
210
- " raise RuntimeError(f\"An error occurred during processing: {str(e)}\")"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
211
  ]
212
  },
213
  {
@@ -229,6 +257,51 @@
229
  " return description, input_analysis, examples\n",
230
  " except Exception as e:\n",
231
  " raise gr.Error(f\"An error occurred: {str(e)}\")\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
232
  "\n",
233
  "def format_selected_example(evt: gr.SelectData, examples):\n",
234
  " if evt.index[0] < len(examples):\n",
@@ -251,20 +324,55 @@
251
  " )\n",
252
  " temperature = gr.Slider(label=\"Temperature\", value=1.0, minimum=0.0, maximum=1.0, step=0.1)\n",
253
  " generating_batch_size = gr.Slider(label=\"Generating Batch Size\", value=3, minimum=1, maximum=10, step=1)\n",
254
- " submit_button = gr.Button(\"Generate\", variant=\"primary\")\n",
 
 
255
  "\n",
256
  " with gr.Column(scale=1): # Outputs column\n",
257
  " description_output = gr.Textbox(label=\"Description\", lines=5, show_copy_button=True)\n",
 
258
  " input_analysis_output = gr.Textbox(label=\"Input Analysis\", lines=5, show_copy_button=True)\n",
 
 
 
 
259
  " examples_output = gr.DataFrame(label=\"Examples\", headers=[\"Input\", \"Output\"], interactive=False)\n",
260
  " new_example_json = gr.Textbox(label=\"New Example JSON\", lines=5, show_copy_button=True)\n",
261
  "\n",
 
 
 
 
262
  " submit_button.click(\n",
263
  " fn=process_json,\n",
264
  " inputs=[input_json, model_name, generating_batch_size, temperature],\n",
265
  " outputs=[description_output, input_analysis_output, examples_output]\n",
266
  " )\n",
267
  "\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
268
  " examples_output.select(\n",
269
  " fn=format_selected_example,\n",
270
  " inputs=[examples_output],\n",
 
207
  " return result\n",
208
  "\n",
209
  " except Exception as e:\n",
210
+ " raise RuntimeError(f\"An error occurred during processing: {str(e)}\")\n",
211
+ "\n",
212
+ " def generate_description(self, input_str):\n",
213
+ " return self.description_chain.invoke(input_str)\n",
214
+ "\n",
215
+ " def analyze_input(self, description):\n",
216
+ " return self.input_analysis_chain.invoke(description)\n",
217
+ "\n",
218
+ " def generate_briefs(self, description, input_analysis, generating_batch_size):\n",
219
+ " return self.briefs_chain.invoke({\n",
220
+ " \"description\": description,\n",
221
+ " \"input_analysis\": input_analysis,\n",
222
+ " \"generating_batch_size\": generating_batch_size\n",
223
+ " })\n",
224
+ "\n",
225
+ " def generate_examples_from_briefs(self, description, new_example_briefs, raw_example, generating_batch_size):\n",
226
+ " return self.examples_from_briefs_chain.invoke({\n",
227
+ " \"description\": description,\n",
228
+ " \"new_example_briefs\": new_example_briefs,\n",
229
+ " \"raw_example\": raw_example,\n",
230
+ " \"generating_batch_size\": generating_batch_size\n",
231
+ " })\n",
232
+ "\n",
233
+ " def generate_examples(self, description, raw_example, generating_batch_size):\n",
234
+ " return self.examples_chain.invoke({\n",
235
+ " \"description\": description,\n",
236
+ " \"raw_example\": raw_example,\n",
237
+ " \"generating_batch_size\": generating_batch_size\n",
238
+ " })"
239
  ]
240
  },
241
  {
 
257
  " return description, input_analysis, examples\n",
258
  " except Exception as e:\n",
259
  " raise gr.Error(f\"An error occurred: {str(e)}\")\n",
260
+ " \n",
261
+ "def generate_description_only(input_json, model_name, temperature):\n",
262
+ " try:\n",
263
+ " model = ChatOpenAI(model=model_name, temperature=temperature, max_retries=3)\n",
264
+ " generator = TaskDescriptionGenerator(model)\n",
265
+ " description = generator.generate_description(input_json)\n",
266
+ " return description\n",
267
+ " except Exception as e:\n",
268
+ " raise gr.Error(f\"An error occurred: {str(e)}\")\n",
269
+ "\n",
270
+ "def analyze_input(description, model_name, temperature):\n",
271
+ " try:\n",
272
+ " model = ChatOpenAI(model=model_name, temperature=temperature, max_retries=3)\n",
273
+ " generator = TaskDescriptionGenerator(model)\n",
274
+ " input_analysis = generator.analyze_input(description)\n",
275
+ " return input_analysis\n",
276
+ " except Exception as e:\n",
277
+ " raise gr.Error(f\"An error occurred: {str(e)}\")\n",
278
+ " \n",
279
+ "def generate_briefs(description, input_analysis, generating_batch_size, model_name, temperature):\n",
280
+ " try:\n",
281
+ " model = ChatOpenAI(model=model_name, temperature=temperature, max_retries=3)\n",
282
+ " generator = TaskDescriptionGenerator(model)\n",
283
+ " briefs = generator.generate_briefs(description, input_analysis, generating_batch_size)\n",
284
+ " return briefs\n",
285
+ " except Exception as e:\n",
286
+ " raise gr.Error(f\"An error occurred: {str(e)}\")\n",
287
+ " \n",
288
+ "def generate_examples_from_briefs(description, new_example_briefs, raw_example, generating_batch_size, model_name, temperature):\n",
289
+ " try:\n",
290
+ " model = ChatOpenAI(model=model_name, temperature=temperature, max_retries=3)\n",
291
+ " generator = TaskDescriptionGenerator(model)\n",
292
+ " examples = generator.generate_examples_from_briefs(description, new_example_briefs, raw_example, generating_batch_size)\n",
293
+ " return examples\n",
294
+ " except Exception as e:\n",
295
+ " raise gr.Error(f\"An error occurred: {str(e)}\")\n",
296
+ " \n",
297
+ "def generate_examples(description, raw_example, generating_batch_size, model_name, temperature):\n",
298
+ " try:\n",
299
+ " model = ChatOpenAI(model=model_name, temperature=temperature, max_retries=3)\n",
300
+ " generator = TaskDescriptionGenerator(model)\n",
301
+ " examples = generator.generate_examples(description, raw_example, generating_batch_size)\n",
302
+ " return examples\n",
303
+ " except Exception as e:\n",
304
+ " raise gr.Error(f\"An error occurred: {str(e)}\")\n",
305
  "\n",
306
  "def format_selected_example(evt: gr.SelectData, examples):\n",
307
  " if evt.index[0] < len(examples):\n",
 
324
  " )\n",
325
  " temperature = gr.Slider(label=\"Temperature\", value=1.0, minimum=0.0, maximum=1.0, step=0.1)\n",
326
  " generating_batch_size = gr.Slider(label=\"Generating Batch Size\", value=3, minimum=1, maximum=10, step=1)\n",
327
+ " with gr.Row():\n",
328
+ " submit_button = gr.Button(\"Generate\", variant=\"primary\")\n",
329
+ " generate_description_button = gr.Button(\"Generate Description\", variant=\"secondary\")\n",
330
  "\n",
331
  " with gr.Column(scale=1): # Outputs column\n",
332
  " description_output = gr.Textbox(label=\"Description\", lines=5, show_copy_button=True)\n",
333
+ " analyze_input_button = gr.Button(\"Analyze Input\", variant=\"secondary\")\n",
334
  " input_analysis_output = gr.Textbox(label=\"Input Analysis\", lines=5, show_copy_button=True)\n",
335
+ " generate_briefs_button = gr.Button(\"Generate Briefs\", variant=\"secondary\")\n",
336
+ " example_briefs_output = gr.Textbox(label=\"Example Briefs\", lines=5, show_copy_button=True)\n",
337
+ " generate_examples_from_briefs_button = gr.Button(\"Generate Examples from Briefs\", variant=\"secondary\")\n",
338
+ " examples_from_briefs_output = gr.DataFrame(label=\"Examples from Briefs\", headers=[\"Input\", \"Output\"], interactive=False)\n",
339
  " examples_output = gr.DataFrame(label=\"Examples\", headers=[\"Input\", \"Output\"], interactive=False)\n",
340
  " new_example_json = gr.Textbox(label=\"New Example JSON\", lines=5, show_copy_button=True)\n",
341
  "\n",
342
+ " clear_button = gr.ClearButton([input_json, description_output, input_analysis_output,\n",
343
+ " example_briefs_output, examples_from_briefs_output,\n",
344
+ " examples_output, new_example_json])\n",
345
+ "\n",
346
  " submit_button.click(\n",
347
  " fn=process_json,\n",
348
  " inputs=[input_json, model_name, generating_batch_size, temperature],\n",
349
  " outputs=[description_output, input_analysis_output, examples_output]\n",
350
  " )\n",
351
  "\n",
352
+ " analyze_input_button.click(\n",
353
+ " fn=analyze_input,\n",
354
+ " inputs=[description_output, model_name, temperature],\n",
355
+ " outputs=[input_analysis_output]\n",
356
+ " )\n",
357
+ "\n",
358
+ " generate_description_button.click(\n",
359
+ " fn=generate_description_only,\n",
360
+ " inputs=[input_json, model_name, temperature],\n",
361
+ " outputs=[description_output]\n",
362
+ " )\n",
363
+ "\n",
364
+ " generate_briefs_button.click(\n",
365
+ " fn=generate_briefs,\n",
366
+ " inputs=[description_output, input_analysis_output, generating_batch_size, model_name, temperature],\n",
367
+ " outputs=[example_briefs_output]\n",
368
+ " )\n",
369
+ "\n",
370
+ " generate_examples_from_briefs_button.click(\n",
371
+ " fn=generate_examples_from_briefs,\n",
372
+ " inputs=[description_output, example_briefs_output, input_json, generating_batch_size, model_name, temperature],\n",
373
+ " outputs=[examples_from_briefs_output]\n",
374
+ " )\n",
375
+ "\n",
376
  " examples_output.select(\n",
377
  " fn=format_selected_example,\n",
378
  " inputs=[examples_output],\n",