yaleh commited on
Commit
59f8551
·
1 Parent(s): aaa4147

Gradio dataframe for sample generator inputs.

Browse files
Files changed (1) hide show
  1. app/gradio_sample_generator.py +31 -12
app/gradio_sample_generator.py CHANGED
@@ -1,10 +1,19 @@
1
  import json
2
  import gradio as gr
 
3
  from langchain_openai import ChatOpenAI
4
  from meta_prompt.sample_generator import TaskDescriptionGenerator
5
 
6
- def process_json(input_json, model_name, generating_batch_size, temperature):
 
 
 
 
 
7
  try:
 
 
 
8
  model = ChatOpenAI(model=model_name, temperature=temperature, max_retries=3)
9
  generator = TaskDescriptionGenerator(model)
10
  result = generator.process(input_json, generating_batch_size)
@@ -18,8 +27,10 @@ def process_json(input_json, model_name, generating_batch_size, temperature):
18
  except Exception as e:
19
  raise gr.Error(f"An error occurred: {str(e)}")
20
 
21
- def generate_description_only(input_json, model_name, temperature):
22
  try:
 
 
23
  model = ChatOpenAI(model=model_name, temperature=temperature, max_retries=3)
24
  generator = TaskDescriptionGenerator(model)
25
  description = generator.generate_description(input_json)
@@ -45,11 +56,13 @@ def generate_briefs(description, input_analysis, generating_batch_size, model_na
45
  except Exception as e:
46
  raise gr.Error(f"An error occurred: {str(e)}")
47
 
48
- def generate_examples_from_briefs(description, new_example_briefs, input_str, generating_batch_size, model_name, temperature):
49
  try:
 
 
50
  model = ChatOpenAI(model=model_name, temperature=temperature, max_retries=3)
51
  generator = TaskDescriptionGenerator(model)
52
- result = generator.generate_examples_from_briefs(description, new_example_briefs, input_str, generating_batch_size)
53
  examples = [[example["input"], example["output"]] for example in result["examples"]]
54
  return examples
55
  except Exception as e:
@@ -57,9 +70,10 @@ def generate_examples_from_briefs(description, new_example_briefs, input_str, ge
57
 
58
  def generate_examples_directly(description, raw_example, generating_batch_size, model_name, temperature):
59
  try:
 
60
  model = ChatOpenAI(model=model_name, temperature=temperature, max_retries=3)
61
  generator = TaskDescriptionGenerator(model)
62
- result = generator.generate_examples_directly(description, raw_example, generating_batch_size)
63
  examples = [[example["input"], example["output"]] for example in result["examples"]]
64
  return examples
65
  except Exception as e:
@@ -78,7 +92,12 @@ with gr.Blocks(title="Task Description Generator") as demo:
78
 
79
  with gr.Row():
80
  with gr.Column(scale=1): # Inputs column
81
- input_json = gr.Textbox(label="Input JSON", lines=10, show_copy_button=True)
 
 
 
 
 
82
  model_name = gr.Dropdown(
83
  label="Model Name",
84
  choices=["llama3-70b-8192", "llama3-8b-8192", "llama-3.1-70b-versatile", "llama-3.1-8b-instant", "gemma2-9b-it"],
@@ -104,25 +123,25 @@ with gr.Blocks(title="Task Description Generator") as demo:
104
  examples_output = gr.DataFrame(label="Examples", headers=["Input", "Output"], interactive=False)
105
  new_example_json = gr.Textbox(label="New Example JSON", lines=5, show_copy_button=True)
106
 
107
- clear_button = gr.ClearButton([input_json, description_output, input_analysis_output,
108
  example_briefs_output, examples_from_briefs_output,
109
  examples_output, new_example_json])
110
 
111
  submit_button.click(
112
  fn=process_json,
113
- inputs=[input_json, model_name, generating_batch_size, temperature],
114
  outputs=[description_output, examples_directly_output, input_analysis_output, example_briefs_output, examples_from_briefs_output, examples_output]
115
  )
116
 
117
  generate_description_button.click(
118
  fn=generate_description_only,
119
- inputs=[input_json, model_name, temperature],
120
  outputs=[description_output]
121
  )
122
 
123
  generate_examples_directly_button.click(
124
  fn=generate_examples_directly,
125
- inputs=[description_output, input_json, generating_batch_size, model_name, temperature],
126
  outputs=[examples_directly_output]
127
  )
128
 
@@ -140,7 +159,7 @@ with gr.Blocks(title="Task Description Generator") as demo:
140
 
141
  generate_examples_from_briefs_button.click(
142
  fn=generate_examples_from_briefs,
143
- inputs=[description_output, example_briefs_output, input_json, generating_batch_size, model_name, temperature],
144
  outputs=[examples_from_briefs_output]
145
  )
146
 
@@ -170,7 +189,7 @@ with gr.Blocks(title="Task Description Generator") as demo:
170
  flagging_callback = gr.CSVLogger()
171
  flag_button.click(
172
  lambda *args: flagging_callback.flag(args),
173
- inputs=[input_json, model_name, generating_batch_size, description_output, examples_output, flag_reason],
174
  outputs=[]
175
  )
176
 
 
1
  import json
2
  import gradio as gr
3
+ import pandas as pd
4
  from langchain_openai import ChatOpenAI
5
  from meta_prompt.sample_generator import TaskDescriptionGenerator
6
 
7
+ def examples_to_json(examples):
8
+ pd_examples = pd.DataFrame(examples)
9
+ pd_examples.columns = pd_examples.columns.str.lower()
10
+ return pd_examples.to_json(orient="records")
11
+
12
+ def process_json(examples, model_name, generating_batch_size, temperature):
13
  try:
14
+ # Convert the gradio dataframe into a JSON array
15
+ input_json = examples_to_json(examples)
16
+
17
  model = ChatOpenAI(model=model_name, temperature=temperature, max_retries=3)
18
  generator = TaskDescriptionGenerator(model)
19
  result = generator.process(input_json, generating_batch_size)
 
27
  except Exception as e:
28
  raise gr.Error(f"An error occurred: {str(e)}")
29
 
30
+ def generate_description_only(examples, model_name, temperature):
31
  try:
32
+ input_json = examples_to_json(examples)
33
+
34
  model = ChatOpenAI(model=model_name, temperature=temperature, max_retries=3)
35
  generator = TaskDescriptionGenerator(model)
36
  description = generator.generate_description(input_json)
 
56
  except Exception as e:
57
  raise gr.Error(f"An error occurred: {str(e)}")
58
 
59
+ def generate_examples_from_briefs(description, new_example_briefs, examples, generating_batch_size, model_name, temperature):
60
  try:
61
+ input_json = examples_to_json(examples)
62
+
63
  model = ChatOpenAI(model=model_name, temperature=temperature, max_retries=3)
64
  generator = TaskDescriptionGenerator(model)
65
+ result = generator.generate_examples_from_briefs(description, new_example_briefs, input_json, generating_batch_size)
66
  examples = [[example["input"], example["output"]] for example in result["examples"]]
67
  return examples
68
  except Exception as e:
 
70
 
71
  def generate_examples_directly(description, raw_example, generating_batch_size, model_name, temperature):
72
  try:
73
+ input_json = examples_to_json(raw_example)
74
  model = ChatOpenAI(model=model_name, temperature=temperature, max_retries=3)
75
  generator = TaskDescriptionGenerator(model)
76
+ result = generator.generate_examples_directly(description, input_json, generating_batch_size)
77
  examples = [[example["input"], example["output"]] for example in result["examples"]]
78
  return examples
79
  except Exception as e:
 
92
 
93
  with gr.Row():
94
  with gr.Column(scale=1): # Inputs column
95
+ input_df = gr.DataFrame(
96
+ label="Input Examples",
97
+ headers=["Input", "Output"],
98
+ datatype=["str", "str"],
99
+ row_count=(1, "dynamic"),
100
+ )
101
  model_name = gr.Dropdown(
102
  label="Model Name",
103
  choices=["llama3-70b-8192", "llama3-8b-8192", "llama-3.1-70b-versatile", "llama-3.1-8b-instant", "gemma2-9b-it"],
 
123
  examples_output = gr.DataFrame(label="Examples", headers=["Input", "Output"], interactive=False)
124
  new_example_json = gr.Textbox(label="New Example JSON", lines=5, show_copy_button=True)
125
 
126
+ clear_button = gr.ClearButton([input_df, description_output, input_analysis_output,
127
  example_briefs_output, examples_from_briefs_output,
128
  examples_output, new_example_json])
129
 
130
  submit_button.click(
131
  fn=process_json,
132
+ inputs=[input_df, model_name, generating_batch_size, temperature], # Package first row
133
  outputs=[description_output, examples_directly_output, input_analysis_output, example_briefs_output, examples_from_briefs_output, examples_output]
134
  )
135
 
136
  generate_description_button.click(
137
  fn=generate_description_only,
138
+ inputs=[input_df, model_name, temperature], # Package first row
139
  outputs=[description_output]
140
  )
141
 
142
  generate_examples_directly_button.click(
143
  fn=generate_examples_directly,
144
+ inputs=[description_output, input_df, generating_batch_size, model_name, temperature], # Package first row
145
  outputs=[examples_directly_output]
146
  )
147
 
 
159
 
160
  generate_examples_from_briefs_button.click(
161
  fn=generate_examples_from_briefs,
162
+ inputs=[description_output, example_briefs_output, input_df, generating_batch_size, model_name, temperature],
163
  outputs=[examples_from_briefs_output]
164
  )
165
 
 
189
  flagging_callback = gr.CSVLogger()
190
  flag_button.click(
191
  lambda *args: flagging_callback.flag(args),
192
+ inputs=[input_df, model_name, generating_batch_size, description_output, examples_output, flag_reason],
193
  outputs=[]
194
  )
195