mgeorgi commited on
Commit
bce8c3d
·
verified ·
1 Parent(s): d4448fe

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +46 -50
app.py CHANGED
@@ -72,8 +72,8 @@ pipe = pipeline(
72
  )
73
 
74
 
75
- def read_feed_data(feed_text: str) -> Dict[str, str]:
76
- """Read the first row of feed data and return as dictionary.
77
  Automatically detects the delimiter from common options (|, ,, ;, \t)."""
78
  feed_io = StringIO(feed_text)
79
  # Get first line to detect delimiter
@@ -95,8 +95,7 @@ def read_feed_data(feed_text: str) -> Dict[str, str]:
95
  feed_io.seek(0)
96
  reader = csv.reader(feed_io, delimiter=delimiter)
97
  headers = next(reader) # Get header row
98
- first_row = next(reader) # Get first data row
99
- return dict(zip(headers, first_row))
100
 
101
 
102
  def overlay_text_on_image(
@@ -159,40 +158,47 @@ def generate_response(
159
  font_family: str = "Arial",
160
  max_new_tokens: int = 256,
161
  temperature: float = 0.7,
162
- ) -> tuple[str, Image.Image]:
163
  # Read feed data
164
- feed_data = read_feed_data(feed_text)
165
-
166
- # Format the prompt using the chat template and feed data
167
- formatted_prompt = prompt.format(**feed_data)
168
- system_prompt = "You are a helpful assistant that processes Meta Product Feeds."
169
-
170
- print(formatted_prompt)
171
-
172
- messages = [
173
- {"role": "system", "content": system_prompt},
174
- {"role": "user", "content": formatted_prompt},
175
- ]
176
-
177
- # Generate response
178
- outputs = pipe(
179
- messages,
180
- max_new_tokens=max_new_tokens,
181
- temperature=temperature,
182
- )
183
- response = outputs[0]["generated_text"]
184
-
185
- # Get image with text overlay
186
- image_with_text = overlay_text_on_image(
187
- image_url=feed_data.get("image_link", ""),
188
- text=response[-1]["content"],
189
- position=(text_x, text_y),
190
- font_size=font_size,
191
- font_color=font_color,
192
- font_family=font_family,
193
- )
194
-
195
- return response[-1]["content"], image_with_text
 
 
 
 
 
 
 
196
 
197
 
198
  # Create Gradio interface
@@ -201,18 +207,9 @@ demo = gr.Interface(
201
  description="Chat with Llama 3.2 model using feed data. Use {field_name} in your prompt to include feed data. The feed should be in CSV format with headers in the first row.",
202
  fn=generate_response,
203
  inputs=[
 
204
  gr.Textbox(
205
- label="Enter your prompt (use {field_name} for feed data)",
206
- lines=3,
207
- value="""
208
- Write an English slogan for "{title}", respond with slogan only.
209
- """,
210
- ),
211
- gr.Textbox(
212
- label="Feed data (CSV with auto-detected delimiter)",
213
- lines=10,
214
- value="""id|item_group_id|title|description|availability|condition|price|sale_price|sale_price_effective_date|link|image_link|additional_image_link|brand|google_product_category|product_type|gtin|mpn|gender|age_group|color|material|pattern|size|shipping|custom_label_0|custom_label_1|custom_label_2|custom_label_3|custom_label_4|ios_url|ios_app_store_id|ios_app_name|android_url|android_package|android_app_name|additional image 1|additional image 2
215
- 93310981|100274271|Spangenpumps aus Leder|Klassischer Spangenpumps aus Leder|in stock|new|52,99 EUR|false|2011-03-01T13:00-0800/2030-12-31T15:30-0800|https://www.bonprix.de/produkt/spangenpumps-aus-leder-schwarz-933109/?fb_pid=93310981|https://image01.bonprix.de/assets/1400x1960/1729512044/24082077-slWAlGkv.jpg|https://image01.bonprix.de/assets/1400x1960/1729512076/24081283-ApoUcVxa.jpg,,https://image01.bonprix.de/assets/1400x1960/1729512046/24082348-PLsOBIrl.jpg|bonprix|187|Damen > Schuhe > Pumps|8964004145445|93310981|female|adult|schwarz|Leder|Einfarbig|36,37,38,42,39,40,41|DE:::4.99 EUR|nein|false|Damen Schuhe > Pumps > Pumps > Spangenpumps|raus|raus|bonprix://www.bonprix.de/produkt/spangenpumps-aus-leder-schwarz-933109/?fb_pid=93310981|1090412741|bonprix – Mode und Wohn-Trends online shoppen|bonprix://www.bonprix.de/produkt/spangenpumps-aus-leder-schwarz-933109/?fb_pid=93310981|de.bonprix|bonprix – Mode online shoppen||""",
216
  ),
217
  gr.Number(label="Text X Position", value=10),
218
  gr.Number(label="Text Y Position", value=10),
@@ -227,8 +224,7 @@ demo = gr.Interface(
227
  gr.Slider(minimum=0.1, maximum=2.0, value=0.7, step=0.1, label="Temperature"),
228
  ],
229
  outputs=[
230
- gr.Textbox(label="Response", lines=5),
231
- gr.Image(label="Product Image with Text"),
232
  ],
233
  )
234
 
 
72
  )
73
 
74
 
75
+ def read_feed_data(feed_text: str) -> List[Dict[str, str]]:
76
+ """Read all rows of feed data and return as list of dictionaries.
77
  Automatically detects the delimiter from common options (|, ,, ;, \t)."""
78
  feed_io = StringIO(feed_text)
79
  # Get first line to detect delimiter
 
95
  feed_io.seek(0)
96
  reader = csv.reader(feed_io, delimiter=delimiter)
97
  headers = next(reader) # Get header row
98
+ return [dict(zip(headers, row)) for row in reader]
 
99
 
100
 
101
  def overlay_text_on_image(
 
158
  font_family: str = "Arial",
159
  max_new_tokens: int = 256,
160
  temperature: float = 0.7,
161
+ ) -> List[Image.Image]:
162
  # Read feed data
163
+ feed_data_list = read_feed_data(feed_text)
164
+ images = []
165
+
166
+ for feed_data in feed_data_list:
167
+ # Format the prompt using the chat template and feed data
168
+ formatted_prompt = prompt.format(**feed_data)
169
+ system_prompt = "You are a helpful assistant that processes Meta Product Feeds."
170
+
171
+ print(formatted_prompt)
172
+
173
+ messages = [
174
+ {"role": "system", "content": system_prompt},
175
+ {"role": "user", "content": formatted_prompt},
176
+ ]
177
+
178
+ # Generate response
179
+ outputs = pipe(
180
+ messages,
181
+ max_new_tokens=max_new_tokens,
182
+ temperature=temperature,
183
+ )
184
+
185
+ response = outputs[0]["generated_text"]
186
+ # Extract the generated text from the pipeline output
187
+ # The pipeline returns the text directly, not in a dictionary
188
+ generated_text = str(response[-1]["content"]) if response else ""
189
+
190
+ # Get image with text overlay
191
+ image_with_text = overlay_text_on_image(
192
+ image_url=feed_data.get("image_link", ""),
193
+ text=generated_text,
194
+ position=(text_x, text_y),
195
+ font_size=font_size,
196
+ font_color=font_color,
197
+ font_family=font_family,
198
+ )
199
+ images.append(image_with_text)
200
+
201
+ return images
202
 
203
 
204
  # Create Gradio interface
 
207
  description="Chat with Llama 3.2 model using feed data. Use {field_name} in your prompt to include feed data. The feed should be in CSV format with headers in the first row.",
208
  fn=generate_response,
209
  inputs=[
210
+ gr.Textbox(label="Enter your prompt (use {field_name} for feed data)", lines=3),
211
  gr.Textbox(
212
+ label="Feed data (CSV with auto-detected delimiter)", lines=10, value=""
 
 
 
 
 
 
 
 
 
 
213
  ),
214
  gr.Number(label="Text X Position", value=10),
215
  gr.Number(label="Text Y Position", value=10),
 
224
  gr.Slider(minimum=0.1, maximum=2.0, value=0.7, step=0.1, label="Temperature"),
225
  ],
226
  outputs=[
227
+ gr.Gallery(label="Product Images with Text", columns=2),
 
228
  ],
229
  )
230