mayf commited on
Commit
ff06172
·
verified ·
1 Parent(s): 258bc7e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -7
app.py CHANGED
@@ -30,7 +30,7 @@ def load_clients():
30
  caption_client, story_client = load_clients()
31
 
32
  # —––––––– Main UI
33
- uploaded = st.file_uploader("Upload an image:", type=["jpg","jpeg","png"])
34
  if not uploaded:
35
  st.info("Please upload a JPG/PNG image to begin.")
36
  else:
@@ -44,7 +44,7 @@ else:
44
  img.save(buf, format="PNG")
45
  cap_out = caption_client(data=buf.getvalue())
46
 
47
- # Correctly extract from list/dict
48
  if isinstance(cap_out, list) and cap_out:
49
  cap_text = cap_out[0].get("generated_text", "").strip()
50
  elif isinstance(cap_out, dict):
@@ -53,12 +53,12 @@ else:
53
  cap_text = str(cap_out).strip()
54
 
55
  if not cap_text:
56
- st.error("😕 I couldn’t generate a caption. Try uploading a different image.")
57
  st.stop()
58
 
59
  st.markdown(f"**Caption:** {cap_text}")
60
 
61
- # 3) Build prompt for story
62
  prompt = (
63
  f"Here’s an image description: “{cap_text}”.\n\n"
64
  "Write an 80–100 word playful story for 3–10 year-old children that:\n"
@@ -68,11 +68,11 @@ else:
68
  "Story:"
69
  )
70
 
71
- # 4) Generate story
72
  with st.spinner("✍️ Generating story..."):
73
  story_out = story_client(
74
  inputs=prompt,
75
- parameters={ # must be `parameters`, not `params`
76
  "max_new_tokens": 120,
77
  "do_sample": True,
78
  "temperature": 0.7,
@@ -90,7 +90,7 @@ else:
90
  story = str(story_out).strip()
91
 
92
  if not story:
93
- st.error("😕 I couldn’t generate a story. Please try again!")
94
  st.stop()
95
 
96
  st.markdown("**Story:**")
 
30
  caption_client, story_client = load_clients()
31
 
32
  # —––––––– Main UI
33
+ uploaded = st.file_uploader("Upload an image:", type=["jpg", "jpeg", "png"])
34
  if not uploaded:
35
  st.info("Please upload a JPG/PNG image to begin.")
36
  else:
 
44
  img.save(buf, format="PNG")
45
  cap_out = caption_client(data=buf.getvalue())
46
 
47
+ # Unwrap list/dict properly
48
  if isinstance(cap_out, list) and cap_out:
49
  cap_text = cap_out[0].get("generated_text", "").strip()
50
  elif isinstance(cap_out, dict):
 
53
  cap_text = str(cap_out).strip()
54
 
55
  if not cap_text:
56
+ st.error("😕 Couldn’t generate a caption. Try another image.")
57
  st.stop()
58
 
59
  st.markdown(f"**Caption:** {cap_text}")
60
 
61
+ # 3) Build prompt
62
  prompt = (
63
  f"Here’s an image description: “{cap_text}”.\n\n"
64
  "Write an 80–100 word playful story for 3–10 year-old children that:\n"
 
68
  "Story:"
69
  )
70
 
71
+ # 4) Generate story via HF Inference API (use `params`)
72
  with st.spinner("✍️ Generating story..."):
73
  story_out = story_client(
74
  inputs=prompt,
75
+ params={ # must be `params`, not `parameters`
76
  "max_new_tokens": 120,
77
  "do_sample": True,
78
  "temperature": 0.7,
 
90
  story = str(story_out).strip()
91
 
92
  if not story:
93
+ st.error("😕 Couldn’t generate a story. Please try again!")
94
  st.stop()
95
 
96
  st.markdown("**Story:**")