Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -6,14 +6,18 @@ import os
|
|
6 |
# function part
|
7 |
# img2text
|
8 |
def img2text(image_path):
|
9 |
-
image_to_text = pipeline("image-to-text", model="
|
10 |
text = image_to_text(image_path)[0]["generated_text"]
|
11 |
return text
|
12 |
|
13 |
# text2story
|
14 |
def text2story(text):
|
|
|
|
|
|
|
15 |
# Using a smaller text generation model
|
16 |
-
generator = pipeline(
|
|
|
17 |
|
18 |
# Create a prompt for the story generation
|
19 |
prompt = f"Write a fun children's story based on this: {text}. Once upon a time, "
|
@@ -34,8 +38,10 @@ def text2story(text):
|
|
34 |
story_text = story_text.replace(prompt, "Once upon a time, ")
|
35 |
|
36 |
# Make sure the story is at least 100 words
|
37 |
-
|
38 |
-
|
|
|
|
|
39 |
|
40 |
return story_text
|
41 |
|
|
|
6 |
# function part
|
7 |
# img2text
|
8 |
def img2text(image_path):
|
9 |
+
image_to_text = pipeline("image-to-text", model="Maciel/Muge-Image-Caption")
|
10 |
text = image_to_text(image_path)[0]["generated_text"]
|
11 |
return text
|
12 |
|
13 |
# text2story
|
14 |
def text2story(text):
|
15 |
+
messages = [
|
16 |
+
{"role": "user", "content": "Who are you?"},
|
17 |
+
]
|
18 |
# Using a smaller text generation model
|
19 |
+
generator = pipeline("text-generation", model="mlx-community/Llama-3.2-1B-Instruct-4bit")
|
20 |
+
generator(messages)
|
21 |
|
22 |
# Create a prompt for the story generation
|
23 |
prompt = f"Write a fun children's story based on this: {text}. Once upon a time, "
|
|
|
38 |
story_text = story_text.replace(prompt, "Once upon a time, ")
|
39 |
|
40 |
# Make sure the story is at least 100 words
|
41 |
+
words = story_text.split()
|
42 |
+
if len(words) > 100:
|
43 |
+
# Simply truncate to 100 words
|
44 |
+
story_text = " ".join(words[:100])
|
45 |
|
46 |
return story_text
|
47 |
|