Update app.py
Browse files
app.py
CHANGED
@@ -2,15 +2,14 @@ import gradio as gr
|
|
2 |
from diffusers import StableDiffusionPipeline
|
3 |
import torch
|
4 |
|
5 |
-
# Load the Stable Diffusion model (use
|
6 |
model_id = "runwayml/stable-diffusion-v1-5"
|
7 |
-
pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=torch.
|
8 |
-
pipe = pipe.to("cuda") # Use GPU if available
|
9 |
|
10 |
# Function to generate image from description
|
11 |
def generate_image(text_description):
|
12 |
-
# Generate image using Stable Diffusion
|
13 |
-
image = pipe(text_description, num_inference_steps=
|
14 |
|
15 |
# Optional: Add text to the image
|
16 |
from PIL import Image, ImageDraw, ImageFont
|
@@ -28,7 +27,7 @@ def generate_image(text_description):
|
|
28 |
# Gradio interface
|
29 |
with gr.Blocks(title="Text-to-Image Generator") as demo:
|
30 |
gr.Markdown("# Text-to-Image Generator")
|
31 |
-
gr.Markdown("Enter a description below and generate a detailed image!")
|
32 |
|
33 |
with gr.Row():
|
34 |
with gr.Column():
|
|
|
2 |
from diffusers import StableDiffusionPipeline
|
3 |
import torch
|
4 |
|
5 |
+
# Load the Stable Diffusion model (use CPU-compatible settings)
|
6 |
model_id = "runwayml/stable-diffusion-v1-5"
|
7 |
+
pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=torch.float32) # Use float32 for CPU
|
|
|
8 |
|
9 |
# Function to generate image from description
|
10 |
def generate_image(text_description):
|
11 |
+
# Generate image using Stable Diffusion on CPU
|
12 |
+
image = pipe(text_description, num_inference_steps=25, guidance_scale=7.5).images[0] # Reduced steps for speed
|
13 |
|
14 |
# Optional: Add text to the image
|
15 |
from PIL import Image, ImageDraw, ImageFont
|
|
|
27 |
# Gradio interface
|
28 |
with gr.Blocks(title="Text-to-Image Generator") as demo:
|
29 |
gr.Markdown("# Text-to-Image Generator")
|
30 |
+
gr.Markdown("Enter a description below and generate a detailed image! (Note: Running on CPU may be slow)")
|
31 |
|
32 |
with gr.Row():
|
33 |
with gr.Column():
|