LEIDIA commited on
Commit
c366ff1
Β·
verified Β·
1 Parent(s): 8715fc2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -6
app.py CHANGED
@@ -8,6 +8,7 @@ from diffusers import StableDiffusionPipeline
8
  from diffusers import OnnxRuntimeModel
9
  import torch
10
 
 
11
 
12
  device = "cuda" if torch.cuda.is_available() else "cpu"
13
  model_repo_id = "runwayml/stable-diffusion-v1-5" # Replace to the model you would like to use
@@ -109,13 +110,22 @@ css = """
109
 
110
  # Interface Gradio
111
  with gr.Blocks() as demo:
112
- gr.Markdown("## Text-to-Image Optimized for CPU")
 
113
 
114
- with gr.Row():
115
- prompt = gr.Textbox(label="Prompt")
116
- num_inference_steps = gr.Slider(
117
- label="Inference Steps", minimum=1, maximum=50, step=1, value=15
118
- )
 
 
 
 
 
 
 
 
119
  with gr.Row():
120
  generate_button = gr.Button("Generate")
121
  result = gr.Image(label="Generated Image")
 
8
  from diffusers import OnnxRuntimeModel
9
  import torch
10
 
11
+ black app.py
12
 
13
  device = "cuda" if torch.cuda.is_available() else "cpu"
14
  model_repo_id = "runwayml/stable-diffusion-v1-5" # Replace to the model you would like to use
 
110
 
111
  # Interface Gradio
112
  with gr.Blocks() as demo:
113
+ with gr.Column(elem_id="col-container"):
114
+ gr.Markdown("## Text-to-Image Optimized for CPU")
115
 
116
+ with gr.Row():
117
+ prompt = gr.Textbox(
118
+ label="Prompt",
119
+ show_label=False,
120
+ max_lines=1,
121
+ placeholder="Enter your prompt",
122
+ container=False,
123
+ )
124
+ run_button = gr.Button("Run", scale=0, variant="primary")
125
+
126
+ result = gr.Image(label="Result", show_label=False)
127
+
128
+
129
  with gr.Row():
130
  generate_button = gr.Button("Generate")
131
  result = gr.Image(label="Generated Image")