Spaces:
Running
on
Zero
Running
on
Zero
Yuxuan Zhang
commited on
Commit
·
14ae48c
1
Parent(s):
dabcb60
Re-add images under Git LFS control
Browse files- app.py +26 -11
- img/img_1.png +3 -0
- img/img_2.png +3 -0
- img/img_3.png +3 -0
app.py
CHANGED
@@ -13,7 +13,7 @@ from openai import OpenAI
|
|
13 |
|
14 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
15 |
|
16 |
-
pipe = CogView4Pipeline.from_pretrained("
|
17 |
|
18 |
|
19 |
def clean_string(s):
|
@@ -96,8 +96,8 @@ def delete_old_files():
|
|
96 |
while True:
|
97 |
now = datetime.now()
|
98 |
cutoff = now - timedelta(minutes=5)
|
|
|
99 |
directories = ["./gradio_tmp"]
|
100 |
-
|
101 |
for directory in directories:
|
102 |
for filename in os.listdir(directory):
|
103 |
file_path = os.path.join(directory, filename)
|
@@ -111,7 +111,7 @@ def delete_old_files():
|
|
111 |
threading.Thread(target=delete_old_files, daemon=True).start()
|
112 |
|
113 |
|
114 |
-
@spaces.GPU # [uncomment to use ZeroGPU]
|
115 |
def infer(prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps,
|
116 |
progress=gr.Progress(track_tqdm=True)):
|
117 |
if randomize_seed:
|
@@ -130,16 +130,16 @@ def infer(prompt, seed, randomize_seed, width, height, guidance_scale, num_infer
|
|
130 |
|
131 |
|
132 |
examples = [
|
133 |
-
"A
|
134 |
-
"A
|
135 |
-
"A
|
136 |
]
|
137 |
|
138 |
|
139 |
with gr.Blocks() as demo:
|
140 |
gr.Markdown("""
|
141 |
<div style="text-align: center; font-size: 32px; font-weight: bold; margin-bottom: 20px;">
|
142 |
-
CogView4-6B
|
143 |
</div>
|
144 |
<div style="text-align: center;">
|
145 |
<a href="https://huggingface.co/THUDM/CogView4-6B">🤗 Model Hub |
|
@@ -220,10 +220,25 @@ with gr.Blocks() as demo:
|
|
220 |
value=50,
|
221 |
)
|
222 |
|
223 |
-
gr.
|
224 |
-
|
225 |
-
|
226 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
227 |
gr.on(
|
228 |
triggers=[run_button.click, prompt.submit],
|
229 |
fn=infer,
|
|
|
13 |
|
14 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
15 |
|
16 |
+
pipe = CogView4Pipeline.from_pretrained("/share/zyx/CogView4-6B-0125/", torch_dtype=torch.bfloat16).to(device)
|
17 |
|
18 |
|
19 |
def clean_string(s):
|
|
|
96 |
while True:
|
97 |
now = datetime.now()
|
98 |
cutoff = now - timedelta(minutes=5)
|
99 |
+
os.makedirs("./gradio_tmp", exist_ok=True)
|
100 |
directories = ["./gradio_tmp"]
|
|
|
101 |
for directory in directories:
|
102 |
for filename in os.listdir(directory):
|
103 |
file_path = os.path.join(directory, filename)
|
|
|
111 |
threading.Thread(target=delete_old_files, daemon=True).start()
|
112 |
|
113 |
|
114 |
+
@spaces.GPU(duration=120) # [uncomment to use ZeroGPU]
|
115 |
def infer(prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps,
|
116 |
progress=gr.Progress(track_tqdm=True)):
|
117 |
if randomize_seed:
|
|
|
130 |
|
131 |
|
132 |
examples = [
|
133 |
+
"A surreal scene unfolds where a chestnut horse adorned with a flowing crimson cape is perched upon the back of a stoic astronaut. The astronaut, in a gleaming white space suit with reflective visor, stands against a backdrop of a swirling cosmic sky filled with distant stars and nebulae. The horse, with its mane fluttering softly in the non-existent breeze of space, seems to be in command, navigating through the galaxy. The juxtaposition of an equine figure with celestial exploration strikes a whimsical yet grand tone, a vibrant clash of eras and realms.",
|
134 |
+
"A charming panda, adorned with a white apron and a barista's hat, is elegantly pouring steamed milk into a ceramic cup filled with rich, dark espresso. The resulting latte art is a delicate bamboo leaf pattern, perfectly contrasting the creamy white froth and the coffee's deep color. The scene is warmly lit, emanating a cozy café atmosphere, with bamboo décor in the background and the subtle steam rising from the freshly crafted beverage.",
|
135 |
+
"A vintage red convertible with gleaming chrome finishes sits attractively under the golden hues of a setting sun, parked on a deserted cobblestone street in a charming old town. The car's polished body reflects the surrounding quaint buildings and the few early evening stars beginning to twinkle in the gentle gradient of the twilight sky. A light breeze teases the few fallen leaves near the car's pristine white-walled tires, which rest casually by the sidewalk, hinting at the leisurely pace of life in this serene setting."
|
136 |
]
|
137 |
|
138 |
|
139 |
with gr.Blocks() as demo:
|
140 |
gr.Markdown("""
|
141 |
<div style="text-align: center; font-size: 32px; font-weight: bold; margin-bottom: 20px;">
|
142 |
+
CogView4-6B Hugging Face Space🤗
|
143 |
</div>
|
144 |
<div style="text-align: center;">
|
145 |
<a href="https://huggingface.co/THUDM/CogView4-6B">🤗 Model Hub |
|
|
|
220 |
value=50,
|
221 |
)
|
222 |
|
223 |
+
with gr.Column():
|
224 |
+
gr.Markdown("### Examples")
|
225 |
+
for i, ex in enumerate(examples):
|
226 |
+
with gr.Row():
|
227 |
+
ex_btn = gr.Button(
|
228 |
+
value=ex,
|
229 |
+
variant="secondary",
|
230 |
+
elem_id=f"ex_btn_{i}",
|
231 |
+
scale=3
|
232 |
+
)
|
233 |
+
ex_img = gr.Image(
|
234 |
+
value=f"img/img_{i + 1}.png",
|
235 |
+
label="Effect",
|
236 |
+
interactive=False,
|
237 |
+
height=130,
|
238 |
+
width=130,
|
239 |
+
scale=1
|
240 |
+
)
|
241 |
+
ex_btn.click(fn=lambda ex=ex: ex, inputs=[], outputs=prompt)
|
242 |
gr.on(
|
243 |
triggers=[run_button.click, prompt.submit],
|
244 |
fn=infer,
|
img/img_1.png
ADDED
![]() |
Git LFS Details
|
img/img_2.png
ADDED
![]() |
Git LFS Details
|
img/img_3.png
ADDED
![]() |
Git LFS Details
|