Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,4 @@
|
|
1 |
import random
|
2 |
-
import os
|
3 |
import gradio as gr
|
4 |
import numpy as np
|
5 |
import spaces
|
@@ -11,7 +10,6 @@ if not torch.cuda.is_available():
|
|
11 |
DESCRIPTION += "\n<p>你现在运行在CPU上 但是此项目只支持GPU.</p>"
|
12 |
|
13 |
MAX_SEED = np.iinfo(np.int32).max
|
14 |
-
CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES", "1") == "1"
|
15 |
MAX_IMAGE_SIZE = 4096
|
16 |
|
17 |
if torch.cuda.is_available():
|
@@ -145,8 +143,7 @@ with gr.Blocks(css=css) as demo:
|
|
145 |
examples=examples,
|
146 |
inputs=prompt,
|
147 |
outputs=[result, seed],
|
148 |
-
fn=infer
|
149 |
-
cache_examples=CACHE_EXAMPLES,
|
150 |
)
|
151 |
|
152 |
use_negative_prompt.change(
|
|
|
1 |
import random
|
|
|
2 |
import gradio as gr
|
3 |
import numpy as np
|
4 |
import spaces
|
|
|
10 |
DESCRIPTION += "\n<p>你现在运行在CPU上 但是此项目只支持GPU.</p>"
|
11 |
|
12 |
MAX_SEED = np.iinfo(np.int32).max
|
|
|
13 |
MAX_IMAGE_SIZE = 4096
|
14 |
|
15 |
if torch.cuda.is_available():
|
|
|
143 |
examples=examples,
|
144 |
inputs=prompt,
|
145 |
outputs=[result, seed],
|
146 |
+
fn=infer
|
|
|
147 |
)
|
148 |
|
149 |
use_negative_prompt.change(
|