Spaces:
Running
on
Zero
Running
on
Zero
Update
Browse files
app.py
CHANGED
@@ -17,7 +17,6 @@ if not torch.cuda.is_available():
|
|
17 |
DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>"
|
18 |
|
19 |
MAX_SEED = np.iinfo(np.int32).max
|
20 |
-
CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES") == "1"
|
21 |
MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "1024"))
|
22 |
USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE") == "1"
|
23 |
ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD") == "1"
|
@@ -236,7 +235,6 @@ with gr.Blocks(css="style.css") as demo:
|
|
236 |
inputs=prompt,
|
237 |
outputs=result,
|
238 |
fn=generate,
|
239 |
-
cache_examples=CACHE_EXAMPLES,
|
240 |
)
|
241 |
|
242 |
use_negative_prompt.change(
|
|
|
17 |
DESCRIPTION += "\n<p>Running on CPU 🥶 This demo does not work on CPU.</p>"
|
18 |
|
19 |
MAX_SEED = np.iinfo(np.int32).max
|
|
|
20 |
MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "1024"))
|
21 |
USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE") == "1"
|
22 |
ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD") == "1"
|
|
|
235 |
inputs=prompt,
|
236 |
outputs=result,
|
237 |
fn=generate,
|
|
|
238 |
)
|
239 |
|
240 |
use_negative_prompt.change(
|