Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -4,6 +4,10 @@ import random
|
|
4 |
from diffusers import StableDiffusionPipeline
|
5 |
import torch
|
6 |
import os
|
|
|
|
|
|
|
|
|
7 |
|
8 |
# Retrieve Hugging Face access token from environment variables
|
9 |
access_token = os.getenv("HF_ACCESS_TOKEN")
|
@@ -11,6 +15,7 @@ access_token = os.getenv("HF_ACCESS_TOKEN")
|
|
11 |
# Set device
|
12 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
13 |
|
|
|
14 |
# Load the Stable Diffusion model
|
15 |
if torch.cuda.is_available():
|
16 |
torch.cuda.max_memory_allocated(device=device)
|
@@ -28,6 +33,11 @@ else:
|
|
28 |
)
|
29 |
pipe = pipe.to(device)
|
30 |
|
|
|
|
|
|
|
|
|
|
|
31 |
MAX_SEED = np.iinfo(np.int32).max
|
32 |
MAX_IMAGE_SIZE = 1024
|
33 |
|
|
|
4 |
from diffusers import StableDiffusionPipeline
|
5 |
import torch
|
6 |
import os
|
7 |
+
import logging
|
8 |
+
|
9 |
+
logging.basicConfig(level=logging.INFO)
|
10 |
+
|
11 |
|
12 |
# Retrieve Hugging Face access token from environment variables
|
13 |
access_token = os.getenv("HF_ACCESS_TOKEN")
|
|
|
15 |
# Set device
|
16 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
17 |
|
18 |
+
|
19 |
# Load the Stable Diffusion model
|
20 |
if torch.cuda.is_available():
|
21 |
torch.cuda.max_memory_allocated(device=device)
|
|
|
33 |
)
|
34 |
pipe = pipe.to(device)
|
35 |
|
36 |
+
logging.info("Loading the model...")
|
37 |
+
|
38 |
+
# Load model
|
39 |
+
logging.info("Model loaded successfully.")
|
40 |
+
|
41 |
MAX_SEED = np.iinfo(np.int32).max
|
42 |
MAX_IMAGE_SIZE = 1024
|
43 |
|