Update app.py
Browse files
app.py
CHANGED
@@ -19,6 +19,7 @@ import matplotlib.pyplot as plt
|
|
19 |
import gc # Import the garbage collector
|
20 |
from audio import *
|
21 |
import os
|
|
|
22 |
# Define a fallback for environments without GPU
|
23 |
if os.environ.get("SPACES_ZERO_GPU") is not None:
|
24 |
import spaces
|
@@ -29,8 +30,6 @@ else:
|
|
29 |
def wrapper(*args, **kwargs):
|
30 |
return func(*args, **kwargs)
|
31 |
return wrapper
|
32 |
-
|
33 |
-
|
34 |
# Download necessary NLTK data
|
35 |
try:
|
36 |
nltk.data.find('tokenizers/punkt')
|
@@ -57,41 +56,31 @@ def log_gpu_memory():
|
|
57 |
print("CUDA is not available. Cannot log GPU memory.")
|
58 |
|
59 |
# --------- MinDalle Image Generation Functions ---------
|
60 |
-
#
|
61 |
-
|
62 |
-
@spaces.GPU(duration=60 * 3)
|
63 |
-
def load_min_dalle_model(models_root: str = 'pretrained'):
|
64 |
-
"""
|
65 |
-
Load the MinDalle model, automatically selecting device and precision.
|
66 |
-
|
67 |
-
Args:
|
68 |
-
models_root: Path to the directory containing MinDalle models.
|
69 |
-
|
70 |
-
Returns:
|
71 |
-
An instance of the MinDalle model.
|
72 |
-
"""
|
73 |
-
print("DEBUG: Loading MinDalle model...")
|
74 |
-
|
75 |
if torch.cuda.is_available():
|
76 |
-
|
77 |
-
|
78 |
-
print(
|
79 |
else:
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
|
|
|
|
|
|
92 |
|
93 |
-
# Initialize
|
94 |
-
min_dalle_model =
|
95 |
|
96 |
def generate_image_with_min_dalle(
|
97 |
model: MinDalle,
|
|
|
19 |
import gc # Import the garbage collector
|
20 |
from audio import *
|
21 |
import os
|
22 |
+
multiprocessing.set_start_method("spawn")
|
23 |
# Define a fallback for environments without GPU
|
24 |
if os.environ.get("SPACES_ZERO_GPU") is not None:
|
25 |
import spaces
|
|
|
30 |
def wrapper(*args, **kwargs):
|
31 |
return func(*args, **kwargs)
|
32 |
return wrapper
|
|
|
|
|
33 |
# Download necessary NLTK data
|
34 |
try:
|
35 |
nltk.data.find('tokenizers/punkt')
|
|
|
56 |
print("CUDA is not available. Cannot log GPU memory.")
|
57 |
|
58 |
# --------- MinDalle Image Generation Functions ---------
|
59 |
+
# Check for GPU availability
|
60 |
+
def check_gpu_availability():
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
if torch.cuda.is_available():
|
62 |
+
print(f"CUDA devices: {torch.cuda.device_count()}")
|
63 |
+
print(f"Current device: {torch.cuda.current_device()}")
|
64 |
+
print(torch.cuda.get_device_properties(torch.cuda.current_device()))
|
65 |
else:
|
66 |
+
print("CUDA is not available. Running on CPU.")
|
67 |
+
check_gpu_availability()
|
68 |
+
# GPU-safe model loading
|
69 |
+
def initialize_min_dalle_with_gpu():
|
70 |
+
@spaces.GPU(duration=60 * 3)
|
71 |
+
def load_model():
|
72 |
+
return MinDalle(
|
73 |
+
is_mega=True,
|
74 |
+
models_root='pretrained',
|
75 |
+
is_reusable=False,
|
76 |
+
is_verbose=True,
|
77 |
+
dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
|
78 |
+
device='cuda' if torch.cuda.is_available() else 'cpu'
|
79 |
+
)
|
80 |
+
return load_model()
|
81 |
|
82 |
+
# Initialize MinDalle model
|
83 |
+
min_dalle_model = initialize_min_dalle_with_gpu()
|
84 |
|
85 |
def generate_image_with_min_dalle(
|
86 |
model: MinDalle,
|