Spaces:
Sleeping
Sleeping
load_model
Browse files
app.py
CHANGED
@@ -11,13 +11,14 @@ import random
|
|
11 |
import spaces
|
12 |
|
13 |
pipe = None
|
|
|
14 |
|
15 |
def load_model():
|
16 |
global pipe
|
17 |
pipe = StableDiffusionXLControlNetPipeline.from_pretrained(
|
18 |
"yeq6x/animagine_position_map",
|
19 |
controlnet=ControlNetModel.from_pretrained("yeq6x/Image2PositionColor_v3"),
|
20 |
-
).to(
|
21 |
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
22 |
|
23 |
load_model()
|
@@ -136,7 +137,6 @@ def outpaint_image(image):
|
|
136 |
|
137 |
@spaces.GPU
|
138 |
def predict_image(cond_image, prompt, negative_prompt):
|
139 |
-
global pipe
|
140 |
generator = torch.Generator()
|
141 |
generator.manual_seed(random.randint(0, 2147483647))
|
142 |
|
|
|
11 |
import spaces
|
12 |
|
13 |
pipe = None
|
14 |
+
device = "cuda" if torch.cuda.is_available() else "cpu"
|
15 |
|
16 |
def load_model():
|
17 |
global pipe
|
18 |
pipe = StableDiffusionXLControlNetPipeline.from_pretrained(
|
19 |
"yeq6x/animagine_position_map",
|
20 |
controlnet=ControlNetModel.from_pretrained("yeq6x/Image2PositionColor_v3"),
|
21 |
+
).to(device)
|
22 |
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
23 |
|
24 |
load_model()
|
|
|
137 |
|
138 |
@spaces.GPU
|
139 |
def predict_image(cond_image, prompt, negative_prompt):
|
|
|
140 |
generator = torch.Generator()
|
141 |
generator.manual_seed(random.randint(0, 2147483647))
|
142 |
|