rnlduatm commited on
Commit
dcb02b2
ยท
1 Parent(s): 0939247

Update space

Browse files
Files changed (1) hide show
  1. model/animagine_xl.py +29 -27
model/animagine_xl.py CHANGED
@@ -1,45 +1,47 @@
1
- from diffusers import StableDiffusionXLPipeline, EulerAncestralDiscreteScheduler
2
- import torch
3
-
4
- def generate_animagine_xl(prompt: str):
5
- model_id = "Linaqruf/animagine-xl"
6
- pipe = StableDiffusionXLPipeline.from_pretrained(
7
- model_id,
8
- torch_dtype=torch.float16,
9
- use_safetensors=True,
10
- variant="fp16"
11
- )
12
- pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
13
- pipe = pipe.to("cuda")
14
- image = pipe(prompt=prompt, width=1024, height=1024).images[0]
15
- image.save("output_animagine_xl.png")
16
- print("โœ… ์ €์žฅ ์™„๋ฃŒ: output_animagine_xl.png")
17
- return image
18
-
19
- if __name__ == "__main__":
20
- prompt = "๊ทธ๋…€๋ฅผ ๋ฐ”๋ผ๋ณด๋Š” ํ•œ ๋‚จ์ž์˜ ์•ผ๋ง"
21
- generate_animagine_xl(prompt)
22
-
23
-
24
  # from diffusers import StableDiffusionXLPipeline, EulerAncestralDiscreteScheduler
25
  # import torch
26
 
27
  # def generate_animagine_xl(prompt: str):
28
  # model_id = "Linaqruf/animagine-xl"
29
- # use_fp16 = torch.cuda.is_available()
30
  # pipe = StableDiffusionXLPipeline.from_pretrained(
31
  # model_id,
32
- # torch_dtype=torch.float16 if use_fp16 else torch.float32,
33
  # use_safetensors=True,
34
  # variant="fp16"
35
  # )
36
  # pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
37
- # device = "cuda" if use_fp16 else "cpu"
38
- # pipe = pipe.to(device)
39
  # image = pipe(prompt=prompt, width=1024, height=1024).images[0]
40
  # image.save("output_animagine_xl.png")
 
41
  # return image
42
 
43
  # if __name__ == "__main__":
44
  # prompt = "๊ทธ๋…€๋ฅผ ๋ฐ”๋ผ๋ณด๋Š” ํ•œ ๋‚จ์ž์˜ ์•ผ๋ง"
45
  # generate_animagine_xl(prompt)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  # from diffusers import StableDiffusionXLPipeline, EulerAncestralDiscreteScheduler
2
  # import torch
3
 
4
  # def generate_animagine_xl(prompt: str):
5
  # model_id = "Linaqruf/animagine-xl"
 
6
  # pipe = StableDiffusionXLPipeline.from_pretrained(
7
  # model_id,
8
+ # torch_dtype=torch.float16,
9
  # use_safetensors=True,
10
  # variant="fp16"
11
  # )
12
  # pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
13
+ # pipe = pipe.to("cuda")
 
14
  # image = pipe(prompt=prompt, width=1024, height=1024).images[0]
15
  # image.save("output_animagine_xl.png")
16
+ # print("โœ… ์ €์žฅ ์™„๋ฃŒ: output_animagine_xl.png")
17
  # return image
18
 
19
  # if __name__ == "__main__":
20
  # prompt = "๊ทธ๋…€๋ฅผ ๋ฐ”๋ผ๋ณด๋Š” ํ•œ ๋‚จ์ž์˜ ์•ผ๋ง"
21
  # generate_animagine_xl(prompt)
22
+
23
+
24
+ from diffusers import StableDiffusionXLPipeline, EulerAncestralDiscreteScheduler
25
+ import torch
26
+
27
+ # (1) ๋ชจ๋ธ๊ณผ ์Šค์ผ€์ค„๋Ÿฌ๋ฅผ ์ „์—ญ์—์„œ ํ•œ ๋ฒˆ๋งŒ ์ดˆ๊ธฐํ™”
28
+ model_id = "Linaqruf/animagine-xl"
29
+ pipe = StableDiffusionXLPipeline.from_pretrained(
30
+ model_id,
31
+ torch_dtype=torch.float16,
32
+ use_safetensors=True,
33
+ variant="fp16"
34
+ )
35
+ pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
36
+ pipe = pipe.to("cuda") # ๋˜๋Š” "cpu"๋กœ ๋ณ€๊ฒฝ ๊ฐ€๋Šฅ
37
+
38
+ # (2) ์ด๋ฏธ์ง€ ์ƒ์„ฑ ํ•จ์ˆ˜
39
+ def generate_animagine_xl(prompt: str):
40
+ image = pipe(prompt=prompt, width=1024, height=1024).images[0]
41
+ image.save("output_animagine_xl.png")
42
+ print("โœ… ์ €์žฅ ์™„๋ฃŒ: output_animagine_xl.png")
43
+ return image
44
+
45
+ if __name__ == "__main__":
46
+ prompt = "๊ทธ๋…€๋ฅผ ๋ฐ”๋ผ๋ณด๋Š” ํ•œ ๋‚จ์ž์˜ ์•ผ๋ง"
47
+ generate_animagine_xl(prompt)