Saad0KH commited on
Commit
87191d2
·
verified ·
1 Parent(s): 9dbc715

Create app-api.py

Browse files
Files changed (1) hide show
  1. app-api.py +112 -0
app-api.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ from PIL import Image
3
+ import base64
4
+ import io
5
+ import random
6
+ import uuid
7
+ import numpy as np
8
+ import spaces
9
+ import torch
10
+ from diffusers import StableDiffusionXLPipeline, EulerAncestralDiscreteScheduler
11
+
12
+
13
+ # Créer une instance FastAPI
14
+ app = Flask(__name__)
15
+
16
+ def save_image(img):
17
+ unique_name = str(uuid.uuid4()) + ".png"
18
+ img.save(unique_name)
19
+ return unique_name
20
+
21
+ def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
22
+ if randomize_seed:
23
+ seed = random.randint(0, MAX_SEED)
24
+ return seed
25
+
26
+ MAX_SEED = np.iinfo(np.int32).max
27
+
28
+ if not torch.cuda.is_available():
29
+ DESCRIPTION += "\n<p>Running on CPU 🥶 This demo may not work on CPU.</p>"
30
+
31
+ MAX_SEED = np.iinfo(np.int32).max
32
+
33
+ USE_TORCH_COMPILE = 0
34
+ ENABLE_CPU_OFFLOAD = 0
35
+
36
+
37
+ if torch.cuda.is_available():
38
+ pipe = StableDiffusionXLPipeline.from_pretrained(
39
+ "fluently/Fluently-XL-v2",
40
+ torch_dtype=torch.float16,
41
+ use_safetensors=True,
42
+ )
43
+ pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
44
+
45
+
46
+ pipe.load_lora_weights("ehristoforu/dalle-3-xl-v2", weight_name="dalle-3-xl-lora-v2.safetensors", adapter_name="dalle")
47
+ pipe.set_adapters("dalle")
48
+
49
+ pipe.to("cuda")
50
+
51
+
52
+
53
+ @spaces.GPU(enable_queue=True)
54
+ def generate(
55
+ prompt: str,
56
+ negative_prompt: str = "",
57
+ use_negative_prompt: bool = False,
58
+ seed: int = 0,
59
+ width: int = 1024,
60
+ height: int = 1024,
61
+ guidance_scale: float = 3,
62
+ randomize_seed: bool = False,
63
+ ):
64
+
65
+
66
+ seed = int(randomize_seed_fn(seed, randomize_seed))
67
+
68
+ if not use_negative_prompt:
69
+ negative_prompt = "" # type: ignore
70
+
71
+ images = pipe(
72
+ prompt=prompt,
73
+ negative_prompt=negative_prompt,
74
+ width=width,
75
+ height=height,
76
+ guidance_scale=guidance_scale,
77
+ num_inference_steps=25,
78
+ num_images_per_prompt=1,
79
+ cross_attention_kwargs={"scale": 0.65},
80
+ output_type="pil",
81
+ ).images
82
+ image_paths = [save_image(img) for img in images]
83
+ print(image_paths)
84
+ return image_paths, seed
85
+
86
+ @app.get("/")
87
+ def root():
88
+ return "Welcome to the Fashion Outfit "
89
+
90
+ # Route pour l'API REST
91
+ @app.route('/api/run', methods=['POST'])
92
+ def run():
93
+ data = request.json
94
+ print(data)
95
+ text = data['prompt']
96
+ negative_prompt = data['negative_prompt']
97
+ use_negative_prompt = data['use_negative_prompt']
98
+ guidance_scale = data['guidance_scale']
99
+ randomize_seed = data['randomize_seed']
100
+ result = generate(
101
+ prompt,
102
+ negative_prompt,
103
+ use_negative_prompt,
104
+ 0,
105
+ 1024,
106
+ 1024,
107
+ guidance_scale,
108
+ randomize_seed)
109
+ return jsonify({'out': result})
110
+
111
+ if __name__ == "__main__":
112
+ app.run(host="0.0.0.0", port=7860)