Update prompt handling
Browse files- handler.py +2 -2
handler.py
CHANGED
@@ -27,7 +27,7 @@ class EndpointHandler():
|
|
27 |
Return:
|
28 |
A :obj:`dict`:. base64 encoded image
|
29 |
"""
|
30 |
-
|
31 |
params = data.pop("parameters", data)
|
32 |
|
33 |
# hyperparamters
|
@@ -41,7 +41,7 @@ class EndpointHandler():
|
|
41 |
generator = torch.Generator(device).manual_seed(manual_seed)
|
42 |
|
43 |
# run inference pipeline
|
44 |
-
out = self.pipe(
|
45 |
generator=generator,
|
46 |
num_inference_steps=num_inference_steps,
|
47 |
guidance_scale=guidance_scale,
|
|
|
27 |
Return:
|
28 |
A :obj:`dict`:. base64 encoded image
|
29 |
"""
|
30 |
+
prompt = data.pop("inputs", data)
|
31 |
params = data.pop("parameters", data)
|
32 |
|
33 |
# hyperparamters
|
|
|
41 |
generator = torch.Generator(device).manual_seed(manual_seed)
|
42 |
|
43 |
# run inference pipeline
|
44 |
+
out = self.pipe(prompt,
|
45 |
generator=generator,
|
46 |
num_inference_steps=num_inference_steps,
|
47 |
guidance_scale=guidance_scale,
|