sl commited on
Commit
941019b
·
1 Parent(s): 184c926

Load model only once.

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -11,6 +11,8 @@ image_width = 256
11
  image_height = 192
12
  samples_dir = "/tmp"
13
 
 
 
14
 
15
  # Borrowed from here: https://stackoverflow.com/a/73667318
16
  def quantize_to_palette(_image, _palette):
@@ -58,9 +60,9 @@ def palette_to_attr(_palette):
58
 
59
 
60
  def generate(prompt, seed):
61
- pipe = DiffusionPipeline.from_pretrained(model, safety_checker=None, requires_safety_checker=False)
62
  generator = torch.Generator("cpu").manual_seed(int(seed))
63
- raw_image = pipe(prompt, height=image_height, width=image_width, num_inference_steps=20, generator=generator).images[0]
 
64
  palette = np.array(
65
  [[0, 0, 0], [0, 0, 255], [0, 255, 0], [0, 255, 255], [255, 0, 0], [255, 0, 255], [255, 255, 0],
66
  [255, 255, 255]])
 
11
  image_height = 192
12
  samples_dir = "/tmp"
13
 
14
+ pipe = DiffusionPipeline.from_pretrained(model, safety_checker=None, requires_safety_checker=False)
15
+
16
 
17
  # Borrowed from here: https://stackoverflow.com/a/73667318
18
  def quantize_to_palette(_image, _palette):
 
60
 
61
 
62
  def generate(prompt, seed):
 
63
  generator = torch.Generator("cpu").manual_seed(int(seed))
64
+ raw_image = \
65
+ pipe(prompt, height=image_height, width=image_width, num_inference_steps=20, generator=generator).images[0]
66
  palette = np.array(
67
  [[0, 0, 0], [0, 0, 255], [0, 255, 0], [0, 255, 255], [255, 0, 0], [255, 0, 255], [255, 255, 0],
68
  [255, 255, 255]])