showee commited on
Commit
287c910
·
1 Parent(s): c526f0f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -4,6 +4,8 @@ import torch
4
  from PIL import Image
5
  from huggingface_hub import hf_hub_download
6
  from safetensors.torch import load_file
 
 
7
 
8
 
9
  def convert_safetensors_to_bin(pipeline, state_dict, alpha = 0.4):
@@ -109,10 +111,7 @@ def error_str(error, title="Error"):
109
  {error}""" if error else ""
110
 
111
  def inference(prompt, guidance, steps, width=512, height=512, seed=0, img=None, strength=0.5, neg_prompt="", auto_prefix=False):
112
- if torch.cuda.is_available():
113
- generator = torch.Generator('cuda').manual_seed(seed) if seed != 0 else None
114
- else:
115
- generator = torch.Generator.manual_seed(seed) if seed != 0 else None
116
  prompt = f"{prefix} {prompt}" if auto_prefix else prompt
117
 
118
  try:
 
4
  from PIL import Image
5
  from huggingface_hub import hf_hub_download
6
  from safetensors.torch import load_file
7
+ import os
8
+ os.environ['CUDA_LAUNCH_BLOCKING'] = 1
9
 
10
 
11
  def convert_safetensors_to_bin(pipeline, state_dict, alpha = 0.4):
 
111
  {error}""" if error else ""
112
 
113
  def inference(prompt, guidance, steps, width=512, height=512, seed=0, img=None, strength=0.5, neg_prompt="", auto_prefix=False):
114
+ generator = torch.Generator('cuda').manual_seed(seed) if seed != 0 else None
 
 
 
115
  prompt = f"{prefix} {prompt}" if auto_prefix else prompt
116
 
117
  try: