Files changed (1) hide show
  1. app.py +28 -5
app.py CHANGED
@@ -1,21 +1,43 @@
1
  import gradio as gr
2
  import numpy as np
3
  import random
 
4
 
5
  import spaces
6
  from diffusers import DiffusionPipeline
7
  import torch
 
8
 
9
  device = "cuda" if torch.cuda.is_available() else "cpu"
10
  model_repo_id = "stabilityai/stable-diffusion-3.5-large-turbo"
11
 
 
 
 
 
 
 
12
  if torch.cuda.is_available():
13
  torch_dtype = torch.bfloat16
14
  else:
15
  torch_dtype = torch.float32
16
 
17
- pipe = DiffusionPipeline.from_pretrained(model_repo_id, torch_dtype=torch_dtype)
18
- pipe = pipe.to(device)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
 
20
  MAX_SEED = np.iinfo(np.int32).max
21
  MAX_IMAGE_SIZE = 1024
@@ -82,8 +104,9 @@ with gr.Blocks(css=css) as demo:
82
  negative_prompt = gr.Text(
83
  label="Negative prompt",
84
  max_lines=1,
85
- placeholder="Enter a negative prompt",
86
- visible=False,
 
87
  )
88
 
89
  seed = gr.Slider(
@@ -148,4 +171,4 @@ with gr.Blocks(css=css) as demo:
148
  )
149
 
150
  if __name__ == "__main__":
151
- demo.launch()
 
1
  import gradio as gr
2
  import numpy as np
3
  import random
4
+ import os
5
 
6
  import spaces
7
  from diffusers import DiffusionPipeline
8
  import torch
9
+ from huggingface_hub import HfFolder
10
 
11
  device = "cuda" if torch.cuda.is_available() else "cpu"
12
  model_repo_id = "stabilityai/stable-diffusion-3.5-large-turbo"
13
 
14
+ def get_hf_token():
15
+ token = os.getenv("HF_TOKEN") # First check environment variable
16
+ if not token:
17
+ token = HfFolder.get_token() # Then check .huggingface/token
18
+ return token
19
+
20
  if torch.cuda.is_available():
21
  torch_dtype = torch.bfloat16
22
  else:
23
  torch_dtype = torch.float32
24
 
25
+ # Initialize pipeline with token
26
+ token = get_hf_token()
27
+ if token:
28
+ pipe = DiffusionPipeline.from_pretrained(
29
+ model_repo_id,
30
+ torch_dtype=torch_dtype,
31
+ use_auth_token=token
32
+ )
33
+ pipe = pipe.to(device)
34
+ else:
35
+ raise ValueError(
36
+ "HF token not found. Please set your Hugging Face token either:\n"
37
+ "1. As an environment variable: export HF_TOKEN='your_token'\n"
38
+ "2. Using the Hugging Face CLI: huggingface-cli login\n"
39
+ "You can get your token from: https://huggingface.co/settings/tokens"
40
+ )
41
 
42
  MAX_SEED = np.iinfo(np.int32).max
43
  MAX_IMAGE_SIZE = 1024
 
104
  negative_prompt = gr.Text(
105
  label="Negative prompt",
106
  max_lines=1,
107
+ placeholder="Enter things you don't want in the image (optional)",
108
+ value="",
109
+ visible=True,
110
  )
111
 
112
  seed = gr.Slider(
 
171
  )
172
 
173
  if __name__ == "__main__":
174
+ demo.launch()