gokilashree commited on
Commit
04b5477
·
verified ·
1 Parent(s): a587619

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -2
app.py CHANGED
@@ -6,6 +6,11 @@ import io
6
  from PIL import Image
7
  import os
8
 
 
 
 
 
 
9
  # Load the translation model and tokenizer
10
  model_name = "facebook/mbart-large-50-many-to-one-mmt"
11
  tokenizer = MBart50Tokenizer.from_pretrained(model_name)
@@ -18,8 +23,8 @@ text_model = AutoModelForCausalLM.from_pretrained(text_generation_model_name)
18
  text_generator = pipeline("text-generation", model=text_model, tokenizer=text_tokenizer)
19
 
20
  # Load the FLUX.1-dev image generation model from diffusers
21
- flux_model = StableDiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-dev")
22
- flux_model.to("cuda") # Make sure to use a GPU for faster generation
23
 
24
  # Function to generate an image using FLUX.1-dev model
25
  def generate_image_from_text(translated_text):
 
6
  from PIL import Image
7
  import os
8
 
9
+ # Load the Hugging Face API key from environment variables
10
+ hf_api_key = os.getenv("full_token")
11
+ if hf_api_key is None:
12
+ raise ValueError("Hugging Face API key not found! Please set the 'full_token' environment variable.")
13
+
14
  # Load the translation model and tokenizer
15
  model_name = "facebook/mbart-large-50-many-to-one-mmt"
16
  tokenizer = MBart50Tokenizer.from_pretrained(model_name)
 
23
  text_generator = pipeline("text-generation", model=text_model, tokenizer=text_tokenizer)
24
 
25
  # Load the FLUX.1-dev image generation model from diffusers
26
+ flux_model = StableDiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-dev", use_auth_token=hf_api_key)
27
+ flux_model.to("cuda") # Use GPU for faster generation, if available
28
 
29
  # Function to generate an image using FLUX.1-dev model
30
  def generate_image_from_text(translated_text):