CamiloVega commited on
Commit
935cc40
·
verified ·
1 Parent(s): a1d006f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -3
app.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import spaces
2
  from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
3
  import gradio as gr
@@ -5,6 +6,11 @@ import torch
5
  import logging
6
  import sys
7
  from accelerate import infer_auto_device_map, init_empty_weights
 
 
 
 
 
8
 
9
  # Configure logging
10
  logging.basicConfig(
@@ -13,6 +19,20 @@ logging.basicConfig(
13
  )
14
  logger = logging.getLogger(__name__)
15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  # Define the model name
17
  model_name = "meta-llama/Llama-2-7b-hf"
18
 
@@ -32,8 +52,7 @@ try:
32
  logger.info("Loading tokenizer...")
33
  tokenizer = AutoTokenizer.from_pretrained(
34
  model_name,
35
- trust_remote_code=True,
36
- use_auth_token=True
37
  )
38
  logger.info("Tokenizer loaded successfully")
39
 
@@ -43,7 +62,6 @@ try:
43
  model_name,
44
  torch_dtype=torch.float16 if device == "cuda" else torch.float32,
45
  trust_remote_code=True,
46
- use_auth_token=True,
47
  load_in_8bit=True,
48
  device_map="auto"
49
  )
 
1
+ import os
2
  import spaces
3
  from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
4
  import gradio as gr
 
6
  import logging
7
  import sys
8
  from accelerate import infer_auto_device_map, init_empty_weights
9
+ from huggingface_hub import login
10
+ from dotenv import load_dotenv
11
+
12
+ # Load environment variables
13
+ load_dotenv()
14
 
15
  # Configure logging
16
  logging.basicConfig(
 
19
  )
20
  logger = logging.getLogger(__name__)
21
 
22
+ # Get HuggingFace token from environment variable
23
+ hf_token = os.getenv('HUGGINGFACE_TOKEN')
24
+ if not hf_token:
25
+ logger.error("HUGGINGFACE_TOKEN environment variable not found")
26
+ raise ValueError("Please set the HUGGINGFACE_TOKEN environment variable")
27
+
28
+ # Login to Hugging Face
29
+ try:
30
+ login(token=hf_token)
31
+ logger.info("Successfully logged in to Hugging Face")
32
+ except Exception as e:
33
+ logger.error(f"Failed to login to Hugging Face: {str(e)}")
34
+ raise
35
+
36
  # Define the model name
37
  model_name = "meta-llama/Llama-2-7b-hf"
38
 
 
52
  logger.info("Loading tokenizer...")
53
  tokenizer = AutoTokenizer.from_pretrained(
54
  model_name,
55
+ trust_remote_code=True
 
56
  )
57
  logger.info("Tokenizer loaded successfully")
58
 
 
62
  model_name,
63
  torch_dtype=torch.float16 if device == "cuda" else torch.float32,
64
  trust_remote_code=True,
 
65
  load_in_8bit=True,
66
  device_map="auto"
67
  )