vaniagrawal commited on
Commit
0aaac91
·
verified ·
1 Parent(s): bb18444

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -3
app.py CHANGED
@@ -6,6 +6,16 @@ model_name = "meta-llama/CodeLlama-7b-hf"
6
  model = AutoModelForCausalLM.from_pretrained(model_name)
7
  tokenizer = AutoTokenizer.from_pretrained(model_name)
8
 
 
 
 
 
 
 
 
 
 
 
9
  def generate_code(prompt):
10
  inputs = tokenizer(prompt, return_tensors="pt")
11
  outputs = model.generate(inputs["input_ids"], max_length=200)
@@ -18,6 +28,3 @@ demo = gr.Interface(fn=generate_code,
18
  outputs="text",
19
  title="CodeLlama 7B Model",
20
  description="Generate code with CodeLlama-7b-hf.").launch()
21
-
22
-
23
- demo.launch()
 
6
  model = AutoModelForCausalLM.from_pretrained(model_name)
7
  tokenizer = AutoTokenizer.from_pretrained(model_name)
8
 
9
+ import os
10
+
11
+ # Check if the token is being accessed
12
+ hf_token = os.getenv("HF_HOME")
13
+ if hf_token:
14
+ print("Successfully retrieved Hugging Face token.")
15
+ else:
16
+ print("Failed to retrieve Hugging Face token.")
17
+
18
+
19
  def generate_code(prompt):
20
  inputs = tokenizer(prompt, return_tensors="pt")
21
  outputs = model.generate(inputs["input_ids"], max_length=200)
 
28
  outputs="text",
29
  title="CodeLlama 7B Model",
30
  description="Generate code with CodeLlama-7b-hf.").launch()