Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -7,8 +7,8 @@ import os
|
|
7 |
huggingface_token = os.getenv('LLAMA_ACCES_TOKEN')
|
8 |
|
9 |
# Use the token with from_pretrained
|
10 |
-
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-chat",
|
11 |
-
model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-2-7b-chat",
|
12 |
|
13 |
|
14 |
# Load a content moderation pipeline
|
|
|
7 |
huggingface_token = os.getenv('LLAMA_ACCES_TOKEN')
|
8 |
|
9 |
# Use the token with from_pretrained
|
10 |
+
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-chat-hf", token=huggingface_token)
|
11 |
+
model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-2-7b-chat-hf", token=huggingface_token)
|
12 |
|
13 |
|
14 |
# Load a content moderation pipeline
|