Spaces:
Sleeping
Sleeping
Update functions_llama.py
Browse files- functions_llama.py +6 -0
functions_llama.py
CHANGED
@@ -6,6 +6,12 @@ from langchain_core.output_parsers import StrOutputParser
|
|
6 |
from langchain_core.runnables import RunnablePassthrough
|
7 |
import re
|
8 |
from typing import Dict, List
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
|
10 |
# Load model and tokenizer
|
11 |
model_name = "meta-llama/Llama-3.3-70B-Instruct"
|
|
|
6 |
from langchain_core.runnables import RunnablePassthrough
|
7 |
import re
|
8 |
from typing import Dict, List
|
9 |
+
import os
|
10 |
+
|
11 |
+
token = os.getenv(‘LLAMA_TOKEN’)
|
12 |
+
if token is None:
|
13 |
+
raise ValueError(“LLAMA_TOKEN is not set in the environment variables”)
|
14 |
+
login(token=token)
|
15 |
|
16 |
# Load model and tokenizer
|
17 |
model_name = "meta-llama/Llama-3.3-70B-Instruct"
|