Spaces:
Sleeping
Sleeping
Commit
·
ca6b728
1
Parent(s):
caceecd
llama implementation
Browse files
app.py
CHANGED
@@ -4,8 +4,8 @@ import os
|
|
4 |
|
5 |
HF_API_KEY = os.getenv("HF_API_KEY") # Retrieve API key from environment variable
|
6 |
|
7 |
-
models = ["HuggingFaceH4/zephyr-7b-beta", "microsoft/Phi-4-mini-instruct"]
|
8 |
-
client = InferenceClient(model=models[
|
9 |
|
10 |
|
11 |
def respond(
|
|
|
4 |
|
5 |
HF_API_KEY = os.getenv("HF_API_KEY") # Retrieve API key from environment variable
|
6 |
|
7 |
+
models = ["HuggingFaceH4/zephyr-7b-beta", "microsoft/Phi-4-mini-instruct", "meta-llama/Llama-3.2-3B-Instruct"]
|
8 |
+
client = InferenceClient(model=models[2], token=HF_API_KEY) # Pass API key to client
|
9 |
|
10 |
|
11 |
def respond(
|