Spaces:
Sleeping
Sleeping
Update appStore/rag.py
Browse files- appStore/rag.py +1 -1
appStore/rag.py
CHANGED
@@ -39,7 +39,7 @@ def run_query(context, label):
|
|
39 |
messages = [{"role": "system", "content": chatbot_role},{"role": "user", "content": get_prompt(context, label)}]
|
40 |
|
41 |
# Initialize the client, pointing it to one of the available models
|
42 |
-
client = InferenceClient("meta-llama/Meta-Llama-3
|
43 |
|
44 |
# instantiate ChatCompletion as a generator object (stream is set to True)
|
45 |
# response = completion_with_backoff(model=model_select, messages=[{"role": "user", "content": get_prompt(context, label)}], stream=True)
|
|
|
39 |
messages = [{"role": "system", "content": chatbot_role},{"role": "user", "content": get_prompt(context, label)}]
|
40 |
|
41 |
# Initialize the client, pointing it to one of the available models
|
42 |
+
client = InferenceClient("meta-llama/Meta-Llama-3-8B-Instruct", token = hf_token)
|
43 |
|
44 |
# instantiate ChatCompletion as a generator object (stream is set to True)
|
45 |
# response = completion_with_backoff(model=model_select, messages=[{"role": "user", "content": get_prompt(context, label)}], stream=True)
|