Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -67,19 +67,19 @@ def initialize_llmchain(llm_model, temperature, max_tokens, top_k, vector_db, pr
|
|
67 |
max_new_tokens=max_tokens,
|
68 |
top_k=top_k,
|
69 |
)
|
70 |
-
|
71 |
-
|
72 |
llm = HuggingFaceEndpoint(
|
73 |
-
repo_id=
|
74 |
huggingfacehub_api_token=api_token,
|
75 |
temperature=temperature,
|
76 |
max_new_tokens=max_tokens,
|
77 |
top_k=top_k,
|
78 |
)
|
79 |
else:
|
80 |
-
|
81 |
llm = HuggingFaceEndpoint(
|
82 |
-
repo_id=
|
83 |
huggingfacehub_api_token=api_token,
|
84 |
temperature=temperature,
|
85 |
max_new_tokens=max_tokens,
|
|
|
67 |
max_new_tokens=max_tokens,
|
68 |
top_k=top_k,
|
69 |
)
|
70 |
+
elif
|
71 |
+
llm_model == "mistralai/Mistral-7B-Instruct-v0.3":
|
72 |
llm = HuggingFaceEndpoint(
|
73 |
+
repo_id=llm_model,
|
74 |
huggingfacehub_api_token=api_token,
|
75 |
temperature=temperature,
|
76 |
max_new_tokens=max_tokens,
|
77 |
top_k=top_k,
|
78 |
)
|
79 |
else:
|
80 |
+
llm_model == "CohereForAI/aya-23-35B":
|
81 |
llm = HuggingFaceEndpoint(
|
82 |
+
repo_id=llm_model,
|
83 |
huggingfacehub_api_token=api_token,
|
84 |
temperature=temperature,
|
85 |
max_new_tokens=max_tokens,
|