Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -96,9 +96,9 @@ api_response_prompt = PromptTemplate(
|
|
96 |
def setup_multiple_chains():
|
97 |
|
98 |
llm = HuggingFaceEndpoint(
|
99 |
-
|
100 |
#endpoint_url="http://localhost:8010/",
|
101 |
-
|
102 |
max_new_tokens=512,
|
103 |
top_k=10,
|
104 |
top_p=0.95,
|
@@ -156,11 +156,11 @@ async def handle_message(message: cl.Message):
|
|
156 |
|
157 |
if re.search(r'\b[A-Z]{6}\d{6}\b', user_message): # ex. "EQJLCQ362149"
|
158 |
|
159 |
-
response = await api_chain.
|
160 |
callbacks=[cl.AsyncLangchainCallbackHandler()])
|
161 |
|
162 |
else:
|
163 |
-
response = await llm_chain.
|
164 |
callbacks=[cl.AsyncLangchainCallbackHandler()])
|
165 |
|
166 |
|
|
|
96 |
def setup_multiple_chains():
|
97 |
|
98 |
llm = HuggingFaceEndpoint(
|
99 |
+
#repo_id="google/gemma-2-2b-it", #"norallm/normistral-7b-warm-instruct",
|
100 |
#endpoint_url="http://localhost:8010/",
|
101 |
+
model="normistral-7b-warm-instruct",
|
102 |
max_new_tokens=512,
|
103 |
top_k=10,
|
104 |
top_p=0.95,
|
|
|
156 |
|
157 |
if re.search(r'\b[A-Z]{6}\d{6}\b', user_message): # ex. "EQJLCQ362149"
|
158 |
|
159 |
+
response = await api_chain.ainvoke(user_message,
|
160 |
callbacks=[cl.AsyncLangchainCallbackHandler()])
|
161 |
|
162 |
else:
|
163 |
+
response = await llm_chain.ainvoke(user_message,
|
164 |
callbacks=[cl.AsyncLangchainCallbackHandler()])
|
165 |
|
166 |
|