Update main.py
Browse files
main.py
CHANGED
@@ -10,7 +10,7 @@ from pathlib import Path
|
|
10 |
import chainlit as cl
|
11 |
#from mistralai.client import MistralClient
|
12 |
#from mistralai import Mistral, UserMessage, SystemMessage
|
13 |
-
|
14 |
from operator import itemgetter
|
15 |
from langchain_huggingface import HuggingFaceEndpoint
|
16 |
from langchain.schema.runnable import Runnable, RunnablePassthrough, RunnableLambda
|
@@ -184,15 +184,23 @@ def Generation_completion(client, data, question):
|
|
184 |
@traceable(run_type="chain", name="Mistral Assistant des datas Gustaviz",)
|
185 |
async def Affichage_reponse(response, question, data):
|
186 |
memory = cl.user_session.get("memory")
|
187 |
-
|
188 |
|
189 |
-
async for chunk in response.astream({"question": question, "data": data},config=RunnableConfig(callbacks=[cl.AsyncLangchainCallbackHandler(stream_final_answer=True)])):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
190 |
time.sleep(0.125)
|
191 |
-
await
|
192 |
-
|
193 |
-
await
|
194 |
memory.chat_memory.add_user_message(question)
|
195 |
-
memory.chat_memory.add_ai_message(msg
|
196 |
|
197 |
return msg
|
198 |
|
|
|
10 |
import chainlit as cl
|
11 |
#from mistralai.client import MistralClient
|
12 |
#from mistralai import Mistral, UserMessage, SystemMessage
|
13 |
+
from deep_translator import GoogleTranslator
|
14 |
from operator import itemgetter
|
15 |
from langchain_huggingface import HuggingFaceEndpoint
|
16 |
from langchain.schema.runnable import Runnable, RunnablePassthrough, RunnableLambda
|
|
|
184 |
@traceable(run_type="chain", name="Mistral Assistant des datas Gustaviz",)
|
185 |
async def Affichage_reponse(response, question, data):
|
186 |
memory = cl.user_session.get("memory")
|
187 |
+
result = cl.Message(author="COPILOT",content="")
|
188 |
|
189 |
+
#async for chunk in response.astream({"question": question, "data": data},config=RunnableConfig(callbacks=[cl.AsyncLangchainCallbackHandler(stream_final_answer=True)])):
|
190 |
+
# time.sleep(0.125)
|
191 |
+
# await msg.stream_token(chunk.replace('Ã','é').replace('©','').replace('Ã','è').replace('¨','').replace('â','\'').replace('€','').replace('™','').replace('Å','oe').replace('“','').replace('®','î').replace('´','ô').replace('<','').replace('>','').replace('/',''))
|
192 |
+
|
193 |
+
msg = response.ainvoke({"question": question, "data": data})
|
194 |
+
msg = msg.replace('Ã','é').replace('©','').replace('Ã','è').replace('¨','').replace('â','\'').replace('€','').replace('™','').replace('Å','oe').replace('“','').replace('®','î').replace('´','ô').replace('<','').replace('>','').replace('/','')
|
195 |
+
msg = GoogleTranslator(source='auto', target='fr').translate(msg[0:4999])
|
196 |
+
msgList = msg.split(' ')
|
197 |
+
for chunk in msgList:
|
198 |
time.sleep(0.125)
|
199 |
+
await result.stream_token(chunk)
|
200 |
+
|
201 |
+
await result.send()
|
202 |
memory.chat_memory.add_user_message(question)
|
203 |
+
memory.chat_memory.add_ai_message(msg)
|
204 |
|
205 |
return msg
|
206 |
|