Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,6 @@
|
|
1 |
-
#
|
|
|
|
|
2 |
from langchain_openai import ChatOpenAI
|
3 |
from langchain.chains import LLMChain
|
4 |
from prompts import maths_assistant_prompt_template
|
@@ -18,6 +20,10 @@ async def start_llm():
|
|
18 |
print("Initializing llm...")
|
19 |
#llm = ChatOpenAI(model='gpt-4o-mini',
|
20 |
# temperature=0.5, api_key = api_key)
|
|
|
|
|
|
|
|
|
21 |
print("llm initialized!")
|
22 |
conversation_memory = ConversationBufferMemory(memory_key="chat_history",
|
23 |
max_len=50,
|
|
|
1 |
+
#References :
|
2 |
+
#https://medium.com/@tahreemrasul/building-a-chatbot-application-with-chainlit-and-langchain-3e86da0099a6
|
3 |
+
#https://github.com/ArjunAranetaCodes/Python-DataScience-AI/blob/main/Chain-LitChat-Mistral7b/app.py
|
4 |
from langchain_openai import ChatOpenAI
|
5 |
from langchain.chains import LLMChain
|
6 |
from prompts import maths_assistant_prompt_template
|
|
|
20 |
print("Initializing llm...")
|
21 |
#llm = ChatOpenAI(model='gpt-4o-mini',
|
22 |
# temperature=0.5, api_key = api_key)
|
23 |
+
model_id = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
24 |
+
llm = HuggingFaceEndpoint(
|
25 |
+
repo_id=model_id, max_length=2000, temperature=0.5, token=os.getenv('HF_READ_TOKEN')
|
26 |
+
)
|
27 |
print("llm initialized!")
|
28 |
conversation_memory = ConversationBufferMemory(memory_key="chat_history",
|
29 |
max_len=50,
|