Spaces:
Runtime error
Runtime error
Commit
·
d018b4e
1
Parent(s):
565caba
Update app.py
Browse files
app.py
CHANGED
@@ -1,39 +1,55 @@
|
|
1 |
import os
|
2 |
import gradio as gr
|
3 |
-
import
|
4 |
-
from
|
5 |
-
from langchain.
|
6 |
-
|
7 |
-
from langchain import
|
8 |
-
from langchain.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
|
10 |
-
OPENAI_API_KEY=os.environ["OPENAI_API_KEY"]
|
11 |
-
PINECONE_API_KEY=os.environ["PINECONE_API_KEY"]
|
12 |
-
|
13 |
-
pinecone.init(api_key=PINECONE_API_KEY, environment="us-east1-gcp")
|
14 |
|
15 |
-
|
16 |
-
|
|
|
17 |
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
]
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
response
|
32 |
-
history = history + [(input, response)]
|
33 |
-
response = history
|
34 |
-
# response = [response]
|
35 |
-
# return response, response
|
36 |
-
return response, response
|
37 |
|
38 |
with gr.Blocks() as demo:
|
39 |
chatbot = gr.Chatbot()
|
|
|
1 |
import os
|
2 |
import gradio as gr
|
3 |
+
from langchain.vectorstores import Chroma
|
4 |
+
from langchain.chains import ConversationalRetrievalChain
|
5 |
+
from langchain.embeddings.openai import OpenAIEmbeddings
|
6 |
+
# convo chain lib
|
7 |
+
from langchain.embeddings.openai import OpenAIEmbeddings
|
8 |
+
from langchain.vectorstores import Chroma
|
9 |
+
from langchain.text_splitter import CharacterTextSplitter
|
10 |
+
from langchain.llms import OpenAI
|
11 |
+
from langchain.chains import ConversationalRetrievalChain
|
12 |
+
from langchain.chat_models import ChatOpenAI
|
13 |
+
from langchain.prompts.chat import (
|
14 |
+
ChatPromptTemplate,
|
15 |
+
SystemMessagePromptTemplate,
|
16 |
+
AIMessagePromptTemplate,
|
17 |
+
HumanMessagePromptTemplate,
|
18 |
+
)
|
19 |
+
from langchain.schema import (
|
20 |
+
AIMessage,
|
21 |
+
HumanMessage,
|
22 |
+
SystemMessage
|
23 |
+
)
|
24 |
|
|
|
|
|
|
|
|
|
25 |
|
26 |
+
OPENAI_API_KEY=os.environ["OPENAI_API_KEY"]
|
27 |
+
embedding = OpenAIEmbedding()
|
28 |
+
vectorstore = Chroma(persist_directory='/vectorstore', embedding_function=embedding)
|
29 |
|
30 |
+
aisyah_template="""
|
31 |
+
Answer each question truthfully using the Malaysia's Form 1 History data provided. Your answers should be concise and straight to the point.
|
32 |
+
For questions that are open-ended, which require subjective judgment or opinion, you may not find a definitive answer in the textbook.
|
33 |
+
However, you should still address the question's directive based on the data's context. Ideally, your answer should provide 3 points that support your response.
|
34 |
+
You are encouraged to better provide positive suggestions for concepts that are less ethical.
|
35 |
+
Please keep in mind that the scope of the data provided is limited to the content covered in the Malaysia's Form 1 History textbook.
|
36 |
+
---------------
|
37 |
+
{context}"""
|
38 |
+
##If you don't know the answer, just say that you don't know, don't try to make up an answer.
|
39 |
+
system_template="""Use the following pieces of context to answer the users question.
|
40 |
+
----------------
|
41 |
+
{context}"""
|
42 |
+
##If you don't know the answer, just say that you don't know, don't try to make up an answer.
|
43 |
+
messages = [
|
44 |
+
SystemMessagePromptTemplate.from_template(aisyah_template),
|
45 |
+
HumanMessagePromptTemplate.from_template("{question}")
|
46 |
]
|
47 |
+
prompt = ChatPromptTemplate.from_messages(messages)
|
48 |
+
qa = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0), retriever, return_source_documents=True, qa_prompt=prompt)
|
49 |
+
|
50 |
+
def predict(input, chat_historyhistory=[]):
|
51 |
+
response = qa({"question":input, "chat_history":history})
|
52 |
+
return response, chat_history
|
|
|
|
|
|
|
|
|
|
|
53 |
|
54 |
with gr.Blocks() as demo:
|
55 |
chatbot = gr.Chatbot()
|