Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
import gradio as gr
|
4 |
+
|
5 |
+
from openai import AzureOpenAI
|
6 |
+
|
7 |
+
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
8 |
+
|
9 |
+
from langchain_community.document_loaders import PyPDFLoader
|
10 |
+
from langchain_community.embeddings.sentence_transformer import SentenceTransformerEmbeddings
|
11 |
+
from langchain_community.vectorstores import Chroma
|
12 |
+
|
13 |
+
client = AzureOpenAI(
|
14 |
+
azure_endpoint=os.environ['AZURE_OPENAI_ENDPOINT'],
|
15 |
+
api_key=os.environ['AZURE_OPENAI_KEY'],
|
16 |
+
api_version="2023-05-15"
|
17 |
+
)
|
18 |
+
|
19 |
+
chat_model_deployment_name = "gpt-35-turbo"
|
20 |
+
|
21 |
+
embedding_model = SentenceTransformerEmbeddings(model_name='thenlper/gte-small')
|
22 |
+
|
23 |
+
text_splitter = RecursiveCharacterTextSplitter.from_tiktoken_encoder(
|
24 |
+
encoding_name='cl100k_base',
|
25 |
+
chunk_size=512,
|
26 |
+
chunk_overlap=16
|
27 |
+
)
|
28 |
+
|
29 |
+
pdf_file = "tsla-20221231-gen.pdf"
|
30 |
+
|
31 |
+
pdf_loader = PyPDFLoader(pdf_file)
|
32 |
+
|
33 |
+
tesla_10k_chunks_ada = pdf_loader.load_and_split(text_splitter)
|
34 |
+
|
35 |
+
tesla_10k_collection = 'tesla-10k-2022'
|
36 |
+
|
37 |
+
vectorstore = Chroma.from_documents(
|
38 |
+
tesla_10k_chunks_ada,
|
39 |
+
embedding_model,
|
40 |
+
collection_name=tesla_10k_collection
|
41 |
+
)
|
42 |
+
|
43 |
+
retriever = vectorstore.as_retriever(
|
44 |
+
search_type='similarity',
|
45 |
+
search_kwargs={'k': 5}
|
46 |
+
)
|
47 |
+
|
48 |
+
qna_system_message = """
|
49 |
+
You are an assistant to a financial services firm who answers user queries on annual reports.
|
50 |
+
Users will ask questions delimited by triple backticks, that is, ```.
|
51 |
+
User input will have the context required by you to answer user questions.
|
52 |
+
This context will begin with the token: ###Context.
|
53 |
+
The context contains references to specific portions of a document relevant to the user query.
|
54 |
+
Please answer only using the context provided in the input.
|
55 |
+
If the answer is not found in the context, respond "I don't know".
|
56 |
+
"""
|
57 |
+
|
58 |
+
qna_user_message_template = """
|
59 |
+
###Context
|
60 |
+
Here are some documents that are relevant to the question.
|
61 |
+
{context}
|
62 |
+
```
|
63 |
+
{question}
|
64 |
+
```
|
65 |
+
"""
|
66 |
+
|
67 |
+
def predict(user_input):
|
68 |
+
|
69 |
+
relevant_document_chunks = retriever.get_relevant_documents(user_input)
|
70 |
+
context_list = [d.page_content for d in relevant_document_chunks]
|
71 |
+
context_for_query = ".".join(context_list)
|
72 |
+
|
73 |
+
prompt = [
|
74 |
+
{'role':'system', 'content': qna_system_message},
|
75 |
+
{'role': 'user', 'content': qna_user_message_template.format(
|
76 |
+
context=context_for_query,
|
77 |
+
question=user_input
|
78 |
+
)
|
79 |
+
}
|
80 |
+
]
|
81 |
+
|
82 |
+
try:
|
83 |
+
response = client.chat.completions.create(
|
84 |
+
model=chat_model_deployment_name,
|
85 |
+
messages=prompt,
|
86 |
+
temperature=0
|
87 |
+
)
|
88 |
+
|
89 |
+
prediction = response.choices[0].message.content
|
90 |
+
|
91 |
+
except Exception as e:
|
92 |
+
prediction = e
|
93 |
+
|
94 |
+
return prediction
|
95 |
+
|
96 |
+
|
97 |
+
textbox = gr.Textbox(placeholder="Enter your query here", lines=6)
|
98 |
+
|
99 |
+
interface = gr.Interface(
|
100 |
+
inputs=textbox, fn=predict, outputs="text",
|
101 |
+
title="AMA on Tesla 2022 10-K",
|
102 |
+
description="This web API presents an interface to ask questions on contents of the Tesla 2022 10-K report.",
|
103 |
+
article="Note that questions that are not relevent to the Tesla 10-K report will not be answered.",
|
104 |
+
allow_flagging="manual", flagging_options=["Useful", "Not Useful"]
|
105 |
+
)
|
106 |
+
|
107 |
+
with gr.Blocks() as demo:
|
108 |
+
interface.launch()
|
109 |
+
|
110 |
+
demo.queue(concurrency_count=16)
|
111 |
+
demo.launch()
|