File size: 1,958 Bytes
14d715f
52848d6
14d715f
c5a1662
 
14d715f
45ee782
 
 
14d715f
 
 
 
 
 
 
 
39de67d
14d715f
 
bd9c6bd
14d715f
c5a1662
 
 
 
 
 
 
 
 
 
 
 
 
 
14d715f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
import os
import pinecone

from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from langchain.chains import RetrievalQA
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.llms import OpenAI
from langchain.vectorstores import Pinecone

PINECONE_API_KEY = os.getenv('PINECONE_API_KEY')
PINECONE_ENV = os.getenv('PINECONE_ENV')
OPENAI_API_KEY = os.getenv('OPENAI_API_KEY')
PINECONE_INDEX_NAME = os.getenv('PINECONE_INDEX_NAME')

def parse_response(response):
    result = response['result']
    result += '\n\nSources:  \n'
    for source_name in response["source_documents"]:
        result += ''.join((source_name.metadata['source'],
              " page #:", str(source_name.metadata['page']), '  \n'))
    return result

app = FastAPI()

app.add_middleware(
    CORSMiddleware,
    allow_origins=['*']
)

@app.get("/")
def read_root():
    return {"message": "Hello World"}
    
@app.get("/api/python")
def hello_python():
    return {"message": "Hello Python"}

@app.get("/prompt")
def read_root(p: str='According to HQ H303140, what is "Country  of  origin"  means?'):
    pinecone.init(
        api_key=PINECONE_API_KEY,
        environment=PINECONE_ENV
    )
    index = pinecone.Index(PINECONE_INDEX_NAME)
    index.describe_index_stats()
    embeddings = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY)
    docsearch = Pinecone.from_existing_index(PINECONE_INDEX_NAME, embeddings)
    retriever = docsearch.as_retriever(
        include_metadata=True, 
        metadata_key='source'
    )
    llm = OpenAI(temperature=0, openai_api_key=OPENAI_API_KEY)
    qa_chain = RetrievalQA.from_chain_type(llm=llm,
                                       chain_type="stuff",
                                       retriever=retriever,
                                       return_source_documents=True)
    response = qa_chain(p)
    return {
        "prompt": p,
        "response": parse_response(response)
    }