File size: 1,210 Bytes
f332108
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
from fastapi import FastAPI, Depends, File, UploadFile, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from fastapi import Request
import typing as t
import uvicorn
import os
from llm_engine import FaissIndex
import openai


OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")

openai.api_key = OPENAI_API_KEY

app = FastAPI(
    title = "Portfolio LLM Backend",
    description = "Backend for Portfolio LLM",
    docs_url  = "/docs",
)

origins = [
    "http://localhost:8000",
    "http://localhost:3000",
    "http://127.0.0.1:8000",
    "http://127.0.0.1:3000",
]

app.add_middleware(
    CORSMiddleware,
    allow_origins = origins,
    allow_credentials = True,
    allow_methods = ["*"],
    allow_headers = ["*"],
)

faiss_index = FaissIndex()

class UserQuery(BaseModel):
    query: str

@app.get("/")
async def root(request: Request):
    return {"Message" : "Server is Up and Running"}

@app.post("/query")
async def query(user_query: UserQuery):
    query = user_query.query
    response = faiss_index.qa_chain({"query" : query})
    return {"response" : response["result"]}

if __name__ == "__main__":
    uvicorn.run(app, host = "127.0.0.1", port = 8000)