Victor2323 commited on
Commit
a876cd3
·
verified ·
1 Parent(s): 3e1e39a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +76 -61
app.py CHANGED
@@ -1,61 +1,76 @@
1
- import os
2
- from fastapi import FastAPI, HTTPException
3
- from pydantic import BaseModel
4
- from typing import List
5
- from langchain_community.vectorstores import Chroma
6
- from langchain.prompts import ChatPromptTemplate
7
- from get_embedding_function import get_embedding_function
8
- from langchain_groq import ChatGroq
9
-
10
- app = FastAPI()
11
-
12
- # Configurar variáveis de ambiente
13
- os.environ["OPENAI_API_BASE"] = 'https://api.groq.com/openai/v1'
14
- os.environ["OPENAI_MODEL_NAME"] = 'llama3-8b-8192'
15
- os.environ["GROQ_API_KEY"] = os.getenv("GROQ_API_KEY")
16
-
17
- CHROMA_PATH = "chroma"
18
-
19
- PROMPT_TEMPLATE = """
20
- You are 'Vasu', an experienced professor with extensive knowledge in Cryptocurrency, Artificial Intelligence, and related projects.
21
- Provide relevant 'Links' "http://", but include links only when they are particularly useful for understanding the response.
22
- Answer the question based solely on the following context: {context}
23
-
24
- Based on the above context, answer the question: {question}.
25
- """
26
-
27
- class QueryRequest(BaseModel):
28
- query: str
29
-
30
- class QueryResponse(BaseModel):
31
- response: str
32
- sources: List[str]
33
-
34
- def query_rag(query_text: str):
35
- # Configurar o modelo Groq
36
- chat_groq = ChatGroq(temperature=0, model_name="llama3-8b-8192")
37
-
38
- # Preparar o DB
39
- embedding_function = get_embedding_function()
40
- db = Chroma(persist_directory=CHROMA_PATH, embedding_function=embedding_function)
41
-
42
- # Buscar no DB
43
- results = db.similarity_search_with_score(query_text, k=10)
44
-
45
- context_text = "\n\n---\n\n".join([doc.page_content for doc, _score in results])
46
- prompt_template = ChatPromptTemplate.from_template(PROMPT_TEMPLATE)
47
- prompt = prompt_template.format(context=context_text, question=query_text)
48
-
49
- # Obter a resposta usando Groq
50
- response_text = chat_groq.invoke(prompt).content
51
-
52
- sources = [doc.metadata.get("id", None) for doc, _score in results]
53
- return response_text, sources
54
-
55
- @app.post("/query", response_model=QueryResponse)
56
- async def query_api(request: QueryRequest):
57
- try:
58
- response_text, sources = query_rag(request.query)
59
- return QueryResponse(response=response_text, sources=sources)
60
- except Exception as e:
61
- raise HTTPException(status_code=500, detail=str(e))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from fastapi import FastAPI, HTTPException
3
+ from pydantic import BaseModel
4
+ from typing import List
5
+ from langchain_community.vectorstores import Chroma
6
+ from langchain.prompts import ChatPromptTemplate
7
+ from get_embedding_function import get_embedding_function
8
+ from langchain_groq import ChatGroq
9
+ import chainlit as cl
10
+
11
+ app = FastAPI()
12
+
13
+ # Configurar variáveis de ambiente
14
+ os.environ["OPENAI_API_BASE"] = 'https://api.groq.com/openai/v1'
15
+ os.environ["OPENAI_MODEL_NAME"] = 'llama3-8b-8192'
16
+ os.environ["GROQ_API_KEY"] = os.getenv("GROQ_API_KEY")
17
+
18
+ CHROMA_PATH = "chroma"
19
+
20
+ PROMPT_TEMPLATE = """
21
+ You are 'Vasu', an experienced professor with extensive knowledge in Cryptocurrency, Artificial Intelligence, and related projects.
22
+ Provide relevant 'Links' "http://", but include links only when they are particularly useful for understanding the response.
23
+ Answer the question based solely on the following context: {context}
24
+
25
+ Based on the above context, answer the question: {question}.
26
+ """
27
+
28
+ class QueryRequest(BaseModel):
29
+ query: str
30
+
31
+ class QueryResponse(BaseModel):
32
+ response: str
33
+ sources: List[str]
34
+
35
+ def query_rag(query_text: str):
36
+ # Configurar o modelo Groq
37
+ chat_groq = ChatGroq(temperature=0, model_name="llama3-8b-8192")
38
+
39
+ # Preparar o DB
40
+ embedding_function = get_embedding_function()
41
+ db = Chroma(persist_directory=CHROMA_PATH, embedding_function=embedding_function)
42
+
43
+ # Buscar no DB
44
+ results = db.similarity_search_with_score(query_text, k=10)
45
+
46
+ context_text = "\n\n---\n\n".join([doc.page_content for doc, _score in results])
47
+ prompt_template = ChatPromptTemplate.from_template(PROMPT_TEMPLATE)
48
+ prompt = prompt_template.format(context=context_text, question=query_text)
49
+
50
+ # Obter a resposta usando Groq
51
+ response_text = chat_groq.invoke(prompt).content
52
+
53
+ sources = [doc.metadata.get("id", None) for doc, _score in results]
54
+ return response_text, sources
55
+
56
+ @app.post("/query", response_model=QueryResponse)
57
+ async def query_api(request: QueryRequest):
58
+ try:
59
+ response_text, sources = query_rag(request.query)
60
+ return QueryResponse(response=response_text, sources=sources)
61
+ except Exception as e:
62
+ raise HTTPException(status_code=500, detail=str(e))
63
+
64
+ @cl.on_message
65
+ async def chainlit_main(message: cl.Message):
66
+ query_text = message.content # Obter a mensagem do usuário a partir do Chainlit
67
+ response_text = query_rag(query_text)
68
+
69
+ # Enviar a resposta de volta para o Chainlit
70
+ await cl.Message(
71
+ content=f"{response_text}",
72
+ ).send()
73
+
74
+ if __name__ == "__main__":
75
+ import uvicorn
76
+ uvicorn.run(app, host="0.0.0.0", port=8000)