Spaces:
Sleeping
Sleeping
Commit
·
b39c0ba
1
Parent(s):
d57efd6
Create bot service
Browse files- alembic/versions/f00987c5ad39_describe_bot_name.py +30 -0
- api/auth.py +24 -9
- api/events.py +7 -7
- api/function.py +17 -203
- api/router/book.py +101 -177
- api/router/book_collection.py +52 -135
- api/router/bot_general.py +32 -26
- api/router/bot_one.py +47 -59
- api/router/bot_specific.py +78 -82
- api/router/category.py +1 -1
- api/router/role.py +1 -1
- api/router/user.py +21 -26
- config.py +2 -0
- core/chat/bot_service.py +163 -0
- core/chat/chatstore.py +12 -1
- core/prompt.py +1 -1
- db/database.py +23 -17
- db/fetching.py +60 -0
- db/models.py +1 -0
- db/query/__init__.py +0 -0
- db/query/base_query.py +201 -0
- db/query/query_book.py +124 -0
- db/query/query_bot.py +57 -0
- db/query/query_category.py +41 -0
- db/query/query_role.py +0 -0
- db/query/query_user_meta.py +106 -0
- script/document_uploader.py +15 -7
- script/vector_db.py +4 -4
- service/dto.py +1 -1
- utils/error_handlers.py +20 -0
alembic/versions/f00987c5ad39_describe_bot_name.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""describe bot name
|
2 |
+
|
3 |
+
Revision ID: f00987c5ad39
|
4 |
+
Revises: b1df0377cbe2
|
5 |
+
Create Date: 2024-10-15 12:56:39.589516
|
6 |
+
|
7 |
+
"""
|
8 |
+
from typing import Sequence, Union
|
9 |
+
|
10 |
+
from alembic import op
|
11 |
+
import sqlalchemy as sa
|
12 |
+
|
13 |
+
|
14 |
+
# revision identifiers, used by Alembic.
|
15 |
+
revision: str = 'f00987c5ad39'
|
16 |
+
down_revision: Union[str, None] = 'b1df0377cbe2'
|
17 |
+
branch_labels: Union[str, Sequence[str], None] = None
|
18 |
+
depends_on: Union[str, Sequence[str], None] = None
|
19 |
+
|
20 |
+
|
21 |
+
def upgrade() -> None:
|
22 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
23 |
+
op.add_column('session_publisher', sa.Column('bot_name', sa.String(length=100), nullable=True))
|
24 |
+
# ### end Alembic commands ###
|
25 |
+
|
26 |
+
|
27 |
+
def downgrade() -> None:
|
28 |
+
# ### commands auto generated by Alembic - please adjust! ###
|
29 |
+
op.drop_column('session_publisher', 'bot_name')
|
30 |
+
# ### end Alembic commands ###
|
api/auth.py
CHANGED
@@ -1,17 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
1 |
from fastapi import APIRouter, Depends
|
2 |
from fastapi.responses import JSONResponse
|
3 |
from fastapi.security import OAuth2PasswordBearer
|
4 |
-
from
|
5 |
-
|
6 |
from sqlalchemy.orm import Session
|
7 |
-
from db.models import User
|
8 |
from starlette import status
|
9 |
-
|
|
|
10 |
from db.database import get_db
|
11 |
-
|
12 |
-
from typing import Annotated
|
13 |
-
from jose import jwt, JWTError
|
14 |
-
import os
|
15 |
|
16 |
load_dotenv()
|
17 |
|
@@ -68,4 +70,17 @@ async def get_current_user(token: Annotated[str, Depends(oauth2_scheme)]):
|
|
68 |
except JWTError:
|
69 |
return JSONResponse(
|
70 |
status_code=status.HTTP_401_UNAUTHORIZED, content="Could not validate user."
|
71 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from datetime import timedelta, datetime, timezone
|
3 |
+
from typing import Annotated
|
4 |
+
|
5 |
+
from dotenv import load_dotenv
|
6 |
from fastapi import APIRouter, Depends
|
7 |
from fastapi.responses import JSONResponse
|
8 |
from fastapi.security import OAuth2PasswordBearer
|
9 |
+
from jose import jwt, JWTError
|
10 |
+
from passlib.context import CryptContext
|
11 |
from sqlalchemy.orm import Session
|
|
|
12 |
from starlette import status
|
13 |
+
|
14 |
+
from db.models import User
|
15 |
from db.database import get_db
|
16 |
+
|
|
|
|
|
|
|
17 |
|
18 |
load_dotenv()
|
19 |
|
|
|
70 |
except JWTError:
|
71 |
return JSONResponse(
|
72 |
status_code=status.HTTP_401_UNAUTHORIZED, content="Could not validate user."
|
73 |
+
)
|
74 |
+
|
75 |
+
user_dependency = Annotated[dict, Depends(get_current_user)]
|
76 |
+
def check_user_authentication(user: user_dependency):
|
77 |
+
"""Helper function to check if the user is authenticated."""
|
78 |
+
if user is None:
|
79 |
+
return JSONResponse(status_code=401, content="Authentication Failed")
|
80 |
+
return None
|
81 |
+
|
82 |
+
def check_admin_authentication(user: user_dependency):
|
83 |
+
"""Helper function to check if the user is authenticated."""
|
84 |
+
if user is None or user.get("role_id") != 1:
|
85 |
+
return JSONResponse(status_code=401, content="Authentication Admin Failed")
|
86 |
+
return None
|
api/events.py
CHANGED
@@ -1,10 +1,12 @@
|
|
|
|
|
|
|
|
1 |
from fastapi import FastAPI
|
2 |
-
|
3 |
from db.database import engine
|
4 |
-
from
|
5 |
from llama_index.core import set_global_handler
|
6 |
-
|
7 |
-
import os
|
8 |
|
9 |
|
10 |
load_dotenv()
|
@@ -12,8 +14,6 @@ load_dotenv()
|
|
12 |
|
13 |
async def startup() -> None:
|
14 |
Base.metadata.create_all(engine)
|
15 |
-
print("table added")
|
16 |
-
await db_conn.connect()
|
17 |
os.environ["LANGFUSE_SECRET_KEY"] = os.getenv("LANGFUSE_SECRET_KEY")
|
18 |
os.environ["LANGFUSE_PUBLIC_KEY"] = os.getenv("LANGFUSE_PUBLIC_KEY")
|
19 |
os.environ["LANGFUSE_HOST"] = os.getenv("LANGFUSE_HOST")
|
@@ -21,7 +21,7 @@ async def startup() -> None:
|
|
21 |
|
22 |
|
23 |
async def shutdown() -> None:
|
24 |
-
|
25 |
|
26 |
def register_events(app: FastAPI) -> FastAPI:
|
27 |
app.add_event_handler("startup", startup)
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
from dotenv import load_dotenv
|
4 |
from fastapi import FastAPI
|
5 |
+
|
6 |
from db.database import engine
|
7 |
+
from db.models import Base
|
8 |
from llama_index.core import set_global_handler
|
9 |
+
|
|
|
10 |
|
11 |
|
12 |
load_dotenv()
|
|
|
14 |
|
15 |
async def startup() -> None:
|
16 |
Base.metadata.create_all(engine)
|
|
|
|
|
17 |
os.environ["LANGFUSE_SECRET_KEY"] = os.getenv("LANGFUSE_SECRET_KEY")
|
18 |
os.environ["LANGFUSE_PUBLIC_KEY"] = os.getenv("LANGFUSE_PUBLIC_KEY")
|
19 |
os.environ["LANGFUSE_HOST"] = os.getenv("LANGFUSE_HOST")
|
|
|
21 |
|
22 |
|
23 |
async def shutdown() -> None:
|
24 |
+
pass
|
25 |
|
26 |
def register_events(app: FastAPI) -> FastAPI:
|
27 |
app.add_event_handler("startup", startup)
|
api/function.py
CHANGED
@@ -1,31 +1,19 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
from db.get_data import GetDatabase
|
4 |
-
from db.delete_data import DeleteDatabase
|
5 |
-
from db.update_data import UpdateDatabase
|
6 |
-
|
7 |
from typing import Any
|
8 |
-
|
9 |
-
from
|
|
|
10 |
from fastapi.responses import JSONResponse
|
11 |
|
12 |
-
from llama_index.core.llms import MessageRole
|
13 |
from core.chat.engine import Engine
|
14 |
-
from core.
|
15 |
-
from
|
16 |
-
from
|
|
|
17 |
from service.aws_loader import Loader
|
|
|
18 |
|
19 |
-
from pymongo.mongo_client import MongoClient
|
20 |
-
from dotenv import load_dotenv
|
21 |
-
|
22 |
-
from typing import List
|
23 |
-
from datetime import datetime
|
24 |
-
import redis
|
25 |
-
import logging
|
26 |
-
import re
|
27 |
-
import json
|
28 |
-
import os
|
29 |
|
30 |
load_dotenv()
|
31 |
|
@@ -33,8 +21,14 @@ load_dotenv()
|
|
33 |
logging.basicConfig(level=logging.INFO)
|
34 |
|
35 |
|
36 |
-
async def data_ingestion(
|
37 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
# Upload to AWS
|
39 |
file_name = f"{reference['title']}"
|
40 |
aws_loader = Loader()
|
@@ -62,186 +56,6 @@ async def data_ingestion(category_id, reference, file: UploadFile) -> Any:
|
|
62 |
content="An internal server error occurred in data ingestion.",
|
63 |
)
|
64 |
|
65 |
-
|
66 |
-
async def get_data(db_conn, title=None, fetch_all_data=True):
|
67 |
-
get_database = GetDatabase(db_conn)
|
68 |
-
print(get_database)
|
69 |
-
try:
|
70 |
-
if fetch_all_data:
|
71 |
-
results = await get_database.get_all_data()
|
72 |
-
print(results)
|
73 |
-
logging.info("Database fetched all data")
|
74 |
-
return results
|
75 |
-
else:
|
76 |
-
results = await get_database.get_data(title)
|
77 |
-
logging.info("Database fetched one data")
|
78 |
-
return results
|
79 |
-
|
80 |
-
except Exception as e:
|
81 |
-
# Log the error and raise HTTPException for FastAPI
|
82 |
-
logging.error("An error occurred in get data: %s", e)
|
83 |
-
return JSONResponse(
|
84 |
-
status_code=500, content="An internal server error occurred in get data."
|
85 |
-
)
|
86 |
-
|
87 |
-
|
88 |
-
async def update_data(id: int, reference, db_conn):
|
89 |
-
update_database = UpdateDatabase(db_conn)
|
90 |
-
try:
|
91 |
-
reference = reference.model_dump()
|
92 |
-
print(reference)
|
93 |
-
reference.update({"id": id})
|
94 |
-
print(reference)
|
95 |
-
await update_database.update_record(reference)
|
96 |
-
response = {"status": "Update Success"}
|
97 |
-
return response
|
98 |
-
except Exception as e:
|
99 |
-
# Log the error and raise HTTPException for FastAPI
|
100 |
-
logging.error("An error occurred in update data: %s", e)
|
101 |
-
return JSONResponse(
|
102 |
-
status_code=500, content="An internal server error occurred in update data."
|
103 |
-
)
|
104 |
-
|
105 |
-
|
106 |
-
async def delete_data(id: int, db_conn):
|
107 |
-
delete_database = DeleteDatabase(db_conn)
|
108 |
-
try:
|
109 |
-
params = {"id": id}
|
110 |
-
await delete_database.delete_record(params)
|
111 |
-
response = {"status": "Delete Success"}
|
112 |
-
return response
|
113 |
-
except Exception as e:
|
114 |
-
# Log the error and raise HTTPException for FastAPI
|
115 |
-
logging.error("An error occurred in get data: %s", e)
|
116 |
-
return JSONResponse(
|
117 |
-
status_code=500, content="An internal server error occurred in delete data."
|
118 |
-
)
|
119 |
-
|
120 |
-
|
121 |
-
def generate_completion_non_streaming(
|
122 |
-
session_id, user_request, titles: List = None, type_bot="general"
|
123 |
-
):
|
124 |
-
uri = os.getenv("MONGO_URI")
|
125 |
-
engine = Engine()
|
126 |
-
index_manager = IndexManager()
|
127 |
-
chatstore = ChatStore()
|
128 |
-
client = MongoClient(uri)
|
129 |
-
|
130 |
-
try:
|
131 |
-
client.admin.command("ping")
|
132 |
-
print("Pinged your deployment. You successfully connected to MongoDB!")
|
133 |
-
except Exception as e:
|
134 |
-
return JSONResponse(status_code=500, content=f"Database Error as {e}")
|
135 |
-
|
136 |
-
try:
|
137 |
-
|
138 |
-
# Load existing indexes
|
139 |
-
index = index_manager.load_existing_indexes()
|
140 |
-
|
141 |
-
if type_bot == "general":
|
142 |
-
# Retrieve the chat engine with the loaded index
|
143 |
-
chat_engine = engine.get_chat_engine(session_id, index)
|
144 |
-
else:
|
145 |
-
# Retrieve the chat engine with the loaded index
|
146 |
-
chat_engine = engine.get_chat_engine(session_id, index, titles, type_bot)
|
147 |
-
|
148 |
-
# Generate completion response
|
149 |
-
response = chat_engine.chat(user_request)
|
150 |
-
|
151 |
-
sources = response.sources
|
152 |
-
|
153 |
-
number_reference = list(set(re.findall(r"\[(\d+)\]", str(response))))
|
154 |
-
number_reference_sorted = sorted(number_reference)
|
155 |
-
|
156 |
-
contents = []
|
157 |
-
metadata_collection = []
|
158 |
-
scores = []
|
159 |
-
|
160 |
-
if number_reference_sorted:
|
161 |
-
for number in number_reference_sorted:
|
162 |
-
# Konversi number ke integer untuk digunakan sebagai indeks
|
163 |
-
number = int(number)
|
164 |
-
|
165 |
-
# Pastikan sources tidak kosong dan memiliki elemen yang diperlukan
|
166 |
-
if sources and len(sources) > 0:
|
167 |
-
node = dict(sources[0])["raw_output"].source_nodes
|
168 |
-
|
169 |
-
# Pastikan number valid sebagai indeks
|
170 |
-
if 0 <= number - 1 < len(node):
|
171 |
-
|
172 |
-
content = clean_text(node[number - 1].node.get_text())
|
173 |
-
contents.append(content)
|
174 |
-
|
175 |
-
metadata = dict(node[number - 1].node.metadata)
|
176 |
-
metadata_collection.append(metadata)
|
177 |
-
|
178 |
-
score = node[number - 1].score
|
179 |
-
scores.append(score)
|
180 |
-
else:
|
181 |
-
print(f"Invalid reference number: {number}")
|
182 |
-
else:
|
183 |
-
print("No sources available")
|
184 |
-
else:
|
185 |
-
print("There are no references")
|
186 |
-
|
187 |
-
response = update_response(str(response))
|
188 |
-
contents = renumber_sources(contents)
|
189 |
-
|
190 |
-
# Check the lengths of content and metadata
|
191 |
-
num_content = len(contents)
|
192 |
-
num_metadata = len(metadata_collection)
|
193 |
-
|
194 |
-
# Add content to metadata
|
195 |
-
for i in range(min(num_content, num_metadata)):
|
196 |
-
metadata_collection[i]["content"] = re.sub(r"source \d+\:", "", contents[i])
|
197 |
-
|
198 |
-
message = ChatMessage(
|
199 |
-
role=MessageRole.ASSISTANT, content=response, metadata=metadata_collection
|
200 |
-
)
|
201 |
-
|
202 |
-
chatstore.delete_last_message(session_id)
|
203 |
-
chatstore.add_message(session_id, message)
|
204 |
-
chatstore.clean_message(session_id)
|
205 |
-
except Exception as e:
|
206 |
-
# Log the error and raise HTTPException for FastAPI
|
207 |
-
logging.error("An error occurred in generate text: %s", e)
|
208 |
-
return JSONResponse(
|
209 |
-
status_code=500,
|
210 |
-
content=f"An internal server error occurred in generate text as {e}.")
|
211 |
-
|
212 |
-
try :
|
213 |
-
chat_history_db = [
|
214 |
-
ChatMessage(role=MessageRole.SYSTEM,
|
215 |
-
content=user_request,
|
216 |
-
timestamp=datetime.now(),
|
217 |
-
payment = "free" if type_bot=="general" else None
|
218 |
-
),
|
219 |
-
ChatMessage(
|
220 |
-
role=MessageRole.ASSISTANT,
|
221 |
-
content=response,
|
222 |
-
metadata=metadata_collection,
|
223 |
-
timestamp=datetime.now(),
|
224 |
-
payment = "free" if type_bot=="general" else None
|
225 |
-
)
|
226 |
-
]
|
227 |
-
|
228 |
-
chat_history_json = [message.model_dump() for message in chat_history_db]
|
229 |
-
|
230 |
-
db = client["bot_database"] # Replace with your database name
|
231 |
-
collection = db[session_id] # Replace with your collection name
|
232 |
-
|
233 |
-
result = collection.insert_many(chat_history_json)
|
234 |
-
print("Data inserted with record ids", result.inserted_ids)
|
235 |
-
|
236 |
-
return str(response), metadata_collection, scores
|
237 |
-
|
238 |
-
except Exception as e:
|
239 |
-
# Log the error and raise HTTPException for FastAPI
|
240 |
-
logging.error("An error occurred in generate text: %s", e)
|
241 |
-
return JSONResponse(
|
242 |
-
status_code=500,
|
243 |
-
content=f"An internal server error occurred in generate text as {e}.")
|
244 |
-
|
245 |
async def generate_streaming_completion(user_request, session_id):
|
246 |
try:
|
247 |
engine = Engine()
|
|
|
1 |
+
import json
|
2 |
+
import logging
|
|
|
|
|
|
|
|
|
3 |
from typing import Any
|
4 |
+
|
5 |
+
from dotenv import load_dotenv
|
6 |
+
from fastapi import HTTPException, UploadFile
|
7 |
from fastapi.responses import JSONResponse
|
8 |
|
|
|
9 |
from core.chat.engine import Engine
|
10 |
+
from core.parser import clean_text
|
11 |
+
from langfuse.llama_index import LlamaIndexCallbackHandler
|
12 |
+
from script.document_uploader import Uploader
|
13 |
+
from script.vector_db import IndexManager
|
14 |
from service.aws_loader import Loader
|
15 |
+
from service.dto import BotResponseStreaming
|
16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
|
18 |
load_dotenv()
|
19 |
|
|
|
21 |
logging.basicConfig(level=logging.INFO)
|
22 |
|
23 |
|
24 |
+
async def data_ingestion(reference, file: UploadFile) -> Any:
|
25 |
try:
|
26 |
+
# Assuming you have a Langfuse callback handler
|
27 |
+
langfuse_callback_handler = LlamaIndexCallbackHandler()
|
28 |
+
langfuse_callback_handler.set_trace_params(
|
29 |
+
user_id="admin_book_uploaded",
|
30 |
+
)
|
31 |
+
|
32 |
# Upload to AWS
|
33 |
file_name = f"{reference['title']}"
|
34 |
aws_loader = Loader()
|
|
|
56 |
content="An internal server error occurred in data ingestion.",
|
57 |
)
|
58 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
async def generate_streaming_completion(user_request, session_id):
|
60 |
try:
|
61 |
engine = Engine()
|
api/router/book.py
CHANGED
@@ -1,32 +1,53 @@
|
|
1 |
import logging
|
2 |
-
import base64
|
3 |
-
|
4 |
from typing import Annotated, Optional
|
5 |
-
|
|
|
6 |
from api.router.user import user_dependency
|
|
|
|
|
7 |
from fastapi import Form, APIRouter, File, UploadFile, Depends
|
8 |
from fastapi.responses import JSONResponse
|
9 |
-
|
10 |
-
from db.
|
11 |
-
from db.models import Category, Metadata
|
12 |
from db.database import get_db
|
13 |
-
from
|
|
|
|
|
|
|
14 |
from config import MYSQL_CONFIG
|
|
|
15 |
from script.vector_db import IndexManager
|
16 |
-
from service.dto import
|
17 |
from sqlalchemy.orm import Session
|
18 |
from sqlalchemy.future import select
|
19 |
-
from sqlalchemy.exc import SQLAlchemyError
|
20 |
|
21 |
|
22 |
router = APIRouter(tags=["Book"])
|
23 |
|
24 |
-
db_conn = get_db_conn(MYSQL_CONFIG)
|
25 |
-
get_database = GetDatabase(db_conn)
|
26 |
index_manager = IndexManager()
|
27 |
db_dependency = Annotated[Session, Depends(get_db)]
|
28 |
|
29 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
30 |
@router.post("/book")
|
31 |
async def upload_file(
|
32 |
user: user_dependency,
|
@@ -39,54 +60,21 @@ async def upload_file(
|
|
39 |
file: UploadFile = File(...),
|
40 |
thumbnail: Optional[UploadFile] = File(None),
|
41 |
):
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
return JSONResponse(status_code=401, content="Authentication Failed")
|
46 |
|
47 |
# Create a new Metadata object
|
48 |
-
|
49 |
-
|
50 |
-
author=author,
|
51 |
-
category_id=category_id,
|
52 |
-
year=year,
|
53 |
-
publisher=publisher,
|
54 |
-
)
|
55 |
-
|
56 |
-
db.add(new_book)
|
57 |
-
db.commit()
|
58 |
-
logging.info("Database Inserted")
|
59 |
-
|
60 |
-
try:
|
61 |
-
# Query the category based on category_id
|
62 |
-
category_query = select(Category.category).where(Category.id == category_id)
|
63 |
-
result = db.execute(category_query)
|
64 |
-
category = result.scalar_one_or_none()
|
65 |
|
66 |
-
|
67 |
-
if category is None:
|
68 |
-
return JSONResponse(status_code=404, content="Category not found")
|
69 |
-
|
70 |
-
except SQLAlchemyError as db_exc:
|
71 |
-
# Handle any database-related errors (e.g., connection issues, query issues)
|
72 |
-
print(f"Database error: {db_exc}")
|
73 |
-
return JSONResponse(status_code=500, content="Database error occurred")
|
74 |
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
return JSONResponse(
|
79 |
-
status_code=500, content="An error occurred while processing your request"
|
80 |
-
)
|
81 |
|
82 |
try:
|
83 |
-
# Assuming you have a Langfuse callback handler
|
84 |
-
langfuse_callback_handler = LlamaIndexCallbackHandler()
|
85 |
-
langfuse_callback_handler.set_trace_params(
|
86 |
-
user_id="admin_book_uploaded",
|
87 |
-
)
|
88 |
-
|
89 |
-
# Construct the reference dictionary
|
90 |
reference = {
|
91 |
"title": title,
|
92 |
"author": author,
|
@@ -96,61 +84,16 @@ async def upload_file(
|
|
96 |
}
|
97 |
|
98 |
# Process the file and handle data ingestion
|
99 |
-
response = await data_ingestion(
|
100 |
-
|
101 |
-
except Exception as e:
|
102 |
-
# Handle any errors related to file processing or data ingestion
|
103 |
-
print(f"File processing error: {e}")
|
104 |
-
return JSONResponse(status_code=500, content="File processing error")
|
105 |
-
|
106 |
-
# Return a successful response with the uploaded filename and response from data ingestion
|
107 |
-
return {
|
108 |
-
"filename": file.filename,
|
109 |
-
"response": response,
|
110 |
-
"info": "upload file successfully",
|
111 |
-
}
|
112 |
-
|
113 |
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
119 |
-
# Join Metadata with Category to get the category name
|
120 |
-
results = (
|
121 |
-
db.query(
|
122 |
-
Metadata.id,
|
123 |
-
Metadata.title,
|
124 |
-
Metadata.author,
|
125 |
-
Category.category, # Assuming this is the correct field for category name
|
126 |
-
Category.id,
|
127 |
-
Metadata.year,
|
128 |
-
Metadata.publisher,
|
129 |
-
Metadata.thumbnail,
|
130 |
-
)
|
131 |
-
.join(Category, Metadata.category_id == Category.id)
|
132 |
-
.all()
|
133 |
-
)
|
134 |
|
135 |
-
|
136 |
-
return
|
137 |
-
MetadataResponse(
|
138 |
-
id = id,
|
139 |
-
title=title,
|
140 |
-
author=author,
|
141 |
-
category=category,
|
142 |
-
category_id = category_id,
|
143 |
-
year=year,
|
144 |
-
publisher=publisher,
|
145 |
-
thumbnail=(
|
146 |
-
thumbnail if thumbnail else None
|
147 |
-
), # Ensure None if thumbnail is not present
|
148 |
-
)
|
149 |
-
for id, title, author, category, category_id, year, publisher, thumbnail in results
|
150 |
-
]
|
151 |
-
except SQLAlchemyError as db_exc:
|
152 |
-
print(f"Database error: {db_exc}")
|
153 |
-
return JSONResponse(status_code=500, content="Database error occurred")
|
154 |
|
155 |
|
156 |
@router.put("/book/{metadata_id}")
|
@@ -165,38 +108,29 @@ async def update_metadata(
|
|
165 |
publisher: str = Form(...),
|
166 |
thumbnail: Optional[UploadFile] = File(None),
|
167 |
):
|
168 |
-
|
169 |
-
|
|
|
170 |
|
171 |
try:
|
172 |
-
#
|
173 |
-
|
174 |
-
|
175 |
-
select(Metadata).where(Metadata.id == metadata_id)
|
176 |
-
).scalar_one_or_none()
|
177 |
-
|
178 |
-
if old_metadata is None:
|
179 |
-
return JSONResponse(status_code=404, content="Metadata not found.")
|
180 |
-
|
181 |
-
# Fetch old and new categories
|
182 |
-
old_category = db.execute(
|
183 |
-
select(Category.category).join(Metadata).where(Metadata.id == metadata_id)
|
184 |
-
).scalar_one_or_none()
|
185 |
|
186 |
-
|
187 |
-
|
188 |
-
|
|
|
|
|
189 |
|
190 |
-
if old_category is None or new_category is None:
|
191 |
-
return JSONResponse(status_code=404, content="Category not found.")
|
192 |
|
193 |
# Prepare the references
|
194 |
-
|
195 |
-
"title":
|
196 |
-
"author":
|
197 |
-
"category":
|
198 |
-
"year":
|
199 |
-
"publisher":
|
200 |
}
|
201 |
|
202 |
new_reference = {
|
@@ -207,72 +141,62 @@ async def update_metadata(
|
|
207 |
"publisher": publisher,
|
208 |
}
|
209 |
|
210 |
-
|
|
|
211 |
|
212 |
# Update existing metadata entry
|
213 |
metadata = db.query(Metadata).filter(Metadata.id == metadata_id).first()
|
|
|
214 |
|
215 |
if not metadata:
|
216 |
return JSONResponse(status_code=404, content="Metadata not found")
|
217 |
|
218 |
-
|
219 |
-
|
220 |
-
metadata.thumbnail = base64.b64encode(thumbnail_data).decode("utf-8")
|
221 |
-
|
222 |
-
# Update fields only if provided in the request
|
223 |
-
metadata.title = title
|
224 |
-
metadata.author = author
|
225 |
-
metadata.category_id = category_id
|
226 |
-
metadata.year = year
|
227 |
-
metadata.publisher = publisher
|
228 |
-
|
229 |
-
# Commit the changes to the database
|
230 |
-
db.commit()
|
231 |
-
db.refresh(metadata) # Refresh to get the updated data
|
232 |
-
|
233 |
-
category_query = select(Category.category).where(
|
234 |
-
Category.id == metadata.category_id
|
235 |
)
|
236 |
-
|
237 |
-
|
|
|
238 |
|
239 |
return MetadataResponse(
|
240 |
id=metadata_id,
|
241 |
-
title=
|
242 |
-
author=
|
243 |
-
category=
|
244 |
-
category_id=
|
245 |
-
year=
|
246 |
-
publisher=
|
247 |
-
thumbnail=
|
|
|
|
|
|
|
|
|
248 |
)
|
249 |
|
250 |
except Exception as e:
|
251 |
-
return
|
252 |
-
status_code=500, content=f"An error occurred while updating metadata as {e}"
|
253 |
-
)
|
254 |
-
|
255 |
|
256 |
@router.delete("/book/{metadata_id}")
|
257 |
async def delete_metadata(user: user_dependency, db: db_dependency, metadata_id: int):
|
258 |
-
|
259 |
-
|
|
|
260 |
|
261 |
try:
|
262 |
-
|
263 |
-
|
264 |
-
old_metadata = db.execute(
|
265 |
-
select(Metadata).where(Metadata.id == metadata_id)
|
266 |
-
).scalar_one_or_none()
|
267 |
-
|
268 |
-
index_manager.delete_vector_database(old_reference)
|
269 |
|
270 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
271 |
|
272 |
return {"Status": "delete successfully"}
|
273 |
|
274 |
except Exception as e:
|
275 |
-
|
276 |
-
|
277 |
-
status_code=500, content="An error occurred while delete metadata"
|
278 |
-
)
|
|
|
1 |
import logging
|
|
|
|
|
2 |
from typing import Annotated, Optional
|
3 |
+
|
4 |
+
from api.function import data_ingestion
|
5 |
from api.router.user import user_dependency
|
6 |
+
from api.auth import check_user_authentication, check_admin_authentication
|
7 |
+
|
8 |
from fastapi import Form, APIRouter, File, UploadFile, Depends
|
9 |
from fastapi.responses import JSONResponse
|
10 |
+
|
11 |
+
from db.models import Metadata
|
|
|
12 |
from db.database import get_db
|
13 |
+
from db.query.query_book import BookQuery
|
14 |
+
from db.query.query_category import CategoryQuery
|
15 |
+
from db.fetching import DataFetching
|
16 |
+
|
17 |
from config import MYSQL_CONFIG
|
18 |
+
from utils.error_handlers import handle_exception
|
19 |
from script.vector_db import IndexManager
|
20 |
+
from service.dto import MetadataResponse
|
21 |
from sqlalchemy.orm import Session
|
22 |
from sqlalchemy.future import select
|
|
|
23 |
|
24 |
|
25 |
router = APIRouter(tags=["Book"])
|
26 |
|
|
|
|
|
27 |
index_manager = IndexManager()
|
28 |
db_dependency = Annotated[Session, Depends(get_db)]
|
29 |
|
30 |
|
31 |
+
@router.get("/book")
|
32 |
+
async def get_metadata(user: user_dependency, db: db_dependency):
|
33 |
+
auth_response = check_user_authentication(user)
|
34 |
+
if auth_response:
|
35 |
+
print(auth_response)
|
36 |
+
return auth_response
|
37 |
+
try:
|
38 |
+
# Join Metadata with Category to get the category name
|
39 |
+
fetching = DataFetching(user, db)
|
40 |
+
# print(fetching)
|
41 |
+
metadata_fetching = fetching.metadata_fetching()
|
42 |
+
# print(metadata_fetching)
|
43 |
+
|
44 |
+
# Transform results into MetadataResponse model with optional thumbnail handling
|
45 |
+
return metadata_fetching
|
46 |
+
|
47 |
+
except Exception as e:
|
48 |
+
return handle_exception(e)
|
49 |
+
|
50 |
+
|
51 |
@router.post("/book")
|
52 |
async def upload_file(
|
53 |
user: user_dependency,
|
|
|
60 |
file: UploadFile = File(...),
|
61 |
thumbnail: Optional[UploadFile] = File(None),
|
62 |
):
|
63 |
+
auth_response = check_admin_authentication(user)
|
64 |
+
if auth_response:
|
65 |
+
return auth_response
|
|
|
66 |
|
67 |
# Create a new Metadata object
|
68 |
+
book_query = BookQuery(user)
|
69 |
+
book_query.add_book(db, title, author, category_id, year, publisher)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
70 |
|
71 |
+
logging.info("Database Inserted")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
72 |
|
73 |
+
# Query the category based on category_id
|
74 |
+
category_query = CategoryQuery(user)
|
75 |
+
category = category_query.get_category(db, category_id)
|
|
|
|
|
|
|
76 |
|
77 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
78 |
reference = {
|
79 |
"title": title,
|
80 |
"author": author,
|
|
|
84 |
}
|
85 |
|
86 |
# Process the file and handle data ingestion
|
87 |
+
response = await data_ingestion(reference, file)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
88 |
|
89 |
+
return {
|
90 |
+
"filename": file.filename,
|
91 |
+
"response": response,
|
92 |
+
"info": "upload file successfully",
|
93 |
+
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
94 |
|
95 |
+
except Exception as e:
|
96 |
+
return handle_exception(e)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
97 |
|
98 |
|
99 |
@router.put("/book/{metadata_id}")
|
|
|
108 |
publisher: str = Form(...),
|
109 |
thumbnail: Optional[UploadFile] = File(None),
|
110 |
):
|
111 |
+
auth_response = check_admin_authentication(user)
|
112 |
+
if auth_response:
|
113 |
+
return auth_response
|
114 |
|
115 |
try:
|
116 |
+
# fetch current metadata
|
117 |
+
book_query = BookQuery(user)
|
118 |
+
current_metadata = book_query.get_metadata_books(db, metadata_id)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
119 |
|
120 |
+
# Fetch current and new categories
|
121 |
+
category_query = CategoryQuery(user)
|
122 |
+
current_category = category_query.get_current_category(db, metadata_id)
|
123 |
+
|
124 |
+
new_category = category_query.get_category(db, category_id)
|
125 |
|
|
|
|
|
126 |
|
127 |
# Prepare the references
|
128 |
+
current_reference = {
|
129 |
+
"title": current_metadata.title,
|
130 |
+
"author": current_metadata.author,
|
131 |
+
"category": current_category,
|
132 |
+
"year": current_metadata.year,
|
133 |
+
"publisher": current_metadata.publisher,
|
134 |
}
|
135 |
|
136 |
new_reference = {
|
|
|
141 |
"publisher": publisher,
|
142 |
}
|
143 |
|
144 |
+
# Update vector database
|
145 |
+
index_manager.update_vector_database(current_reference, new_reference)
|
146 |
|
147 |
# Update existing metadata entry
|
148 |
metadata = db.query(Metadata).filter(Metadata.id == metadata_id).first()
|
149 |
+
print(metadata)
|
150 |
|
151 |
if not metadata:
|
152 |
return JSONResponse(status_code=404, content="Metadata not found")
|
153 |
|
154 |
+
updated_metadata = book_query.update_metadata_entry(
|
155 |
+
db, metadata_id, title, author, category_id, year, publisher
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
156 |
)
|
157 |
+
print(updated_metadata)
|
158 |
+
updated_category = category_query.get_category(db, updated_metadata.category_id)
|
159 |
+
print(updated_category)
|
160 |
|
161 |
return MetadataResponse(
|
162 |
id=metadata_id,
|
163 |
+
title=updated_metadata.title,
|
164 |
+
author=updated_metadata.author,
|
165 |
+
category=updated_category, # Assuming category relationship is available
|
166 |
+
category_id=updated_metadata.category_id,
|
167 |
+
year=updated_metadata.year,
|
168 |
+
publisher=updated_metadata.publisher,
|
169 |
+
thumbnail=(
|
170 |
+
updated_metadata.thumbnail
|
171 |
+
if updated_metadata.thumbnail is not None
|
172 |
+
else None
|
173 |
+
),
|
174 |
)
|
175 |
|
176 |
except Exception as e:
|
177 |
+
return handle_exception(e)
|
|
|
|
|
|
|
178 |
|
179 |
@router.delete("/book/{metadata_id}")
|
180 |
async def delete_metadata(user: user_dependency, db: db_dependency, metadata_id: int):
|
181 |
+
auth_response = check_admin_authentication(user)
|
182 |
+
if auth_response:
|
183 |
+
return auth_response
|
184 |
|
185 |
try:
|
186 |
+
# Check if metadata exists before deletion
|
187 |
+
metadata = db.query(Metadata).filter(Metadata.id == metadata_id).first()
|
|
|
|
|
|
|
|
|
|
|
188 |
|
189 |
+
if not metadata:
|
190 |
+
return JSONResponse(status_code=404, content="Metadata not found")
|
191 |
+
|
192 |
+
# Delete from the vector database and the database
|
193 |
+
index_manager.delete_vector_database(metadata.title)
|
194 |
+
|
195 |
+
db.delete(metadata)
|
196 |
+
db.commit()
|
197 |
|
198 |
return {"Status": "delete successfully"}
|
199 |
|
200 |
except Exception as e:
|
201 |
+
return handle_exception(e)
|
202 |
+
|
|
|
|
api/router/book_collection.py
CHANGED
@@ -1,188 +1,105 @@
|
|
1 |
from typing import Annotated, List, Optional
|
2 |
from api.router.user import user_dependency
|
|
|
3 |
from fastapi import APIRouter, Depends
|
4 |
-
from fastapi.responses import JSONResponse
|
5 |
-
from db.models import User_Meta, Metadata, Category
|
6 |
from db.database import get_db
|
7 |
from sqlalchemy.orm import Session
|
8 |
-
from
|
|
|
|
|
9 |
|
10 |
-
router = APIRouter(tags=["Book_Collection"])
|
11 |
|
|
|
12 |
db_dependency = Annotated[Session, Depends(get_db)]
|
13 |
|
14 |
|
15 |
@router.get("/book_collection")
|
16 |
-
async def get_book_collection(
|
17 |
-
user
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
if user is None:
|
22 |
-
return JSONResponse(status_code=401, content="Authentication Failed")
|
23 |
|
24 |
try:
|
25 |
-
# Fetch all User_Meta entries for the user and their associated Metadata
|
26 |
-
user_meta_entries = (
|
27 |
-
db.query(User_Meta, Metadata, Category) # Select User_Meta, Metadata, and Category
|
28 |
-
.join(Metadata, Metadata.id == User_Meta.metadata_id) # Join on metadata_id
|
29 |
-
.join(Category, Category.id == Metadata.category_id) # Join on category_id
|
30 |
-
.filter(User_Meta.user_id == user.get("id"))
|
31 |
-
.all()
|
32 |
-
)
|
33 |
-
|
34 |
-
if not user_meta_entries:
|
35 |
-
return {"info": "No book collection found"}
|
36 |
-
|
37 |
-
# Extract relevant data from the user_meta_entries
|
38 |
-
results = [
|
39 |
-
{
|
40 |
-
"user_id": user_meta.user_id,
|
41 |
-
"metadata_id": metadata.id, # Access Metadata fields
|
42 |
-
"title": metadata.title, # Replace with actual field names in Metadata
|
43 |
-
"author": metadata.author, # Replace with actual field names in Metadata
|
44 |
-
"category_name": category.category, # Replace with actual field names in Category
|
45 |
-
"year": metadata.year,
|
46 |
-
"publisher": metadata.publisher
|
47 |
-
# Add other Metadata and Category fields as needed
|
48 |
-
}
|
49 |
-
for user_meta, metadata, category in user_meta_entries # Unpack the tuple
|
50 |
-
]
|
51 |
|
|
|
|
|
|
|
52 |
return {
|
53 |
"status": "success",
|
54 |
-
"book_collection":
|
55 |
}
|
56 |
-
|
57 |
-
except SQLAlchemyError as e:
|
58 |
-
return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
|
59 |
except Exception as e:
|
60 |
-
return
|
61 |
-
status_code=500, content=f"An unexpected error occurred: {str(e)}"
|
62 |
-
)
|
63 |
|
64 |
|
65 |
@router.post("/book_collection")
|
66 |
async def request_book_collection(
|
67 |
-
user: user_dependency,
|
68 |
-
db: db_dependency,
|
69 |
-
metadata_id: List[Optional[int]],
|
70 |
):
|
71 |
-
|
72 |
-
|
|
|
|
|
73 |
|
74 |
-
|
75 |
-
# Create User_Meta instances for each metadata_id
|
76 |
-
user_meta_entries = [
|
77 |
-
User_Meta(user_id=user.get("id"), metadata_id=mid) for mid in metadata_id
|
78 |
-
]
|
79 |
|
80 |
-
|
81 |
-
|
82 |
-
db.commit() # Commit the transaction
|
83 |
|
84 |
-
except SQLAlchemyError as e:
|
85 |
-
return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
|
86 |
except Exception as e:
|
87 |
-
return
|
88 |
-
status_code=500, content=f"An unexpected error occurred: {str(e)}"
|
89 |
-
)
|
90 |
-
|
91 |
-
return {"status": "success", "user_meta": [entry.id for entry in user_meta_entries]}
|
92 |
|
93 |
|
94 |
@router.put("/book_collection")
|
95 |
async def update_book_collection(
|
96 |
-
user: user_dependency,
|
97 |
-
db: db_dependency,
|
98 |
-
metadata_id: List[Optional[int]], # Use the Pydantic model
|
99 |
):
|
100 |
-
|
101 |
-
|
|
|
|
|
102 |
|
103 |
try:
|
104 |
-
|
105 |
-
|
106 |
-
synchronize_session=False
|
107 |
-
)
|
108 |
-
db.commit()
|
109 |
-
|
110 |
-
# Insert new User_Meta entries
|
111 |
-
user_meta_entries = [
|
112 |
-
User_Meta(user_id=user.get("id"), metadata_id=mid) for mid in metadata_id
|
113 |
-
]
|
114 |
-
db.add_all(user_meta_entries)
|
115 |
-
db.commit()
|
116 |
-
except SQLAlchemyError as e:
|
117 |
-
db.rollback()
|
118 |
-
return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
|
119 |
-
except Exception as e:
|
120 |
-
return JSONResponse(status_code=500, content=f"Unexpected error: {str(e)}")
|
121 |
|
122 |
-
|
|
|
123 |
|
124 |
|
125 |
@router.delete("/book_collection/{metadata_id}")
|
126 |
async def delete_book_collection(
|
127 |
-
user: user_dependency,
|
128 |
-
db: db_dependency,
|
129 |
-
metadata_id: int
|
130 |
):
|
131 |
-
|
132 |
-
|
|
|
|
|
133 |
|
134 |
try:
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
User_Meta.metadata_id == metadata_id
|
139 |
-
).first()
|
140 |
-
|
141 |
-
if not entry_to_delete:
|
142 |
-
return JSONResponse(status_code=404, content="Entry not found to delete.")
|
143 |
-
|
144 |
-
# Delete the entry
|
145 |
-
db.delete(entry_to_delete)
|
146 |
-
db.commit()
|
147 |
-
|
148 |
-
except SQLAlchemyError as e:
|
149 |
-
db.rollback() # Rollback in case of any database error
|
150 |
-
return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
|
151 |
-
except Exception as e:
|
152 |
-
return JSONResponse(
|
153 |
-
status_code=500, content=f"An unexpected error occurred: {str(e)}"
|
154 |
)
|
155 |
-
|
156 |
-
|
157 |
-
"status": "success",
|
158 |
-
"deleted_entry": entry_to_delete.id,
|
159 |
-
}
|
160 |
|
161 |
|
162 |
@router.delete("/all_collections")
|
163 |
-
async def delete_all_book(
|
164 |
-
user
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
return JSONResponse(status_code=401, content="Authentication Failed")
|
169 |
|
170 |
try:
|
171 |
-
|
172 |
-
|
173 |
-
)
|
174 |
-
|
175 |
-
db.commit() # Commit all deletions
|
176 |
|
177 |
return {
|
178 |
"status": "success",
|
179 |
-
"
|
180 |
}
|
181 |
-
|
182 |
-
except SQLAlchemyError as e:
|
183 |
-
db.rollback() # Rollback in case of any database error
|
184 |
-
return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
|
185 |
except Exception as e:
|
186 |
-
return
|
187 |
-
status_code=500, content=f"An unexpected error occurred: {str(e)}"
|
188 |
-
)
|
|
|
1 |
from typing import Annotated, List, Optional
|
2 |
from api.router.user import user_dependency
|
3 |
+
from api.auth import check_user_authentication
|
4 |
from fastapi import APIRouter, Depends
|
|
|
|
|
5 |
from db.database import get_db
|
6 |
from sqlalchemy.orm import Session
|
7 |
+
from db.query.query_user_meta import UserMetaQuery
|
8 |
+
from db.fetching import DataFetching
|
9 |
+
from utils.error_handlers import handle_exception
|
10 |
|
|
|
11 |
|
12 |
+
router = APIRouter(tags=["Book_Collection"])
|
13 |
db_dependency = Annotated[Session, Depends(get_db)]
|
14 |
|
15 |
|
16 |
@router.get("/book_collection")
|
17 |
+
async def get_book_collection(user: user_dependency, db: db_dependency):
|
18 |
+
"""Fetch user's book collection."""
|
19 |
+
auth_response = check_user_authentication(user)
|
20 |
+
if auth_response:
|
21 |
+
return auth_response
|
|
|
|
|
22 |
|
23 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
|
25 |
+
fetching = DataFetching(user, db)
|
26 |
+
book_collection = fetching.collection_fetching()
|
27 |
+
|
28 |
return {
|
29 |
"status": "success",
|
30 |
+
"book_collection": book_collection,
|
31 |
}
|
|
|
|
|
|
|
32 |
except Exception as e:
|
33 |
+
return handle_exception(e)
|
|
|
|
|
34 |
|
35 |
|
36 |
@router.post("/book_collection")
|
37 |
async def request_book_collection(
|
38 |
+
user: user_dependency, db: db_dependency, metadata_ids: List[Optional[int]]
|
|
|
|
|
39 |
):
|
40 |
+
"""Insert book collection metadata for a user."""
|
41 |
+
auth_response = check_user_authentication(user)
|
42 |
+
if auth_response:
|
43 |
+
return auth_response
|
44 |
|
45 |
+
user_meta_query = UserMetaQuery(user)
|
|
|
|
|
|
|
|
|
46 |
|
47 |
+
try:
|
48 |
+
return user_meta_query.insert_user_meta_entries(db, metadata_ids)
|
|
|
49 |
|
|
|
|
|
50 |
except Exception as e:
|
51 |
+
return handle_exception(e)
|
|
|
|
|
|
|
|
|
52 |
|
53 |
|
54 |
@router.put("/book_collection")
|
55 |
async def update_book_collection(
|
56 |
+
user: user_dependency, db: db_dependency, metadata_ids: List[Optional[int]]
|
|
|
|
|
57 |
):
|
58 |
+
"""Update user's book collection metadata."""
|
59 |
+
auth_response = check_user_authentication(user)
|
60 |
+
if auth_response:
|
61 |
+
return auth_response
|
62 |
|
63 |
try:
|
64 |
+
user_meta_query = UserMetaQuery(user)
|
65 |
+
return user_meta_query.update_user_meta_entries(db, metadata_ids)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
66 |
|
67 |
+
except Exception as e:
|
68 |
+
return handle_exception(e)
|
69 |
|
70 |
|
71 |
@router.delete("/book_collection/{metadata_id}")
|
72 |
async def delete_book_collection(
|
73 |
+
user: user_dependency, db: db_dependency, metadata_id: int
|
|
|
|
|
74 |
):
|
75 |
+
"""Delete a specific book collection entry."""
|
76 |
+
auth_response = check_user_authentication(user)
|
77 |
+
if auth_response:
|
78 |
+
return auth_response
|
79 |
|
80 |
try:
|
81 |
+
user_meta_query = UserMetaQuery(user)
|
82 |
+
return user_meta_query.delete_user_meta(
|
83 |
+
db, metadata_id=metadata_id
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
84 |
)
|
85 |
+
except Exception as e:
|
86 |
+
return handle_exception(e)
|
|
|
|
|
|
|
87 |
|
88 |
|
89 |
@router.delete("/all_collections")
|
90 |
+
async def delete_all_book(user: user_dependency, db: db_dependency):
|
91 |
+
"""Delete all book collections for the authenticated user."""
|
92 |
+
auth_response = check_user_authentication(user)
|
93 |
+
if auth_response:
|
94 |
+
return auth_response
|
|
|
95 |
|
96 |
try:
|
97 |
+
user_meta_query = UserMetaQuery(user)
|
98 |
+
user_meta_query.delete_all_user_meta(db)
|
|
|
|
|
|
|
99 |
|
100 |
return {
|
101 |
"status": "success",
|
102 |
+
"message": f"Deleted book collection for user {user.get('id')}",
|
103 |
}
|
|
|
|
|
|
|
|
|
104 |
except Exception as e:
|
105 |
+
return handle_exception(e)
|
|
|
|
api/router/bot_general.py
CHANGED
@@ -1,19 +1,20 @@
|
|
|
|
|
|
1 |
from fastapi import APIRouter, Depends
|
2 |
from fastapi.responses import JSONResponse
|
3 |
-
from
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
from core.chat.chatstore import ChatStore
|
5 |
from db.database import get_db
|
6 |
from db.models import Session_Publisher
|
7 |
-
from sqlalchemy.orm import Session
|
8 |
-
from api.function import (
|
9 |
-
generate_streaming_completion,
|
10 |
-
generate_completion_non_streaming,
|
11 |
-
)
|
12 |
-
from api.router.user import user_dependency
|
13 |
-
from sse_starlette.sse import EventSourceResponse
|
14 |
-
from utils.utils import generate_uuid
|
15 |
-
from typing import Annotated
|
16 |
from langfuse.llama_index import LlamaIndexCallbackHandler
|
|
|
|
|
17 |
|
18 |
|
19 |
router = APIRouter(tags=["Bot_General"])
|
@@ -37,12 +38,16 @@ async def get_session_id(
|
|
37 |
session_id: str,
|
38 |
chat_store: ChatStore = Depends(get_chat_store),
|
39 |
):
|
40 |
-
|
41 |
-
|
|
|
|
|
|
|
|
|
42 |
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
|
47 |
return chat_history
|
48 |
|
@@ -57,16 +62,11 @@ async def bot_generator_general(
|
|
57 |
|
58 |
if user_prompt_request.streaming:
|
59 |
return EventSourceResponse(
|
60 |
-
generate_streaming_completion(
|
61 |
-
user_prompt_request.prompt,
|
62 |
-
session_id
|
63 |
-
)
|
64 |
)
|
65 |
else:
|
66 |
-
|
67 |
-
|
68 |
-
user_prompt_request.prompt,
|
69 |
-
)
|
70 |
|
71 |
return BotResponse(
|
72 |
content=response,
|
@@ -76,12 +76,18 @@ async def bot_generator_general(
|
|
76 |
|
77 |
|
78 |
@router.delete("/bot/{session_id}")
|
79 |
-
async def delete_bot(
|
|
|
|
|
80 |
try:
|
81 |
chat_store.delete_messages(session_id)
|
82 |
-
|
83 |
# Delete session from database
|
84 |
-
session =
|
|
|
|
|
|
|
|
|
|
|
85 |
if session:
|
86 |
db.delete(session)
|
87 |
db.commit() # Commit the transaction
|
|
|
1 |
+
from typing import Annotated
|
2 |
+
|
3 |
from fastapi import APIRouter, Depends
|
4 |
from fastapi.responses import JSONResponse
|
5 |
+
from sqlalchemy.orm import Session
|
6 |
+
from sse_starlette.sse import EventSourceResponse
|
7 |
+
|
8 |
+
from api.auth import check_user_authentication
|
9 |
+
from api.function import generate_streaming_completion
|
10 |
+
from api.router.user import user_dependency
|
11 |
+
from core.chat.bot_service import ChatCompletionService
|
12 |
from core.chat.chatstore import ChatStore
|
13 |
from db.database import get_db
|
14 |
from db.models import Session_Publisher
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
from langfuse.llama_index import LlamaIndexCallbackHandler
|
16 |
+
from service.dto import UserPromptRequest, BotResponse
|
17 |
+
from utils.utils import generate_uuid
|
18 |
|
19 |
|
20 |
router = APIRouter(tags=["Bot_General"])
|
|
|
38 |
session_id: str,
|
39 |
chat_store: ChatStore = Depends(get_chat_store),
|
40 |
):
|
41 |
+
auth_response = check_user_authentication(user)
|
42 |
+
if auth_response:
|
43 |
+
return auth_response
|
44 |
+
|
45 |
+
# Retrieve chat history from Redis
|
46 |
+
chat_history = chat_store.get_messages(session_id)
|
47 |
|
48 |
+
# If no chat history is found in Redis, fallback to the alternative store using mongoDB
|
49 |
+
if chat_history is None or chat_history == []:
|
50 |
+
chat_history = chat_store.get_all_messages_mongodb(session_id)
|
51 |
|
52 |
return chat_history
|
53 |
|
|
|
62 |
|
63 |
if user_prompt_request.streaming:
|
64 |
return EventSourceResponse(
|
65 |
+
generate_streaming_completion(user_prompt_request.prompt, session_id)
|
|
|
|
|
|
|
66 |
)
|
67 |
else:
|
68 |
+
bot_service = ChatCompletionService(session_id, user_prompt_request.prompt)
|
69 |
+
response, metadata, scores = bot_service.generate_completion()
|
|
|
|
|
70 |
|
71 |
return BotResponse(
|
72 |
content=response,
|
|
|
76 |
|
77 |
|
78 |
@router.delete("/bot/{session_id}")
|
79 |
+
async def delete_bot(
|
80 |
+
db: db_dependency, session_id: str, chat_store: ChatStore = Depends(get_chat_store)
|
81 |
+
):
|
82 |
try:
|
83 |
chat_store.delete_messages(session_id)
|
|
|
84 |
# Delete session from database
|
85 |
+
session = (
|
86 |
+
db.query(Session_Publisher)
|
87 |
+
.filter(Session_Publisher.id == session_id)
|
88 |
+
.first()
|
89 |
+
)
|
90 |
+
|
91 |
if session:
|
92 |
db.delete(session)
|
93 |
db.commit() # Commit the transaction
|
api/router/bot_one.py
CHANGED
@@ -1,24 +1,29 @@
|
|
1 |
from typing import Annotated
|
2 |
from fastapi import APIRouter, Depends
|
3 |
from fastapi.responses import JSONResponse
|
4 |
-
|
|
|
|
|
|
|
5 |
from core.chat.chatstore import ChatStore
|
|
|
6 |
from db.database import get_db
|
7 |
-
from db.models import
|
8 |
-
from db.
|
|
|
|
|
9 |
from sqlalchemy.orm import Session
|
10 |
-
from sqlalchemy.exc import SQLAlchemyError
|
11 |
-
from sqlalchemy import select
|
12 |
-
from api.function import (
|
13 |
-
generate_streaming_completion,
|
14 |
-
generate_completion_non_streaming,
|
15 |
-
)
|
16 |
-
from datetime import datetime
|
17 |
-
from api.router.user import user_dependency
|
18 |
from sse_starlette.sse import EventSourceResponse
|
19 |
from utils.utils import generate_uuid
|
|
|
20 |
from langfuse.llama_index import LlamaIndexCallbackHandler
|
21 |
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
router = APIRouter(tags=["Bot_One"])
|
24 |
|
@@ -30,25 +35,20 @@ def get_chat_store():
|
|
30 |
|
31 |
|
32 |
@router.post("/bot_one/{metadata_id}")
|
33 |
-
async def create_bot_one(user: user_dependency, db: db_dependency, metadata_id: int):
|
34 |
-
|
35 |
-
|
|
|
36 |
# Generate a new session ID (UUID)
|
37 |
try:
|
38 |
session_id = generate_uuid()
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
id=session_id,
|
43 |
-
user_id=user.get("id"),
|
44 |
-
metadata_id=metadata_id,
|
45 |
-
)
|
46 |
-
|
47 |
-
db.add(new_session)
|
48 |
-
db.commit() # Commit the new session to the database
|
49 |
|
50 |
return {
|
51 |
-
"
|
|
|
52 |
"session_id": session_id,
|
53 |
}
|
54 |
|
@@ -59,15 +59,16 @@ async def create_bot_one(user: user_dependency, db: db_dependency, metadata_id:
|
|
59 |
|
60 |
|
61 |
@router.post("/bot_one/{metadata_id}/{session_id}")
|
62 |
-
async def
|
63 |
user: user_dependency,
|
64 |
db: db_dependency,
|
65 |
metadata_id: int,
|
66 |
session_id: str,
|
67 |
user_prompt_request: UserPromptRequest,
|
68 |
):
|
69 |
-
|
70 |
-
|
|
|
71 |
|
72 |
langfuse_callback_handler = LlamaIndexCallbackHandler()
|
73 |
langfuse_callback_handler.set_trace_params(
|
@@ -76,25 +77,13 @@ async def generator_bot_one(
|
|
76 |
|
77 |
# Query to retrieve the titles
|
78 |
try:
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
.where(
|
83 |
-
Session_Publisher.user_id == user.get("id"),
|
84 |
-
Session_Publisher.id == session_id,
|
85 |
-
)
|
86 |
-
)
|
87 |
-
|
88 |
-
result = db.execute(query)
|
89 |
-
titles = result.scalars().all()
|
90 |
print(titles)
|
91 |
|
92 |
-
except SQLAlchemyError as e:
|
93 |
-
return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
|
94 |
except Exception as e:
|
95 |
-
return
|
96 |
-
status_code=500, content=f"An unexpected error occurred: {str(e)}"
|
97 |
-
)
|
98 |
|
99 |
if user_prompt_request.streaming:
|
100 |
return EventSourceResponse(
|
@@ -104,14 +93,17 @@ async def generator_bot_one(
|
|
104 |
)
|
105 |
)
|
106 |
else:
|
107 |
-
|
108 |
-
|
109 |
-
|
|
|
|
|
110 |
|
111 |
existing_session = (
|
112 |
db.query(Session_Publisher).filter(Session_Publisher.id == session_id).first()
|
113 |
)
|
114 |
-
|
|
|
115 |
db.commit()
|
116 |
|
117 |
return BotResponse(
|
@@ -121,24 +113,20 @@ async def generator_bot_one(
|
|
121 |
)
|
122 |
|
123 |
|
124 |
-
@router.get("/bot_one{metadata_id}")
|
125 |
async def get_all_session_bot_one(
|
126 |
user: user_dependency, db: db_dependency, metadata_id: int
|
127 |
):
|
128 |
-
|
129 |
-
|
|
|
130 |
|
131 |
try:
|
132 |
# Query the session IDs based on the user ID
|
133 |
-
|
134 |
-
|
135 |
-
Session_Publisher.metadata_id == metadata_id,
|
136 |
-
)
|
137 |
-
|
138 |
-
result = db.execute(query)
|
139 |
-
sessions = result.all()
|
140 |
|
141 |
-
session_data = [{"id": session.id, "updated_at": str(session.updated_at)} for session in sessions]
|
142 |
|
143 |
# Convert list of tuples to a simple list
|
144 |
session_sorted_data = sorted(session_data, key=lambda x: datetime.fromisoformat(x['updated_at']), reverse=True)
|
|
|
1 |
from typing import Annotated
|
2 |
from fastapi import APIRouter, Depends
|
3 |
from fastapi.responses import JSONResponse
|
4 |
+
import pytz
|
5 |
+
|
6 |
+
from service.dto import UserPromptRequest, BotResponse, BotCreateRequest
|
7 |
+
from datetime import datetime
|
8 |
from core.chat.chatstore import ChatStore
|
9 |
+
from core.chat.bot_service import ChatCompletionService
|
10 |
from db.database import get_db
|
11 |
+
from db.models import Session_Publisher
|
12 |
+
from db.query.query_book import BookQuery
|
13 |
+
from db.query.query_bot import BotQuery
|
14 |
+
|
15 |
from sqlalchemy.orm import Session
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
from sse_starlette.sse import EventSourceResponse
|
17 |
from utils.utils import generate_uuid
|
18 |
+
from utils.error_handlers import handle_exception
|
19 |
from langfuse.llama_index import LlamaIndexCallbackHandler
|
20 |
|
21 |
+
from api.auth import check_user_authentication
|
22 |
+
from api.router.user import user_dependency
|
23 |
+
from api.function import (
|
24 |
+
generate_streaming_completion
|
25 |
+
)
|
26 |
+
|
27 |
|
28 |
router = APIRouter(tags=["Bot_One"])
|
29 |
|
|
|
35 |
|
36 |
|
37 |
@router.post("/bot_one/{metadata_id}")
|
38 |
+
async def create_bot_one(user: user_dependency, db: db_dependency, metadata_id: int, bot_name:BotCreateRequest):
|
39 |
+
auth_response = check_user_authentication(user)
|
40 |
+
if auth_response:
|
41 |
+
return auth_response
|
42 |
# Generate a new session ID (UUID)
|
43 |
try:
|
44 |
session_id = generate_uuid()
|
45 |
+
|
46 |
+
bot_query = BotQuery(user)
|
47 |
+
bot_query.add_bot(db, session_id, bot_name.name, metadata_id)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
48 |
|
49 |
return {
|
50 |
+
"status": "session id created successfully",
|
51 |
+
"bot_name": bot_name.name,
|
52 |
"session_id": session_id,
|
53 |
}
|
54 |
|
|
|
59 |
|
60 |
|
61 |
@router.post("/bot_one/{metadata_id}/{session_id}")
|
62 |
+
async def generator_bot(
|
63 |
user: user_dependency,
|
64 |
db: db_dependency,
|
65 |
metadata_id: int,
|
66 |
session_id: str,
|
67 |
user_prompt_request: UserPromptRequest,
|
68 |
):
|
69 |
+
auth_response = check_user_authentication(user)
|
70 |
+
if auth_response:
|
71 |
+
return auth_response
|
72 |
|
73 |
langfuse_callback_handler = LlamaIndexCallbackHandler()
|
74 |
langfuse_callback_handler.set_trace_params(
|
|
|
77 |
|
78 |
# Query to retrieve the titles
|
79 |
try:
|
80 |
+
book_query = BookQuery(user)
|
81 |
+
output_titles = book_query.get_title_from_session(db, metadata_id, session_id)
|
82 |
+
titles = [item[0] for item in output_titles]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
83 |
print(titles)
|
84 |
|
|
|
|
|
85 |
except Exception as e:
|
86 |
+
return handle_exception(e)
|
|
|
|
|
87 |
|
88 |
if user_prompt_request.streaming:
|
89 |
return EventSourceResponse(
|
|
|
93 |
)
|
94 |
)
|
95 |
else:
|
96 |
+
bot_service = ChatCompletionService(session_id, user_prompt_request.prompt, titles, type_bot="specific")
|
97 |
+
response, metadata, scores = bot_service.generate_completion()
|
98 |
+
|
99 |
+
# Set Jakarta timezone
|
100 |
+
jakarta_tz = pytz.timezone('Asia/Jakarta')
|
101 |
|
102 |
existing_session = (
|
103 |
db.query(Session_Publisher).filter(Session_Publisher.id == session_id).first()
|
104 |
)
|
105 |
+
|
106 |
+
existing_session.updated_at = datetime.now(jakarta_tz)
|
107 |
db.commit()
|
108 |
|
109 |
return BotResponse(
|
|
|
113 |
)
|
114 |
|
115 |
|
116 |
+
@router.get("/bot_one/{metadata_id}")
|
117 |
async def get_all_session_bot_one(
|
118 |
user: user_dependency, db: db_dependency, metadata_id: int
|
119 |
):
|
120 |
+
auth_response = check_user_authentication(user)
|
121 |
+
if auth_response:
|
122 |
+
return auth_response
|
123 |
|
124 |
try:
|
125 |
# Query the session IDs based on the user ID
|
126 |
+
bot_query = BotQuery(user)
|
127 |
+
sessions = bot_query.get_session_ids_bot(db, metadata_id)
|
|
|
|
|
|
|
|
|
|
|
128 |
|
129 |
+
session_data = [{"id": session.id, "bot_name":session.bot_name, "updated_at": str(session.updated_at)} for session in sessions]
|
130 |
|
131 |
# Convert list of tuples to a simple list
|
132 |
session_sorted_data = sorted(session_data, key=lambda x: datetime.fromisoformat(x['updated_at']), reverse=True)
|
api/router/bot_specific.py
CHANGED
@@ -1,24 +1,25 @@
|
|
1 |
from typing import Annotated, List, Optional
|
|
|
|
|
2 |
from fastapi import APIRouter, Depends
|
3 |
from fastapi.responses import JSONResponse
|
|
|
|
|
|
|
|
|
|
|
4 |
from service.dto import UserPromptRequest, BotResponse, BotCreateRequest
|
5 |
from core.chat.chatstore import ChatStore
|
|
|
6 |
from db.database import get_db
|
7 |
from db.models import Bot_Meta, Bot, Metadata
|
8 |
from db.models import Session as SessionModel
|
9 |
-
from
|
10 |
-
from
|
11 |
-
from sqlalchemy.exc import NoResultFound
|
12 |
-
from sqlalchemy import select
|
13 |
-
from api.function import (
|
14 |
-
generate_streaming_completion,
|
15 |
-
generate_completion_non_streaming,
|
16 |
-
)
|
17 |
from api.router.user import user_dependency
|
18 |
-
from
|
19 |
from utils.utils import generate_uuid
|
20 |
-
from
|
21 |
-
from datetime import datetime
|
22 |
|
23 |
|
24 |
router = APIRouter(tags=["Bot_Specific"])
|
@@ -37,14 +38,15 @@ async def create_bot_id(
|
|
37 |
bot_request: BotCreateRequest,
|
38 |
):
|
39 |
|
40 |
-
|
41 |
-
|
|
|
42 |
|
43 |
# Create a new bot entry
|
44 |
try:
|
45 |
# Create a new bot entry
|
46 |
new_bot = Bot(
|
47 |
-
user_id=user.get("id"), bot_name=bot_request.
|
48 |
) # Assuming user has an 'id' attribute
|
49 |
|
50 |
db.add(new_bot)
|
@@ -69,8 +71,9 @@ async def create_bot_specific(
|
|
69 |
bot_id: int,
|
70 |
metadata_id: List[Optional[int]], # Use the Pydantic model
|
71 |
):
|
72 |
-
|
73 |
-
|
|
|
74 |
|
75 |
try:
|
76 |
# Create BotMeta instances for each metadata_id
|
@@ -99,8 +102,9 @@ async def update_bot_specific(
|
|
99 |
bot_id: int,
|
100 |
metadata_id: List[Optional[int]], # Use the Pydantic model
|
101 |
):
|
102 |
-
|
103 |
-
|
|
|
104 |
|
105 |
try:
|
106 |
# Fetch existing Bot_Meta entries related to bot_id
|
@@ -119,16 +123,16 @@ async def update_bot_specific(
|
|
119 |
]
|
120 |
db.add_all(bot_meta_entries)
|
121 |
db.commit()
|
|
|
|
|
|
|
|
|
122 |
|
123 |
except SQLAlchemyError as e:
|
124 |
db.rollback() # Rollback in case of any database error
|
125 |
return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
|
126 |
except Exception as e:
|
127 |
-
return
|
128 |
-
status_code=500, content=f"An unexpected error occurred: {str(e)}"
|
129 |
-
)
|
130 |
-
|
131 |
-
return {"status": "success", "bot_meta": [entry.id for entry in bot_meta_entries]}
|
132 |
|
133 |
|
134 |
@router.delete("/meta/{bot_id}/{metadata_id}")
|
@@ -138,8 +142,9 @@ async def delete_bot_specific(
|
|
138 |
bot_id: int,
|
139 |
metadata_id: int, # Changed to int to specify a single metadata_id
|
140 |
):
|
141 |
-
|
142 |
-
|
|
|
143 |
|
144 |
try:
|
145 |
# Delete the specific metadata entry for the given bot_id
|
@@ -155,19 +160,16 @@ async def delete_bot_specific(
|
|
155 |
# Delete the found entry
|
156 |
db.delete(bot_meta_entry)
|
157 |
db.commit()
|
|
|
|
|
|
|
|
|
158 |
|
159 |
except SQLAlchemyError as e:
|
160 |
db.rollback() # Rollback in case of any database error
|
161 |
return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
|
162 |
except Exception as e:
|
163 |
-
return
|
164 |
-
status_code=500, content=f"An unexpected error occurred: {str(e)}"
|
165 |
-
)
|
166 |
-
|
167 |
-
return {
|
168 |
-
"status": "success",
|
169 |
-
"deleted_entry_id": bot_meta_entry.id,
|
170 |
-
}
|
171 |
|
172 |
|
173 |
@router.delete("/bot_all/{bot_id}")
|
@@ -176,8 +178,9 @@ async def delete_bot_id(
|
|
176 |
db: db_dependency,
|
177 |
bot_id: int,
|
178 |
):
|
179 |
-
|
180 |
-
|
|
|
181 |
|
182 |
try:
|
183 |
# Fetch the bot to ensure it exists
|
@@ -207,18 +210,16 @@ async def delete_bot_id(
|
|
207 |
db.rollback() # Rollback in case of any database error
|
208 |
return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
|
209 |
except Exception as e:
|
210 |
-
return
|
211 |
-
status_code=500, content=f"An unexpected error occurred: {str(e)}"
|
212 |
-
)
|
213 |
|
214 |
|
215 |
@router.post("/session/{bot_id}/new")
|
216 |
async def create_new_session(user: user_dependency, db: db_dependency, bot_id: int):
|
217 |
# Check if user is authenticated
|
218 |
-
|
219 |
-
|
|
|
220 |
|
221 |
-
print(user.get("id"))
|
222 |
user_id = user.get("id")
|
223 |
# Ensure the bot belongs to the user
|
224 |
bot_query = select(Bot).where(Bot.id == bot_id, Bot.user_id == user_id)
|
@@ -251,32 +252,30 @@ async def create_new_session(user: user_dependency, db: db_dependency, bot_id: i
|
|
251 |
}
|
252 |
|
253 |
except Exception as e:
|
254 |
-
return
|
255 |
-
status_code=500, content=f"An unexpected in retrieving session id {str(e)}"
|
256 |
-
)
|
257 |
|
258 |
|
259 |
@router.get("/bot/all/{bot_id}")
|
260 |
async def get_all_session_ids(user: user_dependency, db: db_dependency, bot_id: int):
|
261 |
-
|
262 |
-
|
|
|
263 |
|
264 |
try:
|
265 |
query = select(SessionModel.id, SessionModel.updated_at).where(
|
266 |
-
SessionModel.user_id == user.get("id"),
|
267 |
-
SessionModel.bot_id == bot_id
|
268 |
)
|
269 |
|
270 |
result = db.execute(query)
|
271 |
sessions = result.all()
|
272 |
-
session_data = [
|
|
|
|
|
273 |
|
274 |
return session_data
|
275 |
|
276 |
except Exception as e:
|
277 |
-
|
278 |
-
print(f"An error occurred while fetching session IDs: {e}")
|
279 |
-
return JSONResponse(status_code=400, content="Error retrieving session IDs")
|
280 |
|
281 |
|
282 |
@router.post("/bot/{bot_id}/{session_id}")
|
@@ -287,9 +286,9 @@ async def bot_generator_spesific(
|
|
287 |
session_id: str,
|
288 |
user_prompt_request: UserPromptRequest,
|
289 |
):
|
290 |
-
|
291 |
-
|
292 |
-
|
293 |
langfuse_callback_handler = LlamaIndexCallbackHandler()
|
294 |
langfuse_callback_handler.set_trace_params(
|
295 |
user_id=user.get("username"), session_id=session_id
|
@@ -310,33 +309,30 @@ async def bot_generator_spesific(
|
|
310 |
titles = result.scalars().all()
|
311 |
print(titles)
|
312 |
|
313 |
-
|
314 |
-
|
315 |
-
|
316 |
-
|
317 |
-
|
318 |
-
|
|
|
|
|
|
|
|
|
319 |
|
320 |
-
|
321 |
-
|
322 |
-
generate_streaming_completion(
|
323 |
-
user_prompt_request.prompt,
|
324 |
-
session_id,
|
325 |
)
|
326 |
-
|
327 |
-
|
328 |
-
response, metadata, scores = generate_completion_non_streaming(
|
329 |
-
session_id, user_prompt_request.prompt, titles, type_bot="specific"
|
330 |
-
)
|
331 |
-
|
332 |
-
existing_session = (
|
333 |
-
db.query(SessionModel).filter(SessionModel.id == session_id).first()
|
334 |
-
)
|
335 |
-
existing_session.updated_at = datetime.now()
|
336 |
-
db.commit()
|
337 |
|
338 |
-
|
339 |
-
|
340 |
-
|
341 |
-
|
342 |
-
|
|
|
|
|
|
|
|
|
|
|
|
1 |
from typing import Annotated, List, Optional
|
2 |
+
from datetime import datetime
|
3 |
+
|
4 |
from fastapi import APIRouter, Depends
|
5 |
from fastapi.responses import JSONResponse
|
6 |
+
from sse_starlette.sse import EventSourceResponse
|
7 |
+
from sqlalchemy import select
|
8 |
+
from sqlalchemy.orm import Session
|
9 |
+
from sqlalchemy.exc import SQLAlchemyError, NoResultFound
|
10 |
+
|
11 |
from service.dto import UserPromptRequest, BotResponse, BotCreateRequest
|
12 |
from core.chat.chatstore import ChatStore
|
13 |
+
from core.chat.bot_service import ChatCompletionService
|
14 |
from db.database import get_db
|
15 |
from db.models import Bot_Meta, Bot, Metadata
|
16 |
from db.models import Session as SessionModel
|
17 |
+
from langfuse.llama_index import LlamaIndexCallbackHandler
|
18 |
+
from api.auth import check_user_authentication
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
from api.router.user import user_dependency
|
20 |
+
from api.function import generate_streaming_completion
|
21 |
from utils.utils import generate_uuid
|
22 |
+
from utils.error_handlers import handle_exception
|
|
|
23 |
|
24 |
|
25 |
router = APIRouter(tags=["Bot_Specific"])
|
|
|
38 |
bot_request: BotCreateRequest,
|
39 |
):
|
40 |
|
41 |
+
auth_response = check_user_authentication(user)
|
42 |
+
if auth_response:
|
43 |
+
return auth_response
|
44 |
|
45 |
# Create a new bot entry
|
46 |
try:
|
47 |
# Create a new bot entry
|
48 |
new_bot = Bot(
|
49 |
+
user_id=user.get("id"), bot_name=bot_request.name
|
50 |
) # Assuming user has an 'id' attribute
|
51 |
|
52 |
db.add(new_bot)
|
|
|
71 |
bot_id: int,
|
72 |
metadata_id: List[Optional[int]], # Use the Pydantic model
|
73 |
):
|
74 |
+
auth_response = check_user_authentication(user)
|
75 |
+
if auth_response:
|
76 |
+
return auth_response
|
77 |
|
78 |
try:
|
79 |
# Create BotMeta instances for each metadata_id
|
|
|
102 |
bot_id: int,
|
103 |
metadata_id: List[Optional[int]], # Use the Pydantic model
|
104 |
):
|
105 |
+
auth_response = check_user_authentication(user)
|
106 |
+
if auth_response:
|
107 |
+
return auth_response
|
108 |
|
109 |
try:
|
110 |
# Fetch existing Bot_Meta entries related to bot_id
|
|
|
123 |
]
|
124 |
db.add_all(bot_meta_entries)
|
125 |
db.commit()
|
126 |
+
return {
|
127 |
+
"status": "success",
|
128 |
+
"bot_meta": [entry.id for entry in bot_meta_entries],
|
129 |
+
}
|
130 |
|
131 |
except SQLAlchemyError as e:
|
132 |
db.rollback() # Rollback in case of any database error
|
133 |
return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
|
134 |
except Exception as e:
|
135 |
+
return handle_exception(e)
|
|
|
|
|
|
|
|
|
136 |
|
137 |
|
138 |
@router.delete("/meta/{bot_id}/{metadata_id}")
|
|
|
142 |
bot_id: int,
|
143 |
metadata_id: int, # Changed to int to specify a single metadata_id
|
144 |
):
|
145 |
+
auth_response = check_user_authentication(user)
|
146 |
+
if auth_response:
|
147 |
+
return auth_response
|
148 |
|
149 |
try:
|
150 |
# Delete the specific metadata entry for the given bot_id
|
|
|
160 |
# Delete the found entry
|
161 |
db.delete(bot_meta_entry)
|
162 |
db.commit()
|
163 |
+
return {
|
164 |
+
"status": "success",
|
165 |
+
"deleted_entry_id": bot_meta_entry.id,
|
166 |
+
}
|
167 |
|
168 |
except SQLAlchemyError as e:
|
169 |
db.rollback() # Rollback in case of any database error
|
170 |
return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
|
171 |
except Exception as e:
|
172 |
+
return handle_exception(e)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
173 |
|
174 |
|
175 |
@router.delete("/bot_all/{bot_id}")
|
|
|
178 |
db: db_dependency,
|
179 |
bot_id: int,
|
180 |
):
|
181 |
+
auth_response = check_user_authentication(user)
|
182 |
+
if auth_response:
|
183 |
+
return auth_response
|
184 |
|
185 |
try:
|
186 |
# Fetch the bot to ensure it exists
|
|
|
210 |
db.rollback() # Rollback in case of any database error
|
211 |
return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
|
212 |
except Exception as e:
|
213 |
+
return handle_exception(e)
|
|
|
|
|
214 |
|
215 |
|
216 |
@router.post("/session/{bot_id}/new")
|
217 |
async def create_new_session(user: user_dependency, db: db_dependency, bot_id: int):
|
218 |
# Check if user is authenticated
|
219 |
+
auth_response = check_user_authentication(user)
|
220 |
+
if auth_response:
|
221 |
+
return auth_response
|
222 |
|
|
|
223 |
user_id = user.get("id")
|
224 |
# Ensure the bot belongs to the user
|
225 |
bot_query = select(Bot).where(Bot.id == bot_id, Bot.user_id == user_id)
|
|
|
252 |
}
|
253 |
|
254 |
except Exception as e:
|
255 |
+
return handle_exception(e)
|
|
|
|
|
256 |
|
257 |
|
258 |
@router.get("/bot/all/{bot_id}")
|
259 |
async def get_all_session_ids(user: user_dependency, db: db_dependency, bot_id: int):
|
260 |
+
auth_response = check_user_authentication(user)
|
261 |
+
if auth_response:
|
262 |
+
return auth_response
|
263 |
|
264 |
try:
|
265 |
query = select(SessionModel.id, SessionModel.updated_at).where(
|
266 |
+
SessionModel.user_id == user.get("id"), SessionModel.bot_id == bot_id
|
|
|
267 |
)
|
268 |
|
269 |
result = db.execute(query)
|
270 |
sessions = result.all()
|
271 |
+
session_data = [
|
272 |
+
{"id": session.id, "updated_at": session.updated_at} for session in sessions
|
273 |
+
]
|
274 |
|
275 |
return session_data
|
276 |
|
277 |
except Exception as e:
|
278 |
+
return handle_exception(e)
|
|
|
|
|
279 |
|
280 |
|
281 |
@router.post("/bot/{bot_id}/{session_id}")
|
|
|
286 |
session_id: str,
|
287 |
user_prompt_request: UserPromptRequest,
|
288 |
):
|
289 |
+
auth_response = check_user_authentication(user)
|
290 |
+
if auth_response:
|
291 |
+
return auth_response
|
292 |
langfuse_callback_handler = LlamaIndexCallbackHandler()
|
293 |
langfuse_callback_handler.set_trace_params(
|
294 |
user_id=user.get("username"), session_id=session_id
|
|
|
309 |
titles = result.scalars().all()
|
310 |
print(titles)
|
311 |
|
312 |
+
if user_prompt_request.streaming:
|
313 |
+
return EventSourceResponse(
|
314 |
+
generate_streaming_completion(
|
315 |
+
user_prompt_request.prompt,
|
316 |
+
session_id,
|
317 |
+
)
|
318 |
+
)
|
319 |
+
else:
|
320 |
+
bot_service = ChatCompletionService(session_id, user_prompt_request.prompt, titles, type_bot="specific")
|
321 |
+
response, metadata, scores = bot_service.generate_completion()
|
322 |
|
323 |
+
existing_session = (
|
324 |
+
db.query(SessionModel).filter(SessionModel.id == session_id).first()
|
|
|
|
|
|
|
325 |
)
|
326 |
+
existing_session.updated_at = datetime.now()
|
327 |
+
db.commit()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
328 |
|
329 |
+
return BotResponse(
|
330 |
+
content=response,
|
331 |
+
metadata=metadata,
|
332 |
+
scores=scores,
|
333 |
+
)
|
334 |
+
|
335 |
+
except SQLAlchemyError as e:
|
336 |
+
return JSONResponse(status_code=500, content=f"Database error: {str(e)}")
|
337 |
+
except Exception as e:
|
338 |
+
return handle_exception(e)
|
api/router/category.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
from api.router.user import user_dependency
|
2 |
from fastapi.responses import JSONResponse
|
3 |
-
from fastapi import APIRouter,
|
4 |
|
5 |
from db.models import Category
|
6 |
from db.database import get_db
|
|
|
1 |
from api.router.user import user_dependency
|
2 |
from fastapi.responses import JSONResponse
|
3 |
+
from fastapi import APIRouter, Depends
|
4 |
|
5 |
from db.models import Category
|
6 |
from db.database import get_db
|
api/router/role.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
from fastapi import APIRouter,
|
2 |
from fastapi.responses import JSONResponse
|
3 |
from api.router.user import user_dependency
|
4 |
from typing import Annotated
|
|
|
1 |
+
from fastapi import APIRouter, Depends
|
2 |
from fastapi.responses import JSONResponse
|
3 |
from api.router.user import user_dependency
|
4 |
from typing import Annotated
|
api/router/user.py
CHANGED
@@ -1,14 +1,16 @@
|
|
|
|
|
|
|
|
1 |
from fastapi import APIRouter, Depends, status
|
2 |
-
from fastapi.security import OAuth2PasswordRequestForm
|
3 |
from fastapi.responses import JSONResponse
|
|
|
|
|
|
|
|
|
4 |
from db.models import User
|
5 |
from db.database import get_db
|
6 |
from api.auth import get_current_user, create_access_token
|
7 |
from service.dto import CreateUserRequest, UserVerification, Token
|
8 |
-
from typing import Annotated
|
9 |
-
from passlib.context import CryptContext
|
10 |
-
from sqlalchemy.orm import Session
|
11 |
-
from datetime import timedelta
|
12 |
|
13 |
|
14 |
router = APIRouter(tags=["User"])
|
@@ -38,27 +40,28 @@ async def login_for_access_token(
|
|
38 |
try:
|
39 |
access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
40 |
access_token = create_access_token(
|
41 |
-
user.username,
|
|
|
|
|
|
|
|
|
|
|
42 |
)
|
43 |
|
44 |
return {"access_token": access_token, "token_type": "bearer"}
|
45 |
|
46 |
except Exception as e:
|
47 |
print(e)
|
48 |
-
return JSONResponse(
|
49 |
-
status_code=500, content="An error occuring when login"
|
50 |
-
)
|
51 |
|
52 |
|
53 |
@router.get("/login", response_model=dict)
|
54 |
async def get_user(user: user_dependency):
|
55 |
if user is None:
|
56 |
-
return JSONResponse(
|
57 |
-
status_code=401, content="Authentication Failed"
|
58 |
-
)
|
59 |
return {
|
60 |
"username": user.get("username"),
|
61 |
-
"name"
|
62 |
"id": user.get("id"),
|
63 |
"email": user.get("email"),
|
64 |
"role": user.get("role_id"),
|
@@ -69,9 +72,7 @@ async def get_user(user: user_dependency):
|
|
69 |
async def get_all_users(user: user_dependency, db: Session = Depends(get_db)):
|
70 |
# Check if the current user has an admin role
|
71 |
if user.get("role_id") != 1: # Adjust this check based on how roles are represented
|
72 |
-
return JSONResponse(
|
73 |
-
status_code=401, content="Authentication Failed"
|
74 |
-
)
|
75 |
|
76 |
# Query the database to retrieve all users
|
77 |
users = db.query(
|
@@ -96,9 +97,7 @@ async def register_user(db: db_dependency, create_user_request: CreateUserReques
|
|
96 |
)
|
97 |
|
98 |
if existing_user:
|
99 |
-
return JSONResponse(
|
100 |
-
status_code=400, content="Email is already registered"
|
101 |
-
)
|
102 |
|
103 |
try:
|
104 |
password_hash = bcrypt_context.hash(create_user_request.password)
|
@@ -133,21 +132,17 @@ async def change_password(
|
|
133 |
user: user_dependency, db: db_dependency, user_verification: UserVerification
|
134 |
):
|
135 |
if user is None:
|
136 |
-
return JSONResponse(
|
137 |
-
status_code=401, content="Authentication Failed"
|
138 |
-
)
|
139 |
user_model = db.query(User).filter(User.id == user.get("id")).first()
|
140 |
|
141 |
if not bcrypt_context.verify(
|
142 |
user_verification.password, user_model.hashed_password
|
143 |
):
|
144 |
-
return JSONResponse(
|
145 |
-
status_code=401, content="Error on password change"
|
146 |
-
)
|
147 |
|
148 |
user_model.hashed_password = bcrypt_context.hash(user_verification.new_password)
|
149 |
db.add(user_model)
|
150 |
db.commit()
|
151 |
db.refresh(user_model)
|
152 |
|
153 |
-
return {"message": "User's password successfully changed", "user_id": user_model.id}
|
|
|
1 |
+
from datetime import timedelta
|
2 |
+
from typing import Annotated
|
3 |
+
|
4 |
from fastapi import APIRouter, Depends, status
|
|
|
5 |
from fastapi.responses import JSONResponse
|
6 |
+
from fastapi.security import OAuth2PasswordRequestForm
|
7 |
+
from passlib.context import CryptContext
|
8 |
+
from sqlalchemy.orm import Session
|
9 |
+
|
10 |
from db.models import User
|
11 |
from db.database import get_db
|
12 |
from api.auth import get_current_user, create_access_token
|
13 |
from service.dto import CreateUserRequest, UserVerification, Token
|
|
|
|
|
|
|
|
|
14 |
|
15 |
|
16 |
router = APIRouter(tags=["User"])
|
|
|
40 |
try:
|
41 |
access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
42 |
access_token = create_access_token(
|
43 |
+
user.username,
|
44 |
+
user.name,
|
45 |
+
user.id,
|
46 |
+
user.role_id,
|
47 |
+
access_token_expires,
|
48 |
+
user.email,
|
49 |
)
|
50 |
|
51 |
return {"access_token": access_token, "token_type": "bearer"}
|
52 |
|
53 |
except Exception as e:
|
54 |
print(e)
|
55 |
+
return JSONResponse(status_code=500, content="An error occuring when login")
|
|
|
|
|
56 |
|
57 |
|
58 |
@router.get("/login", response_model=dict)
|
59 |
async def get_user(user: user_dependency):
|
60 |
if user is None:
|
61 |
+
return JSONResponse(status_code=401, content="Authentication Failed")
|
|
|
|
|
62 |
return {
|
63 |
"username": user.get("username"),
|
64 |
+
"name": user.get("name"),
|
65 |
"id": user.get("id"),
|
66 |
"email": user.get("email"),
|
67 |
"role": user.get("role_id"),
|
|
|
72 |
async def get_all_users(user: user_dependency, db: Session = Depends(get_db)):
|
73 |
# Check if the current user has an admin role
|
74 |
if user.get("role_id") != 1: # Adjust this check based on how roles are represented
|
75 |
+
return JSONResponse(status_code=401, content="Authentication Failed")
|
|
|
|
|
76 |
|
77 |
# Query the database to retrieve all users
|
78 |
users = db.query(
|
|
|
97 |
)
|
98 |
|
99 |
if existing_user:
|
100 |
+
return JSONResponse(status_code=400, content="Email is already registered")
|
|
|
|
|
101 |
|
102 |
try:
|
103 |
password_hash = bcrypt_context.hash(create_user_request.password)
|
|
|
132 |
user: user_dependency, db: db_dependency, user_verification: UserVerification
|
133 |
):
|
134 |
if user is None:
|
135 |
+
return JSONResponse(status_code=401, content="Authentication Failed")
|
|
|
|
|
136 |
user_model = db.query(User).filter(User.id == user.get("id")).first()
|
137 |
|
138 |
if not bcrypt_context.verify(
|
139 |
user_verification.password, user_model.hashed_password
|
140 |
):
|
141 |
+
return JSONResponse(status_code=401, content="Error on password change")
|
|
|
|
|
142 |
|
143 |
user_model.hashed_password = bcrypt_context.hash(user_verification.new_password)
|
144 |
db.add(user_model)
|
145 |
db.commit()
|
146 |
db.refresh(user_model)
|
147 |
|
148 |
+
return {"message": "User's password successfully changed", "user_id": user_model.id}
|
config.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1 |
from pydantic_settings import BaseSettings
|
|
|
2 |
import os
|
3 |
|
|
|
4 |
|
5 |
class MysqlConfig(BaseSettings):
|
6 |
DB_HOST: str = ""
|
|
|
1 |
from pydantic_settings import BaseSettings
|
2 |
+
from dotenv import load_dotenv
|
3 |
import os
|
4 |
|
5 |
+
load_dotenv()
|
6 |
|
7 |
class MysqlConfig(BaseSettings):
|
8 |
DB_HOST: str = ""
|
core/chat/bot_service.py
ADDED
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
import re
|
3 |
+
import os
|
4 |
+
|
5 |
+
from typing import List
|
6 |
+
from datetime import datetime
|
7 |
+
from fastapi.responses import JSONResponse
|
8 |
+
from script.vector_db import IndexManager
|
9 |
+
from llama_index.core.llms import MessageRole
|
10 |
+
|
11 |
+
from core.chat.engine import Engine
|
12 |
+
from core.chat.chatstore import ChatStore
|
13 |
+
from core.parser import clean_text, update_response, renumber_sources
|
14 |
+
|
15 |
+
from service.dto import ChatMessage
|
16 |
+
from pymongo.mongo_client import MongoClient
|
17 |
+
|
18 |
+
|
19 |
+
|
20 |
+
|
21 |
+
class ChatCompletionService:
|
22 |
+
def __init__(self, session_id: str, user_request: str, titles: List = None, type_bot: str = "general"):
|
23 |
+
self.session_id = session_id
|
24 |
+
self.user_request = user_request
|
25 |
+
self.titles = titles
|
26 |
+
self.type_bot = type_bot
|
27 |
+
self.client = MongoClient(os.getenv("MONGO_URI"))
|
28 |
+
self.engine = Engine()
|
29 |
+
self.index_manager = IndexManager()
|
30 |
+
self.chatstore = ChatStore()
|
31 |
+
|
32 |
+
def generate_completion(self):
|
33 |
+
if not self._ping_mongo():
|
34 |
+
return JSONResponse(status_code=500, content="Database Error: Unable to connect to MongoDB")
|
35 |
+
|
36 |
+
try:
|
37 |
+
# Load and retrieve chat engine with appropriate index
|
38 |
+
index = self.index_manager.load_existing_indexes()
|
39 |
+
chat_engine = self._get_chat_engine(index)
|
40 |
+
|
41 |
+
# Generate chat response
|
42 |
+
response = chat_engine.chat(self.user_request)
|
43 |
+
sources = response.sources
|
44 |
+
number_reference_sorted = self._extract_sorted_references(response)
|
45 |
+
|
46 |
+
contents, metadata_collection, scores = self._process_sources(sources, number_reference_sorted)
|
47 |
+
|
48 |
+
# Update response and renumber sources
|
49 |
+
response = update_response(str(response))
|
50 |
+
contents = renumber_sources(contents)
|
51 |
+
|
52 |
+
# Add contents to metadata
|
53 |
+
metadata_collection = self._attach_contents_to_metadata(contents, metadata_collection)
|
54 |
+
|
55 |
+
# Save the message to chat store
|
56 |
+
self._store_message_in_chatstore(response, metadata_collection)
|
57 |
+
|
58 |
+
except Exception as e:
|
59 |
+
logging.error(f"An error occurred in generate text: {e}")
|
60 |
+
return JSONResponse(
|
61 |
+
status_code=500,
|
62 |
+
content=f"An internal server error occurred: {e}"
|
63 |
+
)
|
64 |
+
|
65 |
+
try:
|
66 |
+
if self.type_bot == "specific":
|
67 |
+
self._save_chat_history_to_db(response, metadata_collection)
|
68 |
+
|
69 |
+
return str(response), metadata_collection, scores
|
70 |
+
|
71 |
+
except Exception as e:
|
72 |
+
logging.error(f"An error occurred while saving chat history: {e}")
|
73 |
+
return JSONResponse(
|
74 |
+
status_code=500,
|
75 |
+
content=f"An internal server error occurred while saving chat history: {e}"
|
76 |
+
)
|
77 |
+
|
78 |
+
def _ping_mongo(self):
|
79 |
+
try:
|
80 |
+
self.client.admin.command("ping")
|
81 |
+
print("Pinged your deployment. Successfully connected to MongoDB!")
|
82 |
+
return True
|
83 |
+
except Exception as e:
|
84 |
+
logging.error(f"MongoDB connection failed: {e}")
|
85 |
+
return False
|
86 |
+
|
87 |
+
def _get_chat_engine(self, index):
|
88 |
+
if self.type_bot == "general":
|
89 |
+
return self.engine.get_chat_engine(self.session_id, index)
|
90 |
+
return self.engine.get_chat_engine(self.session_id, index, self.titles, self.type_bot)
|
91 |
+
|
92 |
+
def _extract_sorted_references(self, response):
|
93 |
+
number_reference = list(set(re.findall(r"\[(\d+)\]", str(response))))
|
94 |
+
return sorted(number_reference)
|
95 |
+
|
96 |
+
def _process_sources(self, sources, number_reference_sorted):
|
97 |
+
contents, metadata_collection, scores = [], [], []
|
98 |
+
if not number_reference_sorted:
|
99 |
+
print("There are no references")
|
100 |
+
return contents, metadata_collection, scores
|
101 |
+
|
102 |
+
for number in number_reference_sorted:
|
103 |
+
number = int(number)
|
104 |
+
if sources and sources[0].get("raw_output"):
|
105 |
+
node = dict(sources[0])["raw_output"].source_nodes
|
106 |
+
if 0 <= number - 1 < len(node):
|
107 |
+
content = clean_text(node[number - 1].node.get_text())
|
108 |
+
contents.append(content)
|
109 |
+
metadata = dict(node[number - 1].node.metadata)
|
110 |
+
metadata_collection.append(metadata)
|
111 |
+
score = node[number - 1].score
|
112 |
+
scores.append(score)
|
113 |
+
else:
|
114 |
+
print(f"Invalid reference number: {number}")
|
115 |
+
else:
|
116 |
+
print("No sources available")
|
117 |
+
|
118 |
+
return contents, metadata_collection, scores
|
119 |
+
|
120 |
+
def _attach_contents_to_metadata(self, contents, metadata_collection):
|
121 |
+
for i in range(min(len(contents), len(metadata_collection))):
|
122 |
+
metadata_collection[i]["content"] = re.sub(r"source \d+:", "", contents[i])
|
123 |
+
return metadata_collection
|
124 |
+
|
125 |
+
def _store_message_in_chatstore(self, response, metadata_collection):
|
126 |
+
message = ChatMessage(
|
127 |
+
role=MessageRole.ASSISTANT,
|
128 |
+
content=response,
|
129 |
+
metadata=metadata_collection
|
130 |
+
)
|
131 |
+
self.chatstore.delete_last_message(self.session_id)
|
132 |
+
self.chatstore.add_message(self.session_id, message)
|
133 |
+
self.chatstore.clean_message(self.session_id)
|
134 |
+
|
135 |
+
def _save_chat_history_to_db(self, response, metadata_collection):
|
136 |
+
chat_history_db = [
|
137 |
+
ChatMessage(
|
138 |
+
role=MessageRole.SYSTEM,
|
139 |
+
content=self.user_request,
|
140 |
+
timestamp=datetime.now(),
|
141 |
+
payment="free" if self.type_bot == "general" else None,
|
142 |
+
),
|
143 |
+
ChatMessage(
|
144 |
+
role=MessageRole.ASSISTANT,
|
145 |
+
content=response,
|
146 |
+
metadata=metadata_collection,
|
147 |
+
timestamp=datetime.now(),
|
148 |
+
payment="free" if self.type_bot == "general" else None,
|
149 |
+
),
|
150 |
+
]
|
151 |
+
|
152 |
+
chat_history_json = [message.model_dump() for message in chat_history_db]
|
153 |
+
|
154 |
+
db = self.client["bot_database"] # Replace with your database name
|
155 |
+
collection = db[self.session_id] # Replace with your collection name
|
156 |
+
result = collection.insert_many(chat_history_json)
|
157 |
+
print("Data inserted with record ids", result.inserted_ids)
|
158 |
+
|
159 |
+
|
160 |
+
# Example usage
|
161 |
+
def generate_completion_non_streaming(session_id, user_request, titles=None, type_bot="general"):
|
162 |
+
chat_service = ChatCompletionService(session_id, user_request, titles, type_bot)
|
163 |
+
return chat_service.generate_completion()
|
core/chat/chatstore.py
CHANGED
@@ -2,7 +2,7 @@ import redis
|
|
2 |
import os
|
3 |
import json
|
4 |
from fastapi.responses import JSONResponse
|
5 |
-
from typing import Optional, List
|
6 |
from llama_index.storage.chat_store.redis import RedisChatStore
|
7 |
from pymongo.mongo_client import MongoClient
|
8 |
from llama_index.core.memory import ChatMemoryBuffer
|
@@ -56,6 +56,17 @@ class ChatStore:
|
|
56 |
|
57 |
# Decode and parse each item into a dictionary
|
58 |
return [json.loads(m.decode("utf-8")) for m in items]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
def delete_last_message(self, session_id: str) -> Optional[ChatMessage]:
|
61 |
"""Delete last message for a session_id."""
|
|
|
2 |
import os
|
3 |
import json
|
4 |
from fastapi.responses import JSONResponse
|
5 |
+
from typing import Optional, List, Dict
|
6 |
from llama_index.storage.chat_store.redis import RedisChatStore
|
7 |
from pymongo.mongo_client import MongoClient
|
8 |
from llama_index.core.memory import ChatMemoryBuffer
|
|
|
56 |
|
57 |
# Decode and parse each item into a dictionary
|
58 |
return [json.loads(m.decode("utf-8")) for m in items]
|
59 |
+
|
60 |
+
def get_last_message(self, session_id: str) -> Optional[Dict]:
|
61 |
+
"""Get the last message for a session_id."""
|
62 |
+
last_message = self.redis_client.lindex(session_id, -1)
|
63 |
+
|
64 |
+
if last_message is None:
|
65 |
+
return None # Return None if there are no messages
|
66 |
+
|
67 |
+
# Decode and parse the last message into a dictionary
|
68 |
+
return json.loads(last_message.decode("utf-8"))
|
69 |
+
|
70 |
|
71 |
def delete_last_message(self, session_id: str) -> Optional[ChatMessage]:
|
72 |
"""Delete last message for a session_id."""
|
core/prompt.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
SYSTEM_BOT_TEMPLATE = """
|
2 |
-
Kamu adalah Medbot yang selalu menggunakan tools
|
3 |
|
4 |
**Instruksi**:
|
5 |
|
|
|
1 |
SYSTEM_BOT_TEMPLATE = """
|
2 |
+
Kamu adalah Medbot yang selalu menggunakan tools untuk menjawab pertanyaan medis. Jika pengguna bertanya tentang topik non-medis, arahkan mereka untuk bertanya di bidang medis. Tugasmu adalah memberikan jawaban yang informatif dan akurat berdasarkan tools yang tersedia. Pastikan kamu hanya memberikan informasi dari buku yang telah disediakan, jangan sampai menjawab pertanyaan yang tidak terdapat dalam buku atau tools yang kamu gunakan. {additional_information} Jika setelah itu tidak ada informasi yang ditemukan, katakan bahwa kamu tidak mengetahuinya dan berikan informasi dari apa yang kamu ketahui kemudian arahkan pengguna untuk bertanya ke dokter yang lebih ahli.
|
3 |
|
4 |
**Instruksi**:
|
5 |
|
db/database.py
CHANGED
@@ -2,39 +2,45 @@ from sqlalchemy import create_engine
|
|
2 |
from sqlalchemy.orm import sessionmaker
|
3 |
from sqlalchemy.exc import OperationalError
|
4 |
from config import MYSQL_CONFIG
|
5 |
-
from fastapi import
|
|
|
|
|
|
|
6 |
import os
|
7 |
import base64
|
8 |
|
|
|
9 |
|
10 |
SQLALCHEMY_DATABASE_URL = MYSQL_CONFIG.DB_URI_SQL_ALCHEMY
|
11 |
|
12 |
# Get the base64 encoded certificate from the environment variable
|
13 |
ca_cert_base64 = os.getenv("CA_CERT_BASE64")
|
14 |
|
15 |
-
#
|
|
|
|
|
16 |
if ca_cert_base64:
|
|
|
17 |
ca_cert_content = base64.b64decode(ca_cert_base64).decode("utf-8")
|
18 |
|
19 |
-
#
|
20 |
-
|
21 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
-
ca_cert_path = "/tmp/ca.pem"
|
24 |
else:
|
25 |
raise ValueError("CA_CERT_BASE64 environment variable is not set")
|
26 |
|
27 |
-
# Use the decoded CA certificate in the SQLAlchemy engine
|
28 |
-
engine = create_engine(
|
29 |
-
SQLALCHEMY_DATABASE_URL,
|
30 |
-
connect_args={
|
31 |
-
"ssl": {
|
32 |
-
"sslmode": "REQUIRED",
|
33 |
-
"ca": ca_cert_path, # Path to the temporary CA certificate
|
34 |
-
# Add other SSL options as needed
|
35 |
-
}
|
36 |
-
},
|
37 |
-
)
|
38 |
|
39 |
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
40 |
|
|
|
2 |
from sqlalchemy.orm import sessionmaker
|
3 |
from sqlalchemy.exc import OperationalError
|
4 |
from config import MYSQL_CONFIG
|
5 |
+
from fastapi.responses import JSONResponse
|
6 |
+
from fastapi import HTTPException, status
|
7 |
+
from dotenv import load_dotenv
|
8 |
+
import io
|
9 |
import os
|
10 |
import base64
|
11 |
|
12 |
+
load_dotenv()
|
13 |
|
14 |
SQLALCHEMY_DATABASE_URL = MYSQL_CONFIG.DB_URI_SQL_ALCHEMY
|
15 |
|
16 |
# Get the base64 encoded certificate from the environment variable
|
17 |
ca_cert_base64 = os.getenv("CA_CERT_BASE64")
|
18 |
|
19 |
+
# Retrieve the Base64-encoded CA certificate from the environment variable
|
20 |
+
ca_cert_base64 = os.getenv("CA_CERT_BASE64")
|
21 |
+
|
22 |
if ca_cert_base64:
|
23 |
+
# Decode the base64 content
|
24 |
ca_cert_content = base64.b64decode(ca_cert_base64).decode("utf-8")
|
25 |
|
26 |
+
# Use in-memory buffer to handle the decoded CA certificate content
|
27 |
+
ca_cert_in_memory = io.StringIO(ca_cert_content)
|
28 |
+
|
29 |
+
# Create SQLAlchemy engine with SSL configuration, using in-memory certificate
|
30 |
+
engine = create_engine(
|
31 |
+
SQLALCHEMY_DATABASE_URL,
|
32 |
+
connect_args={
|
33 |
+
"ssl": {
|
34 |
+
"sslmode": "REQUIRED",
|
35 |
+
"sslrootcert": ca_cert_in_memory, # In-memory CA certificate
|
36 |
+
# Add other SSL options like client cert/key if required
|
37 |
+
}
|
38 |
+
},
|
39 |
+
)
|
40 |
|
|
|
41 |
else:
|
42 |
raise ValueError("CA_CERT_BASE64 environment variable is not set")
|
43 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
44 |
|
45 |
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
46 |
|
db/fetching.py
ADDED
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from db.query.query_user_meta import UserMetaQuery
|
2 |
+
from db.query.query_book import BookQuery
|
3 |
+
from service.dto import MetadataResponse
|
4 |
+
from fastapi.responses import JSONResponse
|
5 |
+
|
6 |
+
class DataFetching:
|
7 |
+
def __init__(self, user, db):
|
8 |
+
self.user = user
|
9 |
+
self.db = db
|
10 |
+
|
11 |
+
def collection_fetching(self):
|
12 |
+
user_meta_query = UserMetaQuery(self.user)
|
13 |
+
user_meta_entries = user_meta_query.get_user_meta_entries(self.db)
|
14 |
+
|
15 |
+
if not user_meta_entries:
|
16 |
+
return {"info": "No book collection found"}
|
17 |
+
|
18 |
+
# Extract relevant data from the user_meta_entries
|
19 |
+
|
20 |
+
|
21 |
+
results = [
|
22 |
+
{
|
23 |
+
"user_id": user_meta.user_id, # Ensure you include user_id if needed
|
24 |
+
"metadata_id": metadata.id,
|
25 |
+
"title": metadata.title,
|
26 |
+
"author": metadata.author,
|
27 |
+
"category_name": category.category,
|
28 |
+
"year": metadata.year,
|
29 |
+
"publisher": metadata.publisher,
|
30 |
+
}
|
31 |
+
for user_meta, metadata, category in user_meta_entries # Unpack the tuple
|
32 |
+
]
|
33 |
+
|
34 |
+
print("Hasil akhir ", results)
|
35 |
+
|
36 |
+
# Extract relevant data from the user_meta_entries
|
37 |
+
return results
|
38 |
+
|
39 |
+
def metadata_fetching(self):
|
40 |
+
book_query = BookQuery(self.user)
|
41 |
+
book_query_entries = book_query.get_book(self.db)
|
42 |
+
|
43 |
+
return [
|
44 |
+
MetadataResponse(
|
45 |
+
id = id,
|
46 |
+
title=title,
|
47 |
+
author=author,
|
48 |
+
category=category,
|
49 |
+
category_id = category_id,
|
50 |
+
year=year,
|
51 |
+
publisher=publisher,
|
52 |
+
thumbnail=(
|
53 |
+
thumbnail if thumbnail else None
|
54 |
+
), # Ensure None if thumbnail is not present
|
55 |
+
)
|
56 |
+
for id, title, author, category, category_id, year, publisher, thumbnail in book_query_entries
|
57 |
+
]
|
58 |
+
|
59 |
+
|
60 |
+
|
db/models.py
CHANGED
@@ -159,6 +159,7 @@ class Session_Publisher(Base):
|
|
159 |
|
160 |
id = mapped_column(String(36), primary_key=True, index=True, default=lambda: str(uuid.uuid4())) # Store as string
|
161 |
user_id = mapped_column(Integer, ForeignKey("user.id"))
|
|
|
162 |
metadata_id = mapped_column(Integer, ForeignKey("metadata.id"))
|
163 |
created_at : Mapped[timestamp_current]
|
164 |
updated_at : Mapped[timestamp_update]
|
|
|
159 |
|
160 |
id = mapped_column(String(36), primary_key=True, index=True, default=lambda: str(uuid.uuid4())) # Store as string
|
161 |
user_id = mapped_column(Integer, ForeignKey("user.id"))
|
162 |
+
bot_name = mapped_column(String(100), nullable=True)
|
163 |
metadata_id = mapped_column(Integer, ForeignKey("metadata.id"))
|
164 |
created_at : Mapped[timestamp_current]
|
165 |
updated_at : Mapped[timestamp_update]
|
db/query/__init__.py
ADDED
File without changes
|
db/query/base_query.py
ADDED
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from sqlalchemy import select, delete, update
|
2 |
+
from sqlalchemy.exc import SQLAlchemyError
|
3 |
+
from utils.error_handlers import handle_error, not_found_error, no_entries_found, handle_exception
|
4 |
+
from fastapi.responses import JSONResponse
|
5 |
+
from typing import List, Type, Optional
|
6 |
+
|
7 |
+
|
8 |
+
class BaseQuery:
|
9 |
+
def __init__(self, user):
|
10 |
+
self.user = user
|
11 |
+
self.user_id = user.get("id")
|
12 |
+
|
13 |
+
def _fetch(self, db, query, not_found_message, multiple: bool = False):
|
14 |
+
"""Fetch a single or multiple results based on the 'multiple' flag."""
|
15 |
+
try:
|
16 |
+
if multiple:
|
17 |
+
results = db.execute(query).all()
|
18 |
+
if not results:
|
19 |
+
return no_entries_found(not_found_message)
|
20 |
+
return results
|
21 |
+
else:
|
22 |
+
result = db.execute(query).scalar_one_or_none()
|
23 |
+
if not result:
|
24 |
+
return not_found_error(not_found_message)
|
25 |
+
return result
|
26 |
+
except Exception as e:
|
27 |
+
return handle_error(
|
28 |
+
e,
|
29 |
+
"Failed to fetch entry" if not multiple else "Failed to fetch entries",
|
30 |
+
)
|
31 |
+
|
32 |
+
def _handle_commit(self, db):
|
33 |
+
try:
|
34 |
+
db.commit()
|
35 |
+
except SQLAlchemyError as e:
|
36 |
+
db.rollback()
|
37 |
+
return handle_exception(e)
|
38 |
+
except Exception as e:
|
39 |
+
db.rollback()
|
40 |
+
return handle_error(e, "Operation failed")
|
41 |
+
|
42 |
+
def add(self, db, instance):
|
43 |
+
"""Add a new entry."""
|
44 |
+
db.add(instance)
|
45 |
+
return self._handle_commit(db)
|
46 |
+
|
47 |
+
def insert_entries(self, db, entries):
|
48 |
+
"""Insert multiple entries."""
|
49 |
+
db.add_all(entries)
|
50 |
+
return self._handle_commit(db)
|
51 |
+
|
52 |
+
def delete(self, db, model, id, filter_conditions=None):
|
53 |
+
"""Delete an entry by ID with optional filter conditions."""
|
54 |
+
# Build the query to select the entry
|
55 |
+
query = select(model).where(model.id == id)
|
56 |
+
if filter_conditions:
|
57 |
+
query = query.where(*filter_conditions)
|
58 |
+
|
59 |
+
# Fetch the entry
|
60 |
+
entry = self._fetch(db, query, f"Entry with ID {id} not found.", multiple=False)
|
61 |
+
if isinstance(entry, JSONResponse):
|
62 |
+
return entry
|
63 |
+
|
64 |
+
# Build the delete query
|
65 |
+
delete_query = delete(model).where(model.id == id)
|
66 |
+
if filter_conditions:
|
67 |
+
delete_query = delete_query.where(*filter_conditions)
|
68 |
+
|
69 |
+
# Execute the delete query and commit
|
70 |
+
db.execute(delete_query)
|
71 |
+
return self._handle_commit(db)
|
72 |
+
|
73 |
+
|
74 |
+
def delete_all(self, db, model, filter_conditions=None):
|
75 |
+
"""Delete all entries or based on filters."""
|
76 |
+
query = delete(model)
|
77 |
+
if filter_conditions:
|
78 |
+
query = query.where(*filter_conditions)
|
79 |
+
db.execute(query)
|
80 |
+
return self._handle_commit(db)
|
81 |
+
|
82 |
+
def update(self, db, model, id, update_data, filter_conditions=None):
|
83 |
+
"""Update an entry by ID."""
|
84 |
+
query = select(model).where(model.id == id)
|
85 |
+
if filter_conditions:
|
86 |
+
query = query.where(*filter_conditions)
|
87 |
+
|
88 |
+
not_found_message = f"Entry with ID {id} not found."
|
89 |
+
entry = self._fetch(db, query, not_found_message, multiple=False)
|
90 |
+
|
91 |
+
if isinstance(entry, JSONResponse):
|
92 |
+
return entry
|
93 |
+
|
94 |
+
db.execute(
|
95 |
+
update(model)
|
96 |
+
.where(model.id == id, model.user_id == self.user_id)
|
97 |
+
.values(update_data)
|
98 |
+
)
|
99 |
+
return self._handle_commit(db)
|
100 |
+
|
101 |
+
def update_entries(self, db, model, update_data, filter_conditions=None):
|
102 |
+
"""Update multiple entries with optional filtering."""
|
103 |
+
query = select(model)
|
104 |
+
if filter_conditions:
|
105 |
+
query = query.where(*filter_conditions)
|
106 |
+
not_found_message = "No entries found matching the filter conditions."
|
107 |
+
results = self._fetch(
|
108 |
+
db, query, not_found_message , multiple=True
|
109 |
+
)
|
110 |
+
if isinstance(results, JSONResponse):
|
111 |
+
return results
|
112 |
+
|
113 |
+
db.execute(update(model).where(*filter_conditions).values(update_data))
|
114 |
+
return self._handle_commit(db)
|
115 |
+
|
116 |
+
def get(
|
117 |
+
self,
|
118 |
+
db,
|
119 |
+
model: Type = None,
|
120 |
+
id: Optional[int] = None,
|
121 |
+
filter_conditions=None,
|
122 |
+
columns: Optional[List[str]] = None,
|
123 |
+
multiple: bool = False,
|
124 |
+
):
|
125 |
+
"""Get one or multiple entries, filtered by ID or conditions."""
|
126 |
+
if columns:
|
127 |
+
query = select(*columns)
|
128 |
+
else:
|
129 |
+
query = select(model)
|
130 |
+
|
131 |
+
# Apply filtering by user ID and optional conditions
|
132 |
+
query = query
|
133 |
+
if id:
|
134 |
+
query = query.where(model.id == id)
|
135 |
+
if filter_conditions:
|
136 |
+
query = query.where(*filter_conditions)
|
137 |
+
|
138 |
+
return self._fetch(
|
139 |
+
db,
|
140 |
+
query,
|
141 |
+
"Entry not found." if not multiple else "No entries found.",
|
142 |
+
multiple=multiple,
|
143 |
+
)
|
144 |
+
|
145 |
+
def get_with_joins(
|
146 |
+
self,
|
147 |
+
db,
|
148 |
+
join_models: List[Type],
|
149 |
+
join_conditions: List=None,
|
150 |
+
model: Type=None,
|
151 |
+
filter_conditions=None,
|
152 |
+
columns: Optional[List[str]] = None,
|
153 |
+
multiple: bool = False,
|
154 |
+
):
|
155 |
+
"""Get one or multiple entries with joins and optional filters."""
|
156 |
+
if columns:
|
157 |
+
query = select(*columns)
|
158 |
+
else:
|
159 |
+
query = select(model, *join_models).select_from(model)
|
160 |
+
|
161 |
+
# Apply joins
|
162 |
+
if join_conditions:
|
163 |
+
for join_model, join_condition in zip(join_models, join_conditions):
|
164 |
+
query = query.join(join_model, join_condition)
|
165 |
+
else:
|
166 |
+
query = query.join(*join_models)
|
167 |
+
|
168 |
+
# Apply filtering by user ID and optional conditions
|
169 |
+
if filter_conditions:
|
170 |
+
query = query.where(*filter_conditions)
|
171 |
+
|
172 |
+
return self._fetch(
|
173 |
+
db,
|
174 |
+
query,
|
175 |
+
"Entry not found." if not multiple else "No entries found.",
|
176 |
+
multiple=multiple,
|
177 |
+
)
|
178 |
+
|
179 |
+
def get_columns(
|
180 |
+
self,
|
181 |
+
db,
|
182 |
+
columns: List[str],
|
183 |
+
model=None,
|
184 |
+
filter_conditions=None,
|
185 |
+
id: Optional[int] = None,
|
186 |
+
multiple: bool = False,
|
187 |
+
):
|
188 |
+
"""Get specific columns by ID or filtering."""
|
189 |
+
query = select(*columns).select_from(model)
|
190 |
+
|
191 |
+
if id:
|
192 |
+
query = query.where(model.id == id)
|
193 |
+
if filter_conditions:
|
194 |
+
query = query.where(*filter_conditions)
|
195 |
+
|
196 |
+
return self._fetch(
|
197 |
+
db,
|
198 |
+
query,
|
199 |
+
"Entry not found." if not multiple else "No entries found.",
|
200 |
+
multiple=multiple,
|
201 |
+
)
|
db/query/query_book.py
ADDED
@@ -0,0 +1,124 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from db.query.base_query import BaseQuery
|
2 |
+
from db.models import Metadata, Category, Session_Publisher
|
3 |
+
from fastapi.responses import JSONResponse
|
4 |
+
|
5 |
+
|
6 |
+
class BookQuery(BaseQuery):
|
7 |
+
def __init__(self, user):
|
8 |
+
super().__init__(user)
|
9 |
+
|
10 |
+
def add_book(self, db, title, author, category_id, year, publisher):
|
11 |
+
new_book = Metadata(
|
12 |
+
title=title,
|
13 |
+
author=author,
|
14 |
+
category_id=category_id,
|
15 |
+
year=year,
|
16 |
+
publisher=publisher,
|
17 |
+
)
|
18 |
+
self.add(db, new_book)
|
19 |
+
|
20 |
+
def get_book(self, db):
|
21 |
+
model = Metadata
|
22 |
+
metadata_columns = [
|
23 |
+
Metadata.id,
|
24 |
+
Metadata.title,
|
25 |
+
Metadata.author,
|
26 |
+
Category.category, # Assuming this is the correct field for category name
|
27 |
+
Category.id,
|
28 |
+
Metadata.year,
|
29 |
+
Metadata.publisher,
|
30 |
+
Metadata.thumbnail,
|
31 |
+
]
|
32 |
+
|
33 |
+
join_models = [Category]
|
34 |
+
join_conditions = [Metadata.category_id == Category.id]
|
35 |
+
|
36 |
+
result = self.get_with_joins(
|
37 |
+
db,
|
38 |
+
model=model,
|
39 |
+
columns=metadata_columns,
|
40 |
+
join_models=join_models,
|
41 |
+
join_conditions=join_conditions,
|
42 |
+
multiple=True,
|
43 |
+
)
|
44 |
+
print("result", result)
|
45 |
+
|
46 |
+
return result
|
47 |
+
|
48 |
+
def update_metadata_entry(
|
49 |
+
self, db, metadata_id, title, author, category_id, year, publisher
|
50 |
+
):
|
51 |
+
model = Metadata
|
52 |
+
# Define filter conditions based on the metadata_id
|
53 |
+
filter_conditions = [Metadata.id == metadata_id]
|
54 |
+
|
55 |
+
# Prepare the update data
|
56 |
+
update_data = {
|
57 |
+
"title": title,
|
58 |
+
"author": author,
|
59 |
+
"category_id": category_id,
|
60 |
+
"year": year,
|
61 |
+
"publisher": publisher,
|
62 |
+
}
|
63 |
+
|
64 |
+
# Call the update_entries method to update the metadata
|
65 |
+
update_response = self.update_entries(
|
66 |
+
db,
|
67 |
+
model=model,
|
68 |
+
update_data=update_data,
|
69 |
+
filter_conditions=filter_conditions,
|
70 |
+
)
|
71 |
+
|
72 |
+
if isinstance(update_response, JSONResponse):
|
73 |
+
return update_response # Return error response if any
|
74 |
+
|
75 |
+
# Fetch the updated metadata to retrieve the new category
|
76 |
+
updated_metadata = db.query(Metadata).filter(Metadata.id == metadata_id).first()
|
77 |
+
if not updated_metadata:
|
78 |
+
return JSONResponse(status_code=404, content="Metadata not found")
|
79 |
+
|
80 |
+
return updated_metadata
|
81 |
+
|
82 |
+
def update_book(self, db, book_id, title, author):
|
83 |
+
update_data = {"title": title, "author": author}
|
84 |
+
self.update(db, Metadata, book_id, update_data)
|
85 |
+
|
86 |
+
def delete_book(self, db, book_id):
|
87 |
+
self.delete(db, Metadata, book_id)
|
88 |
+
|
89 |
+
def get_books(self, db):
|
90 |
+
return self.get(db, model=Metadata, multiple=True)
|
91 |
+
|
92 |
+
def get_metadata_books(self, db, metadata_id):
|
93 |
+
return self.get(db, Metadata, id=metadata_id)
|
94 |
+
|
95 |
+
# def get_title_from_session(self, db, metadata_id, session_id):
|
96 |
+
# model = Session_Publisher
|
97 |
+
# columns = [Metadata.title]
|
98 |
+
# join_models = [Session_Publisher.id == session_id, Metadata.id == metadata_id]
|
99 |
+
|
100 |
+
# titles = self.get_all_with_join_columns(db, model, columns, join_models)
|
101 |
+
|
102 |
+
# return titles
|
103 |
+
|
104 |
+
def get_title_from_session(self, db, metadata_id, session_id):
|
105 |
+
model = Session_Publisher
|
106 |
+
columns = [Metadata.title]
|
107 |
+
join_models = [Session_Publisher]
|
108 |
+
join_conditions = [Metadata.id == metadata_id]
|
109 |
+
filter_conditions = [
|
110 |
+
Session_Publisher.user_id == self.user_id,
|
111 |
+
Session_Publisher.id == session_id,
|
112 |
+
]
|
113 |
+
|
114 |
+
titles = self.get_with_joins(
|
115 |
+
db,
|
116 |
+
model=model,
|
117 |
+
columns=columns,
|
118 |
+
join_models=join_models,
|
119 |
+
join_conditions=join_conditions,
|
120 |
+
filter_conditions=filter_conditions,
|
121 |
+
multiple=True,
|
122 |
+
)
|
123 |
+
|
124 |
+
return titles
|
db/query/query_bot.py
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from db.models import Bot, Session_Publisher
|
2 |
+
from db.models import Session as SessionModel
|
3 |
+
from db.query.base_query import BaseQuery
|
4 |
+
|
5 |
+
|
6 |
+
class BotQuery(BaseQuery):
|
7 |
+
def __init__(self, user):
|
8 |
+
super().__init__(user)
|
9 |
+
|
10 |
+
def add_bot(self, db, session_id, bot, metadata_id, type="bot_one"):
|
11 |
+
# Create the new session
|
12 |
+
if type == "bot_one":
|
13 |
+
new_bot_one = Session_Publisher(
|
14 |
+
id=session_id,
|
15 |
+
user_id=self.user.get("id"),
|
16 |
+
bot_name=bot,
|
17 |
+
metadata_id=metadata_id,
|
18 |
+
)
|
19 |
+
|
20 |
+
else:
|
21 |
+
new_bot_one = SessionModel(
|
22 |
+
id=session_id,
|
23 |
+
user_id=self.user.get("id"),
|
24 |
+
bot_id=bot,
|
25 |
+
metadata_id=metadata_id,
|
26 |
+
)
|
27 |
+
|
28 |
+
return self.add(db, new_bot_one)
|
29 |
+
|
30 |
+
def get_session_ids_bot(self, db, metadata_id, type="bot_one"):
|
31 |
+
model = Session_Publisher
|
32 |
+
columns = [
|
33 |
+
Session_Publisher.id,
|
34 |
+
Session_Publisher.bot_name,
|
35 |
+
Session_Publisher.updated_at,
|
36 |
+
]
|
37 |
+
filter_conditions = [
|
38 |
+
Session_Publisher.user_id == self.user.get("id"),
|
39 |
+
Session_Publisher.metadata_id == metadata_id,
|
40 |
+
]
|
41 |
+
|
42 |
+
sessions = self.get_columns(db, model=model, columns=columns, filter_conditions=filter_conditions, multiple=True)
|
43 |
+
|
44 |
+
return sessions
|
45 |
+
|
46 |
+
def update_session_bot(self, db, bot_id, name):
|
47 |
+
update_data = {"name": name}
|
48 |
+
self.update(db, Bot, bot_id, update_data)
|
49 |
+
|
50 |
+
def delete_session_bot(self, db, bot_id):
|
51 |
+
self.delete(db, Bot, bot_id)
|
52 |
+
|
53 |
+
def get_bot(self, db, bot_id):
|
54 |
+
return self.get(db, Bot, bot_id)
|
55 |
+
|
56 |
+
# def get_all_bots(self, db):
|
57 |
+
# return self.get_all(db, Bot)
|
db/query/query_category.py
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from db.models import Category, Metadata
|
2 |
+
from db.query.base_query import BaseQuery
|
3 |
+
from sqlalchemy import select, delete, update
|
4 |
+
|
5 |
+
class CategoryQuery(BaseQuery):
|
6 |
+
def __init__(self, user):
|
7 |
+
super().__init__(user)
|
8 |
+
|
9 |
+
def add_category(self, db, name):
|
10 |
+
new_category = Category(name=name, user_id=self.user["id"])
|
11 |
+
return self.add(db, new_category)
|
12 |
+
|
13 |
+
def update_category(self, db, category_id, name):
|
14 |
+
update_data = {"name": name}
|
15 |
+
self.update(db, Category, category_id, update_data)
|
16 |
+
|
17 |
+
def delete_category(self, db, category_id):
|
18 |
+
self.delete(db, Category, category_id)
|
19 |
+
|
20 |
+
def get_category(self, db, category_id):
|
21 |
+
columns = [Category.category]
|
22 |
+
model = Category
|
23 |
+
return self.get_columns(db,model=model, columns=columns, id=category_id)
|
24 |
+
|
25 |
+
def get_current_category(self, db, metadata_id):
|
26 |
+
columns = [Category.category]
|
27 |
+
join_models = [Metadata]
|
28 |
+
where_conditions = [Metadata.id == metadata_id]
|
29 |
+
|
30 |
+
result = self.get_with_joins(
|
31 |
+
db,
|
32 |
+
join_models=join_models,
|
33 |
+
filter_conditions=where_conditions,
|
34 |
+
columns=columns,
|
35 |
+
multiple=False # Assuming you want a single result
|
36 |
+
)
|
37 |
+
|
38 |
+
return result
|
39 |
+
|
40 |
+
def get_all_categories(self, db):
|
41 |
+
return self.get(db, Category, multiple=True)
|
db/query/query_role.py
ADDED
File without changes
|
db/query/query_user_meta.py
ADDED
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from sqlalchemy import select
|
2 |
+
from db.models import User_Meta, Metadata, Category
|
3 |
+
from db.query.base_query import BaseQuery
|
4 |
+
|
5 |
+
|
6 |
+
class UserMetaQuery(BaseQuery):
|
7 |
+
def __init__(self, user):
|
8 |
+
super().__init__(user)
|
9 |
+
|
10 |
+
# def get_user_meta_entries(self, db):
|
11 |
+
# """Fetch all user meta entries joined with metadata and category."""
|
12 |
+
# join_models = [Metadata, Category]
|
13 |
+
# print(join_models)
|
14 |
+
# join_conditions = [
|
15 |
+
# User_Meta.metadata_id == Metadata.id,
|
16 |
+
# Metadata.category_id == Category.id,
|
17 |
+
# ]
|
18 |
+
# print(join_conditions)
|
19 |
+
|
20 |
+
# result = self.get_all_with_joins(
|
21 |
+
# db,
|
22 |
+
# model=User_Meta,
|
23 |
+
# join_models=join_models,
|
24 |
+
# join_conditions=join_conditions,
|
25 |
+
# )
|
26 |
+
# return result
|
27 |
+
|
28 |
+
def get_user_meta_entries(self, db):
|
29 |
+
"""Fetch all user meta entries joined with metadata and category."""
|
30 |
+
join_models = [Metadata, Category]
|
31 |
+
join_conditions = [
|
32 |
+
User_Meta.metadata_id == Metadata.id,
|
33 |
+
Metadata.category_id == Category.id,
|
34 |
+
]
|
35 |
+
|
36 |
+
filter_conditions = [User_Meta.user_id == self.user_id]
|
37 |
+
|
38 |
+
result = self.get_with_joins(
|
39 |
+
db,
|
40 |
+
model=User_Meta,
|
41 |
+
join_models=join_models,
|
42 |
+
join_conditions=join_conditions,
|
43 |
+
filter_conditions=filter_conditions,
|
44 |
+
multiple=True
|
45 |
+
)
|
46 |
+
return result
|
47 |
+
|
48 |
+
|
49 |
+
def insert_user_meta_entries(self, db, metadata_ids):
|
50 |
+
"""Insert new user meta entries."""
|
51 |
+
user_meta_entries = [
|
52 |
+
User_Meta(user_id=self.user.get("id"), metadata_id=mid)
|
53 |
+
for mid in metadata_ids
|
54 |
+
]
|
55 |
+
|
56 |
+
# Use the method from BaseQuery to insert entries
|
57 |
+
self.insert_entries(db, user_meta_entries)
|
58 |
+
return {
|
59 |
+
"message": "User meta entries added successfully.",
|
60 |
+
"metadata_ids": metadata_ids, # Include the metadata IDs in the result
|
61 |
+
}
|
62 |
+
|
63 |
+
def update_user_meta_entries(self, db, metadata_ids):
|
64 |
+
"""Update user meta entries: keep, delete, or add new entries based on metadata_ids."""
|
65 |
+
filter_conditions = [User_Meta.user_id == self.user_id]
|
66 |
+
|
67 |
+
# Fetch existing user meta entries
|
68 |
+
existing_user_meta = self.get(db, model=User_Meta, filter_conditions=filter_conditions, multiple=True)
|
69 |
+
existing_user_meta = [user_meta[0] for user_meta in existing_user_meta]
|
70 |
+
existing_meta_ids = [entry.metadata_id for entry in existing_user_meta]
|
71 |
+
|
72 |
+
# Convert both lists to sets once for efficiency
|
73 |
+
metadata_ids_set = set(metadata_ids)
|
74 |
+
existing_meta_ids_set = set(existing_meta_ids)
|
75 |
+
|
76 |
+
# Find metadata to add, keep, or delete
|
77 |
+
metadata_to_add = list(metadata_ids_set - existing_meta_ids_set)
|
78 |
+
metadata_to_keep = list(metadata_ids_set & existing_meta_ids_set)
|
79 |
+
metadata_to_delete = list(existing_meta_ids_set - metadata_ids_set)
|
80 |
+
|
81 |
+
# Delete entries that are no longer in the updated metadata_ids list
|
82 |
+
if metadata_to_delete:
|
83 |
+
db.query(User_Meta).filter(User_Meta.user_id == self.user_id, User_Meta.metadata_id.in_(metadata_to_delete)).delete(synchronize_session=False)
|
84 |
+
|
85 |
+
# Add new entries for metadata that are not in the existing user meta
|
86 |
+
for meta_id in metadata_to_add:
|
87 |
+
new_entry = User_Meta(user_id=self.user_id, metadata_id=meta_id)
|
88 |
+
self.add(db, new_entry)
|
89 |
+
|
90 |
+
db.commit()
|
91 |
+
|
92 |
+
return {
|
93 |
+
"status": "success",
|
94 |
+
"added_meta": list(metadata_to_add),
|
95 |
+
"deleted_meta": list(metadata_to_delete),
|
96 |
+
"kept_meta": list(metadata_to_keep),
|
97 |
+
}
|
98 |
+
|
99 |
+
def delete_user_meta(self, db, metadata_id):
|
100 |
+
"""Delete user meta entries by metadata_id."""
|
101 |
+
self.delete(db, model=User_Meta, id=metadata_id)
|
102 |
+
return {"message": f"Book user with id {id} deleted successfully."}
|
103 |
+
|
104 |
+
def delete_all_user_meta(self, db):
|
105 |
+
"""Delete all user meta entries for a user."""
|
106 |
+
self.delete_all(db, model=User_Meta)
|
script/document_uploader.py
CHANGED
@@ -8,6 +8,7 @@ from fastapi import UploadFile,status
|
|
8 |
from fastapi.responses import JSONResponse
|
9 |
|
10 |
from llama_index.core.node_parser import (
|
|
|
11 |
SemanticSplitterNodeParser,
|
12 |
)
|
13 |
|
@@ -82,16 +83,23 @@ class Uploader:
|
|
82 |
# Run the pipeline
|
83 |
try:
|
84 |
nodes_with_metadata = pipeline.run(documents=documents_with_metadata)
|
85 |
-
|
86 |
return nodes_with_metadata
|
87 |
|
88 |
except Exception as e:
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
95 |
|
96 |
def filter_document(self, documents):
|
97 |
api_key = PINECONE_CONFIG.PINECONE_API_KEY
|
|
|
8 |
from fastapi.responses import JSONResponse
|
9 |
|
10 |
from llama_index.core.node_parser import (
|
11 |
+
SentenceSplitter,
|
12 |
SemanticSplitterNodeParser,
|
13 |
)
|
14 |
|
|
|
83 |
# Run the pipeline
|
84 |
try:
|
85 |
nodes_with_metadata = pipeline.run(documents=documents_with_metadata)
|
86 |
+
# nodes_with_metadata = splitter.get_nodes_from_documents(documents_with_metadata)
|
87 |
return nodes_with_metadata
|
88 |
|
89 |
except Exception as e:
|
90 |
+
try:
|
91 |
+
# If the first method fails, fallback to sentence splitter
|
92 |
+
sentence_splitter = SentenceSplitter(chunk_size=512)
|
93 |
+
nodes_with_metadata = sentence_splitter.get_nodes_from_documents(documents_with_metadata)
|
94 |
+
print("Pipeline processing completed with SentenceSplitter fallback.")
|
95 |
+
return nodes_with_metadata
|
96 |
+
except Exception as fallback_error:
|
97 |
+
# Log the second error and return JSONResponse for FastAPI
|
98 |
+
logging.error(f"Error with SentenceSplitter fallback: {fallback_error}")
|
99 |
+
return JSONResponse(
|
100 |
+
status_code=500,
|
101 |
+
content="An internal server error occurred during pipeline processing.",
|
102 |
+
)
|
103 |
|
104 |
def filter_document(self, documents):
|
105 |
api_key = PINECONE_CONFIG.PINECONE_API_KEY
|
script/vector_db.py
CHANGED
@@ -98,10 +98,10 @@ class IndexManager:
|
|
98 |
|
99 |
return ids
|
100 |
|
101 |
-
def delete_vector_database(self,
|
102 |
try :
|
103 |
batch_size = 1000
|
104 |
-
all_ids = self.get_all_ids_from_index(
|
105 |
all_ids = list(all_ids)
|
106 |
|
107 |
# Split ids into chunks of batch_size
|
@@ -115,11 +115,11 @@ class IndexManager:
|
|
115 |
except Exception as e:
|
116 |
return JSONResponse(status_code=500, content="An error occurred while delete metadata")
|
117 |
|
118 |
-
def update_vector_database(self,
|
119 |
|
120 |
reference = new_reference
|
121 |
|
122 |
-
all_ids = self.get_all_ids_from_index(
|
123 |
all_ids = list(all_ids)
|
124 |
|
125 |
for id in all_ids:
|
|
|
98 |
|
99 |
return ids
|
100 |
|
101 |
+
def delete_vector_database(self, title):
|
102 |
try :
|
103 |
batch_size = 1000
|
104 |
+
all_ids = self.get_all_ids_from_index(title)
|
105 |
all_ids = list(all_ids)
|
106 |
|
107 |
# Split ids into chunks of batch_size
|
|
|
115 |
except Exception as e:
|
116 |
return JSONResponse(status_code=500, content="An error occurred while delete metadata")
|
117 |
|
118 |
+
def update_vector_database(self, current_reference, new_reference):
|
119 |
|
120 |
reference = new_reference
|
121 |
|
122 |
+
all_ids = self.get_all_ids_from_index(current_reference['title'])
|
123 |
all_ids = list(all_ids)
|
124 |
|
125 |
for id in all_ids:
|
service/dto.py
CHANGED
@@ -96,7 +96,7 @@ class BotMetaCreate(BaseModel):
|
|
96 |
|
97 |
|
98 |
class BotCreateRequest(BaseModel):
|
99 |
-
|
100 |
|
101 |
|
102 |
class BotResponse(BaseModel):
|
|
|
96 |
|
97 |
|
98 |
class BotCreateRequest(BaseModel):
|
99 |
+
name: str
|
100 |
|
101 |
|
102 |
class BotResponse(BaseModel):
|
utils/error_handlers.py
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi.responses import JSONResponse
|
2 |
+
|
3 |
+
|
4 |
+
def handle_exception(e: Exception):
|
5 |
+
"""Helper function to handle exceptions in a consistent way."""
|
6 |
+
return JSONResponse(
|
7 |
+
status_code=500, content=f"An unexpected error occurred: {str(e)}"
|
8 |
+
)
|
9 |
+
|
10 |
+
|
11 |
+
def handle_error(e, message):
|
12 |
+
return JSONResponse(status_code=500, content={"error": f"{message}: {str(e)}"})
|
13 |
+
|
14 |
+
|
15 |
+
def not_found_error(message):
|
16 |
+
return JSONResponse(status_code=404, content={"error": message})
|
17 |
+
|
18 |
+
|
19 |
+
def no_entries_found(message):
|
20 |
+
return JSONResponse(status_code=404, content={"message": message})
|