MAAS / routes.py
Hammad712's picture
Update routes.py
27bc155 verified
# routes.py
import logging
from fastapi import APIRouter, Body, HTTPException
from models import (
PageSpeedURLRequest,
ReportIngestRequest,
NewChatRequest,
ChatQueryRequest,
)
from config import GEMINI_API_KEY, PAGESPEED_API_KEY
from pagespeed_fetch import get_pagespeed_data
from vectorstore_manager import ingest_report
from gemini_report import generate_report_with_gemini
from chatbot import create_new_chat, get_chain_for_user, summarize_messages
from langchain_mongodb.chat_message_histories import MongoDBChatMessageHistory
# Configure logger to integrate with Uvicorn’s logging
logger = logging.getLogger("uvicorn.error")
router = APIRouter()
# ----- EXISTING ENDPOINTS -----
@router.post("/ingest-report")
async def ingest_user_report(request: ReportIngestRequest):
"""
Ingests a user’s PageSpeed report text into the FAISS vectorstore.
"""
logger.info(f"[ingest-report] Received ingest request for user_id={request.user_id}")
try:
vectorstore, retriever = ingest_report(request.user_id, request.report_text)
logger.info(f"[ingest-report] Successfully ingested report for user_id={request.user_id}")
return {"status": "success", "message": f"Report ingested for user {request.user_id}"}
except Exception as e:
logger.error(f"[ingest-report] Error ingesting report for user_id={request.user_id}: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/generate-report")
async def generate_report(data: dict = Body(...)):
"""
Generates a report using Gemini given arbitrary data in the request body.
"""
logger.info(f"[generate-report] Received data for Gemini report generation: keys={list(data.keys())}")
try:
report = generate_report_with_gemini(data, GEMINI_API_KEY)
logger.info(f"[generate-report] Successfully generated report")
return {"report": report}
except Exception as e:
logger.error(f"[generate-report] Error generating report: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/fetch-pagespeed-data")
async def fetch_pagespeed_data(request: PageSpeedURLRequest):
"""
Fetches PageSpeed Insights data for the provided target URL.
"""
logger.info(f"[fetch-pagespeed-data] Fetching PageSpeed for URL={request.target_url}")
try:
data = get_pagespeed_data(request.target_url, PAGESPEED_API_KEY)
logger.info(f"[fetch-pagespeed-data] Successfully fetched PageSpeed data for URL={request.target_url}")
return {"status": "success", "data": data}
except Exception as e:
logger.error(f"[fetch-pagespeed-data] Error fetching PageSpeed data for URL={request.target_url}: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/")
async def home():
"""
Simple health check endpoint.
"""
logger.info("[home] Health check endpoint called")
return {"API is UP and running"}
# ----- NEW ENDPOINTS FOR CHATBOT -----
@router.post("/new-chat")
async def new_chat(request: NewChatRequest):
"""
Creates a new chat session for the given user_id and returns a chat_id.
Expected JSON body: {"user_id": "<string>"}
"""
logger.info(f"[new-chat] Creating new chat session for user_id={request.user_id}")
try:
chat_id = create_new_chat(request.user_id)
logger.info(f"[new-chat] Created chat_id={chat_id} for user_id={request.user_id}")
return {"status": "success", "chat_id": chat_id}
except Exception as e:
logger.error(f"[new-chat] Error creating chat session for user_id={request.user_id}: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/chat")
async def chat_query(request: ChatQueryRequest):
"""
Sends a question to an existing chat session and returns the AI's response.
Expected JSON body:
{
"user_id": "<string>",
"chat_id": "<string>",
"query": "<string>"
}
"""
logger.info(f"[chat] Received query for user_id={request.user_id}, chat_id={request.chat_id}")
try:
# Reconstruct the ConversationalRetrievalChain for this user & chat_id
chain = get_chain_for_user(request.user_id, request.chat_id)
chat_history: MongoDBChatMessageHistory = chain.memory # type: ignore
# Optionally summarize if the history is too long
if summarize_messages(chat_history):
logger.info(f"[chat] Summarized chat history for chat_id={request.chat_id}")
# Add the user’s message to history
chat_history.add_user_message(request.query)
logger.info(f"[chat] Added user message to history: \"{request.query}\"")
# Run the chain synchronously (non-streaming)
result = chain({
"question": request.query,
"chat_history": chat_history.messages,
})
answer = result.get("answer", "").strip()
logger.info(f"[chat] LLM returned answer=\"{answer}\" for user_id={request.user_id}")
# Persist the AI’s message
if answer:
chat_history.add_ai_message(answer)
logger.info(f"[chat] Persisted AI message to history for chat_id={request.chat_id}")
return {
"status": "success",
"answer": answer,
"source_documents": result.get("source_documents", []),
}
except ValueError as ve:
logger.warning(f"[chat] No vectorstore for user_id={request.user_id}: {ve}")
raise HTTPException(status_code=404, detail=str(ve))
except Exception as e:
logger.error(f"[chat] Error processing chat request for user_id={request.user_id}, chat_id={request.chat_id}: {e}")
raise HTTPException(status_code=500, detail=str(e))