mgbam's picture
Update app/main.py
0e87c05 verified
raw
history blame
5.29 kB
"""
CryptoSentinel AI β€” High-performance FastAPI application.
Features:
- Fully asynchronous architecture using modern FastAPI lifespan and background tasks.
- Integrates a robust, async PriceFetcher with multi-API fallback.
- Provides real-time sentiment analysis via an efficient, non-polling SSE stream.
- Centralized state management for testability and clarity.
"""
import asyncio
import json
from contextlib import asynccontextmanager
import httpx
from fastapi import FastAPI, Request, BackgroundTasks
from fastapi.responses import HTMLResponse, StreamingResponse
from fastapi.templating import Jinja2Templates
from pydantic import BaseModel, constr
from .price_fetcher import PriceFetcher
from .sentiment import SentimentAnalyzer
# --- Configuration & Models ---
class SentimentRequest(BaseModel):
"""Pydantic model for validating sentiment analysis requests."""
text: constr(strip_whitespace=True, min_length=1)
# --- Application Lifespan Management ---
@asynccontextmanager
async def lifespan(app: FastAPI):
"""
Manages application startup and shutdown events. This is the modern
replacement for @app.on_event("startup") and "shutdown".
"""
# -- Startup --
# Create a single, shared httpx client for the application's lifespan.
async with httpx.AsyncClient() as client:
# Initialize our stateful services
price_fetcher = PriceFetcher(client=client, coins=["bitcoin", "ethereum", "dogecoin"])
sentiment_analyzer = SentimentAnalyzer()
# Store service instances in the app's state for access in routes
app.state.price_fetcher = price_fetcher
app.state.sentiment_analyzer = sentiment_analyzer
app.state.request_counter = 0
# Create a cancellable background task for periodic price updates
price_update_task = asyncio.create_task(
run_periodic_updates(price_fetcher, interval_seconds=10)
)
print("πŸš€ CryptoSentinel AI started successfully.")
yield # The application is now running
# -- Shutdown --
print("⏳ Shutting down background tasks...")
price_update_task.cancel()
try:
await price_update_task
except asyncio.CancelledError:
print("Price update task cancelled successfully.")
print("βœ… Shutdown complete.")
async def run_periodic_updates(fetcher: PriceFetcher, interval_seconds: int):
"""A simple, robust asyncio background task runner."""
while True:
await fetcher.update_prices_async()
await asyncio.sleep(interval_seconds)
# --- FastAPI App Initialization ---
templates = Jinja2Templates(directory="app/templates")
app = FastAPI(title="CryptoSentinel AI", lifespan=lifespan)
# --- Routes ---
@app.get("/", response_class=HTMLResponse)
async def index(request: Request):
"""Renders the main single-page application view."""
return templates.TemplateResponse("index.html", {"request": request})
@app.get("/api/prices", response_class=HTMLResponse)
async def get_prices_fragment(request: Request):
"""
Returns an HTML fragment with the latest crypto prices.
Designed to be called by HTMX.
"""
price_fetcher: PriceFetcher = request.app.state.price_fetcher
prices = price_fetcher.get_current_prices()
html_fragment = ""
for coin, price in prices.items():
price_str = f"${price:,.2f}" if isinstance(price, (int, float)) else price
html_fragment += f"<div><strong>{coin.capitalize()}:</strong> {price_str}</div>"
return HTMLResponse(content=html_fragment)
@app.post("/api/sentiment")
async def analyze_sentiment(
payload: SentimentRequest,
request: Request,
background_tasks: BackgroundTasks
):
"""
Accepts text for sentiment analysis, validates it, and queues it
for processing in the background.
"""
analyzer: SentimentAnalyzer = request.app.state.sentiment_analyzer
# Use a simple counter for unique event IDs
request.app.state.request_counter += 1
request_id = request.app.state.request_counter
# Add the heavy computation to the background so the API returns instantly
background_tasks.add_task(analyzer.compute_and_publish, payload.text, request_id)
return {"status": "queued", "request_id": request_id}
@app.get("/api/sentiment/stream")
async def sentiment_stream(request: Request):
"""
Server-Sent Events (SSE) endpoint.
This long-lived connection efficiently waits for new sentiment results
from the queue and pushes them to the client.
"""
analyzer: SentimentAnalyzer = request.app.state.sentiment_analyzer
async def event_generator():
while True:
try:
# This is the key: efficiently wait for a result to be put in the queue
result_payload = await analyzer.get_next_result()
payload_str = json.dumps(result_payload)
yield f"id:{result_payload['id']}\nevent: sentiment_update\ndata:{payload_str}\n\n"
except asyncio.CancelledError:
# Handle client disconnect
print("Client disconnected from SSE stream.")
break
return StreamingResponse(event_generator(), media_type="text/event-stream")