|
from contextlib import asynccontextmanager |
|
from typing import Optional |
|
import expiringdict |
|
from fastapi import APIRouter, FastAPI |
|
from fastapi.routing import APIRouter as Router |
|
import httpx |
|
from pydantic import BaseModel, Field |
|
from playwright.async_api import async_playwright, Browser, BrowserContext, Page |
|
import logging |
|
import uvicorn |
|
|
|
from scrap import scrap_patent_async, scrap_patent_bulk_async |
|
from search import APISearchResults, APIPatentResults, query_bing_search, query_brave_search, query_ddg_search, query_google_patents |
|
|
|
logging.basicConfig( |
|
level=logging.INFO, |
|
format='[%(asctime)s][%(levelname)s][%(filename)s:%(lineno)d]: %(message)s', |
|
datefmt='%Y-%m-%d %H:%M:%S' |
|
) |
|
|
|
|
|
playwright = None |
|
pw_browser: Optional[Browser] = None |
|
|
|
|
|
httpx_client = httpx.AsyncClient(timeout=30, limits=httpx.Limits( |
|
max_connections=30, max_keepalive_connections=20)) |
|
|
|
|
|
@asynccontextmanager |
|
async def api_lifespan(app: FastAPI): |
|
global playwright, pw_browser |
|
playwright = await async_playwright().start() |
|
pw_browser = await playwright.chromium.launch(headless=True) |
|
yield |
|
|
|
await pw_browser.close() |
|
await playwright.stop() |
|
|
|
app = FastAPI(lifespan=api_lifespan) |
|
backend_status = expiringdict.ExpiringDict(max_len=5, max_age_seconds=15*60) |
|
|
|
|
|
scrap_router = APIRouter(prefix="/scrap", tags=["scrapping"]) |
|
|
|
@app.get('/') |
|
async def status(): |
|
backend_keys = [k[0] for k in backend_status.items()] |
|
backend_status_dict = {} |
|
|
|
for k in backend_keys: |
|
backend_status_dict[k] = backend_status.get(k) |
|
return {"status": "running", "backend_status": backend_status_dict} |
|
|
|
|
|
|
|
|
|
class APISearchParams(BaseModel): |
|
queries: list[str] = Field(..., |
|
description="The list of queries to search for") |
|
n_results: int = Field( |
|
10, description="Number of results to return for each query. Valid values are 10, 25, 50 and 100") |
|
|
|
|
|
@app.post("/search_scholar") |
|
async def query_google_scholar(params: APISearchParams): |
|
"""Queries google scholar for the specified query""" |
|
return {"error": "Unimplemented"} |
|
|
|
|
|
@app.post("/search_patents") |
|
async def search_patents(params: APISearchParams) -> APIPatentResults: |
|
"""Searches google patents for the specified queries and returns the found documents.""" |
|
results = [] |
|
for q in params.queries: |
|
logging.info(f"Searching Google Patents with query `{q}`") |
|
try: |
|
res = await query_google_patents(pw_browser, q, params.n_results) |
|
results.extend(res) |
|
except Exception as e: |
|
backend_status["gpatents"] = "rate-limited" |
|
logging.error( |
|
f"Failed to query Google Patents with query `{q}`: {e}") |
|
return APIPatentResults(results=results, error=None) |
|
|
|
|
|
@app.post("/search_brave") |
|
async def search_brave(params: APISearchParams) -> APISearchResults: |
|
"""Searches brave search for the specified queries and returns the found documents.""" |
|
results = [] |
|
last_exception: Optional[Exception] = None |
|
for q in params.queries: |
|
logging.info(f"Searching Brave search with query `{q}`") |
|
try: |
|
res = await query_brave_search(pw_browser, q, params.n_results) |
|
results.extend(res) |
|
except Exception as e: |
|
last_exception = e |
|
backend_status["brave"] = "rate-limited" |
|
logging.error( |
|
f"Failed to query Brave search with query `{q}`: {e}") |
|
|
|
return APISearchResults(results=results, error=str(last_exception) if len(results) == 0 and last_exception else None) |
|
|
|
|
|
@app.post("/search_bing") |
|
async def search_bing(params: APISearchParams) -> APISearchResults: |
|
"""Searches Bing search for the specified queries and returns the found documents.""" |
|
results = [] |
|
last_exception: Optional[Exception] = None |
|
for q in params.queries: |
|
logging.info(f"Searching Bing search with query `{q}`") |
|
try: |
|
res = await query_brave_search(pw_browser, q, params.n_results) |
|
results.extend(res) |
|
except Exception as e: |
|
last_exception = e |
|
backend_status["bing"] = "rate-limited" |
|
logging.error( |
|
f"Failed to query Bing search with query `{q}`: {e}") |
|
|
|
return APISearchResults(results=results, error=str(last_exception) if len(results) == 0 and last_exception else None) |
|
|
|
|
|
@app.post("/search_duck") |
|
async def search_duck(params: APISearchParams) -> APISearchResults: |
|
"""Searches duckduckgo for the specified queries and returns the found documents""" |
|
results = [] |
|
last_exception: Optional[Exception] = None |
|
|
|
for q in params.queries: |
|
logging.info(f"Querying DDG with query: `{q}`") |
|
try: |
|
res = await query_ddg_search(q, params.n_results) |
|
results.extend(res) |
|
except Exception as e: |
|
last_exception = e |
|
backend_status["duckduckgo"] = "rate-limited" |
|
logging.error(f"Failed to query DDG with query `{q}`: {e}") |
|
|
|
return APISearchResults(results=results, error=str(last_exception) if len(results) == 0 and last_exception else None) |
|
|
|
|
|
@app.post("/search") |
|
async def search(params: APISearchParams): |
|
"""Attempts to search the specified queries using ALL backends""" |
|
results = [] |
|
|
|
for q in params.queries: |
|
try: |
|
logging.info(f"Querying DDG with query: `{q}`") |
|
res = await query_ddg_search(q, params.n_results) |
|
results.extend(res) |
|
continue |
|
except Exception as e: |
|
logging.error(f"Failed to query DDG with query `{q}`: {e}") |
|
logging.info("Trying with next browser backend.") |
|
|
|
try: |
|
logging.info(f"Querying Brave Search with query: `{q}`") |
|
res = await query_brave_search(pw_browser, q, params.n_results) |
|
results.extend(res) |
|
continue |
|
except Exception as e: |
|
logging.error( |
|
f"Failed to query Brave Search with query `{q}`: {e}") |
|
logging.info("Trying with next browser backend.") |
|
|
|
try: |
|
logging.info(f"Querying Bing with query: `{q}`") |
|
res = await query_bing_search(pw_browser, q, params.n_results) |
|
results.extend(res) |
|
continue |
|
except Exception as e: |
|
logging.error(f"Failed to query Bing search with query `{q}`: {e}") |
|
logging.info("Trying with next browser backend.") |
|
|
|
if len(results) == 0: |
|
return APISearchResults(results=[], error="All backends are rate-limited.") |
|
|
|
return APISearchResults(results=results, error=None) |
|
|
|
|
|
|
|
|
|
@scrap_router.get("/scrap_patent/{patent_id}") |
|
async def scrap_patent(patent_id: str): |
|
"""Scraps the specified patent from Google Patents.""" |
|
patent = await scrap_patent_async(httpx_client, f"https://patents.google.com/patent/{patent_id}/en") |
|
return patent |
|
|
|
|
|
class ScrapPatentsRequest(BaseModel): |
|
"""Request model for scrapping multiple patents.""" |
|
patent_ids: list[str] = Field(..., |
|
description="List of patent IDs to scrap") |
|
|
|
|
|
@scrap_router.post("/scrap_patents_bulk") |
|
async def scrap_patents(params: ScrapPatentsRequest): |
|
"""Scraps multiple patents from Google Patents.""" |
|
patents = await scrap_patent_bulk_async(httpx_client, [ |
|
f"https://patents.google.com/patent/{pid}/en" for pid in params.patent_ids]) |
|
return patents |
|
|
|
|
|
|
|
app.include_router(scrap_router) |
|
|
|
uvicorn.run(app, host="0.0.0.0", port=7860) |
|
|