import requests from bs4 import BeautifulSoup import json import os import time import warnings from fastapi import FastAPI, HTTPException from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import FileResponse from fastapi.staticfiles import StaticFiles from pydantic import BaseModel from typing import Dict, List, Optional import uvicorn warnings.filterwarnings("ignore") app = FastAPI(title="3GPP Document Finder API", description="API to find 3GPP documents based on TSG document IDs") app.mount("/static", StaticFiles(directory="static"), name="static") origins = [ "*", ] app.add_middleware( CORSMiddleware, allow_origins=origins, allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) class DocRequest(BaseModel): tsg_doc_id: str class DocResponse(BaseModel): tsg_doc_id: str url: str search_time: float class BatchDocRequest(BaseModel): tsg_doc_ids: List[str] class BatchDocResponse(BaseModel): results: Dict[str, str] missing: List[str] search_time: float class TsgDocFinder: def __init__(self): self.main_ftp_url = "https://www.3gpp.org/ftp" self.indexer_file = "indexed_docs.json" self.indexer = self.load_indexer() def load_indexer(self): """Load existing index if available""" if os.path.exists(self.indexer_file): with open(self.indexer_file, "r", encoding="utf-8") as f: return json.load(f) return {} def save_indexer(self): """Save the updated index""" with open(self.indexer_file, "w", encoding="utf-8") as f: json.dump(self.indexer, f, indent=4, ensure_ascii=False) def get_workgroup(self, tsg_doc): main_tsg = "tsg_ct" if tsg_doc[0] == "C" else "tsg_sa" if tsg_doc[0] == "S" else None if main_tsg is None: return None, None, None workgroup = f"WG{int(tsg_doc[1])}" if tsg_doc[1].isnumeric() else main_tsg.upper() return main_tsg, workgroup, tsg_doc def find_workgroup_url(self, main_tsg, workgroup): """Find the URL for the specific workgroup""" response = requests.get(f"{self.main_ftp_url}/{main_tsg}", verify=False) soup = BeautifulSoup(response.text, 'html.parser') for item in soup.find_all("tr"): link = item.find("a") if link and workgroup in link.get_text(): return f"{self.main_ftp_url}/{main_tsg}/{link.get_text()}" return f"{self.main_ftp_url}/{main_tsg}/{workgroup}" def get_docs_from_url(self, url): """Get list of documents/directories from a URL""" try: response = requests.get(url, verify=False, timeout=10) soup = BeautifulSoup(response.text, "html.parser") return [item.get_text() for item in soup.select("tr td a")] except Exception as e: print(f"Error accessing {url}: {e}") return [] def search_document(self, tsg_doc_id): """Search for a specific document by its ID""" original_id = tsg_doc_id # Check if already indexed if original_id in self.indexer: return self.indexer[original_id] # Parse the document ID main_tsg, workgroup, tsg_doc = self.get_workgroup(tsg_doc_id) if not main_tsg: return f"Could not parse document ID: {tsg_doc_id}" print(f"Searching for {original_id} (parsed as {tsg_doc}) in {main_tsg}/{workgroup}...") # Find the workgroup URL wg_url = self.find_workgroup_url(main_tsg, workgroup) if not wg_url: return f"Could not find workgroup for {tsg_doc_id}" # Search in the workgroup directories meeting_folders = self.get_docs_from_url(wg_url) for folder in meeting_folders: meeting_url = f"{wg_url}/{folder}" meeting_contents = self.get_docs_from_url(meeting_url) if "Docs" in meeting_contents: docs_url = f"{meeting_url}/Docs" print(f"Checking {docs_url}...") files = self.get_docs_from_url(docs_url) # Check for the document in the main Docs folder for file in files: if tsg_doc in file.lower() or original_id in file: doc_url = f"{docs_url}/{file}" self.indexer[original_id] = doc_url self.save_indexer() return doc_url # Check in ZIP subfolder if it exists if "ZIP" in files: zip_url = f"{docs_url}/ZIP" print(f"Checking {zip_url}...") zip_files = self.get_docs_from_url(zip_url) for file in zip_files: if tsg_doc in file.lower() or original_id in file: doc_url = f"{zip_url}/{file}" self.indexer[original_id] = doc_url self.save_indexer() return doc_url return f"Document {tsg_doc_id} not found" # Create a global instance of the finder finder = TsgDocFinder() @app.get("/") async def main_menu(): return FileResponse(os.path.join("templates", "index.html")) @app.post("/find", response_model=DocResponse) def find_document(request: DocRequest): start_time = time.time() result = finder.search_document(request.tsg_doc_id) if "not found" not in result and "Could not" not in result: return DocResponse( tsg_doc_id=request.tsg_doc_id, url=result, search_time=time.time() - start_time ) else: raise HTTPException(status_code=404, detail=result) @app.post("/batch", response_model=BatchDocResponse) def find_documents_batch(request: BatchDocRequest): start_time = time.time() results = {} missing = [] for doc_id in request.tsg_doc_ids: result = finder.search_document(doc_id) if "not found" not in result and "Could not" not in result: results[doc_id] = result else: missing.append(doc_id) return BatchDocResponse( results=results, missing=missing, search_time=time.time() - start_time ) @app.get("/indexed", response_model=List[str]) def get_indexed_documents(): return list(finder.indexer.keys())