AiDeveloper1 commited on
Commit
c02a806
·
verified ·
1 Parent(s): 74f9ec4

Delete main.py

Browse files
Files changed (1) hide show
  1. main.py +0 -65
main.py DELETED
@@ -1,65 +0,0 @@
1
- from fastapi import FastAPI, HTTPException, Request
2
- from fastapi.responses import HTMLResponse
3
- from fastapi.templating import Jinja2Templates
4
- from fastapi.staticfiles import StaticFiles
5
- from pydantic import HttpUrl
6
- from scraper import scrape_page
7
- from summarizer import summarize_text
8
- from rich_card_builder import build_rich_card
9
- import asyncio
10
- from urllib.parse import urlparse
11
- import logging
12
-
13
- # Set up logging
14
- logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
15
-
16
- app = FastAPI(title="Website Scraper API (Enhanced for RCS)")
17
-
18
- # Mount static files
19
- app.mount("/static", StaticFiles(directory="static"), name="static")
20
-
21
- # Set up Jinja2 templates
22
- templates = Jinja2Templates(directory="templates")
23
-
24
- @app.get("/scrape")
25
- async def crawl_website(url: HttpUrl):
26
- """Crawl a website and return rich card JSON for up to 1 page (demo)."""
27
- try:
28
- visited = set()
29
- to_visit = {str(url)}
30
- base_domain = urlparse(str(url)).netloc
31
- results = []
32
-
33
- while to_visit and len(visited) < 20: # Limited to 1 for demo
34
- current_url = to_visit.pop()
35
- if current_url in visited:
36
- continue
37
- visited.add(current_url)
38
-
39
- logging.info(f"Scraping page: {current_url}")
40
- page_data, new_links = await scrape_page(current_url, visited, base_domain)
41
- if page_data:
42
- logging.info(f"Scraped data: {page_data}")
43
- summary = await summarize_text(page_data["text"], page_data["url"])
44
- rich_card = build_rich_card(page_data, summary)
45
- results.append(rich_card)
46
-
47
- to_visit.update(new_links)
48
- await asyncio.sleep(0.5)
49
-
50
- logging.info(f"Final response: {results}")
51
- return {"rich_cards": results}
52
-
53
- except Exception as e:
54
- logging.error(f"Scraping failed: {str(e)}")
55
- raise HTTPException(status_code=500, detail=f"Scraping failed: {str(e)}")
56
-
57
- @app.get("/", response_class=HTMLResponse)
58
- async def serve_home(request: Request):
59
- """Serve the frontend HTML page."""
60
- return templates.TemplateResponse("index.html", {"request": request})
61
-
62
- if __name__ == "__main__":
63
- logger.info("Starting FastAPI server on port 7860")
64
- import uvicorn
65
- uvicorn.run(app, host="0.0.0.0", port=7860)