File size: 1,738 Bytes
e84fb4f
d92c861
462e814
 
d92c861
9ba3ade
 
 
 
 
 
 
d92c861
 
 
 
 
 
 
 
 
92199f9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0f0c7dc
 
fdf1d1f
 
92199f9
 
 
 
 
 
 
fdf1d1f
92199f9
 
 
 
 
 
 
 
 
 
0f0c7dc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from scraper import Scraper


try: from pip._internal.operations import freeze
except ImportError: # pip < 10.0
    from pip.operations import freeze

pkgs = freeze.freeze()
for pkg in pkgs: print(pkg)

app = FastAPI()
app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

def get_links(soup):
        links = []
        for link in soup.find_all('a'):
            href = link.get('href')
            links.append(href)
        return links


def get_text_content(soup):
        text_elements = []
        for tag in ['p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'span']:
            elements = soup.find_all(tag)
            for element in elements:
                text_elements.append(element.get_text())
        return text_elements

def get_title(soup):
        title = soup.find('title').get_text()
        return title

@app.get("/get_scraped_data")
async def get_data(url: str):
    import requests
    from bs4 import BeautifulSoup
    headers = {'User-Agent': 'Mozilla/5.0'}
    response = requests.get(url, headers=headers)
    soup = BeautifulSoup(response.content, 'html.parser')

    title = Scraper.get_title(soup)
    links = Scraper.get_links(soup)
    text_content = Scraper.get_text_content(soup)
    
    if not links:
        print("Running alternative scrapper")    

        try:
            data = await Scraper.scrape(url)
            return data
        except Exception as e:
            raise HTTPException(status_code=500, detail=str(e))
    esle:
        return return {"title": title, "URL": links, "Content": text_content}