Arafath10 commited on
Commit
e84fb4f
1 Parent(s): bac313f

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +6 -15
main.py CHANGED
@@ -1,5 +1,4 @@
1
- from fastapi import FastAPI, File, UploadFile, HTTPException
2
- import nest_asyncio
3
  import asyncio
4
  from playwright.async_api import async_playwright
5
  from fastapi.responses import HTMLResponse
@@ -10,8 +9,6 @@ from pydantic import BaseModel
10
  from io import StringIO
11
  import os
12
 
13
-
14
-
15
  app = FastAPI()
16
  app.add_middleware(
17
  CORSMiddleware,
@@ -21,11 +18,6 @@ app.add_middleware(
21
  allow_headers=["*"],
22
  )
23
 
24
-
25
-
26
- # Apply nest_asyncio to allow nested asyncio.run() calls
27
- nest_asyncio.apply()
28
-
29
  async def scrape_links():
30
  async with async_playwright() as p:
31
  browser = await p.chromium.launch(headless=True)
@@ -58,12 +50,11 @@ async def scrape_links():
58
  await browser.close()
59
  return result
60
 
61
-
62
-
63
  @app.post("/get_webscrapet_data")
64
- async def get_webscrapet_data(url):
 
65
  # Run the scraping function
66
- results = asyncio.run(scrape_links())
67
- print(results)
68
  return results
69
-
 
 
1
+ from fastapi import FastAPI, HTTPException
 
2
  import asyncio
3
  from playwright.async_api import async_playwright
4
  from fastapi.responses import HTMLResponse
 
9
  from io import StringIO
10
  import os
11
 
 
 
12
  app = FastAPI()
13
  app.add_middleware(
14
  CORSMiddleware,
 
18
  allow_headers=["*"],
19
  )
20
 
 
 
 
 
 
21
  async def scrape_links():
22
  async with async_playwright() as p:
23
  browser = await p.chromium.launch(headless=True)
 
50
  await browser.close()
51
  return result
52
 
 
 
53
  @app.post("/get_webscrapet_data")
54
+ async def get_webscrapet_data(url: str):
55
+ try:
56
  # Run the scraping function
57
+ results = await scrape_links()
 
58
  return results
59
+ except Exception as e:
60
+ raise HTTPException(status_code=500, detail=str(e))