Spaces:
Sleeping
Sleeping
File size: 1,186 Bytes
2ed2129 821284f 2ed2129 821284f 2ed2129 821284f 2ed2129 821284f 2ed2129 821284f 2ed2129 821284f 2ed2129 821284f 2ed2129 821284f 2ed2129 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 |
import pandas as pd
from pathlib import Path
import logging
from course_search.scraper.course_scraper import CourseScraper
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class DataPipeline:
def __init__(self):
self.scraper = CourseScraper()
def run_pipeline(self, save_path: str, force_scrape: bool = False) -> pd.DataFrame:
"""Run the data pipeline with option to use cached data"""
try:
data_path = Path(save_path)
# Check if cached data exists
if not force_scrape and data_path.exists():
logger.info("Loading cached data...")
return pd.read_pickle(data_path)
# If no cached data or force_scrape is True, scrape new data
logger.info("Scraping course data...")
df = self.scraper.scrape_all_courses()
# Save the data
logger.info(f"Saving data to {save_path}")
df.to_pickle(save_path)
return df
except Exception as e:
logger.error(f"Error in data pipeline: {str(e)}")
raise |