Spaces:
Sleeping
Sleeping
import pandas as pd | |
from pathlib import Path | |
import logging | |
from course_search.scraper.course_scraper import CourseScraper | |
logging.basicConfig(level=logging.INFO) | |
logger = logging.getLogger(__name__) | |
class DataPipeline: | |
def __init__(self): | |
self.scraper = CourseScraper() | |
def run_pipeline(self, save_path: str, force_scrape: bool = False) -> pd.DataFrame: | |
"""Run the data pipeline with option to use cached data""" | |
try: | |
data_path = Path(save_path) | |
# Check if cached data exists | |
if not force_scrape and data_path.exists(): | |
logger.info("Loading cached data...") | |
return pd.read_pickle(data_path) | |
# If no cached data or force_scrape is True, scrape new data | |
logger.info("Scraping course data...") | |
df = self.scraper.scrape_all_courses() | |
# Save the data | |
logger.info(f"Saving data to {save_path}") | |
df.to_pickle(save_path) | |
return df | |
except Exception as e: | |
logger.error(f"Error in data pipeline: {str(e)}") | |
raise |