File size: 2,113 Bytes
b9e10f2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
from datasets import Dataset
from pathlib import Path
import json
import logging
from typing import Dict, List
import requests
from PIL import Image
from io import BytesIO
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class DatasetProcessor:
def __init__(self, raw_data_path: str = 'dataset/raw_data.json'):
self.raw_data_path = Path(raw_data_path)
def load_raw_data(self) -> List[Dict]:
"""Load scraped data from JSON"""
with open(self.raw_data_path) as f:
return json.load(f)
def validate_image(self, url: str) -> bool:
"""Check if image URL is valid and image can be loaded"""
try:
response = requests.get(url)
img = Image.open(BytesIO(response.content))
return True
except:
return False
def process_data(self) -> Dataset:
"""Process raw data into HuggingFace dataset"""
raw_data = self.load_raw_data()
# Filter valid images and restructure data
processed_data = {
'image_url': [],
'category': [],
'metadata': []
}
for entry in raw_data:
if self.validate_image(entry['image_url']):
processed_data['image_url'].append(entry['image_url'])
processed_data['category'].append(entry['category'])
processed_data['metadata'].append(entry['metadata'])
# Create HuggingFace dataset
dataset = Dataset.from_dict(processed_data)
logger.info(f"Created dataset with {len(dataset)} entries")
return dataset
def save_to_hub(self, dataset: Dataset, repo_id: str):
"""Push dataset to HuggingFace Hub"""
dataset.push_to_hub(repo_id)
logger.info(f"Pushed dataset to {repo_id}")
if __name__ == "__main__":
processor = DatasetProcessor()
dataset = processor.process_data()
# Uncomment to push to hub:
# processor.save_to_hub(dataset, "your-username/stalker-dataset")
|