andrewsunanda commited on
Commit
feed263
·
1 Parent(s): 199c1bb

Update eda.py

Browse files
Files changed (1) hide show
  1. eda.py +4 -27
eda.py CHANGED
@@ -27,47 +27,24 @@ def run():
27
 
28
 
29
  st.markdown('---')
30
- import os
31
- import torch
32
- import torchvision.transforms as transforms
33
- from torch.utils.data import DataLoader
34
- from datasets import load_dataset
35
-
36
  # Define the path to the dataset
37
- dataset_path = 'andrewsunanda/fast_food_image_classification'
38
 
39
  # Load the dataset from Hugging Face
40
- dataset = load_dataset(dataset_path)
 
41
 
42
  # Define the batch size and image size
43
  batch_size = 256
44
  img_size = (64, 64)
45
 
46
  # Define the paths to the train, validation, and test folders
 
47
  train_path = os.path.join(dataset_path, 'Train')
48
  valid_path = os.path.join(dataset_path, 'Valid')
49
  test_path = os.path.join(dataset_path, 'Test')
50
 
51
- # Define the transforms for the dataset
52
- transform = transforms.Compose([
53
- transforms.Resize(img_size),
54
- transforms.ToTensor(),
55
- ])
56
-
57
- # Load the training dataset
58
- train_dataset = dataset['train']
59
- train_dataset = train_dataset.map(lambda x: {'image': transform(x['image']), 'label': x['label']})
60
- train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
61
-
62
- # Load the validation dataset
63
- valid_dataset = dataset['validation']
64
- valid_dataset = valid_dataset.map(lambda x: {'image': transform(x['image']), 'label': x['label']})
65
- valid_loader = DataLoader(valid_dataset, batch_size=batch_size, shuffle=False)
66
 
67
- # Load the testing dataset
68
- test_dataset = dataset['test']
69
- test_dataset = test_dataset.map(lambda x: {'image': transform(x['image']), 'label': x['label']})
70
- test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False)
71
  # Create data generators for training, validation, and testing
72
  train_datagen = ImageDataGenerator(
73
  rescale=1./255,
 
27
 
28
 
29
  st.markdown('---')
30
+
 
 
 
 
 
31
  # Define the path to the dataset
 
32
 
33
  # Load the dataset from Hugging Face
34
+ from datasets import load_dataset
35
+ dataset = load_dataset("andrewsunanda/fast_food_image_classification")
36
 
37
  # Define the batch size and image size
38
  batch_size = 256
39
  img_size = (64, 64)
40
 
41
  # Define the paths to the train, validation, and test folders
42
+ dataset_path = "andrewsunanda/fast_food_image_classification"
43
  train_path = os.path.join(dataset_path, 'Train')
44
  valid_path = os.path.join(dataset_path, 'Valid')
45
  test_path = os.path.join(dataset_path, 'Test')
46
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
 
 
 
 
 
48
  # Create data generators for training, validation, and testing
49
  train_datagen = ImageDataGenerator(
50
  rescale=1./255,