andrewsunanda commited on
Commit
4045f11
·
1 Parent(s): dfee1e5

Update eda.py

Browse files
Files changed (1) hide show
  1. eda.py +38 -6
eda.py CHANGED
@@ -27,15 +27,47 @@ def run():
27
 
28
 
29
  st.markdown('---')
30
- main_path= 'D:\\tugas_andrew_DS\\phase_2\\m2\\food'
 
 
 
 
31
 
32
- # Define batch size and image size
 
 
 
 
 
 
33
  batch_size = 256
34
  img_size = (64, 64)
35
- # Define paths to the data folders
36
- train_path = os.path.join(main_path, 'Train')
37
- valid_path = os.path.join(main_path, 'Valid')
38
- test_path = os.path.join(main_path, 'Test')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  # Create data generators for training, validation, and testing
40
  train_datagen = ImageDataGenerator(
41
  rescale=1./255,
 
27
 
28
 
29
  st.markdown('---')
30
+ import os
31
+ import torch
32
+ import torchvision.transforms as transforms
33
+ from torch.utils.data import DataLoader
34
+ from datasets import load_dataset
35
 
36
+ # Define the path to the dataset
37
+ dataset_path = 'andrewsunanda/fast_food_image_classification'
38
+
39
+ # Load the dataset from Hugging Face
40
+ dataset = load_dataset(dataset_path)
41
+
42
+ # Define the batch size and image size
43
  batch_size = 256
44
  img_size = (64, 64)
45
+
46
+ # Define the paths to the train, validation, and test folders
47
+ train_path = os.path.join(dataset_path, 'Train')
48
+ valid_path = os.path.join(dataset_path, 'Valid')
49
+ test_path = os.path.join(dataset_path, 'Test')
50
+
51
+ # Define the transforms for the dataset
52
+ transform = transforms.Compose([
53
+ transforms.Resize(img_size),
54
+ transforms.ToTensor(),
55
+ ])
56
+
57
+ # Load the training dataset
58
+ train_dataset = dataset['train']
59
+ train_dataset = train_dataset.map(lambda x: {'image': transform(x['image']), 'label': x['label']})
60
+ train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
61
+
62
+ # Load the validation dataset
63
+ valid_dataset = dataset['validation']
64
+ valid_dataset = valid_dataset.map(lambda x: {'image': transform(x['image']), 'label': x['label']})
65
+ valid_loader = DataLoader(valid_dataset, batch_size=batch_size, shuffle=False)
66
+
67
+ # Load the testing dataset
68
+ test_dataset = dataset['test']
69
+ test_dataset = test_dataset.map(lambda x: {'image': transform(x['image']), 'label': x['label']})
70
+ test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False)
71
  # Create data generators for training, validation, and testing
72
  train_datagen = ImageDataGenerator(
73
  rescale=1./255,