andrewsunanda commited on
Commit
93a274c
·
1 Parent(s): e384564

Update prediction.py

Browse files
Files changed (1) hide show
  1. prediction.py +36 -5
prediction.py CHANGED
@@ -25,18 +25,49 @@ def preprocess_input_image(img_path):
25
 
26
 
27
 
28
- # Define batch size and image size
 
 
 
 
 
 
 
 
 
 
 
 
29
  batch_size = 256
30
  img_size = (64, 64)
31
- # Define paths to the data folders
32
- dataset_path = 'andrewsunanda/fast_food_image_classification'
33
-
34
-
35
  # Define the paths to the train, validation, and test folders
36
  train_path = os.path.join(dataset_path, 'Train')
37
  valid_path = os.path.join(dataset_path, 'Valid')
38
  test_path = os.path.join(dataset_path, 'Test')
39
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
40
  # Create data generators for training, validation, and testing
41
  train_datagen = ImageDataGenerator(
42
  rescale=1./255,
 
25
 
26
 
27
 
28
+ import os
29
+ import torch
30
+ import torchvision.transforms as transforms
31
+ from torch.utils.data import DataLoader
32
+ from datasets import load_dataset
33
+
34
+ # Define the path to the dataset
35
+ dataset_path = 'andrewsunanda/fast_food_image_classification'
36
+
37
+ # Load the dataset from Hugging Face
38
+ dataset = load_dataset(dataset_path)
39
+
40
+ # Define the batch size and image size
41
  batch_size = 256
42
  img_size = (64, 64)
43
+
 
 
 
44
  # Define the paths to the train, validation, and test folders
45
  train_path = os.path.join(dataset_path, 'Train')
46
  valid_path = os.path.join(dataset_path, 'Valid')
47
  test_path = os.path.join(dataset_path, 'Test')
48
 
49
+ # Define the transforms for the dataset
50
+ transform = transforms.Compose([
51
+ transforms.Resize(img_size),
52
+ transforms.ToTensor(),
53
+ ])
54
+
55
+ # Load the training dataset
56
+ train_dataset = dataset['train']
57
+ train_dataset = train_dataset.map(lambda x: {'image': transform(x['image']), 'label': x['label']})
58
+ train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
59
+
60
+ # Load the validation dataset
61
+ valid_dataset = dataset['validation']
62
+ valid_dataset = valid_dataset.map(lambda x: {'image': transform(x['image']), 'label': x['label']})
63
+ valid_loader = DataLoader(valid_dataset, batch_size=batch_size, shuffle=False)
64
+
65
+ # Load the testing dataset
66
+ test_dataset = dataset['test']
67
+ test_dataset = test_dataset.map(lambda x: {'image': transform(x['image']), 'label': x['label']})
68
+ test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False)
69
+
70
+
71
  # Create data generators for training, validation, and testing
72
  train_datagen = ImageDataGenerator(
73
  rescale=1./255,