andrewsunanda commited on
Commit
199c1bb
·
1 Parent(s): 4045f11

Update prediction.py

Browse files
Files changed (1) hide show
  1. prediction.py +39 -7
prediction.py CHANGED
@@ -20,15 +20,47 @@ def preprocess_input_image(img_path):
20
  x /= 255.
21
  return x, img1
22
 
23
- main_path= 'D:\\tugas_andrew_DS\\phase_2\\m2\\food'
24
-
25
- # Define batch size and image size
 
 
 
 
 
 
 
 
 
 
26
  batch_size = 256
27
  img_size = (64, 64)
28
- # Define paths to the data folders
29
- train_path = os.path.join(main_path, 'Train')
30
- valid_path = os.path.join(main_path, 'Valid')
31
- test_path = os.path.join(main_path, 'Test')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
 
33
  # Create data generators for training, validation, and testing
34
  train_datagen = ImageDataGenerator(
 
20
  x /= 255.
21
  return x, img1
22
 
23
+ import os
24
+ import torch
25
+ import torchvision.transforms as transforms
26
+ from torch.utils.data import DataLoader
27
+ from datasets import load_dataset
28
+
29
+ # Define the path to the dataset
30
+ dataset_path = 'andrewsunanda/fast_food_image_classification'
31
+
32
+ # Load the dataset from Hugging Face
33
+ dataset = load_dataset(dataset_path)
34
+
35
+ # Define the batch size and image size
36
  batch_size = 256
37
  img_size = (64, 64)
38
+
39
+ # Define the paths to the train, validation, and test folders
40
+ train_path = os.path.join(dataset_path, 'Train')
41
+ valid_path = os.path.join(dataset_path, 'Valid')
42
+ test_path = os.path.join(dataset_path, 'Test')
43
+
44
+ # Define the transforms for the dataset
45
+ transform = transforms.Compose([
46
+ transforms.Resize(img_size),
47
+ transforms.ToTensor(),
48
+ ])
49
+
50
+ # Load the training dataset
51
+ train_dataset = dataset['train']
52
+ train_dataset = train_dataset.map(lambda x: {'image': transform(x['image']), 'label': x['label']})
53
+ train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
54
+
55
+ # Load the validation dataset
56
+ valid_dataset = dataset['validation']
57
+ valid_dataset = valid_dataset.map(lambda x: {'image': transform(x['image']), 'label': x['label']})
58
+ valid_loader = DataLoader(valid_dataset, batch_size=batch_size, shuffle=False)
59
+
60
+ # Load the testing dataset
61
+ test_dataset = dataset['test']
62
+ test_dataset = test_dataset.map(lambda x: {'image': transform(x['image']), 'label': x['label']})
63
+ test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False)
64
 
65
  # Create data generators for training, validation, and testing
66
  train_datagen = ImageDataGenerator(