Truptidand commited on
Commit
7f723fa
·
1 Parent(s): 0ea1ef9

Upload Data_preprocessing.py

Browse files
Files changed (1) hide show
  1. Data_preprocessing.py +93 -0
Data_preprocessing.py ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # coding: utf-8
3
+
4
+ # In[1]:
5
+
6
+
7
+ import numpy as np
8
+ import pandas as pd
9
+ import matplotlib.pyplot as plt
10
+ import seaborn as sns
11
+
12
+
13
+ # In[2]:
14
+
15
+
16
+ import tensorflow as tf
17
+ from tensorflow import keras
18
+ from keras import Sequential
19
+ from keras.layers import Dense,Convolution2D,Flatten,Dropout,BatchNormalization
20
+ from tensorflow.keras.layers import MaxPooling2D
21
+ from keras.preprocessing.image import ImageDataGenerator
22
+
23
+
24
+ # In[ ]:
25
+
26
+
27
+ #val_data=keras.utils.image_dataset_from_directory(
28
+ #directory="E:\DSspec\Internship\CUB-200-2011\cub_200_2011_64x64_for_fid_10k\cub_200_2011_64x64_10k"
29
+ #label="inferred",
30
+ #label_mode="int",
31
+ #batch_size=32,
32
+ #iamge_size=(256,256)
33
+ #)
34
+
35
+
36
+ # In[3]:
37
+
38
+
39
+ train=keras.utils.image_dataset_from_directory(directory="E:\\DSspec\\Internship\\CUB-200-2011\\cub_200_2011_64x64_for_fid_10k",
40
+ labels="inferred",
41
+ validation_split=0.2,
42
+ subset="training",
43
+ seed=1337,
44
+ label_mode="int",
45
+ batch_size=32,
46
+ image_size=(256,256))
47
+
48
+
49
+ # In[4]:
50
+
51
+
52
+ test=keras.utils.image_dataset_from_directory(directory="E:\\DSspec\\Internship\\CUB-200-2011\\cub_200_2011_64x64_for_fid_10k",
53
+ labels="inferred",
54
+ validation_split=0.2,
55
+ subset="validation",
56
+ seed=1337,
57
+ label_mode="int",
58
+ batch_size=32,
59
+ image_size=(256,256))
60
+
61
+
62
+ # In[5]:
63
+
64
+
65
+ for image,label in train.take(2):
66
+ plt.imshow(image[31].numpy().astype("uint8"))
67
+ plt.show()
68
+
69
+
70
+ # In[8]:
71
+
72
+
73
+ from tensorflow.keras import layers
74
+ data_augmentation = keras.Sequential(
75
+ [
76
+ layers.RandomFlip("horizontal", input_shape=(256, 256, 3)),
77
+ layers.RandomRotation(0.3),
78
+ layers.RandomZoom(0.3),
79
+ ]
80
+ )
81
+
82
+
83
+ # In[9]:
84
+
85
+
86
+ train_gen = train.map(lambda x, y: (data_augmentation(x, training=True), y))
87
+
88
+
89
+ # In[ ]:
90
+
91
+
92
+
93
+