-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathgetdata.py
More file actions
70 lines (53 loc) · 1.83 KB
/
getdata.py
File metadata and controls
70 lines (53 loc) · 1.83 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import tensorflow as tf
from tensorflow import keras
import numpy as np
import matplotlib.pyplot as plt
import os
import cv2
import random
import pickle
def create_training_data(img_size, datadirs, categories):
training_data = []
for i in np.arange(len(datadirs)):
count = 0
directory = os.path.join(datadirs[i], categories[0])
# print(directory)
for img in os.listdir(directory):
try:
img_array = cv2.imread(os.path.join(directory, img), cv2.IMREAD_GRAYSCALE)
print(np.shape(img_array))
new_array = cv2.resize(img_array, (img_size, img_size))
count += 1
training_data.append([new_array, i])
# print(count)
except Exception as e:
pass
return training_data
def reshaper(training_data, img_size):
x = []
y = []
for features, label in training_data:
x.append(features)
y.append(label)
x = np.array(x).reshape(-1, img_size, img_size, 1)
y = np.array(y)
return x, y
def main():
img_size = 50
datadirs = [r"C:\Users\MarkScheble.Jr\Desktop\data_rbc\dataset_regular", r"C:\Users\MarkScheble.Jr\Desktop\data_rbc\dataset_spiky"]
categories = ['train', 'test', 'validation']
training_data = create_training_data(img_size, datadirs, categories)
random.shuffle(training_data)
random.shuffle(training_data)
for sample in training_data[:10]:
print(sample[1])
x, y = reshaper(training_data, img_size)
pickle_out = open("x.pickle", "wb")
pickle.dump(x, pickle_out)
pickle_out.close()
pickle_out = open("y.pickle", "wb")
pickle.dump(y, pickle_out)
pickle_out.close()
print("success")
if __name__ == "__main__":
main()