티스토리 뷰

5. 파이썬

83541. cat_and_dog.ipynb

패스트코드블로그 2020. 5. 29. 21:58

 

1
2
3
4
5
6
7
8
9
10
# CPU 처리
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D
from tensorflow.keras.preprocessing.image import ImageDataGenerator
import os
import matplotlib.pyplot as plt
from tensorflow.python.keras import datasets
from tensorflow import keras
cs

 

1
2
3
4
5
6
7
8
9
10
11
12
13
14
batch_size = 128
epochs = 1 # 시간절약
IMG_HEIGHT = 150
IMG_WIDTH = 150
train_dir = None
validation_dir = None
train_cats_dir = None
train_dogs_dir = None
validation_cats_dir = None
validation_dogs_dir = None
train_data_gen = None
total_train = None
total_val = None
val_data_gen = None
cs

 

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
(train_images, train_labels), (test_images, test_labels) = datasets.mnist.load_data()
train_images = train_images.reshape((6000028281))
test_images = test_images.reshape((1000028281))
# 픽셀 값을 0~1 사이로 정규화합니다.
train_images, test_images = train_images / 255.0, test_images / 255.0
_URL = 'https://storage.googleapis.com/mledu-datasets/cats_and_dogs_filtered.zip'
path_to_zip = tf.keras.utils.get_file('cats_and_dogs.zip', origin=_URL, extract=True)
PATH = os.path.join(os.path.dirname(path_to_zip), 'cats_and_dogs_filtered')
train_dir = os.path.join(PATH, 'train')
validation_dir = os.path.join(PATH, 'validation')
train_cats_dir = os.path.join(train_dir, 'cats')  # directory with our training cat pictures
train_dogs_dir = os.path.join(train_dir, 'dogs')  # directory with our training dog pictures
validation_cats_dir = os.path.join(validation_dir, 'cats')  # directory with our validation cat pictures
validation_dogs_dir = os.path.join(validation_dir, 'dogs')  # directory with our validation dog pictures
num_cats_tr = len(os.listdir(train_cats_dir))
num_dogs_tr = len(os.listdir(train_dogs_dir))
num_cats_val = len(os.listdir(validation_cats_dir))
num_dogs_val = len(os.listdir(validation_dogs_dir))
total_train = num_cats_tr + num_dogs_tr
total_val = num_cats_val + num_dogs_val
print('total training cat images:', num_cats_tr)
print('total training dog images:', num_dogs_tr)
print('total validation cat images:', num_cats_val)
print('total validation dog images:', num_dogs_val)
print("--")
print("Total training images:", total_train)
print("Total validation images:", total_val)
cs

 

Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/mnist.npz 11493376/11490434 [==============================] - 0s 0us/step Downloading data from https://storage.googleapis.com/mledu-datasets/cats_and_dogs_filtered.zip 68608000/68606236 [==============================] - 0s 0us/step total training cat images: 1000 total training dog images: 1000 total validation cat images: 500 total validation dog images: 500 -- Total training images: 2000 Total validation images: 1000

 

1
2
3
4
5
train_image_generator = ImageDataGenerator(rescale=1. / 255)  # Generator for our training data
validation_image_generator = ImageDataGenerator(rescale=1. / 255)  # Generator for our validation data
train_data_gen = train_image_generator.flow_from_directory(batch_size=batch_size,directory=train_dir,shuffle=True,target_size=(IMG_HEIGHT,IMG_WIDTH),class_mode='binary')
val_data_gen = validation_image_generator.flow_from_directory(batch_size=batch_size,directory=validation_dir,target_size=(IMG_HEIGHT, IMG_WIDTH),class_mode='binary')
sample_training_images, _ = next(train_data_gen)
cs

 

Found 2000 images belonging to 2 classes.
Found 1000 images belonging to 2 classes.

 

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
model = Sequential([
            Conv2D(163, padding='same',
                   activation='relu',
                   input_shape=(IMG_HEIGHT, IMG_WIDTH, 3)),
            MaxPooling2D(),
            Conv2D(323, padding='same', activation='relu'),
            MaxPooling2D(),
            Conv2D(643, padding='same', activation='relu'),
            MaxPooling2D(),
            Flatten(),
            Dense(512, activation='relu'),
            Dense(1, activation='sigmoid')
        ])
model.compile(optimizer='adam',loss='binary_crossentropy',metrics=['accuracy'])
print('---------- MODEL SUMMARY -------------')
print(model.summary())
model.save('cats_and_dogs.h5')
print('======= 모델 훈련 종료 ======')
cs

 

 

1
2
3
4
5
6
7
8
9
10
def train_model():
    print('케라스에서 모델 호출')
    model = keras.models.load_model('cats_and_dogs.h5')
    history = model.fit_generator(train_data_gen,
                                steps_per_epoch=total_train // batch_size,
                                epochs=1,
                                validation_data=val_data_gen,
                                validation_steps=total_val //batch_size
    )
    return history
cs

 

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
history = train_model()
acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs_range = range(1# epochs 1은 시간절약
plt.figure(figsize=(88))
plt.subplot(121)
plt.plot(epochs_range, acc, label='Training Accuracy')
plt.plot(epochs_range, val_acc, label='Validation Accuracy')
plt.legend(loc='lower right')
plt.title('Training and Validation Accuracy')
plt.subplot(122)
plt.plot(epochs_range, loss, label='Training Loss')
plt.plot(epochs_range, val_loss, label='Validation Loss')
plt.legend(loc='upper right')
plt.title('Training and Validation Loss')
plt.show()
cs

 

댓글
공지사항
최근에 올라온 글
최근에 달린 댓글
Total
Today
Yesterday
링크
«   2025/01   »
1 2 3 4
5 6 7 8 9 10 11
12 13 14 15 16 17 18
19 20 21 22 23 24 25
26 27 28 29 30 31
글 보관함