1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71
| import os import keras import numpy as np import pandas as pd from glob import glob import tensorflow as tf from tensorflow.keras.utils import load_img, img_to_array from keras.preprocessing.image import ImageDataGenerator from keras.models import Sequential, load_model from keras.layers import GlobalAvgPool2D, Dense, Dropout from keras.callbacks import EarlyStopping, ModelCheckpoint from tensorflow.keras.applications import ResNet50V2
import plotly.express as px import matplotlib.pyplot as plt
from zipfile import ZipFile
z = ZipFile('/content/drive/MyDrive/lesson_data/animal_data.zip') z.extractall()
root_path = './Animal Classification/Animal Classification/Training Data/' test_path = './Animal Classification/Animal Classification/Testing Data/' valid_path = './Animal Classification/Animal Classification/Validation Data/'
class_names = sorted(os.listdir(root_path)) n_classes = len(class_names)
train_gen = ImageDataGenerator(rescale=1/255., rotation_range=10, horizontal_flip=True) valid_gen = ImageDataGenerator(rescale=1/255.) test_gen = ImageDataGenerator(rescale=1/255)
train_ds = train_gen.flow_from_directory(root_path, class_mode='binary', target_size=(256,256), shuffle=True, batch_size=32) valid_ds = valid_gen.flow_from_directory(valid_path, class_mode='binary', target_size=(256,256), shuffle=True, batch_size=32) test_ds = test_gen.flow_from_directory(test_path, class_mode='binary', target_size=(256,256), shuffle=True, batch_size=32)
with tf.device("/GPU:0"): base_model = ResNet50V2(input_shape=(256,256,3), include_top=False)
base_model.trainable = False
name = 'ResNet50V2'
model = Sequential([ base_model, GlobalAvgPool2D(), Dense(256, activation='relu', kernel_initializer='he_normal'), Dropout(0.2), Dense(n_classes, activation='softmax') ], name=name)
cbs = [EarlyStopping(patience=3, restore_best_weights=True), ModelCheckpoint(name + ".h5", save_best_only=True)]
opt = tf.keras.optimizers.Adam(learning_rate=2e-3) model.compile(loss='categorical_crossentropy', optimizer=opt, metrics=['accuracy'])
history = model.fit(train_ds, validation_data=valid_ds, callbacks=cbs, epochs=15)
|