加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
克隆/下载
model.py 1.97 KB
一键复制 编辑 原始数据 按行查看 历史
philipismyen 提交于 2021-12-20 04:57 . 模型结构和训练模型的文件
import os
from keras.preprocessing import image
import matplotlib.pyplot as plt
import numpy as np
from keras.utils.np_utils import to_categorical
import random, shutil
from keras.models import Sequential
from keras.layers import Dropout, Conv2D, Flatten, Dense, MaxPooling2D, BatchNormalization
from keras.models import load_model
def generator(dir, gen=image.ImageDataGenerator(rescale=1. / 255), shuffle=True, batch_size=1, target_size=(24, 24),
class_mode='categorical'):
return gen.flow_from_directory(dir, batch_size=batch_size, shuffle=shuffle, color_mode='grayscale',
class_mode=class_mode, target_size=target_size)
BS = 32 # batch_size
TS = (24, 24) # input_shape
train_batch = generator('dataset/train', shuffle=True, batch_size=BS, target_size=TS)
valid_batch = generator('dataset/test', shuffle=True, batch_size=BS, target_size=TS)
SPE = len(train_batch.classes) // BS
VS = len(valid_batch.classes) // BS
print(SPE, VS)
model = Sequential([
Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=(24, 24, 1)),
MaxPooling2D(pool_size=(1, 1)),
Conv2D(32, (3, 3), activation='relu'),
MaxPooling2D(pool_size=(1, 1)),
Conv2D(64, (3, 3), activation='relu'),
MaxPooling2D(pool_size=(1, 1)),
# randomly turn neurons on and off to improve convergence
# Dropout(0.25),
# flatten since too many dimensions, we only want a classification output
Flatten(),
# fully connected to get all relevant data
Dense(128, activation='relu'),
# one more dropout for convergence'sake :)
# Dropout(0.5),
# output a softmax to squash the matrix into output probabilities
Dense(2, activation='softmax')
])
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
model.fit_generator(train_batch, validation_data=valid_batch, epochs=15, steps_per_epoch=SPE, validation_steps=VS)
model.save('models/cnn_dds.h5', overwrite=True)
Loading...
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化