# 图片数据生成器
train_datagen = keras.preprocessing.image.ImageDataGenerator(rescale = 1.0/ 255,rotation_range= 40, width_shift_range= 0.2,height_shift_range= 0.2,shear_range = 0.2, zoom_range = 0.2,horizontal_flip = True,vertical_flip= True,fill_mode= 'nearest')
train_generator = train_datagen.flow_from_directory(train_dir, target_size= (height, width),batch_size = batch_size,shuffle= True,seed = 7,class_mode = 'categorical')
valid_generator = valid_dategen.flow_from_directory(valid_dir, target_size= (height, width),batch_size = batch_size,shuffle= True,seed = 7,class_mode = 'categorical')
model.compile(loss = 'categorical_crossentropy', # 独热编码, 所以不用sparseoptimizer = 'adam',metrics = 'accuracy')
histroy = model.fit(train_generator,steps_per_epoch= train_num // batch_size,epochs = 10,validation_data = valid_generator,validation_steps= valid_num // batch_size)
10-monkey-species 数据集是一个10类不同品种猴子的数据集,这个数据集是从 kaggle 平台中下载到本地使用的。
import tensorflow as tf
from tensorflow import keras
import numpy as np
import pandas as pd
import matplotlib.pyplot as pltcpu=tf.config.list_physical_devices("CPU")
tf.config.set_visible_devices(cpu)
print(tf.config.list_logical_devices())
train_dir = './training/training/'
valid_dir = './validation/validation/'
label_file = './monkey_labels.txt'labels = pd.read_csv(label_file, header= 0)
labels
# 图片数据生成器
train_datagen = keras.preprocessing.image.ImageDataGenerator(rescale = 1.0/ 255,rotation_range= 40, width_shift_range= 0.2,height_shift_range= 0.2,shear_range = 0.2, zoom_range = 0.2,horizontal_flip = True,vertical_flip= True,fill_mode= 'nearest')height = 128
width = 128
channels = 3
batch_size = 32
num_classes = 10
train_generator = train_datagen.flow_from_directory(train_dir, target_size= (height, width),batch_size = batch_size,shuffle= True,seed = 7,class_mode = 'categorical')valid_dategen = keras.preprocessing.image.ImageDataGenerator(rescale = 1. / 255)
valid_generator = valid_dategen.flow_from_directory(valid_dir, target_size= (height, width),batch_size = batch_size,shuffle= True,seed = 7,class_mode = 'categorical')print(train_generator.samples) # 1098
print(valid_generator.samples) # 272
model = keras.models.Sequential()
model.add(keras.layers.Conv2D(filters = 32, # 卷积kernel_size = 3,padding = 'same',activation = 'relu',input_shape = (128, 128, 3))) # 128, 128, 3
model.add(keras.layers.Conv2D(filters = 32,kernel_size = 3,padding = 'same',activation = 'relu'))
model.add(keras.layers.MaxPool2D()) # 池化model.add(keras.layers.Conv2D(filters = 64, # 卷积kernel_size = 3,padding = 'same',activation = 'relu'))
model.add(keras.layers.Conv2D(filters = 64,kernel_size = 3,padding = 'same',activation = 'relu'))
model.add(keras.layers.MaxPool2D()) # 池化model.add(keras.layers.Conv2D(filters = 128, # 卷积kernel_size = 3,padding = 'same',activation = 'relu'))
model.add(keras.layers.Conv2D(filters = 128,kernel_size = 3,padding = 'same',activation = 'relu'))
model.add(keras.layers.MaxPool2D()) # 池化model.add(keras.layers.Flatten())
model.add(keras.layers.Dense(32, activation = 'relu'))
model.add(keras.layers.Dense(32, activation = 'relu'))
model.add(keras.layers.Dense(10, activation = 'softmax'))model.compile(loss = 'categorical_crossentropy', # 独热编码, 所以不用sparseoptimizer = 'adam',metrics = 'accuracy')
model.summary()
train_num = train_generator.samples
valid_num = valid_generator.samples
print(train_num, valid_num, batch_size) # 1098, 272, 32
histroy = model.fit(train_generator,steps_per_epoch= train_num // batch_size,epochs = 10,validation_data = valid_generator,validation_steps= valid_num // batch_size)
# 函数式写法 # M:池化
cfgs = {'vgg11': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],'vgg13': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],'vgg16': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'],'vgg19': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M']}
def make_feature(cfg):feature_layers = []for v in cfg:if v == 'M':feature_layers.append(keras.layers.MaxPool2D(pool_size = 2, strides = 2))else:feature_layers.append(keras.layers.Conv2D(v, kernel_size = 3,padding = 'SAME',activation = 'relu'))return keras.Sequential(feature_layers, name = 'feature') # 整体当做一层
# 定义网络结构
def VGG(feature, im_height = 224, im_width = 224, num_classes = 1000):input_image = keras.layers.Input(shape = (im_height, im_width, 3), dtype = 'float32')x = feature(input_image)x = keras.layers.Flatten()(x) # 将flatten当做一个函数# dropout, 防止过拟合, 每次放弃部分参数x = keras.layers.Dropout(rate = 0.5)(x)# 原论文为4096x = keras.layers.Dense(512, activation = 'relu')(x)x = keras.layers.Dropout(rate = 0.5)(x)x = keras.layers.Dense(512, activation = 'relu')(x)x = keras.layers.Dense(num_classes)(x)output = keras.layers.Softmax()(x)model = keras.models.Model(inputs = input_image, outputs = output)return model
# 定义网络模型
def vgg(model_name = 'vgg16', im_height = 224, im_width = 224, num_classes = 1000):cfg = cfgs[model_name]model = VGG(make_feature(cfg), im_height = im_height, im_width= im_width, num_classes= num_classes)return modelvgg16 = vgg(num_classes = 10)
vgg16.compile(optimizer = 'adam', # optimizer 优化器, 防止过拟合 loss = 'categorical_crossentropy',metrics = ['accuracy'])histroy = vgg16.fit(train_generator,steps_per_epoch= train_generator.samples // batch_size,epochs = 10,validation_data= valid_generator,validation_steps= valid_generator.samples // batch_size)
# 函数式写法
def AlexNet(im_height=224, im_width=224, num_classes=1000):# 输入层input_image = keras.layers.Input(shape =(im_height, im_width, 3), dtype = tf.float32)# 手动实现padding, 周边补零填充x = keras.layers.ZeroPadding2D(((1, 2), (1, 2)))(input_image)# 卷积x = keras.layers.Conv2D(48, kernel_size = 11, strides = 4, activation = 'relu')(x)# 池化x = keras.layers.MaxPool2D(pool_size = 3, strides = 2)(x)# 第二层卷积x = keras.layers.Conv2D(128, kernel_size = 5, padding = 'same', activation = 'relu')(x)# 池化x = keras.layers.MaxPool2D(pool_size = 3, strides = 2)(x)# 卷积x = keras.layers.Conv2D(192, kernel_size = 3, padding = 'same', activation = 'relu')(x)x = keras.layers.Conv2D(192, kernel_size = 3, padding = 'same', activation = 'relu')(x)x = keras.layers.Conv2D(128, kernel_size = 3, padding = 'same', activation = 'relu')(x)# 池化 pool_sizex = keras.layers.MaxPool2D(pool_size = 3, strides = 2)(x)# 传链接x = keras.layers.Flatten()(x)# 加dropoutx = keras.layers.Dropout(0.2)(x)x = keras.layers.Dense(2048, activation = 'relu')(x)x = keras.layers.Dropout(0.2)(x)x = keras.layers.Dense(2048, activation = 'relu')(x)# 输出层x = keras.layers.Dense(num_classes)(x)# 预测predict = keras.layers.Softmax()(x)model = keras.models.Model(inputs = input_image, outputs = predict)return modelmodel = AlexNet(im_height= 224, im_width= 224, num_classes= 10)
model.compile(optimizer = 'adam', # optimizer 优化器, 防止过拟合 loss = 'categorical_crossentropy',metrics = ['accuracy'])histroy = model.fit(train_generator,steps_per_epoch= train_generator.samples // batch_size,epochs = 10,validation_data= valid_generator,validation_steps= valid_generator.samples // batch_size)