>>>>>>>>>>>>>>>###model_trainer = ModelTraining()
#创建ModelTraining类实例
model_trainer.tModelTypeAsResNet()
#将模型类型设置为ResNet 也可以使⽤其他模型
#tModelTypeAsSqueezeNet()
#tModelTypeAsInceptionV3()
食物视频#tModelTypeAsDenNet()
model_trainer.tDataDirectory(DATASET_DIR)
#设置训练数据集路径
ainModel(num_objects=3, num_experiments=10, enhance_data=True, batch_size=32, show_network_summary=True) #训练模型并设置参数详见说明
参数设置说明
num_objects 指定图像数据集中对象的数量及图像的种类(简单起见这⾥只⽤了三类)
num_experiments 图像训练的次数 epochs
enhance_data(可选) 指定是否⽣成训练图像副本从⽽得到更好的性能
batch_size 批次处理 每批数量
show_network_summary 是否在控制台中显⽰训练的过程
运⾏结果
⾸先展⽰了模型的结构 然后是训练的结果
__________________________________________________________________________________________________ Layer (type) Output Shape Param # Connected to
================================================================================================== input_2 (InputLayer) (None, 224, 224, 3) 0
__________________________________________________________________________________________________ conv2d_54 (Conv2D) (None, 112, 112, 64) 9472 input_2[0][0]
__________________________________________________________________________________________________ batch_normalization_54 (BatchNo (None, 112, 112, 64) 256 conv2d_54[0][0]
__________________________________________________________________________________________________ activation_51 (Activation) (None, 112, 112, 64) 0 batch_normalization_54[0][0]
__________________________________________________________________________________________________ max_pooling2d_2 (MaxPooling2D) (None, 55, 55, 64) 0 activation_51[0][0]
__________________________________________________________________________________________________ conv2d_56 (Conv2D) (None, 55, 55, 64) 4160 max_pooling2d_2[0][0]
__________________________________________________________________________________________________ batch_normalization_56 (BatchNo (None, 55, 55, 64) 256 conv2d_56[0][0]
__________________________________________________________________________________________________ activation_52 (Activation) (None, 55, 55, 64) 0 batch_normalization_56[0][0]
__________________________________________________________________________________________________ conv2d_57 (Conv2D) (None, 55, 55, 64) 36928 activation_52[0][0]
__________________________________________________________________________________________________ batch_normalization_57 (BatchNo (None, 55, 55, 64) 256 conv2d_57[0][0]
__________________________________________________________________________________________________ activation_53 (Activation) (None, 55, 55, 64) 0 batch_normalization_57[0][0]
__________________________________________________________________________________________________ conv2d_58 (Conv2D) (None, 55, 55, 256) 16640 activation_53[0][0]
__________________________________________________________________________________________________ conv2d_55 (Conv2D) (None, 55, 55, 256) 16640 max_pooling2d_2[0][0]
__________________________________________________________________________________________________ batch_normalization_58 (BatchNo (None, 55, 55, 256) 1024 conv2d_58[0][0]
__________________________________________________________________________________________________ batch_normalization_55 (BatchNo (None, 55, 55, 256) 1024 conv2d_55[0][0]
__________________________________________________________________________________________________ add_17 (Add) (None, 55, 55, 256) 0 batch_normalization_58[0][0]
batch_normalization_55[0][0]
__________________________________________________________________________________________________ activation_54 (Activation) (None, 55, 55, 256) 0 add_17[0][0]
__________________________________________________________________________________________________ conv2d_59 (Conv2D) (None, 55, 55, 64) 16448 activation_54[0][0]
__________________________________________________________________________________________________ batch_normalization_59 (BatchNo (None, 55, 55, 64) 256 conv2d_59[0][0]
__________________________________________________________________________________________________ activation_55 (Activation) (None, 55, 55, 64) 0 batch_normalization_59[0][0]
__________________________________________________________________________________________________ conv2d_60 (Conv2D) (None, 55, 55, 64) 36928 activation_55[0][0]
__________________________________________________________________________________________________ batch_normalization_60 (BatchNo (None, 55, 55, 64) 256 conv2d_60[0][0]
__________________________________________________________________________________________________ activation_56 (Activation) (None, 55, 55, 64) 0 batch_normalization_60[0][0]
__________________________________________________________________________________________________ conv2d_61 (Conv2D) (None, 55, 55, 256) 16640 activation_56[0][0]
add_18 (Add) (None, 55, 55, 256) 0 batch_normalization_61[0][0]
activation_54[0][0]
__________________________________________________________________________________________________ activation_57 (Activation) (None, 55, 55, 256) 0 add_18[0][0]
__________________________________________________________________________________________________ conv2d_62 (Conv2D) (None, 55, 55, 64) 16448 activation_57[0][0]
__________________________________________________________________________________________________ batch_normalization_62 (BatchNo (None, 55, 55, 64) 256 conv2d_62[0][0]
__________________________________________________________________________________________________ activation_58 (Activation) (None, 55, 55, 64) 0 batch_normalization_62[0][0]
__________________________________________________________________________________________________ conv2d_63 (Conv2D) (None, 55, 55, 64) 36928 activation_58[0][0]
__________________________________________________________________________________________________ batch_normalization_63 (BatchNo (None, 55, 55, 64) 256 conv2d_63[0][0]
__________________________________________________________________________________________________ activation_59 (Activation) (None, 55, 55, 64) 0 batch_normalization_63[0][0]
__________________________________________________________________________________________________ conv2d_64 (Conv2D) (None, 55, 55, 256) 16640 activation_59[0][0]
__________________________________________________________________________________________________ batch_normalization_64 (BatchNo (None, 55, 55, 256) 1024 conv2d_64[0][0]
__________________________________________________________________________________________________ add_19 (Add) (None, 55, 55, 256) 0 batch_normalization_64[0][0]
activation_57[0][0]
__________________________________________________________________________________________________ activation_60 (Activation) (None, 55, 55, 256) 0 add_19[0][0]
__________________________________________________________________________________________________ conv2d_66 (Conv2D) (None, 28, 28, 128) 32896 activation_60[0][0]
无往不复__________________________________________________________________________________________________ batch_normalization_66 (BatchNo (None, 28, 28, 128) 512 conv2d_66[0][0]
__________________________________________________________________________________________________ activation_61 (Activation) (None, 28, 28, 128) 0 batch_normalization_66[0][0]
__________________________________________________________________________________________________ conv2d_67 (Conv2D) (None, 28, 28, 128) 147584 activation_61[0][0]
__________________________________________________________________________________________________ batch_normalization_67 (BatchNo (None, 28, 28, 128) 512 conv2d_67[0][0]
__________________________________________________________________________________________________ activation_62 (Activation) (None, 28, 28, 128) 0 batch_normalization_67[0][0]
__________________________________________________________________________________________________ conv2d_68 (Conv2D) (None, 28, 28, 512) 66048 activation_62[0][0]
__________________________________________________________________________________________________ conv2d_65 (Conv2D) (None, 28, 28, 512) 131584 activation_60[0][0]
__________________________________________________________________________________________________ batch_normalization_68 (BatchNo (None, 28, 28, 512) 2048 conv2d_68[0][0]
__________________________________________________________________________________________________ batch_normalization_65 (BatchNo (None, 28, 28, 512) 2048 conv2d_65[0][0]
__________________________________________________________________________________________________ add_20 (Add) (None, 28, 28, 512) 0 batch_normalization_68[0][0]
batch_normalization_65[0][0]
__________________________________________________________________________________________________ activation_63 (Activation) (None, 28, 28, 512) 0 add_20[0][0]
__________________________________________________________________________________________________ conv2d_69 (Conv2D) (None, 28, 28, 128) 65664 activation_63[0][0]
__________________________________________________________________________________________________ batch_normalization_69 (BatchNo (None, 28, 28, 128) 512 conv2d_69[0][0]
__________________________________________________________________________________________________ activation_64 (Activation) (None, 28, 28, 128) 0 batch_normalization_69[0][0]
__________________________________________________________________________________________________ conv2d_70 (Conv2D) (None, 28, 28, 128) 147584 activation_64[0][0]
__________________________________________________________________________________________________ batch_normalization_70 (BatchNo (None, 28, 28, 128) 512 conv2d_70[0][0]
__________________________________________________________________________________________________ activation_65 (Activation) (None, 28, 28, 128) 0 batch_normalization_70[0][0]
batch_normalization_71 (BatchNo (None, 28, 28, 512) 2048 conv2d_71[0][0]
__________________________________________________________________________________________________ add_21 (Add) (None, 28, 28, 512) 0 batch_normalization_71[0][0]
activation_63[0][0]
__________________________________________________________________________________________________ activation_66 (Activation) (None, 28, 28, 512) 0 add_21[0][0]
__________________________________________________________________________________________________ conv2d_72 (Conv2D) (None, 28, 28, 128) 65664 activation_66[0][0]
__________________________________________________________________________________________________ batch_normalization_72 (BatchNo (None, 28, 28, 128) 512 conv2d_72[0][0]
__________________________________________________________________________________________________ activation_67 (Activation) (None, 28, 28, 128) 0 batch_normalization_72[0][0]
__________________________________________________________________________________________________ conv2d_73 (Conv2D) (None, 28, 28, 128) 147584 activation_67[0][0]
__________________________________________________________________________________________________ batch_normalization_73 (BatchNo (None, 28, 28, 128) 512 conv2d_73[0][0]
__________________________________________________________________________________________________ activation_68 (Activation) (None, 28, 28, 128) 0 batch_normalization_73[0][0]
__________________________________________________________________________________________________ conv2d_74 (Conv2D) (None, 28, 28, 512) 66048 activation_68[0][0]
__________________________________________________________________________________________________ batch_normalization_74 (BatchNo (None, 28, 28, 512) 2048 conv2d_74[0][0]
__________________________________________________________________________________________________ add_22 (Add) (None, 28, 28, 512) 0 batch_normalization_74[0][0]
activation_66[0][0]
__________________________________________________________________________________________________ activation_69 (Activation) (None, 28, 28, 512) 0 add_22[0][0]
__________________________________________________________________________________________________ conv2d_75 (Conv2D) (None, 28, 28, 128) 65664 activation_69[0][0]
__________________________________________________________________________________________________ batch_normalization_75 (BatchNo (None, 28, 28, 128) 512 conv2d_75[0][0]
__________________________________________________________________________________________________ activation_70 (Activation) (None, 28, 28, 128) 0 batch_normalization_75[0][0]
__________________________________________________________________________________________________ conv2d_76 (Conv2D) (None, 28, 28, 128) 147584 activation_70[0][0]
__________________________________________________________________________________________________ batch_normalization_76 (BatchNo (None, 28, 28, 128) 512 conv2d_76[0][0]
__________________________________________________________________________________________________ activation_71 (Activation) (None, 28, 28, 128) 0 batch_normalization_76[0][0]
__________________________________________________________________________________________________ conv2d_77 (Conv2D) (None, 28, 28, 512) 66048 activation_71[0][0]
__________________________________________________________________________________________________ batch_normalization_77 (BatchNo (None, 28, 28, 512) 2048 conv2d_77[0][0]
__________________________________________________________________________________________________ add_23 (Add) (None, 28, 28, 512) 0 batch_normalization_77[0][0]
买家电activation_69[0][0]
__________________________________________________________________________________________________ activation_72 (Activation) (None, 28, 28, 512) 0 add_23[0][0]
颜回简介__________________________________________________________________________________________________ conv2d_79 (Conv2D) (None, 14, 14, 256) 131328 activation_72[0][0]
__________________________________________________________________________________________________ batch_normalization_79 (BatchNo (None, 14, 14, 256) 1024 conv2d_79[0][0]
__________________________________________________________________________________________________ activation_73 (Activation) (None, 14, 14, 256) 0 batch_normalization_79[0][0]
__________________________________________________________________________________________________ conv2d_80 (Conv2D) (None, 14, 14, 256) 590080 activation_73[0][0]
__________________________________________________________________________________________________ batch_normalization_80 (BatchNo (None, 14, 14, 256) 1024 conv2d_80[0][0]
__________________________________________________________________________________________________ activation_74 (Activation) (None, 14, 14, 256) 0 batch_normalization_80[0][0]
__________________________________________________________________________________________________ conv2d_81 (Conv2D) (None, 14, 14, 1024) 263168 activation_74[0][0]