EGB_MLP网络可视化

目标:

  1. 学习神经网络在训练过程中,隐含层的特征变化机制

  2. 学习端到端梯度提升算法(End to end gradient boosting, EGB)

代码中的基学习器为多层感知机(multi layer perception, mlp)

参考:

葛家驿,杨乃森,唐宏,徐朋磊,纪超.端到端的梯度提升网络分类过程可视化[J].信号处理,2022,38(02):355-366.DOI:10.16798/j.issn.1003-0530.2022.02.015.

研究准备

环境配置

[ ]:
'''
端到端梯度提升模型分类过程可视化
每个基分类器是全连接网络
'''

import numpy as np
from sklearn.preprocessing import StandardScaler
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import backend as K
from tensorflow.keras import layers
from tensorflow.keras.callbacks import CSVLogger
from sklearn.preprocessing import MinMaxScaler
from matplotlib.colors import ListedColormap
from matplotlib import cm
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
from sklearn.datasets import make_circles #同心圆数据
from sklearn.model_selection import train_test_split
import sys
sys.setrecursionlimit(500000)
import imageio
%pylab inline
Populating the interactive namespace from numpy and matplotlib

设置模拟数据

[ ]:
n_samples = 1000 #样本点数
X, y = make_circles(n_samples=1000,factor=.4,noise=.06,random_state=0) #生成同心圆数据
test_size = 0.5

#划分训练集、测试集
X_train, X_test, Y_train, Y_test = train_test_split(X, y, test_size=test_size, random_state=2)

c,r = np.mgrid[[slice(X.min()- .2,X.max() + .2,50j)]*2]
p = np.c_[c.flat,r.flat]

ss = StandardScaler().fit(X_train)
X = ss.transform(X)
p = ss.transform(p)
X_train = ss.transform(X_train)
X_test = ss.transform(X_test)

配置绘图环境

[ ]:
#设置画布大小和颜色
fig = plt.figure(figsize = (9,3))
top = cm.get_cmap('Oranges_r', 512)
bottom = cm.get_cmap('Blues', 512)
newcolors = np.vstack((top(np.linspace(0.55, 1, 512)),
                       bottom(np.linspace(0, 0.75, 512))))
cm_bright = ListedColormap(newcolors, name='OrangeBlue')

#训练数据可视化
plt.subplot(121)
m1 = plt.scatter(*X_train.T,c = Y_train,cmap = cm_bright,edgecolors='white',s = 20,linewidths = 0.5)
plt.title(f'train data ({int(n_samples*(1-test_size))} points)')
plt.axis('equal')

#测试数据可视化
plt.subplot(122)
m2 = plt.scatter(*X_test.T,c = Y_test,cmap = cm_bright,edgecolors='white',s = 20,linewidths = 0.5);
plt.title(f'test data ({int(n_samples*test_size)} points)')
plt.axis('equal')
ax = fig.get_axes()
plt.colorbar(ax = ax)
#plt.savefig(f'data_{n_samples}_points.png')
#plt.savefig(f'data_{n_samples}_points.pdf')
plt.show()

#全部数据可视化
fig = plt.figure(figsize = (7,6))
plt.scatter(*X.T,c = y,cmap = cm_bright,edgecolors='white',s = 20,linewidths = 0.5)
plt.title(f'Raw data ({n_samples} points)')
plt.axis('equal')
#plt.savefig(f'Raw data ({n_samples} points)')
#plt.savefig(f'Raw data ({n_samples} points).pdf')
plt.axis('equal')
plt.show()
../../_images/1stPart_Homework.1_EGB_mlp_7_0.png
../../_images/1stPart_Homework.1_EGB_mlp_7_1.png

配置数据与损失函数

[ ]:
num_classes=2 #设置类别数
y_train=keras.utils.to_categorical(Y_train,num_classes) #类别标签转换为onehot编码
y_test=keras.utils.to_categorical(Y_test,num_classes)
#定义损失曲线绘制函数

def plot_loss_accuracy(history, title_text, file_name):

    fig, ax1 = plt.subplots()
    ax2 = ax1.twinx()
    ax1.set_xlabel("Epoch")
    ax1.set_ylabel("Accuracy")
    ax2.set_ylabel("Loss")

    #ax1.set_ylim(-0.01,1.01)
    ax1.plot(history.epoch,
             history.history['accuracy'],
             label="Training Accuracy")
    ax1.plot(history.epoch,
             history.history['val_accuracy'],
             linestyle='--',
             label="Test Accuracy")

    #ax2.set_ylim(-0.01,1.01)
    ax2._get_lines.prop_cycler = ax1._get_lines.prop_cycler

    ax2.plot(history.epoch,
             history.history['loss'],
             label="Training Loss")
    ax2.plot(history.epoch,
             history.history['val_loss'],
             linestyle='--',
             label="Test Loss")

    ax1.legend()
    ax2.legend()
    plt.suptitle(title_text)
    plt.savefig(file_name)

构建基于全连接网络的端到端梯度提升模型

[ ]:
#定义全连接网络

def FullyConnected_Block(name_of_classifiers,
                         number_of_layers,
                         num_neurons_of_layer,
                         inputs):
    x = inputs
    for n in range(number_of_layers):
        inputs = x
        x = layers.Dense(num_neurons_of_layer,
                         activation=tf.nn.relu,
                         name = f'{name_of_classifiers}th-clf_{n}th-hidden')(x)
    return x
#构建全连接基分类器

def build_fully_connected_model(name_of_classifiers,
                                number_of_layers,
                                num_neurons_of_layer,
                                inputs):
    x = FullyConnected_Block(name_of_classifiers,
                             number_of_layers,
                             num_neurons_of_layer,
                             inputs)
    outputs = x
    #outputs = layers.Dense(num_classes,name=f'{name_of_classifiers}th-clf_logits')(x)
    fully_connected_model = keras.Model(inputs, outputs)
    return fully_connected_model
#构建EGB网络

def build_boosting_model(classifiers,
                         name_of_classifiers,
                         number_of_layers,
                         num_neurons_of_layer,
                         inputs):
    model_logits = build_fully_connected_model(name_of_classifiers,
                                               number_of_layers,
                                               num_neurons_of_layer,
                                               inputs)
    classifiers.append(model_logits)
    #如果有多个分类器,每个分类器最后一层的输出相加,然后softmax
    if len(classifiers)>1:
        fully_boost_model = layers.Add(name='classifiers_Add')([item.outputs[0] for item in classifiers])
    else:
        fully_boost_model = model_logits.outputs[0]
    outputs = layers.Dense(2, activation='softmax',name = 'activation')(fully_boost_model)
    boosting_model = keras.Model(inputs, outputs)
    return boosting_model,model_logits

定义模型训练方式

[ ]:
#定义模型训练

def train_FullyBoost_model(number_of_weak_classifiers,
                           number_of_layers,
                           num_neurons_of_layer,
                           batch_size,
                           epochs):

    classifiers = []
    history = []
    boosting_models = []

    inputs = keras.Input(shape=(2, ))

    #每次叠加一个基分类器拟合训练数据
    for n_th_weak in range(number_of_weak_classifiers):
        name_of_classifiers = n_th_weak
        boosting_model,model_logits=build_boosting_model(classifiers,
                                                         name_of_classifiers,
                                                         number_of_layers,
                                                         num_neurons_of_layer,
                                                         inputs)

        boosting_model.compile(loss=keras.losses.categorical_crossentropy,
                                  optimizer=keras.optimizers.Adam(lr=3e-4),
                                  metrics=['accuracy'])
        #csv_logger = CSVLogger(f'training_{name_of_classifiers}.log') #保存训练日志
        new_history = boosting_model.fit(X_train,
                                         y_train,
                                         batch_size=batch_size,
                                         epochs=epochs,
                                         verbose=2,
                                         #callbacks=[csv_logger],
                                         validation_data=(X_test, y_test))

        #在训练新的基分类器时,所有前面的基分类器每层的参数冻结
        for layer in classifiers[-1].layers:
            layer.trainable = False
        history.append(new_history)
        boosting_models.append(boosting_model)
        #model_logits.save_weights(f"single_classifier_{name_of_classifiers}.h5") #保存基分类器
        print('第{}个弱分类器训练完毕'.format(n_th_weak+1))

    #boosting_model.save(f'boosting_model_with_{number_of_weak_classifiers}_classifiers.h5') #保存模型
    return boosting_model,boosting_models,history

进行实验

设置训练参数

[ ]:
#设置参数

epochs=100 #迭代次数
batch_size=32 #batchsize
number_of_weak_classifiers=5 #基分类器个数
number_of_layers=4 #基分类器的隐层数
num_neurons_of_layer=2 #隐层的神经元数

#训练模型
boosting_model,boosting_models,history=train_FullyBoost_model(number_of_weak_classifiers,
                                                              number_of_layers,
                                                              num_neurons_of_layer,
                                                              batch_size,
                                                              epochs)
/usr/local/lib/python3.7/dist-packages/keras/optimizers/optimizer_v2/adam.py:110: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead.
  super(Adam, self).__init__(name, **kwargs)
Epoch 1/100
16/16 - 2s - loss: 1.0253 - accuracy: 0.5240 - val_loss: 1.1483 - val_accuracy: 0.4640 - 2s/epoch - 148ms/step
Epoch 2/100
16/16 - 0s - loss: 1.0180 - accuracy: 0.5140 - val_loss: 1.1382 - val_accuracy: 0.4640 - 273ms/epoch - 17ms/step
Epoch 3/100
16/16 - 0s - loss: 1.0109 - accuracy: 0.4980 - val_loss: 1.1285 - val_accuracy: 0.4400 - 165ms/epoch - 10ms/step
Epoch 4/100
16/16 - 0s - loss: 1.0039 - accuracy: 0.4620 - val_loss: 1.1193 - val_accuracy: 0.4200 - 260ms/epoch - 16ms/step
Epoch 5/100
16/16 - 0s - loss: 0.9971 - accuracy: 0.4380 - val_loss: 1.1103 - val_accuracy: 0.3820 - 157ms/epoch - 10ms/step
Epoch 6/100
16/16 - 0s - loss: 0.9908 - accuracy: 0.3760 - val_loss: 1.1015 - val_accuracy: 0.3160 - 289ms/epoch - 18ms/step
Epoch 7/100
16/16 - 0s - loss: 0.9846 - accuracy: 0.3540 - val_loss: 1.0926 - val_accuracy: 0.3320 - 188ms/epoch - 12ms/step
Epoch 8/100
16/16 - 0s - loss: 0.9780 - accuracy: 0.4020 - val_loss: 1.0848 - val_accuracy: 0.3820 - 132ms/epoch - 8ms/step
Epoch 9/100
16/16 - 0s - loss: 0.9722 - accuracy: 0.4780 - val_loss: 1.0769 - val_accuracy: 0.4840 - 135ms/epoch - 8ms/step
Epoch 10/100
16/16 - 0s - loss: 0.9665 - accuracy: 0.5040 - val_loss: 1.0687 - val_accuracy: 0.4840 - 143ms/epoch - 9ms/step
Epoch 11/100
16/16 - 0s - loss: 0.9610 - accuracy: 0.5040 - val_loss: 1.0608 - val_accuracy: 0.4800 - 193ms/epoch - 12ms/step
Epoch 12/100
16/16 - 0s - loss: 0.9553 - accuracy: 0.5020 - val_loss: 1.0539 - val_accuracy: 0.4780 - 169ms/epoch - 11ms/step
Epoch 13/100
16/16 - 0s - loss: 0.9503 - accuracy: 0.5020 - val_loss: 1.0463 - val_accuracy: 0.4800 - 200ms/epoch - 13ms/step
Epoch 14/100
16/16 - 0s - loss: 0.9450 - accuracy: 0.4960 - val_loss: 1.0393 - val_accuracy: 0.4780 - 208ms/epoch - 13ms/step
Epoch 15/100
16/16 - 0s - loss: 0.9401 - accuracy: 0.4920 - val_loss: 1.0322 - val_accuracy: 0.4760 - 166ms/epoch - 10ms/step
Epoch 16/100
16/16 - 0s - loss: 0.9351 - accuracy: 0.4880 - val_loss: 1.0255 - val_accuracy: 0.4740 - 192ms/epoch - 12ms/step
Epoch 17/100
16/16 - 0s - loss: 0.9304 - accuracy: 0.4860 - val_loss: 1.0190 - val_accuracy: 0.4720 - 92ms/epoch - 6ms/step
Epoch 18/100
16/16 - 0s - loss: 0.9257 - accuracy: 0.4860 - val_loss: 1.0128 - val_accuracy: 0.4720 - 126ms/epoch - 8ms/step
Epoch 19/100
16/16 - 0s - loss: 0.9213 - accuracy: 0.4820 - val_loss: 1.0064 - val_accuracy: 0.4700 - 185ms/epoch - 12ms/step
Epoch 20/100
16/16 - 0s - loss: 0.9170 - accuracy: 0.4820 - val_loss: 0.9999 - val_accuracy: 0.4700 - 164ms/epoch - 10ms/step
Epoch 21/100
16/16 - 0s - loss: 0.9126 - accuracy: 0.4840 - val_loss: 0.9937 - val_accuracy: 0.4720 - 140ms/epoch - 9ms/step
Epoch 22/100
16/16 - 0s - loss: 0.9083 - accuracy: 0.4840 - val_loss: 0.9880 - val_accuracy: 0.4720 - 146ms/epoch - 9ms/step
Epoch 23/100
16/16 - 0s - loss: 0.9044 - accuracy: 0.4840 - val_loss: 0.9825 - val_accuracy: 0.4720 - 236ms/epoch - 15ms/step
Epoch 24/100
16/16 - 0s - loss: 0.9003 - accuracy: 0.4840 - val_loss: 0.9773 - val_accuracy: 0.4720 - 132ms/epoch - 8ms/step
Epoch 25/100
16/16 - 0s - loss: 0.8968 - accuracy: 0.4860 - val_loss: 0.9714 - val_accuracy: 0.4720 - 384ms/epoch - 24ms/step
Epoch 26/100
16/16 - 0s - loss: 0.8927 - accuracy: 0.4840 - val_loss: 0.9662 - val_accuracy: 0.4700 - 294ms/epoch - 18ms/step
Epoch 27/100
16/16 - 0s - loss: 0.8891 - accuracy: 0.4840 - val_loss: 0.9610 - val_accuracy: 0.4680 - 212ms/epoch - 13ms/step
Epoch 28/100
16/16 - 0s - loss: 0.8855 - accuracy: 0.4860 - val_loss: 0.9562 - val_accuracy: 0.4680 - 152ms/epoch - 10ms/step
Epoch 29/100
16/16 - 0s - loss: 0.8822 - accuracy: 0.4840 - val_loss: 0.9509 - val_accuracy: 0.4680 - 354ms/epoch - 22ms/step
Epoch 30/100
16/16 - 0s - loss: 0.8786 - accuracy: 0.4780 - val_loss: 0.9460 - val_accuracy: 0.4660 - 116ms/epoch - 7ms/step
Epoch 31/100
16/16 - 0s - loss: 0.8751 - accuracy: 0.4780 - val_loss: 0.9414 - val_accuracy: 0.4660 - 237ms/epoch - 15ms/step
Epoch 32/100
16/16 - 0s - loss: 0.8718 - accuracy: 0.4780 - val_loss: 0.9366 - val_accuracy: 0.4640 - 175ms/epoch - 11ms/step
Epoch 33/100
16/16 - 0s - loss: 0.8684 - accuracy: 0.4780 - val_loss: 0.9322 - val_accuracy: 0.4620 - 128ms/epoch - 8ms/step
Epoch 34/100
16/16 - 0s - loss: 0.8653 - accuracy: 0.4780 - val_loss: 0.9275 - val_accuracy: 0.4600 - 222ms/epoch - 14ms/step
Epoch 35/100
16/16 - 0s - loss: 0.8622 - accuracy: 0.4780 - val_loss: 0.9227 - val_accuracy: 0.4600 - 130ms/epoch - 8ms/step
Epoch 36/100
16/16 - 0s - loss: 0.8588 - accuracy: 0.4760 - val_loss: 0.9185 - val_accuracy: 0.4600 - 308ms/epoch - 19ms/step
Epoch 37/100
16/16 - 0s - loss: 0.8559 - accuracy: 0.4740 - val_loss: 0.9141 - val_accuracy: 0.4600 - 136ms/epoch - 8ms/step
Epoch 38/100
16/16 - 0s - loss: 0.8528 - accuracy: 0.4740 - val_loss: 0.9101 - val_accuracy: 0.4600 - 218ms/epoch - 14ms/step
Epoch 39/100
16/16 - 0s - loss: 0.8499 - accuracy: 0.4740 - val_loss: 0.9059 - val_accuracy: 0.4580 - 280ms/epoch - 17ms/step
Epoch 40/100
16/16 - 0s - loss: 0.8470 - accuracy: 0.4740 - val_loss: 0.9020 - val_accuracy: 0.4560 - 184ms/epoch - 11ms/step
Epoch 41/100
16/16 - 0s - loss: 0.8441 - accuracy: 0.4740 - val_loss: 0.8982 - val_accuracy: 0.4560 - 149ms/epoch - 9ms/step
Epoch 42/100
16/16 - 0s - loss: 0.8415 - accuracy: 0.4740 - val_loss: 0.8938 - val_accuracy: 0.4540 - 147ms/epoch - 9ms/step
Epoch 43/100
16/16 - 0s - loss: 0.8385 - accuracy: 0.4760 - val_loss: 0.8901 - val_accuracy: 0.4560 - 221ms/epoch - 14ms/step
Epoch 44/100
16/16 - 0s - loss: 0.8359 - accuracy: 0.4760 - val_loss: 0.8864 - val_accuracy: 0.4540 - 184ms/epoch - 11ms/step
Epoch 45/100
16/16 - 0s - loss: 0.8333 - accuracy: 0.4740 - val_loss: 0.8826 - val_accuracy: 0.4540 - 194ms/epoch - 12ms/step
Epoch 46/100
16/16 - 0s - loss: 0.8306 - accuracy: 0.4740 - val_loss: 0.8793 - val_accuracy: 0.4540 - 190ms/epoch - 12ms/step
Epoch 47/100
16/16 - 0s - loss: 0.8281 - accuracy: 0.4740 - val_loss: 0.8757 - val_accuracy: 0.4520 - 228ms/epoch - 14ms/step
Epoch 48/100
16/16 - 0s - loss: 0.8256 - accuracy: 0.4740 - val_loss: 0.8723 - val_accuracy: 0.4520 - 278ms/epoch - 17ms/step
Epoch 49/100
16/16 - 0s - loss: 0.8231 - accuracy: 0.4720 - val_loss: 0.8689 - val_accuracy: 0.4520 - 214ms/epoch - 13ms/step
Epoch 50/100
16/16 - 0s - loss: 0.8207 - accuracy: 0.4700 - val_loss: 0.8655 - val_accuracy: 0.4500 - 182ms/epoch - 11ms/step
Epoch 51/100
16/16 - 0s - loss: 0.8182 - accuracy: 0.4700 - val_loss: 0.8623 - val_accuracy: 0.4520 - 220ms/epoch - 14ms/step
Epoch 52/100
16/16 - 0s - loss: 0.8159 - accuracy: 0.4660 - val_loss: 0.8589 - val_accuracy: 0.4500 - 151ms/epoch - 9ms/step
Epoch 53/100
16/16 - 0s - loss: 0.8136 - accuracy: 0.4660 - val_loss: 0.8557 - val_accuracy: 0.4480 - 104ms/epoch - 7ms/step
Epoch 54/100
16/16 - 0s - loss: 0.8112 - accuracy: 0.4680 - val_loss: 0.8528 - val_accuracy: 0.4460 - 134ms/epoch - 8ms/step
Epoch 55/100
16/16 - 0s - loss: 0.8090 - accuracy: 0.4660 - val_loss: 0.8497 - val_accuracy: 0.4460 - 263ms/epoch - 16ms/step
Epoch 56/100
16/16 - 0s - loss: 0.8068 - accuracy: 0.4640 - val_loss: 0.8466 - val_accuracy: 0.4460 - 124ms/epoch - 8ms/step
Epoch 57/100
16/16 - 0s - loss: 0.8045 - accuracy: 0.4660 - val_loss: 0.8435 - val_accuracy: 0.4460 - 133ms/epoch - 8ms/step
Epoch 58/100
16/16 - 0s - loss: 0.8023 - accuracy: 0.4680 - val_loss: 0.8407 - val_accuracy: 0.4440 - 134ms/epoch - 8ms/step
Epoch 59/100
16/16 - 0s - loss: 0.8002 - accuracy: 0.4680 - val_loss: 0.8378 - val_accuracy: 0.4420 - 145ms/epoch - 9ms/step
Epoch 60/100
16/16 - 0s - loss: 0.7981 - accuracy: 0.4660 - val_loss: 0.8350 - val_accuracy: 0.4420 - 203ms/epoch - 13ms/step
Epoch 61/100
16/16 - 0s - loss: 0.7960 - accuracy: 0.4700 - val_loss: 0.8322 - val_accuracy: 0.4420 - 174ms/epoch - 11ms/step
Epoch 62/100
16/16 - 0s - loss: 0.7939 - accuracy: 0.4700 - val_loss: 0.8294 - val_accuracy: 0.4420 - 170ms/epoch - 11ms/step
Epoch 63/100
16/16 - 0s - loss: 0.7920 - accuracy: 0.4700 - val_loss: 0.8266 - val_accuracy: 0.4420 - 211ms/epoch - 13ms/step
Epoch 64/100
16/16 - 0s - loss: 0.7899 - accuracy: 0.4700 - val_loss: 0.8241 - val_accuracy: 0.4420 - 187ms/epoch - 12ms/step
Epoch 65/100
16/16 - 0s - loss: 0.7879 - accuracy: 0.4680 - val_loss: 0.8214 - val_accuracy: 0.4400 - 188ms/epoch - 12ms/step
Epoch 66/100
16/16 - 0s - loss: 0.7860 - accuracy: 0.4680 - val_loss: 0.8187 - val_accuracy: 0.4420 - 129ms/epoch - 8ms/step
Epoch 67/100
16/16 - 0s - loss: 0.7840 - accuracy: 0.4680 - val_loss: 0.8163 - val_accuracy: 0.4420 - 138ms/epoch - 9ms/step
Epoch 68/100
16/16 - 0s - loss: 0.7821 - accuracy: 0.4680 - val_loss: 0.8137 - val_accuracy: 0.4420 - 158ms/epoch - 10ms/step
Epoch 69/100
16/16 - 0s - loss: 0.7801 - accuracy: 0.4680 - val_loss: 0.8113 - val_accuracy: 0.4420 - 180ms/epoch - 11ms/step
Epoch 70/100
16/16 - 0s - loss: 0.7782 - accuracy: 0.4680 - val_loss: 0.8088 - val_accuracy: 0.4420 - 138ms/epoch - 9ms/step
Epoch 71/100
16/16 - 0s - loss: 0.7764 - accuracy: 0.4680 - val_loss: 0.8063 - val_accuracy: 0.4420 - 134ms/epoch - 8ms/step
Epoch 72/100
16/16 - 0s - loss: 0.7746 - accuracy: 0.4680 - val_loss: 0.8037 - val_accuracy: 0.4420 - 181ms/epoch - 11ms/step
Epoch 73/100
16/16 - 0s - loss: 0.7727 - accuracy: 0.4680 - val_loss: 0.8014 - val_accuracy: 0.4420 - 402ms/epoch - 25ms/step
Epoch 74/100
16/16 - 0s - loss: 0.7709 - accuracy: 0.4680 - val_loss: 0.7992 - val_accuracy: 0.4420 - 279ms/epoch - 17ms/step
Epoch 75/100
16/16 - 0s - loss: 0.7691 - accuracy: 0.4680 - val_loss: 0.7968 - val_accuracy: 0.4420 - 339ms/epoch - 21ms/step
Epoch 76/100
16/16 - 0s - loss: 0.7673 - accuracy: 0.4680 - val_loss: 0.7947 - val_accuracy: 0.4420 - 183ms/epoch - 11ms/step
Epoch 77/100
16/16 - 0s - loss: 0.7655 - accuracy: 0.4680 - val_loss: 0.7925 - val_accuracy: 0.4420 - 68ms/epoch - 4ms/step
Epoch 78/100
16/16 - 0s - loss: 0.7639 - accuracy: 0.4660 - val_loss: 0.7901 - val_accuracy: 0.4420 - 74ms/epoch - 5ms/step
Epoch 79/100
16/16 - 0s - loss: 0.7620 - accuracy: 0.4660 - val_loss: 0.7880 - val_accuracy: 0.4420 - 111ms/epoch - 7ms/step
Epoch 80/100
16/16 - 0s - loss: 0.7603 - accuracy: 0.4660 - val_loss: 0.7860 - val_accuracy: 0.4420 - 116ms/epoch - 7ms/step
Epoch 81/100
16/16 - 0s - loss: 0.7587 - accuracy: 0.4660 - val_loss: 0.7837 - val_accuracy: 0.4420 - 69ms/epoch - 4ms/step
Epoch 82/100
16/16 - 0s - loss: 0.7570 - accuracy: 0.4660 - val_loss: 0.7816 - val_accuracy: 0.4420 - 66ms/epoch - 4ms/step
Epoch 83/100
16/16 - 0s - loss: 0.7554 - accuracy: 0.4660 - val_loss: 0.7796 - val_accuracy: 0.4420 - 71ms/epoch - 4ms/step
Epoch 84/100
16/16 - 0s - loss: 0.7537 - accuracy: 0.4660 - val_loss: 0.7776 - val_accuracy: 0.4420 - 108ms/epoch - 7ms/step
Epoch 85/100
16/16 - 0s - loss: 0.7521 - accuracy: 0.4660 - val_loss: 0.7755 - val_accuracy: 0.4420 - 76ms/epoch - 5ms/step
Epoch 86/100
16/16 - 0s - loss: 0.7504 - accuracy: 0.4660 - val_loss: 0.7734 - val_accuracy: 0.4420 - 72ms/epoch - 4ms/step
Epoch 87/100
16/16 - 0s - loss: 0.7488 - accuracy: 0.4660 - val_loss: 0.7715 - val_accuracy: 0.4420 - 112ms/epoch - 7ms/step
Epoch 88/100
16/16 - 0s - loss: 0.7472 - accuracy: 0.4660 - val_loss: 0.7694 - val_accuracy: 0.4400 - 67ms/epoch - 4ms/step
Epoch 89/100
16/16 - 0s - loss: 0.7457 - accuracy: 0.4660 - val_loss: 0.7675 - val_accuracy: 0.4420 - 66ms/epoch - 4ms/step
Epoch 90/100
16/16 - 0s - loss: 0.7441 - accuracy: 0.4660 - val_loss: 0.7657 - val_accuracy: 0.4420 - 68ms/epoch - 4ms/step
Epoch 91/100
16/16 - 0s - loss: 0.7425 - accuracy: 0.4660 - val_loss: 0.7636 - val_accuracy: 0.4420 - 76ms/epoch - 5ms/step
Epoch 92/100
16/16 - 0s - loss: 0.7409 - accuracy: 0.4660 - val_loss: 0.7617 - val_accuracy: 0.4420 - 110ms/epoch - 7ms/step
Epoch 93/100
16/16 - 0s - loss: 0.7394 - accuracy: 0.4660 - val_loss: 0.7598 - val_accuracy: 0.4420 - 67ms/epoch - 4ms/step
Epoch 94/100
16/16 - 0s - loss: 0.7378 - accuracy: 0.4660 - val_loss: 0.7580 - val_accuracy: 0.4420 - 72ms/epoch - 4ms/step
Epoch 95/100
16/16 - 0s - loss: 0.7364 - accuracy: 0.4680 - val_loss: 0.7561 - val_accuracy: 0.4420 - 113ms/epoch - 7ms/step
Epoch 96/100
16/16 - 0s - loss: 0.7348 - accuracy: 0.4660 - val_loss: 0.7542 - val_accuracy: 0.4420 - 127ms/epoch - 8ms/step
Epoch 97/100
16/16 - 0s - loss: 0.7334 - accuracy: 0.4680 - val_loss: 0.7524 - val_accuracy: 0.4420 - 77ms/epoch - 5ms/step
Epoch 98/100
16/16 - 0s - loss: 0.7318 - accuracy: 0.4680 - val_loss: 0.7506 - val_accuracy: 0.4420 - 82ms/epoch - 5ms/step
Epoch 99/100
16/16 - 0s - loss: 0.7304 - accuracy: 0.4680 - val_loss: 0.7489 - val_accuracy: 0.4420 - 111ms/epoch - 7ms/step
Epoch 100/100
16/16 - 0s - loss: 0.7289 - accuracy: 0.4680 - val_loss: 0.7472 - val_accuracy: 0.4420 - 88ms/epoch - 5ms/step
第1个弱分类器训练完毕
Epoch 1/100
16/16 - 1s - loss: 0.6325 - accuracy: 0.4980 - val_loss: 0.5948 - val_accuracy: 0.5420 - 676ms/epoch - 42ms/step
Epoch 2/100
16/16 - 0s - loss: 0.6305 - accuracy: 0.5020 - val_loss: 0.5932 - val_accuracy: 0.5480 - 75ms/epoch - 5ms/step
Epoch 3/100
16/16 - 0s - loss: 0.6286 - accuracy: 0.5000 - val_loss: 0.5916 - val_accuracy: 0.5560 - 64ms/epoch - 4ms/step
Epoch 4/100
16/16 - 0s - loss: 0.6267 - accuracy: 0.5000 - val_loss: 0.5900 - val_accuracy: 0.5580 - 107ms/epoch - 7ms/step
Epoch 5/100
16/16 - 0s - loss: 0.6248 - accuracy: 0.5040 - val_loss: 0.5885 - val_accuracy: 0.5640 - 70ms/epoch - 4ms/step
Epoch 6/100
16/16 - 0s - loss: 0.6229 - accuracy: 0.5140 - val_loss: 0.5870 - val_accuracy: 0.5700 - 71ms/epoch - 4ms/step
Epoch 7/100
16/16 - 0s - loss: 0.6211 - accuracy: 0.5140 - val_loss: 0.5854 - val_accuracy: 0.5720 - 113ms/epoch - 7ms/step
Epoch 8/100
16/16 - 0s - loss: 0.6193 - accuracy: 0.5140 - val_loss: 0.5838 - val_accuracy: 0.5780 - 87ms/epoch - 5ms/step
Epoch 9/100
16/16 - 0s - loss: 0.6174 - accuracy: 0.5220 - val_loss: 0.5823 - val_accuracy: 0.5780 - 75ms/epoch - 5ms/step
Epoch 10/100
16/16 - 0s - loss: 0.6156 - accuracy: 0.5300 - val_loss: 0.5807 - val_accuracy: 0.5800 - 80ms/epoch - 5ms/step
Epoch 11/100
16/16 - 0s - loss: 0.6138 - accuracy: 0.5340 - val_loss: 0.5792 - val_accuracy: 0.5860 - 111ms/epoch - 7ms/step
Epoch 12/100
16/16 - 0s - loss: 0.6120 - accuracy: 0.5360 - val_loss: 0.5776 - val_accuracy: 0.5880 - 68ms/epoch - 4ms/step
Epoch 13/100
16/16 - 0s - loss: 0.6102 - accuracy: 0.5360 - val_loss: 0.5761 - val_accuracy: 0.5860 - 70ms/epoch - 4ms/step
Epoch 14/100
16/16 - 0s - loss: 0.6085 - accuracy: 0.5400 - val_loss: 0.5746 - val_accuracy: 0.5880 - 123ms/epoch - 8ms/step
Epoch 15/100
16/16 - 0s - loss: 0.6067 - accuracy: 0.5440 - val_loss: 0.5730 - val_accuracy: 0.5900 - 114ms/epoch - 7ms/step
Epoch 16/100
16/16 - 0s - loss: 0.6049 - accuracy: 0.5500 - val_loss: 0.5714 - val_accuracy: 0.5920 - 76ms/epoch - 5ms/step
Epoch 17/100
16/16 - 0s - loss: 0.6032 - accuracy: 0.5500 - val_loss: 0.5699 - val_accuracy: 0.5960 - 91ms/epoch - 6ms/step
Epoch 18/100
16/16 - 0s - loss: 0.6014 - accuracy: 0.5520 - val_loss: 0.5684 - val_accuracy: 0.5940 - 112ms/epoch - 7ms/step
Epoch 19/100
16/16 - 0s - loss: 0.5997 - accuracy: 0.5600 - val_loss: 0.5668 - val_accuracy: 0.5940 - 110ms/epoch - 7ms/step
Epoch 20/100
16/16 - 0s - loss: 0.5979 - accuracy: 0.5600 - val_loss: 0.5652 - val_accuracy: 0.5960 - 72ms/epoch - 4ms/step
Epoch 21/100
16/16 - 0s - loss: 0.5962 - accuracy: 0.5640 - val_loss: 0.5637 - val_accuracy: 0.5980 - 71ms/epoch - 4ms/step
Epoch 22/100
16/16 - 0s - loss: 0.5945 - accuracy: 0.5680 - val_loss: 0.5622 - val_accuracy: 0.6020 - 72ms/epoch - 4ms/step
Epoch 23/100
16/16 - 0s - loss: 0.5928 - accuracy: 0.5720 - val_loss: 0.5607 - val_accuracy: 0.6040 - 109ms/epoch - 7ms/step
Epoch 24/100
16/16 - 0s - loss: 0.5911 - accuracy: 0.5780 - val_loss: 0.5592 - val_accuracy: 0.6040 - 73ms/epoch - 5ms/step
Epoch 25/100
16/16 - 0s - loss: 0.5894 - accuracy: 0.5800 - val_loss: 0.5576 - val_accuracy: 0.6040 - 115ms/epoch - 7ms/step
Epoch 26/100
16/16 - 0s - loss: 0.5877 - accuracy: 0.5840 - val_loss: 0.5561 - val_accuracy: 0.6060 - 110ms/epoch - 7ms/step
Epoch 27/100
16/16 - 0s - loss: 0.5860 - accuracy: 0.5840 - val_loss: 0.5545 - val_accuracy: 0.6060 - 70ms/epoch - 4ms/step
Epoch 28/100
16/16 - 0s - loss: 0.5843 - accuracy: 0.5900 - val_loss: 0.5530 - val_accuracy: 0.6060 - 76ms/epoch - 5ms/step
Epoch 29/100
16/16 - 0s - loss: 0.5826 - accuracy: 0.5940 - val_loss: 0.5515 - val_accuracy: 0.6060 - 75ms/epoch - 5ms/step
Epoch 30/100
16/16 - 0s - loss: 0.5810 - accuracy: 0.5980 - val_loss: 0.5500 - val_accuracy: 0.6080 - 74ms/epoch - 5ms/step
Epoch 31/100
16/16 - 0s - loss: 0.5792 - accuracy: 0.6000 - val_loss: 0.5484 - val_accuracy: 0.6120 - 119ms/epoch - 7ms/step
Epoch 32/100
16/16 - 0s - loss: 0.5775 - accuracy: 0.6000 - val_loss: 0.5469 - val_accuracy: 0.6100 - 113ms/epoch - 7ms/step
Epoch 33/100
16/16 - 0s - loss: 0.5758 - accuracy: 0.6020 - val_loss: 0.5454 - val_accuracy: 0.6160 - 114ms/epoch - 7ms/step
Epoch 34/100
16/16 - 0s - loss: 0.5742 - accuracy: 0.6120 - val_loss: 0.5439 - val_accuracy: 0.6180 - 76ms/epoch - 5ms/step
Epoch 35/100
16/16 - 0s - loss: 0.5725 - accuracy: 0.6180 - val_loss: 0.5424 - val_accuracy: 0.6220 - 113ms/epoch - 7ms/step
Epoch 36/100
16/16 - 0s - loss: 0.5708 - accuracy: 0.6240 - val_loss: 0.5408 - val_accuracy: 0.6240 - 111ms/epoch - 7ms/step
Epoch 37/100
16/16 - 0s - loss: 0.5691 - accuracy: 0.6320 - val_loss: 0.5394 - val_accuracy: 0.6280 - 73ms/epoch - 5ms/step
Epoch 38/100
16/16 - 0s - loss: 0.5674 - accuracy: 0.6360 - val_loss: 0.5378 - val_accuracy: 0.6340 - 76ms/epoch - 5ms/step
Epoch 39/100
16/16 - 0s - loss: 0.5657 - accuracy: 0.6360 - val_loss: 0.5364 - val_accuracy: 0.6420 - 114ms/epoch - 7ms/step
Epoch 40/100
16/16 - 0s - loss: 0.5641 - accuracy: 0.6420 - val_loss: 0.5349 - val_accuracy: 0.6460 - 110ms/epoch - 7ms/step
Epoch 41/100
16/16 - 0s - loss: 0.5624 - accuracy: 0.6400 - val_loss: 0.5333 - val_accuracy: 0.6500 - 66ms/epoch - 4ms/step
Epoch 42/100
16/16 - 0s - loss: 0.5607 - accuracy: 0.6460 - val_loss: 0.5318 - val_accuracy: 0.6500 - 69ms/epoch - 4ms/step
Epoch 43/100
16/16 - 0s - loss: 0.5590 - accuracy: 0.6460 - val_loss: 0.5303 - val_accuracy: 0.6580 - 70ms/epoch - 4ms/step
Epoch 44/100
16/16 - 0s - loss: 0.5573 - accuracy: 0.6540 - val_loss: 0.5288 - val_accuracy: 0.6600 - 115ms/epoch - 7ms/step
Epoch 45/100
16/16 - 0s - loss: 0.5557 - accuracy: 0.6560 - val_loss: 0.5273 - val_accuracy: 0.6580 - 71ms/epoch - 4ms/step
Epoch 46/100
16/16 - 0s - loss: 0.5540 - accuracy: 0.6560 - val_loss: 0.5258 - val_accuracy: 0.6640 - 70ms/epoch - 4ms/step
Epoch 47/100
16/16 - 0s - loss: 0.5523 - accuracy: 0.6600 - val_loss: 0.5243 - val_accuracy: 0.6700 - 70ms/epoch - 4ms/step
Epoch 48/100
16/16 - 0s - loss: 0.5506 - accuracy: 0.6620 - val_loss: 0.5228 - val_accuracy: 0.6840 - 73ms/epoch - 5ms/step
Epoch 49/100
16/16 - 0s - loss: 0.5489 - accuracy: 0.6640 - val_loss: 0.5213 - val_accuracy: 0.6860 - 110ms/epoch - 7ms/step
Epoch 50/100
16/16 - 0s - loss: 0.5473 - accuracy: 0.6680 - val_loss: 0.5198 - val_accuracy: 0.6880 - 71ms/epoch - 4ms/step
Epoch 51/100
16/16 - 0s - loss: 0.5456 - accuracy: 0.6740 - val_loss: 0.5182 - val_accuracy: 0.6920 - 89ms/epoch - 6ms/step
Epoch 52/100
16/16 - 0s - loss: 0.5439 - accuracy: 0.6780 - val_loss: 0.5168 - val_accuracy: 0.6920 - 80ms/epoch - 5ms/step
Epoch 53/100
16/16 - 0s - loss: 0.5423 - accuracy: 0.6860 - val_loss: 0.5153 - val_accuracy: 0.6940 - 71ms/epoch - 4ms/step
Epoch 54/100
16/16 - 0s - loss: 0.5406 - accuracy: 0.6960 - val_loss: 0.5138 - val_accuracy: 0.7000 - 75ms/epoch - 5ms/step
Epoch 55/100
16/16 - 0s - loss: 0.5389 - accuracy: 0.7000 - val_loss: 0.5123 - val_accuracy: 0.7000 - 75ms/epoch - 5ms/step
Epoch 56/100
16/16 - 0s - loss: 0.5373 - accuracy: 0.7060 - val_loss: 0.5108 - val_accuracy: 0.7060 - 74ms/epoch - 5ms/step
Epoch 57/100
16/16 - 0s - loss: 0.5357 - accuracy: 0.7100 - val_loss: 0.5093 - val_accuracy: 0.7080 - 73ms/epoch - 5ms/step
Epoch 58/100
16/16 - 0s - loss: 0.5340 - accuracy: 0.7160 - val_loss: 0.5078 - val_accuracy: 0.7100 - 109ms/epoch - 7ms/step
Epoch 59/100
16/16 - 0s - loss: 0.5324 - accuracy: 0.7220 - val_loss: 0.5063 - val_accuracy: 0.7120 - 64ms/epoch - 4ms/step
Epoch 60/100
16/16 - 0s - loss: 0.5307 - accuracy: 0.7220 - val_loss: 0.5049 - val_accuracy: 0.7140 - 69ms/epoch - 4ms/step
Epoch 61/100
16/16 - 0s - loss: 0.5290 - accuracy: 0.7260 - val_loss: 0.5034 - val_accuracy: 0.7220 - 69ms/epoch - 4ms/step
Epoch 62/100
16/16 - 0s - loss: 0.5274 - accuracy: 0.7280 - val_loss: 0.5019 - val_accuracy: 0.7240 - 70ms/epoch - 4ms/step
Epoch 63/100
16/16 - 0s - loss: 0.5257 - accuracy: 0.7320 - val_loss: 0.5004 - val_accuracy: 0.7260 - 112ms/epoch - 7ms/step
Epoch 64/100
16/16 - 0s - loss: 0.5241 - accuracy: 0.7360 - val_loss: 0.4990 - val_accuracy: 0.7260 - 74ms/epoch - 5ms/step
Epoch 65/100
16/16 - 0s - loss: 0.5225 - accuracy: 0.7480 - val_loss: 0.4975 - val_accuracy: 0.7340 - 73ms/epoch - 5ms/step
Epoch 66/100
16/16 - 0s - loss: 0.5208 - accuracy: 0.7500 - val_loss: 0.4961 - val_accuracy: 0.7360 - 69ms/epoch - 4ms/step
Epoch 67/100
16/16 - 0s - loss: 0.5192 - accuracy: 0.7580 - val_loss: 0.4947 - val_accuracy: 0.7380 - 67ms/epoch - 4ms/step
Epoch 68/100
16/16 - 0s - loss: 0.5176 - accuracy: 0.7600 - val_loss: 0.4933 - val_accuracy: 0.7440 - 67ms/epoch - 4ms/step
Epoch 69/100
16/16 - 0s - loss: 0.5160 - accuracy: 0.7620 - val_loss: 0.4919 - val_accuracy: 0.7440 - 68ms/epoch - 4ms/step
Epoch 70/100
16/16 - 0s - loss: 0.5144 - accuracy: 0.7680 - val_loss: 0.4905 - val_accuracy: 0.7560 - 76ms/epoch - 5ms/step
Epoch 71/100
16/16 - 0s - loss: 0.5128 - accuracy: 0.7700 - val_loss: 0.4890 - val_accuracy: 0.7600 - 66ms/epoch - 4ms/step
Epoch 72/100
16/16 - 0s - loss: 0.5112 - accuracy: 0.7740 - val_loss: 0.4876 - val_accuracy: 0.7640 - 69ms/epoch - 4ms/step
Epoch 73/100
16/16 - 0s - loss: 0.5096 - accuracy: 0.7760 - val_loss: 0.4862 - val_accuracy: 0.7700 - 107ms/epoch - 7ms/step
Epoch 74/100
16/16 - 0s - loss: 0.5081 - accuracy: 0.7760 - val_loss: 0.4849 - val_accuracy: 0.7740 - 74ms/epoch - 5ms/step
Epoch 75/100
16/16 - 0s - loss: 0.5065 - accuracy: 0.7760 - val_loss: 0.4836 - val_accuracy: 0.7800 - 112ms/epoch - 7ms/step
Epoch 76/100
16/16 - 0s - loss: 0.5049 - accuracy: 0.7780 - val_loss: 0.4822 - val_accuracy: 0.7820 - 72ms/epoch - 5ms/step
Epoch 77/100
16/16 - 0s - loss: 0.5034 - accuracy: 0.7800 - val_loss: 0.4808 - val_accuracy: 0.7840 - 69ms/epoch - 4ms/step
Epoch 78/100
16/16 - 0s - loss: 0.5019 - accuracy: 0.7860 - val_loss: 0.4795 - val_accuracy: 0.7880 - 67ms/epoch - 4ms/step
Epoch 79/100
16/16 - 0s - loss: 0.5003 - accuracy: 0.7880 - val_loss: 0.4781 - val_accuracy: 0.7860 - 71ms/epoch - 4ms/step
Epoch 80/100
16/16 - 0s - loss: 0.4988 - accuracy: 0.7900 - val_loss: 0.4768 - val_accuracy: 0.7880 - 69ms/epoch - 4ms/step
Epoch 81/100
16/16 - 0s - loss: 0.4973 - accuracy: 0.7920 - val_loss: 0.4755 - val_accuracy: 0.7920 - 74ms/epoch - 5ms/step
Epoch 82/100
16/16 - 0s - loss: 0.4958 - accuracy: 0.7940 - val_loss: 0.4742 - val_accuracy: 0.7960 - 108ms/epoch - 7ms/step
Epoch 83/100
16/16 - 0s - loss: 0.4943 - accuracy: 0.7960 - val_loss: 0.4729 - val_accuracy: 0.8000 - 74ms/epoch - 5ms/step
Epoch 84/100
16/16 - 0s - loss: 0.4928 - accuracy: 0.8020 - val_loss: 0.4716 - val_accuracy: 0.8040 - 111ms/epoch - 7ms/step
Epoch 85/100
16/16 - 0s - loss: 0.4913 - accuracy: 0.8060 - val_loss: 0.4704 - val_accuracy: 0.8040 - 114ms/epoch - 7ms/step
Epoch 86/100
16/16 - 0s - loss: 0.4898 - accuracy: 0.8080 - val_loss: 0.4691 - val_accuracy: 0.8060 - 109ms/epoch - 7ms/step
Epoch 87/100
16/16 - 0s - loss: 0.4883 - accuracy: 0.8120 - val_loss: 0.4678 - val_accuracy: 0.8140 - 71ms/epoch - 4ms/step
Epoch 88/100
16/16 - 0s - loss: 0.4868 - accuracy: 0.8140 - val_loss: 0.4665 - val_accuracy: 0.8140 - 111ms/epoch - 7ms/step
Epoch 89/100
16/16 - 0s - loss: 0.4854 - accuracy: 0.8160 - val_loss: 0.4652 - val_accuracy: 0.8140 - 112ms/epoch - 7ms/step
Epoch 90/100
16/16 - 0s - loss: 0.4839 - accuracy: 0.8180 - val_loss: 0.4639 - val_accuracy: 0.8160 - 68ms/epoch - 4ms/step
Epoch 91/100
16/16 - 0s - loss: 0.4824 - accuracy: 0.8240 - val_loss: 0.4627 - val_accuracy: 0.8180 - 68ms/epoch - 4ms/step
Epoch 92/100
16/16 - 0s - loss: 0.4810 - accuracy: 0.8320 - val_loss: 0.4614 - val_accuracy: 0.8200 - 68ms/epoch - 4ms/step
Epoch 93/100
16/16 - 0s - loss: 0.4795 - accuracy: 0.8340 - val_loss: 0.4601 - val_accuracy: 0.8240 - 77ms/epoch - 5ms/step
Epoch 94/100
16/16 - 0s - loss: 0.4781 - accuracy: 0.8360 - val_loss: 0.4588 - val_accuracy: 0.8280 - 69ms/epoch - 4ms/step
Epoch 95/100
16/16 - 0s - loss: 0.4766 - accuracy: 0.8400 - val_loss: 0.4576 - val_accuracy: 0.8380 - 76ms/epoch - 5ms/step
Epoch 96/100
16/16 - 0s - loss: 0.4752 - accuracy: 0.8460 - val_loss: 0.4563 - val_accuracy: 0.8420 - 71ms/epoch - 4ms/step
Epoch 97/100
16/16 - 0s - loss: 0.4737 - accuracy: 0.8480 - val_loss: 0.4551 - val_accuracy: 0.8440 - 71ms/epoch - 4ms/step
Epoch 98/100
16/16 - 0s - loss: 0.4723 - accuracy: 0.8480 - val_loss: 0.4538 - val_accuracy: 0.8440 - 77ms/epoch - 5ms/step
Epoch 99/100
16/16 - 0s - loss: 0.4708 - accuracy: 0.8520 - val_loss: 0.4525 - val_accuracy: 0.8480 - 74ms/epoch - 5ms/step
Epoch 100/100
16/16 - 0s - loss: 0.4694 - accuracy: 0.8540 - val_loss: 0.4512 - val_accuracy: 0.8520 - 77ms/epoch - 5ms/step
第2个弱分类器训练完毕
Epoch 1/100
16/16 - 1s - loss: 0.8410 - accuracy: 0.5060 - val_loss: 0.8266 - val_accuracy: 0.5080 - 764ms/epoch - 48ms/step
Epoch 2/100
16/16 - 0s - loss: 0.8354 - accuracy: 0.5060 - val_loss: 0.8204 - val_accuracy: 0.5120 - 69ms/epoch - 4ms/step
Epoch 3/100
16/16 - 0s - loss: 0.8300 - accuracy: 0.5040 - val_loss: 0.8142 - val_accuracy: 0.5100 - 119ms/epoch - 7ms/step
Epoch 4/100
16/16 - 0s - loss: 0.8246 - accuracy: 0.5080 - val_loss: 0.8083 - val_accuracy: 0.5120 - 75ms/epoch - 5ms/step
Epoch 5/100
16/16 - 0s - loss: 0.8191 - accuracy: 0.5040 - val_loss: 0.8022 - val_accuracy: 0.5120 - 76ms/epoch - 5ms/step
Epoch 6/100
16/16 - 0s - loss: 0.8139 - accuracy: 0.5060 - val_loss: 0.7959 - val_accuracy: 0.5160 - 110ms/epoch - 7ms/step
Epoch 7/100
16/16 - 0s - loss: 0.8085 - accuracy: 0.5060 - val_loss: 0.7900 - val_accuracy: 0.5160 - 78ms/epoch - 5ms/step
Epoch 8/100
16/16 - 0s - loss: 0.8033 - accuracy: 0.5060 - val_loss: 0.7844 - val_accuracy: 0.5220 - 108ms/epoch - 7ms/step
Epoch 9/100
16/16 - 0s - loss: 0.7982 - accuracy: 0.5080 - val_loss: 0.7786 - val_accuracy: 0.5220 - 73ms/epoch - 5ms/step
Epoch 10/100
16/16 - 0s - loss: 0.7929 - accuracy: 0.5100 - val_loss: 0.7732 - val_accuracy: 0.5220 - 110ms/epoch - 7ms/step
Epoch 11/100
16/16 - 0s - loss: 0.7880 - accuracy: 0.5100 - val_loss: 0.7677 - val_accuracy: 0.5320 - 76ms/epoch - 5ms/step
Epoch 12/100
16/16 - 0s - loss: 0.7830 - accuracy: 0.5100 - val_loss: 0.7620 - val_accuracy: 0.5360 - 85ms/epoch - 5ms/step
Epoch 13/100
16/16 - 0s - loss: 0.7779 - accuracy: 0.5100 - val_loss: 0.7567 - val_accuracy: 0.5360 - 80ms/epoch - 5ms/step
Epoch 14/100
16/16 - 0s - loss: 0.7731 - accuracy: 0.5100 - val_loss: 0.7513 - val_accuracy: 0.5380 - 71ms/epoch - 4ms/step
Epoch 15/100
16/16 - 0s - loss: 0.7682 - accuracy: 0.5120 - val_loss: 0.7458 - val_accuracy: 0.5400 - 113ms/epoch - 7ms/step
Epoch 16/100
16/16 - 0s - loss: 0.7633 - accuracy: 0.5140 - val_loss: 0.7408 - val_accuracy: 0.5400 - 113ms/epoch - 7ms/step
Epoch 17/100
16/16 - 0s - loss: 0.7588 - accuracy: 0.5140 - val_loss: 0.7351 - val_accuracy: 0.5440 - 73ms/epoch - 5ms/step
Epoch 18/100
16/16 - 0s - loss: 0.7539 - accuracy: 0.5160 - val_loss: 0.7302 - val_accuracy: 0.5460 - 72ms/epoch - 4ms/step
Epoch 19/100
16/16 - 0s - loss: 0.7493 - accuracy: 0.5180 - val_loss: 0.7250 - val_accuracy: 0.5460 - 119ms/epoch - 7ms/step
Epoch 20/100
16/16 - 0s - loss: 0.7444 - accuracy: 0.5200 - val_loss: 0.7203 - val_accuracy: 0.5460 - 120ms/epoch - 7ms/step
Epoch 21/100
16/16 - 0s - loss: 0.7401 - accuracy: 0.5220 - val_loss: 0.7151 - val_accuracy: 0.5520 - 73ms/epoch - 5ms/step
Epoch 22/100
16/16 - 0s - loss: 0.7354 - accuracy: 0.5240 - val_loss: 0.7102 - val_accuracy: 0.5560 - 112ms/epoch - 7ms/step
Epoch 23/100
16/16 - 0s - loss: 0.7308 - accuracy: 0.5240 - val_loss: 0.7056 - val_accuracy: 0.5580 - 72ms/epoch - 5ms/step
Epoch 24/100
16/16 - 0s - loss: 0.7264 - accuracy: 0.5260 - val_loss: 0.7006 - val_accuracy: 0.5620 - 73ms/epoch - 5ms/step
Epoch 25/100
16/16 - 0s - loss: 0.7219 - accuracy: 0.5320 - val_loss: 0.6959 - val_accuracy: 0.5620 - 80ms/epoch - 5ms/step
Epoch 26/100
16/16 - 0s - loss: 0.7175 - accuracy: 0.5320 - val_loss: 0.6911 - val_accuracy: 0.5620 - 76ms/epoch - 5ms/step
Epoch 27/100
16/16 - 0s - loss: 0.7133 - accuracy: 0.5340 - val_loss: 0.6865 - val_accuracy: 0.5660 - 76ms/epoch - 5ms/step
Epoch 28/100
16/16 - 0s - loss: 0.7089 - accuracy: 0.5440 - val_loss: 0.6821 - val_accuracy: 0.5700 - 116ms/epoch - 7ms/step
Epoch 29/100
16/16 - 0s - loss: 0.7047 - accuracy: 0.5540 - val_loss: 0.6777 - val_accuracy: 0.5700 - 76ms/epoch - 5ms/step
Epoch 30/100
16/16 - 0s - loss: 0.7006 - accuracy: 0.5540 - val_loss: 0.6731 - val_accuracy: 0.5780 - 76ms/epoch - 5ms/step
Epoch 31/100
16/16 - 0s - loss: 0.6963 - accuracy: 0.5580 - val_loss: 0.6689 - val_accuracy: 0.5840 - 70ms/epoch - 4ms/step
Epoch 32/100
16/16 - 0s - loss: 0.6923 - accuracy: 0.5620 - val_loss: 0.6646 - val_accuracy: 0.5860 - 111ms/epoch - 7ms/step
Epoch 33/100
16/16 - 0s - loss: 0.6882 - accuracy: 0.5660 - val_loss: 0.6603 - val_accuracy: 0.5900 - 112ms/epoch - 7ms/step
Epoch 34/100
16/16 - 0s - loss: 0.6842 - accuracy: 0.5720 - val_loss: 0.6561 - val_accuracy: 0.5940 - 110ms/epoch - 7ms/step
Epoch 35/100
16/16 - 0s - loss: 0.6803 - accuracy: 0.5740 - val_loss: 0.6517 - val_accuracy: 0.5940 - 81ms/epoch - 5ms/step
Epoch 36/100
16/16 - 0s - loss: 0.6763 - accuracy: 0.5760 - val_loss: 0.6477 - val_accuracy: 0.6040 - 76ms/epoch - 5ms/step
Epoch 37/100
16/16 - 0s - loss: 0.6724 - accuracy: 0.5780 - val_loss: 0.6436 - val_accuracy: 0.6080 - 71ms/epoch - 4ms/step
Epoch 38/100
16/16 - 0s - loss: 0.6687 - accuracy: 0.5800 - val_loss: 0.6393 - val_accuracy: 0.6100 - 115ms/epoch - 7ms/step
Epoch 39/100
16/16 - 0s - loss: 0.6647 - accuracy: 0.5860 - val_loss: 0.6355 - val_accuracy: 0.6200 - 72ms/epoch - 5ms/step
Epoch 40/100
16/16 - 0s - loss: 0.6610 - accuracy: 0.5880 - val_loss: 0.6316 - val_accuracy: 0.6260 - 75ms/epoch - 5ms/step
Epoch 41/100
16/16 - 0s - loss: 0.6572 - accuracy: 0.5920 - val_loss: 0.6276 - val_accuracy: 0.6300 - 70ms/epoch - 4ms/step
Epoch 42/100
16/16 - 0s - loss: 0.6535 - accuracy: 0.6020 - val_loss: 0.6237 - val_accuracy: 0.6340 - 67ms/epoch - 4ms/step
Epoch 43/100
16/16 - 0s - loss: 0.6497 - accuracy: 0.6120 - val_loss: 0.6199 - val_accuracy: 0.6400 - 113ms/epoch - 7ms/step
Epoch 44/100
16/16 - 0s - loss: 0.6461 - accuracy: 0.6200 - val_loss: 0.6162 - val_accuracy: 0.6500 - 70ms/epoch - 4ms/step
Epoch 45/100
16/16 - 0s - loss: 0.6425 - accuracy: 0.6280 - val_loss: 0.6123 - val_accuracy: 0.6560 - 126ms/epoch - 8ms/step
Epoch 46/100
16/16 - 0s - loss: 0.6388 - accuracy: 0.6260 - val_loss: 0.6085 - val_accuracy: 0.6580 - 68ms/epoch - 4ms/step
Epoch 47/100
16/16 - 0s - loss: 0.6353 - accuracy: 0.6320 - val_loss: 0.6047 - val_accuracy: 0.6580 - 68ms/epoch - 4ms/step
Epoch 48/100
16/16 - 0s - loss: 0.6317 - accuracy: 0.6320 - val_loss: 0.6011 - val_accuracy: 0.6620 - 111ms/epoch - 7ms/step
Epoch 49/100
16/16 - 0s - loss: 0.6282 - accuracy: 0.6320 - val_loss: 0.5975 - val_accuracy: 0.6620 - 69ms/epoch - 4ms/step
Epoch 50/100
16/16 - 0s - loss: 0.6248 - accuracy: 0.6340 - val_loss: 0.5938 - val_accuracy: 0.6660 - 72ms/epoch - 4ms/step
Epoch 51/100
16/16 - 0s - loss: 0.6214 - accuracy: 0.6400 - val_loss: 0.5903 - val_accuracy: 0.6700 - 72ms/epoch - 4ms/step
Epoch 52/100
16/16 - 0s - loss: 0.6180 - accuracy: 0.6460 - val_loss: 0.5870 - val_accuracy: 0.6780 - 116ms/epoch - 7ms/step
Epoch 53/100
16/16 - 0s - loss: 0.6147 - accuracy: 0.6540 - val_loss: 0.5836 - val_accuracy: 0.6860 - 73ms/epoch - 5ms/step
Epoch 54/100
16/16 - 0s - loss: 0.6113 - accuracy: 0.6560 - val_loss: 0.5802 - val_accuracy: 0.6920 - 68ms/epoch - 4ms/step
Epoch 55/100
16/16 - 0s - loss: 0.6081 - accuracy: 0.6580 - val_loss: 0.5768 - val_accuracy: 0.6960 - 109ms/epoch - 7ms/step
Epoch 56/100
16/16 - 0s - loss: 0.6048 - accuracy: 0.6640 - val_loss: 0.5734 - val_accuracy: 0.6980 - 89ms/epoch - 6ms/step
Epoch 57/100
16/16 - 0s - loss: 0.6016 - accuracy: 0.6660 - val_loss: 0.5701 - val_accuracy: 0.7020 - 112ms/epoch - 7ms/step
Epoch 58/100
16/16 - 0s - loss: 0.5984 - accuracy: 0.6740 - val_loss: 0.5669 - val_accuracy: 0.7060 - 76ms/epoch - 5ms/step
Epoch 59/100
16/16 - 0s - loss: 0.5954 - accuracy: 0.6800 - val_loss: 0.5638 - val_accuracy: 0.7060 - 76ms/epoch - 5ms/step
Epoch 60/100
16/16 - 0s - loss: 0.5922 - accuracy: 0.6920 - val_loss: 0.5608 - val_accuracy: 0.7140 - 72ms/epoch - 5ms/step
Epoch 61/100
16/16 - 0s - loss: 0.5891 - accuracy: 0.6980 - val_loss: 0.5577 - val_accuracy: 0.7180 - 70ms/epoch - 4ms/step
Epoch 62/100
16/16 - 0s - loss: 0.5861 - accuracy: 0.7000 - val_loss: 0.5546 - val_accuracy: 0.7220 - 75ms/epoch - 5ms/step
Epoch 63/100
16/16 - 0s - loss: 0.5832 - accuracy: 0.7100 - val_loss: 0.5515 - val_accuracy: 0.7300 - 69ms/epoch - 4ms/step
Epoch 64/100
16/16 - 0s - loss: 0.5802 - accuracy: 0.7180 - val_loss: 0.5485 - val_accuracy: 0.7360 - 70ms/epoch - 4ms/step
Epoch 65/100
16/16 - 0s - loss: 0.5773 - accuracy: 0.7180 - val_loss: 0.5456 - val_accuracy: 0.7400 - 80ms/epoch - 5ms/step
Epoch 66/100
16/16 - 0s - loss: 0.5744 - accuracy: 0.7200 - val_loss: 0.5427 - val_accuracy: 0.7460 - 70ms/epoch - 4ms/step
Epoch 67/100
16/16 - 0s - loss: 0.5715 - accuracy: 0.7220 - val_loss: 0.5399 - val_accuracy: 0.7500 - 70ms/epoch - 4ms/step
Epoch 68/100
16/16 - 0s - loss: 0.5687 - accuracy: 0.7260 - val_loss: 0.5370 - val_accuracy: 0.7560 - 120ms/epoch - 8ms/step
Epoch 69/100
16/16 - 0s - loss: 0.5660 - accuracy: 0.7320 - val_loss: 0.5341 - val_accuracy: 0.7580 - 68ms/epoch - 4ms/step
Epoch 70/100
16/16 - 0s - loss: 0.5632 - accuracy: 0.7320 - val_loss: 0.5314 - val_accuracy: 0.7600 - 113ms/epoch - 7ms/step
Epoch 71/100
16/16 - 0s - loss: 0.5605 - accuracy: 0.7340 - val_loss: 0.5287 - val_accuracy: 0.7600 - 70ms/epoch - 4ms/step
Epoch 72/100
16/16 - 0s - loss: 0.5578 - accuracy: 0.7340 - val_loss: 0.5260 - val_accuracy: 0.7660 - 75ms/epoch - 5ms/step
Epoch 73/100
16/16 - 0s - loss: 0.5551 - accuracy: 0.7360 - val_loss: 0.5233 - val_accuracy: 0.7680 - 77ms/epoch - 5ms/step
Epoch 74/100
16/16 - 0s - loss: 0.5525 - accuracy: 0.7380 - val_loss: 0.5207 - val_accuracy: 0.7760 - 75ms/epoch - 5ms/step
Epoch 75/100
16/16 - 0s - loss: 0.5499 - accuracy: 0.7380 - val_loss: 0.5180 - val_accuracy: 0.7820 - 112ms/epoch - 7ms/step
Epoch 76/100
16/16 - 0s - loss: 0.5473 - accuracy: 0.7440 - val_loss: 0.5154 - val_accuracy: 0.7860 - 114ms/epoch - 7ms/step
Epoch 77/100
16/16 - 0s - loss: 0.5448 - accuracy: 0.7460 - val_loss: 0.5129 - val_accuracy: 0.7880 - 73ms/epoch - 5ms/step
Epoch 78/100
16/16 - 0s - loss: 0.5423 - accuracy: 0.7500 - val_loss: 0.5102 - val_accuracy: 0.7900 - 116ms/epoch - 7ms/step
Epoch 79/100
16/16 - 0s - loss: 0.5398 - accuracy: 0.7580 - val_loss: 0.5077 - val_accuracy: 0.7960 - 82ms/epoch - 5ms/step
Epoch 80/100
16/16 - 0s - loss: 0.5374 - accuracy: 0.7620 - val_loss: 0.5052 - val_accuracy: 0.8000 - 69ms/epoch - 4ms/step
Epoch 81/100
16/16 - 0s - loss: 0.5349 - accuracy: 0.7660 - val_loss: 0.5028 - val_accuracy: 0.8000 - 68ms/epoch - 4ms/step
Epoch 82/100
16/16 - 0s - loss: 0.5325 - accuracy: 0.7720 - val_loss: 0.5005 - val_accuracy: 0.8020 - 69ms/epoch - 4ms/step
Epoch 83/100
16/16 - 0s - loss: 0.5302 - accuracy: 0.7780 - val_loss: 0.4981 - val_accuracy: 0.8040 - 80ms/epoch - 5ms/step
Epoch 84/100
16/16 - 0s - loss: 0.5278 - accuracy: 0.7800 - val_loss: 0.4957 - val_accuracy: 0.8040 - 70ms/epoch - 4ms/step
Epoch 85/100
16/16 - 0s - loss: 0.5255 - accuracy: 0.7840 - val_loss: 0.4933 - val_accuracy: 0.8040 - 87ms/epoch - 5ms/step
Epoch 86/100
16/16 - 0s - loss: 0.5231 - accuracy: 0.7860 - val_loss: 0.4910 - val_accuracy: 0.8040 - 72ms/epoch - 5ms/step
Epoch 87/100
16/16 - 0s - loss: 0.5209 - accuracy: 0.7900 - val_loss: 0.4887 - val_accuracy: 0.8040 - 73ms/epoch - 5ms/step
Epoch 88/100
16/16 - 0s - loss: 0.5186 - accuracy: 0.7920 - val_loss: 0.4863 - val_accuracy: 0.8060 - 79ms/epoch - 5ms/step
Epoch 89/100
16/16 - 0s - loss: 0.5164 - accuracy: 0.7920 - val_loss: 0.4841 - val_accuracy: 0.8100 - 111ms/epoch - 7ms/step
Epoch 90/100
16/16 - 0s - loss: 0.5141 - accuracy: 0.7960 - val_loss: 0.4819 - val_accuracy: 0.8240 - 72ms/epoch - 4ms/step
Epoch 91/100
16/16 - 0s - loss: 0.5120 - accuracy: 0.7940 - val_loss: 0.4796 - val_accuracy: 0.8240 - 98ms/epoch - 6ms/step
Epoch 92/100
16/16 - 0s - loss: 0.5097 - accuracy: 0.7960 - val_loss: 0.4775 - val_accuracy: 0.8260 - 74ms/epoch - 5ms/step
Epoch 93/100
16/16 - 0s - loss: 0.5076 - accuracy: 0.7980 - val_loss: 0.4753 - val_accuracy: 0.8260 - 71ms/epoch - 4ms/step
Epoch 94/100
16/16 - 0s - loss: 0.5054 - accuracy: 0.8020 - val_loss: 0.4732 - val_accuracy: 0.8280 - 69ms/epoch - 4ms/step
Epoch 95/100
16/16 - 0s - loss: 0.5032 - accuracy: 0.8020 - val_loss: 0.4710 - val_accuracy: 0.8280 - 72ms/epoch - 5ms/step
Epoch 96/100
16/16 - 0s - loss: 0.5011 - accuracy: 0.8020 - val_loss: 0.4690 - val_accuracy: 0.8340 - 70ms/epoch - 4ms/step
Epoch 97/100
16/16 - 0s - loss: 0.4991 - accuracy: 0.8020 - val_loss: 0.4669 - val_accuracy: 0.8340 - 148ms/epoch - 9ms/step
Epoch 98/100
16/16 - 0s - loss: 0.4970 - accuracy: 0.8020 - val_loss: 0.4649 - val_accuracy: 0.8360 - 68ms/epoch - 4ms/step
Epoch 99/100
16/16 - 0s - loss: 0.4949 - accuracy: 0.8040 - val_loss: 0.4628 - val_accuracy: 0.8400 - 76ms/epoch - 5ms/step
Epoch 100/100
16/16 - 0s - loss: 0.4929 - accuracy: 0.8060 - val_loss: 0.4608 - val_accuracy: 0.8460 - 67ms/epoch - 4ms/step
第3个弱分类器训练完毕
Epoch 1/100
16/16 - 1s - loss: 0.7409 - accuracy: 0.5380 - val_loss: 0.7494 - val_accuracy: 0.5180 - 759ms/epoch - 47ms/step
Epoch 2/100
16/16 - 0s - loss: 0.7337 - accuracy: 0.5400 - val_loss: 0.7404 - val_accuracy: 0.5260 - 74ms/epoch - 5ms/step
Epoch 3/100
16/16 - 0s - loss: 0.7270 - accuracy: 0.5440 - val_loss: 0.7317 - val_accuracy: 0.5320 - 71ms/epoch - 4ms/step
Epoch 4/100
16/16 - 0s - loss: 0.7204 - accuracy: 0.5500 - val_loss: 0.7238 - val_accuracy: 0.5360 - 73ms/epoch - 5ms/step
Epoch 5/100
16/16 - 0s - loss: 0.7142 - accuracy: 0.5520 - val_loss: 0.7161 - val_accuracy: 0.5480 - 78ms/epoch - 5ms/step
Epoch 6/100
16/16 - 0s - loss: 0.7082 - accuracy: 0.5640 - val_loss: 0.7087 - val_accuracy: 0.5520 - 84ms/epoch - 5ms/step
Epoch 7/100
16/16 - 0s - loss: 0.7025 - accuracy: 0.5660 - val_loss: 0.7013 - val_accuracy: 0.5660 - 111ms/epoch - 7ms/step
Epoch 8/100
16/16 - 0s - loss: 0.6968 - accuracy: 0.5700 - val_loss: 0.6944 - val_accuracy: 0.5740 - 115ms/epoch - 7ms/step
Epoch 9/100
16/16 - 0s - loss: 0.6915 - accuracy: 0.5780 - val_loss: 0.6877 - val_accuracy: 0.5760 - 72ms/epoch - 4ms/step
Epoch 10/100
16/16 - 0s - loss: 0.6862 - accuracy: 0.5860 - val_loss: 0.6815 - val_accuracy: 0.5820 - 72ms/epoch - 5ms/step
Epoch 11/100
16/16 - 0s - loss: 0.6813 - accuracy: 0.5880 - val_loss: 0.6751 - val_accuracy: 0.6000 - 112ms/epoch - 7ms/step
Epoch 12/100
16/16 - 0s - loss: 0.6763 - accuracy: 0.6000 - val_loss: 0.6694 - val_accuracy: 0.6140 - 71ms/epoch - 4ms/step
Epoch 13/100
16/16 - 0s - loss: 0.6716 - accuracy: 0.6180 - val_loss: 0.6636 - val_accuracy: 0.6320 - 113ms/epoch - 7ms/step
Epoch 14/100
16/16 - 0s - loss: 0.6670 - accuracy: 0.6260 - val_loss: 0.6579 - val_accuracy: 0.6460 - 73ms/epoch - 5ms/step
Epoch 15/100
16/16 - 0s - loss: 0.6624 - accuracy: 0.6340 - val_loss: 0.6524 - val_accuracy: 0.6580 - 79ms/epoch - 5ms/step
Epoch 16/100
16/16 - 0s - loss: 0.6580 - accuracy: 0.6480 - val_loss: 0.6472 - val_accuracy: 0.6680 - 69ms/epoch - 4ms/step
Epoch 17/100
16/16 - 0s - loss: 0.6536 - accuracy: 0.6580 - val_loss: 0.6421 - val_accuracy: 0.6720 - 81ms/epoch - 5ms/step
Epoch 18/100
16/16 - 0s - loss: 0.6494 - accuracy: 0.6700 - val_loss: 0.6370 - val_accuracy: 0.6780 - 116ms/epoch - 7ms/step
Epoch 19/100
16/16 - 0s - loss: 0.6452 - accuracy: 0.6760 - val_loss: 0.6321 - val_accuracy: 0.6880 - 113ms/epoch - 7ms/step
Epoch 20/100
16/16 - 0s - loss: 0.6412 - accuracy: 0.6860 - val_loss: 0.6272 - val_accuracy: 0.6880 - 114ms/epoch - 7ms/step
Epoch 21/100
16/16 - 0s - loss: 0.6371 - accuracy: 0.6900 - val_loss: 0.6226 - val_accuracy: 0.6960 - 77ms/epoch - 5ms/step
Epoch 22/100
16/16 - 0s - loss: 0.6332 - accuracy: 0.6900 - val_loss: 0.6181 - val_accuracy: 0.6980 - 69ms/epoch - 4ms/step
Epoch 23/100
16/16 - 0s - loss: 0.6294 - accuracy: 0.7000 - val_loss: 0.6136 - val_accuracy: 0.7000 - 77ms/epoch - 5ms/step
Epoch 24/100
16/16 - 0s - loss: 0.6255 - accuracy: 0.7040 - val_loss: 0.6092 - val_accuracy: 0.7020 - 71ms/epoch - 4ms/step
Epoch 25/100
16/16 - 0s - loss: 0.6217 - accuracy: 0.7040 - val_loss: 0.6051 - val_accuracy: 0.7040 - 73ms/epoch - 5ms/step
Epoch 26/100
16/16 - 0s - loss: 0.6182 - accuracy: 0.7040 - val_loss: 0.6007 - val_accuracy: 0.7120 - 78ms/epoch - 5ms/step
Epoch 27/100
16/16 - 0s - loss: 0.6145 - accuracy: 0.7060 - val_loss: 0.5968 - val_accuracy: 0.7120 - 75ms/epoch - 5ms/step
Epoch 28/100
16/16 - 0s - loss: 0.6110 - accuracy: 0.7080 - val_loss: 0.5926 - val_accuracy: 0.7160 - 84ms/epoch - 5ms/step
Epoch 29/100
16/16 - 0s - loss: 0.6076 - accuracy: 0.7120 - val_loss: 0.5886 - val_accuracy: 0.7160 - 75ms/epoch - 5ms/step
Epoch 30/100
16/16 - 0s - loss: 0.6040 - accuracy: 0.7160 - val_loss: 0.5847 - val_accuracy: 0.7220 - 119ms/epoch - 7ms/step
Epoch 31/100
16/16 - 0s - loss: 0.6007 - accuracy: 0.7240 - val_loss: 0.5810 - val_accuracy: 0.7280 - 115ms/epoch - 7ms/step
Epoch 32/100
16/16 - 0s - loss: 0.5974 - accuracy: 0.7280 - val_loss: 0.5774 - val_accuracy: 0.7300 - 112ms/epoch - 7ms/step
Epoch 33/100
16/16 - 0s - loss: 0.5941 - accuracy: 0.7360 - val_loss: 0.5737 - val_accuracy: 0.7300 - 70ms/epoch - 4ms/step
Epoch 34/100
16/16 - 0s - loss: 0.5910 - accuracy: 0.7400 - val_loss: 0.5700 - val_accuracy: 0.7320 - 110ms/epoch - 7ms/step
Epoch 35/100
16/16 - 0s - loss: 0.5877 - accuracy: 0.7420 - val_loss: 0.5665 - val_accuracy: 0.7340 - 67ms/epoch - 4ms/step
Epoch 36/100
16/16 - 0s - loss: 0.5845 - accuracy: 0.7400 - val_loss: 0.5629 - val_accuracy: 0.7400 - 72ms/epoch - 5ms/step
Epoch 37/100
16/16 - 0s - loss: 0.5813 - accuracy: 0.7400 - val_loss: 0.5594 - val_accuracy: 0.7480 - 79ms/epoch - 5ms/step
Epoch 38/100
16/16 - 0s - loss: 0.5782 - accuracy: 0.7420 - val_loss: 0.5559 - val_accuracy: 0.7540 - 75ms/epoch - 5ms/step
Epoch 39/100
16/16 - 0s - loss: 0.5751 - accuracy: 0.7420 - val_loss: 0.5526 - val_accuracy: 0.7620 - 83ms/epoch - 5ms/step
Epoch 40/100
16/16 - 0s - loss: 0.5721 - accuracy: 0.7460 - val_loss: 0.5493 - val_accuracy: 0.7620 - 72ms/epoch - 4ms/step
Epoch 41/100
16/16 - 0s - loss: 0.5691 - accuracy: 0.7460 - val_loss: 0.5459 - val_accuracy: 0.7620 - 76ms/epoch - 5ms/step
Epoch 42/100
16/16 - 0s - loss: 0.5661 - accuracy: 0.7500 - val_loss: 0.5427 - val_accuracy: 0.7640 - 75ms/epoch - 5ms/step
Epoch 43/100
16/16 - 0s - loss: 0.5631 - accuracy: 0.7520 - val_loss: 0.5395 - val_accuracy: 0.7620 - 69ms/epoch - 4ms/step
Epoch 44/100
16/16 - 0s - loss: 0.5602 - accuracy: 0.7520 - val_loss: 0.5362 - val_accuracy: 0.7640 - 117ms/epoch - 7ms/step
Epoch 45/100
16/16 - 0s - loss: 0.5572 - accuracy: 0.7560 - val_loss: 0.5332 - val_accuracy: 0.7640 - 70ms/epoch - 4ms/step
Epoch 46/100
16/16 - 0s - loss: 0.5543 - accuracy: 0.7620 - val_loss: 0.5302 - val_accuracy: 0.7640 - 71ms/epoch - 4ms/step
Epoch 47/100
16/16 - 0s - loss: 0.5515 - accuracy: 0.7560 - val_loss: 0.5270 - val_accuracy: 0.7640 - 71ms/epoch - 4ms/step
Epoch 48/100
16/16 - 0s - loss: 0.5487 - accuracy: 0.7580 - val_loss: 0.5240 - val_accuracy: 0.7620 - 71ms/epoch - 4ms/step
Epoch 49/100
16/16 - 0s - loss: 0.5459 - accuracy: 0.7580 - val_loss: 0.5211 - val_accuracy: 0.7620 - 75ms/epoch - 5ms/step
Epoch 50/100
16/16 - 0s - loss: 0.5432 - accuracy: 0.7600 - val_loss: 0.5181 - val_accuracy: 0.7620 - 69ms/epoch - 4ms/step
Epoch 51/100
16/16 - 0s - loss: 0.5405 - accuracy: 0.7580 - val_loss: 0.5152 - val_accuracy: 0.7600 - 111ms/epoch - 7ms/step
Epoch 52/100
16/16 - 0s - loss: 0.5379 - accuracy: 0.7540 - val_loss: 0.5124 - val_accuracy: 0.7600 - 72ms/epoch - 5ms/step
Epoch 53/100
16/16 - 0s - loss: 0.5352 - accuracy: 0.7580 - val_loss: 0.5096 - val_accuracy: 0.7620 - 76ms/epoch - 5ms/step
Epoch 54/100
16/16 - 0s - loss: 0.5326 - accuracy: 0.7580 - val_loss: 0.5068 - val_accuracy: 0.7680 - 113ms/epoch - 7ms/step
Epoch 55/100
16/16 - 0s - loss: 0.5301 - accuracy: 0.7560 - val_loss: 0.5039 - val_accuracy: 0.7740 - 115ms/epoch - 7ms/step
Epoch 56/100
16/16 - 0s - loss: 0.5275 - accuracy: 0.7580 - val_loss: 0.5012 - val_accuracy: 0.7760 - 71ms/epoch - 4ms/step
Epoch 57/100
16/16 - 0s - loss: 0.5249 - accuracy: 0.7600 - val_loss: 0.4986 - val_accuracy: 0.7800 - 74ms/epoch - 5ms/step
Epoch 58/100
16/16 - 0s - loss: 0.5224 - accuracy: 0.7600 - val_loss: 0.4958 - val_accuracy: 0.7820 - 84ms/epoch - 5ms/step
Epoch 59/100
16/16 - 0s - loss: 0.5199 - accuracy: 0.7600 - val_loss: 0.4931 - val_accuracy: 0.7880 - 78ms/epoch - 5ms/step
Epoch 60/100
16/16 - 0s - loss: 0.5174 - accuracy: 0.7600 - val_loss: 0.4905 - val_accuracy: 0.7920 - 74ms/epoch - 5ms/step
Epoch 61/100
16/16 - 0s - loss: 0.5149 - accuracy: 0.7620 - val_loss: 0.4879 - val_accuracy: 0.7960 - 113ms/epoch - 7ms/step
Epoch 62/100
16/16 - 0s - loss: 0.5124 - accuracy: 0.7680 - val_loss: 0.4854 - val_accuracy: 0.7960 - 88ms/epoch - 6ms/step
Epoch 63/100
16/16 - 0s - loss: 0.5100 - accuracy: 0.7740 - val_loss: 0.4828 - val_accuracy: 0.7980 - 76ms/epoch - 5ms/step
Epoch 64/100
16/16 - 0s - loss: 0.5076 - accuracy: 0.7820 - val_loss: 0.4803 - val_accuracy: 0.8060 - 72ms/epoch - 4ms/step
Epoch 65/100
16/16 - 0s - loss: 0.5052 - accuracy: 0.7880 - val_loss: 0.4778 - val_accuracy: 0.8080 - 111ms/epoch - 7ms/step
Epoch 66/100
16/16 - 0s - loss: 0.5027 - accuracy: 0.7940 - val_loss: 0.4753 - val_accuracy: 0.8120 - 72ms/epoch - 5ms/step
Epoch 67/100
16/16 - 0s - loss: 0.5004 - accuracy: 0.8040 - val_loss: 0.4729 - val_accuracy: 0.8160 - 112ms/epoch - 7ms/step
Epoch 68/100
16/16 - 0s - loss: 0.4981 - accuracy: 0.8060 - val_loss: 0.4705 - val_accuracy: 0.8160 - 115ms/epoch - 7ms/step
Epoch 69/100
16/16 - 0s - loss: 0.4957 - accuracy: 0.8120 - val_loss: 0.4682 - val_accuracy: 0.8180 - 72ms/epoch - 4ms/step
Epoch 70/100
16/16 - 0s - loss: 0.4935 - accuracy: 0.8120 - val_loss: 0.4658 - val_accuracy: 0.8280 - 69ms/epoch - 4ms/step
Epoch 71/100
16/16 - 0s - loss: 0.4911 - accuracy: 0.8120 - val_loss: 0.4633 - val_accuracy: 0.8320 - 72ms/epoch - 5ms/step
Epoch 72/100
16/16 - 0s - loss: 0.4888 - accuracy: 0.8120 - val_loss: 0.4609 - val_accuracy: 0.8340 - 116ms/epoch - 7ms/step
Epoch 73/100
16/16 - 0s - loss: 0.4865 - accuracy: 0.8140 - val_loss: 0.4586 - val_accuracy: 0.8400 - 78ms/epoch - 5ms/step
Epoch 74/100
16/16 - 0s - loss: 0.4842 - accuracy: 0.8160 - val_loss: 0.4563 - val_accuracy: 0.8420 - 66ms/epoch - 4ms/step
Epoch 75/100
16/16 - 0s - loss: 0.4820 - accuracy: 0.8160 - val_loss: 0.4541 - val_accuracy: 0.8420 - 81ms/epoch - 5ms/step
Epoch 76/100
16/16 - 0s - loss: 0.4797 - accuracy: 0.8200 - val_loss: 0.4518 - val_accuracy: 0.8420 - 114ms/epoch - 7ms/step
Epoch 77/100
16/16 - 0s - loss: 0.4776 - accuracy: 0.8200 - val_loss: 0.4494 - val_accuracy: 0.8440 - 72ms/epoch - 5ms/step
Epoch 78/100
16/16 - 0s - loss: 0.4753 - accuracy: 0.8220 - val_loss: 0.4472 - val_accuracy: 0.8500 - 71ms/epoch - 4ms/step
Epoch 79/100
16/16 - 0s - loss: 0.4731 - accuracy: 0.8260 - val_loss: 0.4451 - val_accuracy: 0.8560 - 82ms/epoch - 5ms/step
Epoch 80/100
16/16 - 0s - loss: 0.4709 - accuracy: 0.8280 - val_loss: 0.4429 - val_accuracy: 0.8560 - 69ms/epoch - 4ms/step
Epoch 81/100
16/16 - 0s - loss: 0.4688 - accuracy: 0.8340 - val_loss: 0.4407 - val_accuracy: 0.8560 - 71ms/epoch - 4ms/step
Epoch 82/100
16/16 - 0s - loss: 0.4666 - accuracy: 0.8400 - val_loss: 0.4387 - val_accuracy: 0.8560 - 70ms/epoch - 4ms/step
Epoch 83/100
16/16 - 0s - loss: 0.4646 - accuracy: 0.8420 - val_loss: 0.4364 - val_accuracy: 0.8620 - 69ms/epoch - 4ms/step
Epoch 84/100
16/16 - 0s - loss: 0.4623 - accuracy: 0.8440 - val_loss: 0.4344 - val_accuracy: 0.8640 - 109ms/epoch - 7ms/step
Epoch 85/100
16/16 - 0s - loss: 0.4603 - accuracy: 0.8460 - val_loss: 0.4323 - val_accuracy: 0.8640 - 82ms/epoch - 5ms/step
Epoch 86/100
16/16 - 0s - loss: 0.4582 - accuracy: 0.8500 - val_loss: 0.4302 - val_accuracy: 0.8620 - 72ms/epoch - 5ms/step
Epoch 87/100
16/16 - 0s - loss: 0.4561 - accuracy: 0.8540 - val_loss: 0.4281 - val_accuracy: 0.8640 - 71ms/epoch - 4ms/step
Epoch 88/100
16/16 - 0s - loss: 0.4541 - accuracy: 0.8560 - val_loss: 0.4260 - val_accuracy: 0.8680 - 72ms/epoch - 5ms/step
Epoch 89/100
16/16 - 0s - loss: 0.4520 - accuracy: 0.8580 - val_loss: 0.4240 - val_accuracy: 0.8780 - 77ms/epoch - 5ms/step
Epoch 90/100
16/16 - 0s - loss: 0.4500 - accuracy: 0.8620 - val_loss: 0.4220 - val_accuracy: 0.8820 - 69ms/epoch - 4ms/step
Epoch 91/100
16/16 - 0s - loss: 0.4480 - accuracy: 0.8640 - val_loss: 0.4201 - val_accuracy: 0.8840 - 74ms/epoch - 5ms/step
Epoch 92/100
16/16 - 0s - loss: 0.4460 - accuracy: 0.8680 - val_loss: 0.4181 - val_accuracy: 0.8860 - 73ms/epoch - 5ms/step
Epoch 93/100
16/16 - 0s - loss: 0.4440 - accuracy: 0.8680 - val_loss: 0.4161 - val_accuracy: 0.8920 - 70ms/epoch - 4ms/step
Epoch 94/100
16/16 - 0s - loss: 0.4420 - accuracy: 0.8700 - val_loss: 0.4142 - val_accuracy: 0.8920 - 69ms/epoch - 4ms/step
Epoch 95/100
16/16 - 0s - loss: 0.4400 - accuracy: 0.8760 - val_loss: 0.4123 - val_accuracy: 0.8960 - 67ms/epoch - 4ms/step
Epoch 96/100
16/16 - 0s - loss: 0.4381 - accuracy: 0.8780 - val_loss: 0.4105 - val_accuracy: 0.8960 - 113ms/epoch - 7ms/step
Epoch 97/100
16/16 - 0s - loss: 0.4361 - accuracy: 0.8800 - val_loss: 0.4085 - val_accuracy: 0.9020 - 76ms/epoch - 5ms/step
Epoch 98/100
16/16 - 0s - loss: 0.4342 - accuracy: 0.8860 - val_loss: 0.4066 - val_accuracy: 0.9020 - 119ms/epoch - 7ms/step
Epoch 99/100
16/16 - 0s - loss: 0.4321 - accuracy: 0.8880 - val_loss: 0.4047 - val_accuracy: 0.9040 - 109ms/epoch - 7ms/step
Epoch 100/100
16/16 - 0s - loss: 0.4301 - accuracy: 0.8880 - val_loss: 0.4028 - val_accuracy: 0.9040 - 67ms/epoch - 4ms/step
第4个弱分类器训练完毕
Epoch 1/100
16/16 - 1s - loss: 0.6546 - accuracy: 0.4740 - val_loss: 0.6093 - val_accuracy: 0.5260 - 842ms/epoch - 53ms/step
Epoch 2/100
16/16 - 0s - loss: 0.6510 - accuracy: 0.4740 - val_loss: 0.6065 - val_accuracy: 0.5260 - 67ms/epoch - 4ms/step
Epoch 3/100
16/16 - 0s - loss: 0.6474 - accuracy: 0.4740 - val_loss: 0.6038 - val_accuracy: 0.5260 - 71ms/epoch - 4ms/step
Epoch 4/100
16/16 - 0s - loss: 0.6442 - accuracy: 0.4740 - val_loss: 0.6011 - val_accuracy: 0.5260 - 110ms/epoch - 7ms/step
Epoch 5/100
16/16 - 0s - loss: 0.6407 - accuracy: 0.4740 - val_loss: 0.5985 - val_accuracy: 0.5260 - 68ms/epoch - 4ms/step
Epoch 6/100
16/16 - 0s - loss: 0.6375 - accuracy: 0.4740 - val_loss: 0.5959 - val_accuracy: 0.5260 - 72ms/epoch - 4ms/step
Epoch 7/100
16/16 - 0s - loss: 0.6344 - accuracy: 0.4740 - val_loss: 0.5935 - val_accuracy: 0.5260 - 72ms/epoch - 4ms/step
Epoch 8/100
16/16 - 0s - loss: 0.6314 - accuracy: 0.4740 - val_loss: 0.5911 - val_accuracy: 0.5260 - 71ms/epoch - 4ms/step
Epoch 9/100
16/16 - 0s - loss: 0.6286 - accuracy: 0.4740 - val_loss: 0.5887 - val_accuracy: 0.5260 - 67ms/epoch - 4ms/step
Epoch 10/100
16/16 - 0s - loss: 0.6257 - accuracy: 0.4740 - val_loss: 0.5864 - val_accuracy: 0.5260 - 71ms/epoch - 4ms/step
Epoch 11/100
16/16 - 0s - loss: 0.6229 - accuracy: 0.4740 - val_loss: 0.5842 - val_accuracy: 0.5260 - 81ms/epoch - 5ms/step
Epoch 12/100
16/16 - 0s - loss: 0.6203 - accuracy: 0.4740 - val_loss: 0.5819 - val_accuracy: 0.5260 - 78ms/epoch - 5ms/step
Epoch 13/100
16/16 - 0s - loss: 0.6176 - accuracy: 0.4740 - val_loss: 0.5797 - val_accuracy: 0.5260 - 71ms/epoch - 4ms/step
Epoch 14/100
16/16 - 0s - loss: 0.6151 - accuracy: 0.4740 - val_loss: 0.5776 - val_accuracy: 0.5260 - 69ms/epoch - 4ms/step
Epoch 15/100
16/16 - 0s - loss: 0.6125 - accuracy: 0.4740 - val_loss: 0.5755 - val_accuracy: 0.5260 - 71ms/epoch - 4ms/step
Epoch 16/100
16/16 - 0s - loss: 0.6101 - accuracy: 0.4740 - val_loss: 0.5734 - val_accuracy: 0.5260 - 69ms/epoch - 4ms/step
Epoch 17/100
16/16 - 0s - loss: 0.6077 - accuracy: 0.4740 - val_loss: 0.5713 - val_accuracy: 0.5280 - 74ms/epoch - 5ms/step
Epoch 18/100
16/16 - 0s - loss: 0.6053 - accuracy: 0.4780 - val_loss: 0.5692 - val_accuracy: 0.5280 - 71ms/epoch - 4ms/step
Epoch 19/100
16/16 - 0s - loss: 0.6030 - accuracy: 0.4800 - val_loss: 0.5671 - val_accuracy: 0.5300 - 68ms/epoch - 4ms/step
Epoch 20/100
16/16 - 0s - loss: 0.6007 - accuracy: 0.4820 - val_loss: 0.5651 - val_accuracy: 0.5320 - 110ms/epoch - 7ms/step
Epoch 21/100
16/16 - 0s - loss: 0.5984 - accuracy: 0.4820 - val_loss: 0.5631 - val_accuracy: 0.5320 - 67ms/epoch - 4ms/step
Epoch 22/100
16/16 - 0s - loss: 0.5962 - accuracy: 0.4860 - val_loss: 0.5611 - val_accuracy: 0.5360 - 72ms/epoch - 5ms/step
Epoch 23/100
16/16 - 0s - loss: 0.5939 - accuracy: 0.4900 - val_loss: 0.5591 - val_accuracy: 0.5360 - 70ms/epoch - 4ms/step
Epoch 24/100
16/16 - 0s - loss: 0.5917 - accuracy: 0.4900 - val_loss: 0.5571 - val_accuracy: 0.5400 - 71ms/epoch - 4ms/step
Epoch 25/100
16/16 - 0s - loss: 0.5896 - accuracy: 0.4940 - val_loss: 0.5553 - val_accuracy: 0.5420 - 76ms/epoch - 5ms/step
Epoch 26/100
16/16 - 0s - loss: 0.5874 - accuracy: 0.5000 - val_loss: 0.5533 - val_accuracy: 0.5440 - 75ms/epoch - 5ms/step
Epoch 27/100
16/16 - 0s - loss: 0.5853 - accuracy: 0.5000 - val_loss: 0.5514 - val_accuracy: 0.5440 - 69ms/epoch - 4ms/step
Epoch 28/100
16/16 - 0s - loss: 0.5831 - accuracy: 0.5000 - val_loss: 0.5494 - val_accuracy: 0.5460 - 68ms/epoch - 4ms/step
Epoch 29/100
16/16 - 0s - loss: 0.5811 - accuracy: 0.5040 - val_loss: 0.5475 - val_accuracy: 0.5480 - 68ms/epoch - 4ms/step
Epoch 30/100
16/16 - 0s - loss: 0.5789 - accuracy: 0.5080 - val_loss: 0.5456 - val_accuracy: 0.5480 - 82ms/epoch - 5ms/step
Epoch 31/100
16/16 - 0s - loss: 0.5769 - accuracy: 0.5120 - val_loss: 0.5436 - val_accuracy: 0.5520 - 75ms/epoch - 5ms/step
Epoch 32/100
16/16 - 0s - loss: 0.5748 - accuracy: 0.5180 - val_loss: 0.5417 - val_accuracy: 0.5600 - 74ms/epoch - 5ms/step
Epoch 33/100
16/16 - 0s - loss: 0.5727 - accuracy: 0.5220 - val_loss: 0.5399 - val_accuracy: 0.5620 - 71ms/epoch - 4ms/step
Epoch 34/100
16/16 - 0s - loss: 0.5706 - accuracy: 0.5260 - val_loss: 0.5380 - val_accuracy: 0.5680 - 72ms/epoch - 4ms/step
Epoch 35/100
16/16 - 0s - loss: 0.5686 - accuracy: 0.5300 - val_loss: 0.5360 - val_accuracy: 0.5700 - 70ms/epoch - 4ms/step
Epoch 36/100
16/16 - 0s - loss: 0.5665 - accuracy: 0.5320 - val_loss: 0.5341 - val_accuracy: 0.5720 - 112ms/epoch - 7ms/step
Epoch 37/100
16/16 - 0s - loss: 0.5644 - accuracy: 0.5380 - val_loss: 0.5321 - val_accuracy: 0.5720 - 113ms/epoch - 7ms/step
Epoch 38/100
16/16 - 0s - loss: 0.5624 - accuracy: 0.5420 - val_loss: 0.5302 - val_accuracy: 0.5820 - 75ms/epoch - 5ms/step
Epoch 39/100
16/16 - 0s - loss: 0.5602 - accuracy: 0.5440 - val_loss: 0.5283 - val_accuracy: 0.5860 - 75ms/epoch - 5ms/step
Epoch 40/100
16/16 - 0s - loss: 0.5582 - accuracy: 0.5460 - val_loss: 0.5263 - val_accuracy: 0.5900 - 67ms/epoch - 4ms/step
Epoch 41/100
16/16 - 0s - loss: 0.5561 - accuracy: 0.5460 - val_loss: 0.5243 - val_accuracy: 0.5920 - 72ms/epoch - 5ms/step
Epoch 42/100
16/16 - 0s - loss: 0.5540 - accuracy: 0.5600 - val_loss: 0.5225 - val_accuracy: 0.5980 - 78ms/epoch - 5ms/step
Epoch 43/100
16/16 - 0s - loss: 0.5519 - accuracy: 0.5680 - val_loss: 0.5205 - val_accuracy: 0.6000 - 80ms/epoch - 5ms/step
Epoch 44/100
16/16 - 0s - loss: 0.5498 - accuracy: 0.5700 - val_loss: 0.5186 - val_accuracy: 0.6000 - 70ms/epoch - 4ms/step
Epoch 45/100
16/16 - 0s - loss: 0.5478 - accuracy: 0.5760 - val_loss: 0.5166 - val_accuracy: 0.6120 - 69ms/epoch - 4ms/step
Epoch 46/100
16/16 - 0s - loss: 0.5458 - accuracy: 0.5820 - val_loss: 0.5149 - val_accuracy: 0.6180 - 69ms/epoch - 4ms/step
Epoch 47/100
16/16 - 0s - loss: 0.5436 - accuracy: 0.5840 - val_loss: 0.5128 - val_accuracy: 0.6180 - 72ms/epoch - 4ms/step
Epoch 48/100
16/16 - 0s - loss: 0.5415 - accuracy: 0.5860 - val_loss: 0.5109 - val_accuracy: 0.6220 - 69ms/epoch - 4ms/step
Epoch 49/100
16/16 - 0s - loss: 0.5395 - accuracy: 0.5900 - val_loss: 0.5089 - val_accuracy: 0.6260 - 71ms/epoch - 4ms/step
Epoch 50/100
16/16 - 0s - loss: 0.5374 - accuracy: 0.6000 - val_loss: 0.5070 - val_accuracy: 0.6300 - 89ms/epoch - 6ms/step
Epoch 51/100
16/16 - 0s - loss: 0.5353 - accuracy: 0.6040 - val_loss: 0.5050 - val_accuracy: 0.6360 - 71ms/epoch - 4ms/step
Epoch 52/100
16/16 - 0s - loss: 0.5333 - accuracy: 0.6040 - val_loss: 0.5031 - val_accuracy: 0.6440 - 70ms/epoch - 4ms/step
Epoch 53/100
16/16 - 0s - loss: 0.5312 - accuracy: 0.6140 - val_loss: 0.5011 - val_accuracy: 0.6540 - 108ms/epoch - 7ms/step
Epoch 54/100
16/16 - 0s - loss: 0.5291 - accuracy: 0.6260 - val_loss: 0.4992 - val_accuracy: 0.6580 - 72ms/epoch - 4ms/step
Epoch 55/100
16/16 - 0s - loss: 0.5271 - accuracy: 0.6300 - val_loss: 0.4973 - val_accuracy: 0.6700 - 114ms/epoch - 7ms/step
Epoch 56/100
16/16 - 0s - loss: 0.5250 - accuracy: 0.6400 - val_loss: 0.4953 - val_accuracy: 0.6780 - 108ms/epoch - 7ms/step
Epoch 57/100
16/16 - 0s - loss: 0.5229 - accuracy: 0.6500 - val_loss: 0.4934 - val_accuracy: 0.6880 - 113ms/epoch - 7ms/step
Epoch 58/100
16/16 - 0s - loss: 0.5209 - accuracy: 0.6600 - val_loss: 0.4915 - val_accuracy: 0.6960 - 75ms/epoch - 5ms/step
Epoch 59/100
16/16 - 0s - loss: 0.5188 - accuracy: 0.6660 - val_loss: 0.4895 - val_accuracy: 0.7040 - 76ms/epoch - 5ms/step
Epoch 60/100
16/16 - 0s - loss: 0.5167 - accuracy: 0.6720 - val_loss: 0.4876 - val_accuracy: 0.7040 - 111ms/epoch - 7ms/step
Epoch 61/100
16/16 - 0s - loss: 0.5146 - accuracy: 0.6760 - val_loss: 0.4856 - val_accuracy: 0.7120 - 70ms/epoch - 4ms/step
Epoch 62/100
16/16 - 0s - loss: 0.5126 - accuracy: 0.6780 - val_loss: 0.4837 - val_accuracy: 0.7220 - 70ms/epoch - 4ms/step
Epoch 63/100
16/16 - 0s - loss: 0.5105 - accuracy: 0.6820 - val_loss: 0.4817 - val_accuracy: 0.7260 - 67ms/epoch - 4ms/step
Epoch 64/100
16/16 - 0s - loss: 0.5085 - accuracy: 0.6880 - val_loss: 0.4797 - val_accuracy: 0.7360 - 74ms/epoch - 5ms/step
Epoch 65/100
16/16 - 0s - loss: 0.5064 - accuracy: 0.6920 - val_loss: 0.4778 - val_accuracy: 0.7400 - 71ms/epoch - 4ms/step
Epoch 66/100
16/16 - 0s - loss: 0.5044 - accuracy: 0.7000 - val_loss: 0.4759 - val_accuracy: 0.7440 - 111ms/epoch - 7ms/step
Epoch 67/100
16/16 - 0s - loss: 0.5023 - accuracy: 0.7100 - val_loss: 0.4740 - val_accuracy: 0.7540 - 119ms/epoch - 7ms/step
Epoch 68/100
16/16 - 0s - loss: 0.5003 - accuracy: 0.7160 - val_loss: 0.4720 - val_accuracy: 0.7580 - 69ms/epoch - 4ms/step
Epoch 69/100
16/16 - 0s - loss: 0.4982 - accuracy: 0.7160 - val_loss: 0.4701 - val_accuracy: 0.7600 - 71ms/epoch - 4ms/step
Epoch 70/100
16/16 - 0s - loss: 0.4962 - accuracy: 0.7280 - val_loss: 0.4682 - val_accuracy: 0.7720 - 115ms/epoch - 7ms/step
Epoch 71/100
16/16 - 0s - loss: 0.4942 - accuracy: 0.7300 - val_loss: 0.4663 - val_accuracy: 0.7760 - 67ms/epoch - 4ms/step
Epoch 72/100
16/16 - 0s - loss: 0.4922 - accuracy: 0.7360 - val_loss: 0.4644 - val_accuracy: 0.7780 - 85ms/epoch - 5ms/step
Epoch 73/100
16/16 - 0s - loss: 0.4902 - accuracy: 0.7440 - val_loss: 0.4626 - val_accuracy: 0.7780 - 79ms/epoch - 5ms/step
Epoch 74/100
16/16 - 0s - loss: 0.4881 - accuracy: 0.7520 - val_loss: 0.4606 - val_accuracy: 0.7800 - 71ms/epoch - 4ms/step
Epoch 75/100
16/16 - 0s - loss: 0.4862 - accuracy: 0.7540 - val_loss: 0.4587 - val_accuracy: 0.7800 - 110ms/epoch - 7ms/step
Epoch 76/100
16/16 - 0s - loss: 0.4842 - accuracy: 0.7580 - val_loss: 0.4568 - val_accuracy: 0.7880 - 81ms/epoch - 5ms/step
Epoch 77/100
16/16 - 0s - loss: 0.4822 - accuracy: 0.7620 - val_loss: 0.4550 - val_accuracy: 0.7900 - 81ms/epoch - 5ms/step
Epoch 78/100
16/16 - 0s - loss: 0.4803 - accuracy: 0.7700 - val_loss: 0.4530 - val_accuracy: 0.7940 - 70ms/epoch - 4ms/step
Epoch 79/100
16/16 - 0s - loss: 0.4783 - accuracy: 0.7740 - val_loss: 0.4512 - val_accuracy: 0.7980 - 70ms/epoch - 4ms/step
Epoch 80/100
16/16 - 0s - loss: 0.4764 - accuracy: 0.7800 - val_loss: 0.4494 - val_accuracy: 0.8040 - 70ms/epoch - 4ms/step
Epoch 81/100
16/16 - 0s - loss: 0.4744 - accuracy: 0.7860 - val_loss: 0.4476 - val_accuracy: 0.8060 - 71ms/epoch - 4ms/step
Epoch 82/100
16/16 - 0s - loss: 0.4725 - accuracy: 0.7880 - val_loss: 0.4457 - val_accuracy: 0.8100 - 110ms/epoch - 7ms/step
Epoch 83/100
16/16 - 0s - loss: 0.4706 - accuracy: 0.7920 - val_loss: 0.4439 - val_accuracy: 0.8160 - 73ms/epoch - 5ms/step
Epoch 84/100
16/16 - 0s - loss: 0.4687 - accuracy: 0.7980 - val_loss: 0.4420 - val_accuracy: 0.8200 - 132ms/epoch - 8ms/step
Epoch 85/100
16/16 - 0s - loss: 0.4668 - accuracy: 0.8020 - val_loss: 0.4402 - val_accuracy: 0.8300 - 70ms/epoch - 4ms/step
Epoch 86/100
16/16 - 0s - loss: 0.4649 - accuracy: 0.8020 - val_loss: 0.4383 - val_accuracy: 0.8340 - 110ms/epoch - 7ms/step
Epoch 87/100
16/16 - 0s - loss: 0.4630 - accuracy: 0.8120 - val_loss: 0.4366 - val_accuracy: 0.8420 - 112ms/epoch - 7ms/step
Epoch 88/100
16/16 - 0s - loss: 0.4612 - accuracy: 0.8160 - val_loss: 0.4348 - val_accuracy: 0.8460 - 68ms/epoch - 4ms/step
Epoch 89/100
16/16 - 0s - loss: 0.4593 - accuracy: 0.8180 - val_loss: 0.4330 - val_accuracy: 0.8480 - 67ms/epoch - 4ms/step
Epoch 90/100
16/16 - 0s - loss: 0.4574 - accuracy: 0.8300 - val_loss: 0.4312 - val_accuracy: 0.8600 - 71ms/epoch - 4ms/step
Epoch 91/100
16/16 - 0s - loss: 0.4556 - accuracy: 0.8360 - val_loss: 0.4294 - val_accuracy: 0.8620 - 71ms/epoch - 4ms/step
Epoch 92/100
16/16 - 0s - loss: 0.4537 - accuracy: 0.8360 - val_loss: 0.4276 - val_accuracy: 0.8660 - 74ms/epoch - 5ms/step
Epoch 93/100
16/16 - 0s - loss: 0.4519 - accuracy: 0.8360 - val_loss: 0.4258 - val_accuracy: 0.8660 - 115ms/epoch - 7ms/step
Epoch 94/100
16/16 - 0s - loss: 0.4501 - accuracy: 0.8460 - val_loss: 0.4241 - val_accuracy: 0.8720 - 69ms/epoch - 4ms/step
Epoch 95/100
16/16 - 0s - loss: 0.4482 - accuracy: 0.8500 - val_loss: 0.4223 - val_accuracy: 0.8720 - 86ms/epoch - 5ms/step
Epoch 96/100
16/16 - 0s - loss: 0.4464 - accuracy: 0.8520 - val_loss: 0.4205 - val_accuracy: 0.8740 - 70ms/epoch - 4ms/step
Epoch 97/100
16/16 - 0s - loss: 0.4446 - accuracy: 0.8540 - val_loss: 0.4188 - val_accuracy: 0.8740 - 68ms/epoch - 4ms/step
Epoch 98/100
16/16 - 0s - loss: 0.4428 - accuracy: 0.8540 - val_loss: 0.4170 - val_accuracy: 0.8800 - 72ms/epoch - 4ms/step
Epoch 99/100
16/16 - 0s - loss: 0.4410 - accuracy: 0.8580 - val_loss: 0.4154 - val_accuracy: 0.8900 - 115ms/epoch - 7ms/step
Epoch 100/100
16/16 - 0s - loss: 0.4392 - accuracy: 0.8600 - val_loss: 0.4136 - val_accuracy: 0.8940 - 71ms/epoch - 4ms/step
第5个弱分类器训练完毕

可视化结果

绘制训练曲线

[ ]:
#绘制每个基分类器的训练曲线
for i in range(number_of_weak_classifiers):
    plot_loss_accuracy(history[i],
                       f'Training curve of {i+1}th classifier',
                       f'Training curve of {i+1}th classifier')
../../_images/1stPart_Homework.1_EGB_mlp_19_0.png
../../_images/1stPart_Homework.1_EGB_mlp_19_1.png
../../_images/1stPart_Homework.1_EGB_mlp_19_2.png
../../_images/1stPart_Homework.1_EGB_mlp_19_3.png
../../_images/1stPart_Homework.1_EGB_mlp_19_4.png

输出模型结构信息

[ ]:
#打印模型结构和参数信息
print(boosting_model.summary())
Model: "model_9"
__________________________________________________________________________________________________
 Layer (type)                   Output Shape         Param #     Connected to
==================================================================================================
 input_1 (InputLayer)           [(None, 2)]          0           []

 0th-clf_0th-hidden (Dense)     (None, 2)            6           ['input_1[0][0]']

 1th-clf_0th-hidden (Dense)     (None, 2)            6           ['input_1[0][0]']

 2th-clf_0th-hidden (Dense)     (None, 2)            6           ['input_1[0][0]']

 3th-clf_0th-hidden (Dense)     (None, 2)            6           ['input_1[0][0]']

 4th-clf_0th-hidden (Dense)     (None, 2)            6           ['input_1[0][0]']

 classifiers_Add (Add)          (None, 2)            0           ['0th-clf_0th-hidden[0][0]',
                                                                  '1th-clf_0th-hidden[0][0]',
                                                                  '2th-clf_0th-hidden[0][0]',
                                                                  '3th-clf_0th-hidden[0][0]',
                                                                  '4th-clf_0th-hidden[0][0]']

 activation (Dense)             (None, 2)            6           ['classifiers_Add[0][0]']

==================================================================================================
Total params: 36
Trainable params: 6
Non-trainable params: 30
__________________________________________________________________________________________________
None

对原始特征空间剖分的可视化

[ ]:
#对原始特征空间剖分的可视化

#可视化原始特征空间
fig, ax1= plt.subplots(1,1, figsize=(7, 4),subplot_kw = {'aspect':'equal'})
mp = ax1.scatter(*X.T,c = y,cmap = cm_bright,edgecolors='white',s = 20,linewidths = 0.5)
plt.colorbar(mp,ax = [ax1]);
plt.title(f'Raw data')
plt.savefig(f'Raw data.png')
plt.savefig(f'Raw data.pdf')

#可视化每叠加一个基分类器后的EGB模型对原始特征空间的剖分
for i in range(number_of_weak_classifiers):
    prob = boosting_models[i].predict(p)[:,1]
    fig, ax1= plt.subplots(1,1, figsize=(7, 4),subplot_kw = {'aspect':'equal'})
    ax1.scatter(*p.T,c = prob,cmap = cm_bright)
    mp = ax1.scatter(*X.T,c = y,cmap = cm_bright,edgecolors='white',s = 20,linewidths = 0.5)
    plt.colorbar(mp,ax = [ax1]);
    plt.title(f'Outputs of {i+1} classifiers')
    plt.savefig(f'Space division of {i+1} classifiers.png')
    plt.savefig(f'Space division of {i+1} classifiers.pdf')

#生成动图
def create_gif(image_list, gif_name, duration=1):
    frames = []
    for image_name in image_list:
        frames.append(imageio.imread(image_name))
    imageio.mimsave(gif_name, frames, 'GIF', duration=duration)
    return

def main():
    image_list = ['Raw data.png']
    for i in range(number_of_weak_classifiers):
        image_list.append(f'Space division of {i+1} classifiers.png')
    gif_name = '空间剖分动图.gif'
    duration = 0.8
    create_gif(image_list, gif_name, duration)

main()
79/79 [==============================] - 0s 1ms/step
79/79 [==============================] - 0s 2ms/step
79/79 [==============================] - 0s 1ms/step
79/79 [==============================] - 0s 1ms/step
79/79 [==============================] - 0s 1ms/step
../../_images/1stPart_Homework.1_EGB_mlp_23_1.png
../../_images/1stPart_Homework.1_EGB_mlp_23_2.png
../../_images/1stPart_Homework.1_EGB_mlp_23_3.png
../../_images/1stPart_Homework.1_EGB_mlp_23_4.png
../../_images/1stPart_Homework.1_EGB_mlp_23_5.png
../../_images/1stPart_Homework.1_EGB_mlp_23_6.png

更换背景显示原始样本

[ ]:
#更换背景显示原始样本

mpl.style.use('ggplot')

fig = plt.figure(figsize = (9,3))
top = cm.get_cmap('Oranges_r', 512)
bottom = cm.get_cmap('Blues', 512)
newcolors = np.vstack((top(np.linspace(0.55, 1, 512)),
                       bottom(np.linspace(0, 0.75, 512))))
cm_bright = ListedColormap(newcolors, name='OrangeBlue')

fig = plt.figure(figsize = (8,6))
m3 = plt.scatter(*X.T,c = y,cmap = cm_bright,edgecolors='white',s = 20,linewidths = 0.5)
plt.title(f'Raw data ({n_samples} points)')
plt.axis('equal')
plt.colorbar()
plt.savefig(f'Raw data ({n_samples} points)')
plt.savefig(f'Raw data ({n_samples} points).pdf')
plt.axis('equal')
#plt.colorbar(ax = ax)
plt.show()
<Figure size 648x216 with 0 Axes>
../../_images/1stPart_Homework.1_EGB_mlp_25_1.png

特征变换过程可视化

[ ]:
#对样本点特征变换的可视化

clf_add_layers = []
clf_add_layers.append(boosting_models[0].get_layer('0th-clf_0th-hidden'))
for i in range(len(boosting_models)-1):
    add_layer = boosting_models[i+1].get_layer('classifiers_Add')
    clf_add_layers.append(add_layer)

inp = boosting_model.input
outputs = [layer.output for layer in clf_add_layers]
functors = [K.function([inp], [out]) for out in outputs]
boosting_model_outs = [func([X]) for func in functors]

#可视化每叠加一个基分类器后的EGB模型对样本点特征变换后的状态
for idx in range(len(boosting_model_outs)):
    fig = plt.figure(figsize = (8,6))
    scatter(boosting_model_outs[idx][0][:,0],boosting_model_outs[idx][0][:,1],
            c = y,cmap = cm_bright,edgecolors='white',s = 30,linewidths = 0.1)
    plt.axis('equal')
    plt.title(f'Outputs of {idx+1} classifiers')
    plt.colorbar()
    plt.savefig(f'Outputs of {idx+1} classifiers.png')
    plt.savefig(f'Outputs of {idx+1} classifiers.pdf')
    plt.show()

#生成动图
def create_gif(image_list, gif_name, duration=1):
    frames = []
    for image_name in image_list:
        frames.append(imageio.imread(image_name))
    imageio.mimsave(gif_name, frames, 'GIF', duration=duration)
    return

def main():
    image_list = [f'Raw data ({n_samples} points).png']
    for i in range(len(boosting_model_outs)):
        image_list.append(f'Outputs of {i+1} classifiers.png')
    gif_name = '特征变换动图.gif'
    duration = 0.8
    create_gif(image_list, gif_name, duration)

main()
../../_images/1stPart_Homework.1_EGB_mlp_27_0.png
../../_images/1stPart_Homework.1_EGB_mlp_27_1.png
../../_images/1stPart_Homework.1_EGB_mlp_27_2.png
../../_images/1stPart_Homework.1_EGB_mlp_27_3.png
../../_images/1stPart_Homework.1_EGB_mlp_27_4.png
[ ]: