tensorflow2图像训练

173 阅读1分钟

tensorflow视觉训练

import pandas as pd
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
#fashion_mnist图片分类
(train, train_label), (test, test_label) = tf.keras.datasets.fashion_mnist.load_data()
#图片展示
#plt.imshow(train[0])
#plt.show()
#数据标准化
train = train/255
test = test/255
#模型建立
#对于多维数据,要先将数据进行扁平化处理
model = tf.keras.Sequential()
model.add(tf.keras.layers.Flatten(input_shape=(28,28)))#将二维数据扁平为一维数据
model.add(tf.keras.layers.Dense(128,activation='relu'))
model.add(tf.keras.layers.Dropout(rate=0.5))#添加dropout层 抑制数据过拟合
model.add(tf.keras.layers.Dense(128,activation='relu'))
model.add(tf.keras.layers.Dropout(rate=0.5))#随机丢弃一半的参数
model.add(tf.keras.layers.Dense(10,activation='softmax'))#将输出变为概率
#模型参数
#顺序编码[1,2,2,3,],独热编码[[0,0,0,1],[0,1,0,0]]
#顺序编码时损失用sparse_categorical_crossentropy 独热编码时用categorical_crossentropy
model.compile(optimizer='adam',
              loss='sparse_categorical_crossentropy',
              metrics=['acc'])
#模型训练
history = model.fit(train,train_label,epochs=5,validation_data=(test,test_label))#每个epoch在测试集上的准确度
#可视化训练效果
plt.plot(history.epoch,history.history.get('loss'),label='loss')
plt.plot(history.epoch,history.history.get('val_loss'),label='val_loss')
plt.plot(history.epoch,history.history.get('acc'),label='acc')
plt.plot(history.epoch,history.history.get('val_acc'),label='acc')
plt.legend()
plt.show()