clc
clear
close all
% 读取数据
load('images.mat')
rng(1)
% 选取部分数据可视化 前20个
figure
for i=1:1:20
subplot(4,5,i);
imshow(images(:,:,:,(i-1)*64+7))
end
[M,N] = size(images(:,:,1));%图像大小
Y = categorical(labels'); % 标签的数据类型为categorical
X = images;
idx = randperm(size(images,4)); % 产生一个和数据个数一致的随机数序列
num_train = round(0.8*length(X)); % 训练集个数,0.8表示全部数据中随机选取50%作为训练集
% 训练集和测试集数据
X_train = X(:,:,:,idx(1:num_train));
X_test = X(:,:,:,idx(num_train+1:end)); %这里假设,全部数据中除了
% 训练集和测试集标签
Y_train = Y(idx(1:num_train),:);
Y_test = Y(idx(num_train+1:end),:);
unique(labels)
%% 定义网络层
%训练网络
layers = resnet18Layers();
figure
plot(layers)
% options = trainingOptions("sgdm", ...
% InitialLearnRate=0.001, ...
% LearnRateSchedule="piecewise", ...
% L2Regularization=1.0000e-04, ...
% MaxEpochs=20, ...
% MiniBatchSize=16, ...
% ValidationFrequency=20, ...
% Plots="training-progress", ...
% Metrics="accuracy");
options = trainingOptions('sgdm', ... % Adam 梯度下降算法
'MaxEpochs',20, ... % 最大迭代次数 500
'MiniBatchSize',50, ... % 批量大小 512
'InitialLearnRate', 5e-4, ... % 初始学习率为 0.0005
'LearnRateSchedule', 'piecewise', ... % 学习率下降
'LearnRateDropFactor', 0.1, ... % 学习率下降因子 0.1
'LearnRateDropPeriod', 400, ... % 经过 400 次训练后 学习率为 0.001 * 0.1
'L2Regularization', 0.0001, ...
'Shuffle', 'every-epoch', ... % 打乱数据集
'Plots', 'training-progress', ... % 画出曲线
'Verbose', false);
net_cnn = trainNetwork(X_train,Y_train,layers,options);
% 测试
testLabel = classify(net_cnn,X_test);
precision = sum(testLabel==Y_test)/numel(testLabel);
disp(['测试集分类准确率为',num2str(precision*100),'%'])
save resnet18_checkpoints.mat net_cnn
%%
%% 混淆矩阵
fig = figure;
cm = confusionchart(Y_test,testLabel,'RowSummary','row-normalized','ColumnSummary','column-normalized');
fig_Position = fig.Position;
fig_Position(3) = fig_Position(3)*1.5;
fig.Position = fig_Position;