阅读 99

pytorch分类模型,svm分类器

一、概述

本文在pytorch的框架下,基于softmax分类器的原理,给出softmax分类器的源码,实现图像的分类。 结合代码阐述了神经网络的建立过程,并用测试集验证了神经网络的训练结果,最后在本文的最后给出了所有代码。

二、softmax分类器的搭建

1、下载数据集

importnumpyasnpimporttorchfromtorch.utils.dataimportdataloader, datasetfromtorchvisionimporttransformsimportgzipimportosimporttorchvisionimportmatplotlib.pyplotaspltimportrandom #。 数据集Mn ist _ train=torch vision.datasets.fashionm NIST (root='~/datasets/fashionm NIST ',train=True, 下载Download=) transform=transforms.ToTensor () Mn ist _ test=torch vision.datasets.fashionm NIST (root=' )

导出训练集和测试集中的数据

#数据defload_data(data_folder,data_name, 要从中导出label_name ) : with gzip.open (OS.path.join ) data _ folder (“Rb”)的as lbpath: # rb是要读取的二进制数据offset=8)表示withgzip.opeer )的' rb ' ) as imgpath : x _ train=NP.from buffer (imgpath.read )、np.uint8、 offset=16 ).reshape(len ) (y_ttape ) ) ) 65y _ train (folder=' d :/jupyter _ data/fashionm NIST ) raw ' train _ data _ name=' train-images-idx3- ubyte.gz ' train _ label _ name=' train-labels-idx1- ubyte

#小批次数据集defdata_ITER(data,Label, batch _ size (: index=list (range ) len(data ) ) ) random.shuffle (random.shuffle ) index )来筛选batch _ size (: j (360 j=inden (len ) index (lex (lex (rat

softmax分类函数defsoftmax(x ) : X_exp=X.exp ) partition=x_exp.sum(dim=1,keepdims=True ) returnx_eer

# def target _ function (num _ outputs,batch_size,Label ) :y=torch.zeros(batch_size,num_outputs )

#交叉熵丢失函数defcross_entropy(y,Y_hat,Batch_Size ) :return-(y*y_hat.log ) ).sum ) batch _ sizizath

#初始化训练参数batch _ size=200 num _ inputs=784 num _ outputs=10lr=

0.03num_epochs=5w = torch.tensor(np.random.normal(0, 0.01, ((num_inputs,num_outputs))), dtype=torch.float,requires_grad=True)b = torch.tensor(np.zeros((batch_size,1)), dtype=torch.float,requires_grad=True)

8、开始训练

#开始训练for epoch in range(num_epochs): for data,label in Data_Iter(train_data,train_labels,batch_size): train_loss,train_acc,n=0.0,0.0,0 #归一化图像数据 x=data.reshape(batch_size,-1)/255 #计算样本估计值 y_hat=SoftMax(torch.mm(x, w) + b) #实际样本标签 y=Target_Function(num_outputs, batch_size, np.array(label)) #计算损失函数 loss=cross_entropy(y,y_hat,batch_size) #求梯度 loss.backward() #更新训练参数 w.data-=lr*w.grad b.data-=lr*b.grad #梯度清零 w.grad.data.zero_() b.grad.data.zero_() #计算小批量训练集误差 l=cross_entropy(y,SoftMax(torch.mm(x, w) + b),batch_size) #计算训练集误差 train_loss+=l #计算训练准确率 val,indices=y_hat.max(dim=1) acc=(indices==label).float().sum() train_acc+=acc n+=batch_size print('loss= %.4f' % (train_loss/n),' ','accuracy= %.4f' % (train_acc/n))

训练结果如下图所示:

9、预测
利用测试集检验神经网络训练效果

#预测text_labels = ['t-shirt', 'trouser', 'pullover', 'dress', 'coat', 'sandal', 'shirt', 'sneaker', 'bag', 'ankle boot']#定义图像显示函数def Show_Img(Img,True_Label,Pred_Label,Num): for i in range(Num): plt.figure(figsize=(30,30)) plt.subplot(1,Num,i+1) plt.imshow(Img[i]) #消除每张图片自己单独的横纵坐标 plt.xticks([]) plt.yticks([]) #添加label plt.title(text_labels[True_Label[i]]+'\n'+text_labels[Pred_Label[i]]) plt.show()show_num=10for Img,true_l in Data_Iter(test_data,test_labels,batch_size): img=Img.reshape(batch_size,-1)/255 #计算样本估计值 l_hat=SoftMax(torch.mm(img, w) + b) value,pred_l=l_hat.max(dim=1) Show_Img(Img,true_l,pred_l,show_num) break

训练效果如下图所示:







显然,softmax分类器的识别准确率较高,基本都能正确识别出来
三、全部代码

import numpy as npimport torchfrom torch.utils.data import DataLoader,Datasetfrom torchvision import transformsimport gzipimport osimport torchvisionimport matplotlib.pyplot as pltimport random# #下载数据集# mnist_train =torchvision.datasets.FashionMNIST(root='~/Datasets/FashionMNIST',train=True, download=True,transform=transforms.ToTensor())# mnist_test =torchvision.datasets.FashionMNIST(root='~/Datasets/FashionMNIST',# train=False, download=True, transform=transforms.ToTensor())#导出数据def load_data(data_folder, data_name, label_name): with gzip.open(os.path.join(data_folder,label_name), 'rb') as lbpath: # rb表示的是读取二进制数据 y_train = np.frombuffer(lbpath.read(), np.uint8, offset=8) with gzip.open(os.path.join(data_folder,data_name), 'rb') as imgpath: x_train = np.frombuffer(imgpath.read(), np.uint8, offset=16).reshape(len(y_train), 28, 28) return (x_train, y_train)folder='D:/jupyter_data/FashionMNIST/raw'train_data_name="train-images-idx3-ubyte.gz"train_label_name="train-labels-idx1-ubyte.gz"test_data_name="t10k-images-idx3-ubyte.gz"test_label_name="t10k-labels-idx1-ubyte.gz"train_data, train_labels = load_data(folder, train_data_name, train_label_name)test_data, test_labels = load_data(folder, train_data_name, train_label_name)#筛选小批量数据集def Data_Iter(Data,Label,Batch_Size): index=list(range(len(Data))) random.shuffle(index) for i in range(0,len(index),Batch_Size): j=index[i:min(i+Batch_Size,len(Data))] yield torch.tensor(Data[j],dtype=torch.float),torch.tensor(Label[j])#定义softmax分类函数def SoftMax(X): X_exp=X.exp() partition=X_exp.sum(dim=1,keepdims=True) return X_exp/partition#定义样本标签def Target_Function(num_outputs,batch_size,Label): Y=torch.zeros(batch_size,num_outputs,dtype=torch.float) for i in range(batch_size): Y[i,Label[i]]=1 return Y#定义交叉熵损失函数def cross_entropy(Y,Y_hat,Batch_Size): return -(Y*Y_hat.log()).sum()/Batch_Size#初始化训练参数batch_size=200num_inputs=784num_outputs=10lr=0.03num_epochs=5w = torch.tensor(np.random.normal(0, 0.01, ((num_inputs,num_outputs))), dtype=torch.float,requires_grad=True)b = torch.tensor(np.zeros((batch_size,1)), dtype=torch.float,requires_grad=True)#开始训练for epoch in range(num_epochs): for data,label in Data_Iter(train_data,train_labels,batch_size): train_loss,train_acc,n=0.0,0.0,0 #归一化图像数据 x=data.reshape(batch_size,-1)/255 #计算样本估计值 y_hat=SoftMax(torch.mm(x, w) + b) #实际样本标签 y=Target_Function(num_outputs, batch_size, np.array(label)) #计算损失函数 loss=cross_entropy(y,y_hat,batch_size) #求梯度 loss.backward() #更新训练参数 w.data-=lr*w.grad b.data-=lr*b.grad #梯度清零 w.grad.data.zero_() b.grad.data.zero_() #计算小批量训练集误差 l=cross_entropy(y,SoftMax(torch.mm(x, w) + b),batch_size) #计算训练集误差 train_loss+=l #计算训练准确率 val,indices=y_hat.max(dim=1) acc=(indices==label).float().sum() train_acc+=acc n+=batch_size print('loss= %.4f' % (train_loss/n),' ','accuracy= %.4f' % (train_acc/n))#预测text_labels = ['t-shirt', 'trouser', 'pullover', 'dress', 'coat', 'sandal', 'shirt', 'sneaker', 'bag', 'ankle boot']#定义图像显示函数def Show_Img(Img,True_Label,Pred_Label,Num): for i in range(Num): plt.figure(figsize=(30,30)) plt.subplot(1,Num,i+1) plt.imshow(Img[i]) #消除每张图片自己单独的横纵坐标 plt.xticks([]) plt.yticks([]) #添加label plt.title(text_labels[True_Label[i]]+'\n'+text_labels[Pred_Label[i]]) plt.show()show_num=10for Img,true_l in Data_Iter(test_data,test_labels,batch_size): img=Img.reshape(batch_size,-1)/255 #计算样本估计值 l_hat=SoftMax(torch.mm(img, w) + b) value,pred_l=l_hat.max(dim=1) Show_Img(Img,true_l,pred_l,show_num) break


文章分类
代码人生
版权声明:本站是系统测试站点,无实际运营。本文内容由互联网用户自发贡献,该文观点仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌抄袭侵权/违法违规的内容, 请发送邮件至 XXXXXXo@163.com 举报,一经查实,本站将立刻删除。
相关推荐