pytorch mnist 框架

炒個(gè)冷飯……以后不能再不會(huì)寫了??!

import torch
import torch.nn as nn
import torch.tensor
import torch.nn.functional as F
import torch.optim as optim
import os
from torchvision import datasets, transforms

# hyper parameters
BATCH_SIZE = 5
HIDDEN_SIZE = 512
WIDTH = 28
HEIGHT = 28
PIC_SIZE = WIDTH * HEIGHT
LEARNING_RATE = 1e-5
USE_CUDA = True
SAVE_PATH = './model/dnn'

# data loader
train_loader = torch.utils.data.DataLoader(
    datasets.MNIST('./data', train=True, download=True,
                   transform=transforms.Compose([transforms.ToTensor()])  # 將圖像轉(zhuǎn)為張量
                   ), batch_size=BATCH_SIZE
)
test_loader = torch.utils.data.DataLoader(
    datasets.MNIST('./data', train=False, download=True,
                   transform=transforms.Compose([transforms.ToTensor()])
                   ), batch_size=BATCH_SIZE
)


class NN(torch.nn.Module):
    # the class of your module
    def __init__(self, models):
        super(NN, self).__init__()
        self.models = models

    def forward(self, input):
        x = input.reshape([-1, PIC_SIZE])
        for model in self.models:
            x = model(x)
        return x


# ModuleList
models = nn.ModuleList([
    nn.Linear(PIC_SIZE, HIDDEN_SIZE),
    nn.ReLU(),
    nn.Linear(HIDDEN_SIZE, 10),
])
model = NN(models)
print(model)

# If you have more than one gpu, use it
if USE_CUDA: model = model.cuda()

optimizer = optim.Adam(model.parameters(), lr=LEARNING_RATE)


def train(epoch):
    for i, (source, label) in enumerate(train_loader):
        if USE_CUDA: source, label = source.cuda(), label.cuda()
        out = model(source)
        loss = F.cross_entropy(out, label)
        optimizer.zero_grad()
        loss.backward()
        optimizer.step()
        if i % 100 == 0 and i != 0:
            print('Epoch {}, step {} | loss: {}'.format(epoch, i, loss))
    return


def test():
    count = 0
    acc = 0
    for i, (source, label) in enumerate(test_loader):
        if USE_CUDA: source, label = source.cuda(), label.cuda()
        out = model(source)
        _, out = out.max(dim=1)
        acc += BATCH_SIZE - (out - label).nonzero().size()[0]
        count += BATCH_SIZE
    return float(acc) / float(count)


if __name__ == '__main__':
    for i in range(1, 10):
        train(i)
        print('Epoch {} | Accuracy {}'.format(i, test()))
        if not os.path.exists(SAVE_PATH): os.makedirs(SAVE_PATH)
        torch.save(model, './model/dnn/checkpoint_{}.pt'.format(i))

最后編輯于
?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請聯(lián)系作者
【社區(qū)內(nèi)容提示】社區(qū)部分內(nèi)容疑似由AI輔助生成,瀏覽時(shí)請結(jié)合常識(shí)與多方信息審慎甄別。
平臺(tái)聲明:文章內(nèi)容(如有圖片或視頻亦包括在內(nèi))由作者上傳并發(fā)布,文章內(nèi)容僅代表作者本人觀點(diǎn),簡書系信息發(fā)布平臺(tái),僅提供信息存儲(chǔ)服務(wù)。

相關(guān)閱讀更多精彩內(nèi)容

  • 你沒看錯(cuò),現(xiàn)在的我,已經(jīng)畢業(yè)了,語言文字表達(dá)這種感覺顯得太蒼白無力,附圖幾張~ 人家班主任都跟個(gè)傳說似的,神龍見首...
    唯恐天下不亂閱讀 378評論 0 1
  • 2017.12.02爸爸媽媽帶我來到了美麗的海南城市三亞游玩,開啟了我人生的第一次旅途 第一天:入住京海假日大酒店...
    夢涵_0725閱讀 278評論 0 0
  • 語花慢閱讀 278評論 0 1

友情鏈接更多精彩內(nèi)容