In [ ]:
! pip install hiddenlayer graphviz torchviz
Requirement already satisfied: hiddenlayer in /usr/local/lib/python3.6/dist-packages (0.3)
Requirement already satisfied: graphviz in /usr/local/lib/python3.6/dist-packages (0.10.1)
Collecting torchviz
  Downloading https://files.pythonhosted.org/packages/8f/8e/a9630c7786b846d08b47714dd363a051f5e37b4ea0e534460d8cdfc1644b/torchviz-0.0.1.tar.gz (41kB)
     |████████████████████████████████| 51kB 2.2MB/s 
Requirement already satisfied: torch in /usr/local/lib/python3.6/dist-packages (from torchviz) (1.6.0+cu101)
Requirement already satisfied: future in /usr/local/lib/python3.6/dist-packages (from torch->torchviz) (0.16.0)
Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from torch->torchviz) (1.18.5)
Building wheels for collected packages: torchviz
  Building wheel for torchviz (setup.py) ... done
  Created wheel for torchviz: filename=torchviz-0.0.1-cp36-none-any.whl size=3520 sha256=ca7ad615a188fcf4865ec3d8f0bdce1c431a43f322315ecd05c99571f98c5fac
  Stored in directory: /root/.cache/pip/wheels/2a/c2/c5/b8b4d0f7992c735f6db5bfa3c5f354cf36502037ca2b585667
Successfully built torchviz
Installing collected packages: torchviz
Successfully installed torchviz-0.0.1
In [67]:
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import torch.utils.data as Data
from torchvision import datasets, transforms
import torch.nn.functional as F
import timeit
import unittest


torch.manual_seed(0)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
np.random.seed(0)
In [68]:
# check availability of GPU and set the device accordingly

device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')


# define a transforms for preparing the dataset
transform = transforms.Compose([
        transforms.CenterCrop(26),
        transforms.Resize((28,28)),
        transforms.ColorJitter(brightness=0.05, contrast=0.05, saturation=0.05, hue=0.05),
        transforms.RandomRotation(10),      
        transforms.RandomAffine(5),
        
        # convert the image to a pytorch tensor
        transforms.ToTensor(), 
        
        # normalise the images with mean and std of the dataset
        transforms.Normalize((0.1307,), (0.3081,)) 
        ])
In [69]:
# Load the MNIST training, test datasets using `torchvision.datasets.MNIST` 
# using the transform defined above

train_dataset = datasets.MNIST('./data',train=True,transform=transform,download=True)
test_dataset =  datasets.MNIST('./data',train=False,transform=transform,download=True)
In [70]:
# create dataloaders for training and test datasets
# use a batch size of 32 and set shuffle=True for the training set

train_dataloader = Data.DataLoader(dataset=train_dataset, batch_size=128, shuffle=True)
test_dataloader = Data.DataLoader(dataset=test_dataset, batch_size=128, shuffle=True)
In [81]:
# My Net

class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()
        
        # define a conv layer with output channels as 16, kernel size of 3 and stride of 1
        self.conv11 = nn.Conv2d(1, 16, 3, 1) # Input = 1x28x28  Output = 16x26x26
        self.conv12 = nn.Conv2d(1, 16, 5, 1) # Input = 1x28x28  Output = 16x24x24
        self.conv13 = nn.Conv2d(1, 16, 7, 1) # Input = 1x28x28  Output = 16x22x22
        self.conv14 = nn.Conv2d(1, 16, 9, 1) # Input = 1x28x28  Output = 16x20x20

        # define a conv layer with output channels as 32, kernel size of 3 and stride of 1
        self.conv21 = nn.Conv2d(16, 32, 3, 1) # Input = 16x26x26 Output = 32x24x24
        self.conv22 = nn.Conv2d(16, 32, 5, 1) # Input = 16x24x24 Output = 32x20x20
        self.conv23 = nn.Conv2d(16, 32, 7, 1) # Input = 16x22x22 Output = 32x16x16
        self.conv24 = nn.Conv2d(16, 32, 9, 1) # Input = 16x20x20  Output = 32x12x12

        # define a conv layer with output channels as 64, kernel size of 3 and stride of 1
        self.conv31 = nn.Conv2d(32, 64, 3, 1) # Input = 32x24x24 Output = 64x22x22
        self.conv32 = nn.Conv2d(32, 64, 5, 1) # Input = 32x20x20 Output = 64x16x16
        self.conv33 = nn.Conv2d(32, 64, 7, 1) # Input = 32x16x16 Output = 64x10x10
        self.conv34 = nn.Conv2d(32, 64, 9, 1) # Input = 32x12x12 Output = 64x4x4
        

        # define a max pooling layer with kernel size 2
        self.maxpool = nn.MaxPool2d(2) # Output = 64x11x11
        #self.maxpool1 = nn.MaxPool2d(1)
        # define dropout layer with a probability of 0.25
        self.dropout1 = nn.Dropout(0.25)
        # define dropout layer with a probability of 0.5
        self.dropout2 = nn.Dropout(0.5)

        # define a linear(dense) layer with 128 output features
        self.fc11 = nn.Linear(64*11*11, 256)
        self.fc12 = nn.Linear(64*8*8, 256)      # after maxpooling 2x2
        self.fc13 = nn.Linear(64*5*5, 256)
        self.fc14 = nn.Linear(64*2*2, 256)

        # define a linear(dense) layer with output features corresponding to the number of classes in the dataset
        self.fc21 = nn.Linear(256, 128)
        self.fc22 = nn.Linear(256, 128)
        self.fc23 = nn.Linear(256, 128)
        self.fc24 = nn.Linear(256, 128)

        self.fc33 = nn.Linear(128*4,10)
        #self.fc33 = nn.Linear(64*3,10)
        

    def forward(self, inp):
        # Use the layers defined above in a sequential way (folow the same as the layer definitions above) and 
        # write the forward pass, after each of conv1, conv2, conv3 and fc1 use a relu activation. 
        

        x = F.relu(self.conv11(inp))
        x = F.relu(self.conv21(x))
        x = F.relu(self.maxpool(self.conv31(x)))
        #print(x.shape)
        #x = torch.flatten(x, 1)
        x = x.view(-1,64*11*11)
        x = self.dropout1(x)
        x = F.relu(self.fc11(x))
        x = self.dropout2(x)
        x = self.fc21(x)

        y = F.relu(self.conv12(inp))
        y = F.relu(self.conv22(y))
        y = F.relu(self.maxpool(self.conv32(y)))
        #x = torch.flatten(x, 1)
        y = y.view(-1,64*8*8)
        y = self.dropout1(y)
        y = F.relu(self.fc12(y))
        y = self.dropout2(y)
        y = self.fc22(y)

        z = F.relu(self.conv13(inp))
        z = F.relu(self.conv23(z))
        z = F.relu(self.maxpool(self.conv33(z)))
        #x = torch.flatten(x, 1)
        z = z.view(-1,64*5*5)
        z = self.dropout1(z)
        z = F.relu(self.fc13(z))
        z = self.dropout2(z)
        z = self.fc23(z)

        ze = F.relu(self.conv14(inp))
        ze = F.relu(self.conv24(ze))
        ze = F.relu(self.maxpool(self.conv34(ze)))
        #x = torch.flatten(x, 1)
        ze = ze.view(-1,64*2*2)
        ze = self.dropout1(ze)
        ze = F.relu(self.fc14(ze))
        ze = self.dropout2(ze)
        ze = self.fc24(ze)

        out_f = torch.cat((x, y, z, ze), dim=1)
        #out_f1 = torch.cat((out_f, ze), dim=1)
        out = self.fc33(out_f)
        
        output = F.log_softmax(out, dim=1)
        return output
In [82]:
torch.cat((torch.tensor([1,2]),torch.tensor([2,3]),torch.tensor([3,4]),torch.tensor([3,4])),dim=0)
Out[82]:
tensor([1, 2, 2, 3, 3, 4, 3, 4])
In [83]:
torch.tensor(1)
Out[83]:
tensor(1)
In [84]:
model = Net().to(device)
In [85]:
print(model.parameters)
<bound method Module.parameters of Net(
  (conv11): Conv2d(1, 16, kernel_size=(3, 3), stride=(1, 1))
  (conv12): Conv2d(1, 16, kernel_size=(5, 5), stride=(1, 1))
  (conv13): Conv2d(1, 16, kernel_size=(7, 7), stride=(1, 1))
  (conv14): Conv2d(1, 16, kernel_size=(9, 9), stride=(1, 1))
  (conv21): Conv2d(16, 32, kernel_size=(3, 3), stride=(1, 1))
  (conv22): Conv2d(16, 32, kernel_size=(5, 5), stride=(1, 1))
  (conv23): Conv2d(16, 32, kernel_size=(7, 7), stride=(1, 1))
  (conv24): Conv2d(16, 32, kernel_size=(9, 9), stride=(1, 1))
  (conv31): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1))
  (conv32): Conv2d(32, 64, kernel_size=(5, 5), stride=(1, 1))
  (conv33): Conv2d(32, 64, kernel_size=(7, 7), stride=(1, 1))
  (conv34): Conv2d(32, 64, kernel_size=(9, 9), stride=(1, 1))
  (maxpool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
  (dropout1): Dropout(p=0.25, inplace=False)
  (dropout2): Dropout(p=0.5, inplace=False)
  (fc11): Linear(in_features=7744, out_features=256, bias=True)
  (fc12): Linear(in_features=4096, out_features=256, bias=True)
  (fc13): Linear(in_features=1600, out_features=256, bias=True)
  (fc14): Linear(in_features=256, out_features=256, bias=True)
  (fc21): Linear(in_features=256, out_features=128, bias=True)
  (fc22): Linear(in_features=256, out_features=128, bias=True)
  (fc23): Linear(in_features=256, out_features=128, bias=True)
  (fc24): Linear(in_features=256, out_features=128, bias=True)
  (fc33): Linear(in_features=512, out_features=10, bias=True)
)>
In [86]:
import unittest

class TestImplementations(unittest.TestCase):
    
    # Dataloading tests
    def test_dataset(self):
        self.dataset_classes = ['0 - zero',
                                '1 - one',
                                '2 - two',
                                '3 - three',
                                '4 - four',
                                '5 - five',
                                '6 - six',
                                '7 - seven',
                                '8 - eight',
                                '9 - nine']
        self.assertTrue(train_dataset.classes == self.dataset_classes)
        self.assertTrue(train_dataset.train == True)
    
    def test_dataloader(self):        
        self.assertTrue(train_dataloader.batch_size == 32)
        self.assertTrue(test_dataloader.batch_size == 32)      
         
    def test_total_parameters(self):
        model = Net().to(device)
        #self.assertTrue(sum(p.numel() for p in model.parameters()) == 1015946)

suite = unittest.TestLoader().loadTestsFromModule(TestImplementations())
unittest.TextTestRunner().run(suite)
F..
======================================================================
FAIL: test_dataloader (__main__.TestImplementations)
----------------------------------------------------------------------
Traceback (most recent call last):
  File "<ipython-input-86-80e7f3fdfcef>", line 21, in test_dataloader
    self.assertTrue(train_dataloader.batch_size == 32)
AssertionError: False is not true

----------------------------------------------------------------------
Ran 3 tests in 0.062s

FAILED (failures=1)
Out[86]:
<unittest.runner.TextTestResult run=3 errors=0 failures=1>
In [86]:
 
In [87]:
losses_1 = []
losses_2 = []

def train(model, device, train_loader, optimizer, epoch):
    model.train()
    
    for batch_idx, (data, target) in enumerate(train_loader):
        # send the image, target to the device
        data, target = data.to(device), target.to(device)
        # flush out the gradients stored in optimizer
        optimizer.zero_grad()
        # pass the image to the model and assign the output to variable named output
        output = model(data)
        # calculate the loss (use nll_loss in pytorch)
        loss = F.nll_loss(output, target)
        # do a backward pass
        loss.backward()
        # update the weights
        optimizer.step()
      
        if batch_idx % 100 == 0:
            print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
                epoch, batch_idx * len(data), len(train_loader.dataset),
                100. * batch_idx / len(train_loader), loss.item()))
            losses_1.append(loss.item())
            losses_2.append(100. * batch_idx / len(train_loader))
        
In [88]:
accuracy = []
avg_loss = []
def test(model, device, test_loader):
    model.eval()
    test_loss = 0
    correct = 0
    with torch.no_grad():
        for data, target in test_loader:
          
            # send the image, target to the device
            data, target = data.to(device), target.to(device)
            # pass the image to the model and assign the output to variable named output
            output = model(data)
            test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
          
            pred = output.argmax(dim=1, keepdim=True)  # get the index of the max log-probability
            correct += pred.eq(target.view_as(pred)).sum().item()

    test_loss /= len(test_loader.dataset)

    print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(
        test_loss, correct, len(test_loader.dataset),
        100. * correct / len(test_loader.dataset)))
    avg_loss.append(test_loss)
    accuracy.append(100. * correct / len(test_loader.dataset))
In [89]:
model = Net().to(device)
learning_rate = []
def adjust_learning_rate(optimizer, iter, each):
    # sets the learning rate to the initial LR decayed by 0.1 every 'each' iterations
    lr = 0.001 * (0.95 ** (iter // each))
    state_dict = optimizer.state_dict()
    for param_group in state_dict['param_groups']:
        param_group['lr'] = lr
    optimizer.load_state_dict(state_dict)
    print("Learning rate = ",lr)
    return lr


## Define Adam Optimiser with a learning rate of 0.01
optimizer =  torch.optim.Adam(model.parameters(),lr=0.001)

start = timeit.default_timer()
for epoch in range(1,100):
  lr = adjust_learning_rate(optimizer, epoch, 1.616)
  learning_rate.append(lr)
  train(model, device, train_dataloader, optimizer, epoch)
  test(model, device, test_dataloader)
stop = timeit.default_timer()
print('Total time taken: {} seconds'.format(int(stop - start)))
Learning rate =  0.001
Train Epoch: 1 [0/60000 (0%)]	Loss: 2.301726
Train Epoch: 1 [12800/60000 (21%)]	Loss: 0.183894
Train Epoch: 1 [25600/60000 (43%)]	Loss: 0.100056
Train Epoch: 1 [38400/60000 (64%)]	Loss: 0.034865
Train Epoch: 1 [51200/60000 (85%)]	Loss: 0.075558

Test set: Average loss: 0.0457, Accuracy: 9848/10000 (98%)

Learning rate =  0.00095
Train Epoch: 2 [0/60000 (0%)]	Loss: 0.062097
Train Epoch: 2 [12800/60000 (21%)]	Loss: 0.054302
Train Epoch: 2 [25600/60000 (43%)]	Loss: 0.023979
Train Epoch: 2 [38400/60000 (64%)]	Loss: 0.058818
Train Epoch: 2 [51200/60000 (85%)]	Loss: 0.143367

Test set: Average loss: 0.0386, Accuracy: 9884/10000 (99%)

Learning rate =  0.00095
Train Epoch: 3 [0/60000 (0%)]	Loss: 0.023573
Train Epoch: 3 [12800/60000 (21%)]	Loss: 0.081923
Train Epoch: 3 [25600/60000 (43%)]	Loss: 0.057554
Train Epoch: 3 [38400/60000 (64%)]	Loss: 0.073468
Train Epoch: 3 [51200/60000 (85%)]	Loss: 0.133310

Test set: Average loss: 0.0375, Accuracy: 9887/10000 (99%)

Learning rate =  0.0009025
Train Epoch: 4 [0/60000 (0%)]	Loss: 0.151612
Train Epoch: 4 [12800/60000 (21%)]	Loss: 0.015873
Train Epoch: 4 [25600/60000 (43%)]	Loss: 0.066844
Train Epoch: 4 [38400/60000 (64%)]	Loss: 0.071160
Train Epoch: 4 [51200/60000 (85%)]	Loss: 0.067345

Test set: Average loss: 0.0358, Accuracy: 9882/10000 (99%)

Learning rate =  0.000857375
Train Epoch: 5 [0/60000 (0%)]	Loss: 0.041029
Train Epoch: 5 [12800/60000 (21%)]	Loss: 0.007885
Train Epoch: 5 [25600/60000 (43%)]	Loss: 0.034582
Train Epoch: 5 [38400/60000 (64%)]	Loss: 0.020356
Train Epoch: 5 [51200/60000 (85%)]	Loss: 0.011873

Test set: Average loss: 0.0295, Accuracy: 9911/10000 (99%)

Learning rate =  0.000857375
Train Epoch: 6 [0/60000 (0%)]	Loss: 0.036332
Train Epoch: 6 [12800/60000 (21%)]	Loss: 0.035254
Train Epoch: 6 [25600/60000 (43%)]	Loss: 0.025454
Train Epoch: 6 [38400/60000 (64%)]	Loss: 0.086189
Train Epoch: 6 [51200/60000 (85%)]	Loss: 0.029599

Test set: Average loss: 0.0351, Accuracy: 9884/10000 (99%)

Learning rate =  0.0008145062499999999
Train Epoch: 7 [0/60000 (0%)]	Loss: 0.005426
Train Epoch: 7 [12800/60000 (21%)]	Loss: 0.029572
Train Epoch: 7 [25600/60000 (43%)]	Loss: 0.060426
Train Epoch: 7 [38400/60000 (64%)]	Loss: 0.050103
Train Epoch: 7 [51200/60000 (85%)]	Loss: 0.004074

Test set: Average loss: 0.0206, Accuracy: 9928/10000 (99%)

Learning rate =  0.0008145062499999999
Train Epoch: 8 [0/60000 (0%)]	Loss: 0.002737
Train Epoch: 8 [12800/60000 (21%)]	Loss: 0.012506
Train Epoch: 8 [25600/60000 (43%)]	Loss: 0.003289
Train Epoch: 8 [38400/60000 (64%)]	Loss: 0.037941
Train Epoch: 8 [51200/60000 (85%)]	Loss: 0.052138

Test set: Average loss: 0.0223, Accuracy: 9931/10000 (99%)

Learning rate =  0.0007737809374999998
Train Epoch: 9 [0/60000 (0%)]	Loss: 0.021300
Train Epoch: 9 [12800/60000 (21%)]	Loss: 0.002368
Train Epoch: 9 [25600/60000 (43%)]	Loss: 0.043856
Train Epoch: 9 [38400/60000 (64%)]	Loss: 0.024022
Train Epoch: 9 [51200/60000 (85%)]	Loss: 0.043918

Test set: Average loss: 0.0269, Accuracy: 9926/10000 (99%)

Learning rate =  0.0007350918906249999
Train Epoch: 10 [0/60000 (0%)]	Loss: 0.040984
Train Epoch: 10 [12800/60000 (21%)]	Loss: 0.001695
Train Epoch: 10 [25600/60000 (43%)]	Loss: 0.011522
Train Epoch: 10 [38400/60000 (64%)]	Loss: 0.061423
Train Epoch: 10 [51200/60000 (85%)]	Loss: 0.002001

Test set: Average loss: 0.0214, Accuracy: 9934/10000 (99%)

Learning rate =  0.0007350918906249999
Train Epoch: 11 [0/60000 (0%)]	Loss: 0.006208
Train Epoch: 11 [12800/60000 (21%)]	Loss: 0.043491
Train Epoch: 11 [25600/60000 (43%)]	Loss: 0.004927
Train Epoch: 11 [38400/60000 (64%)]	Loss: 0.088628
Train Epoch: 11 [51200/60000 (85%)]	Loss: 0.012003

Test set: Average loss: 0.0281, Accuracy: 9911/10000 (99%)

Learning rate =  0.0006983372960937497
Train Epoch: 12 [0/60000 (0%)]	Loss: 0.012479
Train Epoch: 12 [12800/60000 (21%)]	Loss: 0.019884
Train Epoch: 12 [25600/60000 (43%)]	Loss: 0.010930
Train Epoch: 12 [38400/60000 (64%)]	Loss: 0.071494
Train Epoch: 12 [51200/60000 (85%)]	Loss: 0.024340

Test set: Average loss: 0.0277, Accuracy: 9919/10000 (99%)

Learning rate =  0.0006634204312890623
Train Epoch: 13 [0/60000 (0%)]	Loss: 0.018359
Train Epoch: 13 [12800/60000 (21%)]	Loss: 0.002581
Train Epoch: 13 [25600/60000 (43%)]	Loss: 0.009119
Train Epoch: 13 [38400/60000 (64%)]	Loss: 0.012950
Train Epoch: 13 [51200/60000 (85%)]	Loss: 0.011662

Test set: Average loss: 0.0254, Accuracy: 9935/10000 (99%)

Learning rate =  0.0006634204312890623
Train Epoch: 14 [0/60000 (0%)]	Loss: 0.000234
Train Epoch: 14 [12800/60000 (21%)]	Loss: 0.003305
Train Epoch: 14 [25600/60000 (43%)]	Loss: 0.025527
Train Epoch: 14 [38400/60000 (64%)]	Loss: 0.019675
Train Epoch: 14 [51200/60000 (85%)]	Loss: 0.009907

Test set: Average loss: 0.0204, Accuracy: 9936/10000 (99%)

Learning rate =  0.0006302494097246091
Train Epoch: 15 [0/60000 (0%)]	Loss: 0.008930
Train Epoch: 15 [12800/60000 (21%)]	Loss: 0.019481
Train Epoch: 15 [25600/60000 (43%)]	Loss: 0.004442
Train Epoch: 15 [38400/60000 (64%)]	Loss: 0.000202
Train Epoch: 15 [51200/60000 (85%)]	Loss: 0.081121

Test set: Average loss: 0.0213, Accuracy: 9936/10000 (99%)

Learning rate =  0.0006302494097246091
Train Epoch: 16 [0/60000 (0%)]	Loss: 0.020234
Train Epoch: 16 [12800/60000 (21%)]	Loss: 0.003627
Train Epoch: 16 [25600/60000 (43%)]	Loss: 0.002248
Train Epoch: 16 [38400/60000 (64%)]	Loss: 0.002779
Train Epoch: 16 [51200/60000 (85%)]	Loss: 0.005351

Test set: Average loss: 0.0241, Accuracy: 9919/10000 (99%)

Learning rate =  0.0005987369392383787
Train Epoch: 17 [0/60000 (0%)]	Loss: 0.003025
Train Epoch: 17 [12800/60000 (21%)]	Loss: 0.000471
Train Epoch: 17 [25600/60000 (43%)]	Loss: 0.000029
Train Epoch: 17 [38400/60000 (64%)]	Loss: 0.003522
Train Epoch: 17 [51200/60000 (85%)]	Loss: 0.033848

Test set: Average loss: 0.0192, Accuracy: 9940/10000 (99%)

Learning rate =  0.0005688000922764596
Train Epoch: 18 [0/60000 (0%)]	Loss: 0.001633
Train Epoch: 18 [12800/60000 (21%)]	Loss: 0.003090
Train Epoch: 18 [25600/60000 (43%)]	Loss: 0.008347
Train Epoch: 18 [38400/60000 (64%)]	Loss: 0.036387
Train Epoch: 18 [51200/60000 (85%)]	Loss: 0.009319

Test set: Average loss: 0.0225, Accuracy: 9936/10000 (99%)

Learning rate =  0.0005688000922764596
Train Epoch: 19 [0/60000 (0%)]	Loss: 0.014034
Train Epoch: 19 [12800/60000 (21%)]	Loss: 0.005067
Train Epoch: 19 [25600/60000 (43%)]	Loss: 0.005205
Train Epoch: 19 [38400/60000 (64%)]	Loss: 0.030434
Train Epoch: 19 [51200/60000 (85%)]	Loss: 0.001558

Test set: Average loss: 0.0223, Accuracy: 9946/10000 (99%)

Learning rate =  0.0005403600876626366
Train Epoch: 20 [0/60000 (0%)]	Loss: 0.037629
Train Epoch: 20 [12800/60000 (21%)]	Loss: 0.014070
Train Epoch: 20 [25600/60000 (43%)]	Loss: 0.042333
Train Epoch: 20 [38400/60000 (64%)]	Loss: 0.001674
Train Epoch: 20 [51200/60000 (85%)]	Loss: 0.000658

Test set: Average loss: 0.0295, Accuracy: 9918/10000 (99%)

Learning rate =  0.0005403600876626366
Train Epoch: 21 [0/60000 (0%)]	Loss: 0.010162
Train Epoch: 21 [12800/60000 (21%)]	Loss: 0.060639
Train Epoch: 21 [25600/60000 (43%)]	Loss: 0.000253
Train Epoch: 21 [38400/60000 (64%)]	Loss: 0.002128
Train Epoch: 21 [51200/60000 (85%)]	Loss: 0.044032

Test set: Average loss: 0.0234, Accuracy: 9942/10000 (99%)

Learning rate =  0.0005133420832795048
Train Epoch: 22 [0/60000 (0%)]	Loss: 0.007296
Train Epoch: 22 [12800/60000 (21%)]	Loss: 0.003267
Train Epoch: 22 [25600/60000 (43%)]	Loss: 0.000104
Train Epoch: 22 [38400/60000 (64%)]	Loss: 0.010029
Train Epoch: 22 [51200/60000 (85%)]	Loss: 0.003976

Test set: Average loss: 0.0246, Accuracy: 9938/10000 (99%)

Learning rate =  0.00048767497911552955
Train Epoch: 23 [0/60000 (0%)]	Loss: 0.008708
Train Epoch: 23 [12800/60000 (21%)]	Loss: 0.000913
Train Epoch: 23 [25600/60000 (43%)]	Loss: 0.000396
Train Epoch: 23 [38400/60000 (64%)]	Loss: 0.012965
Train Epoch: 23 [51200/60000 (85%)]	Loss: 0.016098

Test set: Average loss: 0.0199, Accuracy: 9948/10000 (99%)

Learning rate =  0.00048767497911552955
Train Epoch: 24 [0/60000 (0%)]	Loss: 0.004991
Train Epoch: 24 [12800/60000 (21%)]	Loss: 0.001519
Train Epoch: 24 [25600/60000 (43%)]	Loss: 0.031811
Train Epoch: 24 [38400/60000 (64%)]	Loss: 0.000082
Train Epoch: 24 [51200/60000 (85%)]	Loss: 0.001252

Test set: Average loss: 0.0231, Accuracy: 9938/10000 (99%)

Learning rate =  0.000463291230159753
Train Epoch: 25 [0/60000 (0%)]	Loss: 0.003644
Train Epoch: 25 [12800/60000 (21%)]	Loss: 0.000591
Train Epoch: 25 [25600/60000 (43%)]	Loss: 0.003577
Train Epoch: 25 [38400/60000 (64%)]	Loss: 0.000553
Train Epoch: 25 [51200/60000 (85%)]	Loss: 0.013818

Test set: Average loss: 0.0223, Accuracy: 9942/10000 (99%)

Learning rate =  0.00044012666865176535
Train Epoch: 26 [0/60000 (0%)]	Loss: 0.000107
Train Epoch: 26 [12800/60000 (21%)]	Loss: 0.000144
Train Epoch: 26 [25600/60000 (43%)]	Loss: 0.002753
Train Epoch: 26 [38400/60000 (64%)]	Loss: 0.000335
Train Epoch: 26 [51200/60000 (85%)]	Loss: 0.001168

Test set: Average loss: 0.0262, Accuracy: 9939/10000 (99%)

Learning rate =  0.00044012666865176535
Train Epoch: 27 [0/60000 (0%)]	Loss: 0.015810
Train Epoch: 27 [12800/60000 (21%)]	Loss: 0.000907
Train Epoch: 27 [25600/60000 (43%)]	Loss: 0.000207
Train Epoch: 27 [38400/60000 (64%)]	Loss: 0.016557
Train Epoch: 27 [51200/60000 (85%)]	Loss: 0.000317

Test set: Average loss: 0.0207, Accuracy: 9941/10000 (99%)

Learning rate =  0.0004181203352191771
Train Epoch: 28 [0/60000 (0%)]	Loss: 0.005923
Train Epoch: 28 [12800/60000 (21%)]	Loss: 0.051334
Train Epoch: 28 [25600/60000 (43%)]	Loss: 0.085315
Train Epoch: 28 [38400/60000 (64%)]	Loss: 0.000335
Train Epoch: 28 [51200/60000 (85%)]	Loss: 0.000258

Test set: Average loss: 0.0220, Accuracy: 9943/10000 (99%)

Learning rate =  0.0004181203352191771
Train Epoch: 29 [0/60000 (0%)]	Loss: 0.032029
Train Epoch: 29 [12800/60000 (21%)]	Loss: 0.002672
Train Epoch: 29 [25600/60000 (43%)]	Loss: 0.001691
Train Epoch: 29 [38400/60000 (64%)]	Loss: 0.010483
Train Epoch: 29 [51200/60000 (85%)]	Loss: 0.046918

Test set: Average loss: 0.0238, Accuracy: 9944/10000 (99%)

Learning rate =  0.0003972143184582182
Train Epoch: 30 [0/60000 (0%)]	Loss: 0.000488
Train Epoch: 30 [12800/60000 (21%)]	Loss: 0.001079
Train Epoch: 30 [25600/60000 (43%)]	Loss: 0.010696
Train Epoch: 30 [38400/60000 (64%)]	Loss: 0.002220
Train Epoch: 30 [51200/60000 (85%)]	Loss: 0.002937

Test set: Average loss: 0.0225, Accuracy: 9944/10000 (99%)

Learning rate =  0.00037735360253530727
Train Epoch: 31 [0/60000 (0%)]	Loss: 0.000515
Train Epoch: 31 [12800/60000 (21%)]	Loss: 0.000357
Train Epoch: 31 [25600/60000 (43%)]	Loss: 0.035446
Train Epoch: 31 [38400/60000 (64%)]	Loss: 0.003316
Train Epoch: 31 [51200/60000 (85%)]	Loss: 0.108663

Test set: Average loss: 0.0217, Accuracy: 9943/10000 (99%)

Learning rate =  0.00037735360253530727
Train Epoch: 32 [0/60000 (0%)]	Loss: 0.000074
Train Epoch: 32 [12800/60000 (21%)]	Loss: 0.003480
Train Epoch: 32 [25600/60000 (43%)]	Loss: 0.000117
Train Epoch: 32 [38400/60000 (64%)]	Loss: 0.007595
Train Epoch: 32 [51200/60000 (85%)]	Loss: 0.001376

Test set: Average loss: 0.0219, Accuracy: 9952/10000 (100%)

Learning rate =  0.0003584859224085419
Train Epoch: 33 [0/60000 (0%)]	Loss: 0.000496
Train Epoch: 33 [12800/60000 (21%)]	Loss: 0.018251
Train Epoch: 33 [25600/60000 (43%)]	Loss: 0.003300
Train Epoch: 33 [38400/60000 (64%)]	Loss: 0.027350
Train Epoch: 33 [51200/60000 (85%)]	Loss: 0.044863

Test set: Average loss: 0.0219, Accuracy: 9948/10000 (99%)

Learning rate =  0.0003405616262881148
Train Epoch: 34 [0/60000 (0%)]	Loss: 0.003798
Train Epoch: 34 [12800/60000 (21%)]	Loss: 0.000045
Train Epoch: 34 [25600/60000 (43%)]	Loss: 0.003400
Train Epoch: 34 [38400/60000 (64%)]	Loss: 0.001285
Train Epoch: 34 [51200/60000 (85%)]	Loss: 0.000020

Test set: Average loss: 0.0257, Accuracy: 9941/10000 (99%)

Learning rate =  0.0003405616262881148
Train Epoch: 35 [0/60000 (0%)]	Loss: 0.000751
Train Epoch: 35 [12800/60000 (21%)]	Loss: 0.000168
Train Epoch: 35 [25600/60000 (43%)]	Loss: 0.000070
Train Epoch: 35 [38400/60000 (64%)]	Loss: 0.002789
Train Epoch: 35 [51200/60000 (85%)]	Loss: 0.003033

Test set: Average loss: 0.0212, Accuracy: 9940/10000 (99%)

Learning rate =  0.000323533544973709
Train Epoch: 36 [0/60000 (0%)]	Loss: 0.003999
Train Epoch: 36 [12800/60000 (21%)]	Loss: 0.001404
Train Epoch: 36 [25600/60000 (43%)]	Loss: 0.003314
Train Epoch: 36 [38400/60000 (64%)]	Loss: 0.000092
Train Epoch: 36 [51200/60000 (85%)]	Loss: 0.001052

Test set: Average loss: 0.0218, Accuracy: 9951/10000 (100%)

Learning rate =  0.000323533544973709
Train Epoch: 37 [0/60000 (0%)]	Loss: 0.000241
Train Epoch: 37 [12800/60000 (21%)]	Loss: 0.000025
Train Epoch: 37 [25600/60000 (43%)]	Loss: 0.000090
Train Epoch: 37 [38400/60000 (64%)]	Loss: 0.000005
Train Epoch: 37 [51200/60000 (85%)]	Loss: 0.003032

Test set: Average loss: 0.0226, Accuracy: 9943/10000 (99%)

Learning rate =  0.00030735686772502356
Train Epoch: 38 [0/60000 (0%)]	Loss: 0.000760
Train Epoch: 38 [12800/60000 (21%)]	Loss: 0.000893
Train Epoch: 38 [25600/60000 (43%)]	Loss: 0.001294
Train Epoch: 38 [38400/60000 (64%)]	Loss: 0.036439
Train Epoch: 38 [51200/60000 (85%)]	Loss: 0.004461

Test set: Average loss: 0.0210, Accuracy: 9946/10000 (99%)

Learning rate =  0.0002919890243387724
Train Epoch: 39 [0/60000 (0%)]	Loss: 0.002320
Train Epoch: 39 [12800/60000 (21%)]	Loss: 0.000082
Train Epoch: 39 [25600/60000 (43%)]	Loss: 0.000351
Train Epoch: 39 [38400/60000 (64%)]	Loss: 0.000012
Train Epoch: 39 [51200/60000 (85%)]	Loss: 0.000596

Test set: Average loss: 0.0197, Accuracy: 9946/10000 (99%)

Learning rate =  0.0002919890243387724
Train Epoch: 40 [0/60000 (0%)]	Loss: 0.000678
Train Epoch: 40 [12800/60000 (21%)]	Loss: 0.005464
Train Epoch: 40 [25600/60000 (43%)]	Loss: 0.000150
Train Epoch: 40 [38400/60000 (64%)]	Loss: 0.000004
Train Epoch: 40 [51200/60000 (85%)]	Loss: 0.000051

Test set: Average loss: 0.0252, Accuracy: 9946/10000 (99%)

Learning rate =  0.00027738957312183375
Train Epoch: 41 [0/60000 (0%)]	Loss: 0.007213
Train Epoch: 41 [12800/60000 (21%)]	Loss: 0.005018
Train Epoch: 41 [25600/60000 (43%)]	Loss: 0.000173
Train Epoch: 41 [38400/60000 (64%)]	Loss: 0.000299
Train Epoch: 41 [51200/60000 (85%)]	Loss: 0.000402

Test set: Average loss: 0.0211, Accuracy: 9954/10000 (100%)

Learning rate =  0.00027738957312183375
Train Epoch: 42 [0/60000 (0%)]	Loss: 0.000005
Train Epoch: 42 [12800/60000 (21%)]	Loss: 0.000131
Train Epoch: 42 [25600/60000 (43%)]	Loss: 0.002701
Train Epoch: 42 [38400/60000 (64%)]	Loss: 0.000092
Train Epoch: 42 [51200/60000 (85%)]	Loss: 0.000016

Test set: Average loss: 0.0221, Accuracy: 9945/10000 (99%)

Learning rate =  0.00026352009446574203
Train Epoch: 43 [0/60000 (0%)]	Loss: 0.000013
Train Epoch: 43 [12800/60000 (21%)]	Loss: 0.000025
Train Epoch: 43 [25600/60000 (43%)]	Loss: 0.001637
Train Epoch: 43 [38400/60000 (64%)]	Loss: 0.000017
Train Epoch: 43 [51200/60000 (85%)]	Loss: 0.000034

Test set: Average loss: 0.0221, Accuracy: 9948/10000 (99%)

Learning rate =  0.00025034408974245495
Train Epoch: 44 [0/60000 (0%)]	Loss: 0.000193
Train Epoch: 44 [12800/60000 (21%)]	Loss: 0.026843
Train Epoch: 44 [25600/60000 (43%)]	Loss: 0.000526
Train Epoch: 44 [38400/60000 (64%)]	Loss: 0.000276
Train Epoch: 44 [51200/60000 (85%)]	Loss: 0.000016

Test set: Average loss: 0.0220, Accuracy: 9952/10000 (100%)

Learning rate =  0.00025034408974245495
Train Epoch: 45 [0/60000 (0%)]	Loss: 0.003983
Train Epoch: 45 [12800/60000 (21%)]	Loss: 0.000031
Train Epoch: 45 [25600/60000 (43%)]	Loss: 0.000045
Train Epoch: 45 [38400/60000 (64%)]	Loss: 0.000378
Train Epoch: 45 [51200/60000 (85%)]	Loss: 0.000044

Test set: Average loss: 0.0258, Accuracy: 9944/10000 (99%)

Learning rate =  0.00023782688525533216
Train Epoch: 46 [0/60000 (0%)]	Loss: 0.000151
Train Epoch: 46 [12800/60000 (21%)]	Loss: 0.000075
Train Epoch: 46 [25600/60000 (43%)]	Loss: 0.000118
Train Epoch: 46 [38400/60000 (64%)]	Loss: 0.000474
Train Epoch: 46 [51200/60000 (85%)]	Loss: 0.000635

Test set: Average loss: 0.0227, Accuracy: 9954/10000 (100%)

Learning rate =  0.00022593554099256555
Train Epoch: 47 [0/60000 (0%)]	Loss: 0.000002
Train Epoch: 47 [12800/60000 (21%)]	Loss: 0.000127
Train Epoch: 47 [25600/60000 (43%)]	Loss: 0.010186
Train Epoch: 47 [38400/60000 (64%)]	Loss: 0.000123
Train Epoch: 47 [51200/60000 (85%)]	Loss: 0.000361

Test set: Average loss: 0.0250, Accuracy: 9937/10000 (99%)

Learning rate =  0.00022593554099256555
Train Epoch: 48 [0/60000 (0%)]	Loss: 0.005299
Train Epoch: 48 [12800/60000 (21%)]	Loss: 0.000046
Train Epoch: 48 [25600/60000 (43%)]	Loss: 0.000125
Train Epoch: 48 [38400/60000 (64%)]	Loss: 0.000376
Train Epoch: 48 [51200/60000 (85%)]	Loss: 0.000002

Test set: Average loss: 0.0241, Accuracy: 9950/10000 (100%)

Learning rate =  0.00021463876394293727
Train Epoch: 49 [0/60000 (0%)]	Loss: 0.000006
Train Epoch: 49 [12800/60000 (21%)]	Loss: 0.000161
Train Epoch: 49 [25600/60000 (43%)]	Loss: 0.000148
Train Epoch: 49 [38400/60000 (64%)]	Loss: 0.000098
Train Epoch: 49 [51200/60000 (85%)]	Loss: 0.000104

Test set: Average loss: 0.0255, Accuracy: 9941/10000 (99%)

Learning rate =  0.00021463876394293727
Train Epoch: 50 [0/60000 (0%)]	Loss: 0.005233
Train Epoch: 50 [12800/60000 (21%)]	Loss: 0.000431
Train Epoch: 50 [25600/60000 (43%)]	Loss: 0.000016
Train Epoch: 50 [38400/60000 (64%)]	Loss: 0.002419
Train Epoch: 50 [51200/60000 (85%)]	Loss: 0.000276

Test set: Average loss: 0.0241, Accuracy: 9953/10000 (100%)

Learning rate =  0.00020390682574579038
Train Epoch: 51 [0/60000 (0%)]	Loss: 0.004802
Train Epoch: 51 [12800/60000 (21%)]	Loss: 0.000025
Train Epoch: 51 [25600/60000 (43%)]	Loss: 0.000009
Train Epoch: 51 [38400/60000 (64%)]	Loss: 0.000015
Train Epoch: 51 [51200/60000 (85%)]	Loss: 0.000031

Test set: Average loss: 0.0208, Accuracy: 9955/10000 (100%)

Learning rate =  0.00019371148445850088
Train Epoch: 52 [0/60000 (0%)]	Loss: 0.000142
Train Epoch: 52 [12800/60000 (21%)]	Loss: 0.000015
Train Epoch: 52 [25600/60000 (43%)]	Loss: 0.000309
Train Epoch: 52 [38400/60000 (64%)]	Loss: 0.001971
Train Epoch: 52 [51200/60000 (85%)]	Loss: 0.003264

Test set: Average loss: 0.0237, Accuracy: 9953/10000 (100%)

Learning rate =  0.00019371148445850088
Train Epoch: 53 [0/60000 (0%)]	Loss: 0.002356
Train Epoch: 53 [12800/60000 (21%)]	Loss: 0.000006
Train Epoch: 53 [25600/60000 (43%)]	Loss: 0.010515
Train Epoch: 53 [38400/60000 (64%)]	Loss: 0.000532
Train Epoch: 53 [51200/60000 (85%)]	Loss: 0.000014

Test set: Average loss: 0.0215, Accuracy: 9955/10000 (100%)

Learning rate =  0.00018402591023557584
Train Epoch: 54 [0/60000 (0%)]	Loss: 0.000134
Train Epoch: 54 [12800/60000 (21%)]	Loss: 0.002094
Train Epoch: 54 [25600/60000 (43%)]	Loss: 0.007953
Train Epoch: 54 [38400/60000 (64%)]	Loss: 0.000002
Train Epoch: 54 [51200/60000 (85%)]	Loss: 0.000001

Test set: Average loss: 0.0232, Accuracy: 9955/10000 (100%)

Learning rate =  0.000174824614723797
Train Epoch: 55 [0/60000 (0%)]	Loss: 0.000000
Train Epoch: 55 [12800/60000 (21%)]	Loss: 0.000040
Train Epoch: 55 [25600/60000 (43%)]	Loss: 0.029384
Train Epoch: 55 [38400/60000 (64%)]	Loss: 0.000005
Train Epoch: 55 [51200/60000 (85%)]	Loss: 0.000013

Test set: Average loss: 0.0219, Accuracy: 9956/10000 (100%)

Learning rate =  0.000174824614723797
Train Epoch: 56 [0/60000 (0%)]	Loss: 0.000001
Train Epoch: 56 [12800/60000 (21%)]	Loss: 0.000250
Train Epoch: 56 [25600/60000 (43%)]	Loss: 0.008892
Train Epoch: 56 [38400/60000 (64%)]	Loss: 0.000283
Train Epoch: 56 [51200/60000 (85%)]	Loss: 0.000015

Test set: Average loss: 0.0215, Accuracy: 9953/10000 (100%)

Learning rate =  0.00016608338398760718
Train Epoch: 57 [0/60000 (0%)]	Loss: 0.000048
Train Epoch: 57 [12800/60000 (21%)]	Loss: 0.000044
Train Epoch: 57 [25600/60000 (43%)]	Loss: 0.000027
Train Epoch: 57 [38400/60000 (64%)]	Loss: 0.000250
Train Epoch: 57 [51200/60000 (85%)]	Loss: 0.000009

Test set: Average loss: 0.0218, Accuracy: 9949/10000 (99%)

Learning rate =  0.00016608338398760718
Train Epoch: 58 [0/60000 (0%)]	Loss: 0.000125
Train Epoch: 58 [12800/60000 (21%)]	Loss: 0.012908
Train Epoch: 58 [25600/60000 (43%)]	Loss: 0.000675
Train Epoch: 58 [38400/60000 (64%)]	Loss: 0.000088
Train Epoch: 58 [51200/60000 (85%)]	Loss: 0.000001

Test set: Average loss: 0.0209, Accuracy: 9948/10000 (99%)

Learning rate =  0.0001577792147882268
Train Epoch: 59 [0/60000 (0%)]	Loss: 0.000001
Train Epoch: 59 [12800/60000 (21%)]	Loss: 0.000054
Train Epoch: 59 [25600/60000 (43%)]	Loss: 0.006602
Train Epoch: 59 [38400/60000 (64%)]	Loss: 0.000306
Train Epoch: 59 [51200/60000 (85%)]	Loss: 0.000611

Test set: Average loss: 0.0239, Accuracy: 9949/10000 (99%)

Learning rate =  0.00014989025404881545
Train Epoch: 60 [0/60000 (0%)]	Loss: 0.000039
Train Epoch: 60 [12800/60000 (21%)]	Loss: 0.000142
Train Epoch: 60 [25600/60000 (43%)]	Loss: 0.000024
Train Epoch: 60 [38400/60000 (64%)]	Loss: 0.000015
Train Epoch: 60 [51200/60000 (85%)]	Loss: 0.000034

Test set: Average loss: 0.0248, Accuracy: 9954/10000 (100%)

Learning rate =  0.00014989025404881545
Train Epoch: 61 [0/60000 (0%)]	Loss: 0.000022
Train Epoch: 61 [12800/60000 (21%)]	Loss: 0.000123
Train Epoch: 61 [25600/60000 (43%)]	Loss: 0.000003
Train Epoch: 61 [38400/60000 (64%)]	Loss: 0.000183
Train Epoch: 61 [51200/60000 (85%)]	Loss: 0.000001

Test set: Average loss: 0.0292, Accuracy: 9939/10000 (99%)

Learning rate =  0.00014239574134637466
Train Epoch: 62 [0/60000 (0%)]	Loss: 0.005839
Train Epoch: 62 [12800/60000 (21%)]	Loss: 0.000006
Train Epoch: 62 [25600/60000 (43%)]	Loss: 0.000223
Train Epoch: 62 [38400/60000 (64%)]	Loss: 0.000771
Train Epoch: 62 [51200/60000 (85%)]	Loss: 0.000004

Test set: Average loss: 0.0224, Accuracy: 9954/10000 (100%)

Learning rate =  0.00014239574134637466
Train Epoch: 63 [0/60000 (0%)]	Loss: 0.069960
Train Epoch: 63 [12800/60000 (21%)]	Loss: 0.000060
Train Epoch: 63 [25600/60000 (43%)]	Loss: 0.000018
Train Epoch: 63 [38400/60000 (64%)]	Loss: 0.000010
Train Epoch: 63 [51200/60000 (85%)]	Loss: 0.000565

Test set: Average loss: 0.0244, Accuracy: 9954/10000 (100%)

Learning rate =  0.00013527595427905592
Train Epoch: 64 [0/60000 (0%)]	Loss: 0.000003
Train Epoch: 64 [12800/60000 (21%)]	Loss: 0.000041
Train Epoch: 64 [25600/60000 (43%)]	Loss: 0.000000
Train Epoch: 64 [38400/60000 (64%)]	Loss: 0.000425
Train Epoch: 64 [51200/60000 (85%)]	Loss: 0.000019

Test set: Average loss: 0.0262, Accuracy: 9949/10000 (99%)

Learning rate =  0.00012851215656510312
Train Epoch: 65 [0/60000 (0%)]	Loss: 0.004285
Train Epoch: 65 [12800/60000 (21%)]	Loss: 0.000054
Train Epoch: 65 [25600/60000 (43%)]	Loss: 0.000001
Train Epoch: 65 [38400/60000 (64%)]	Loss: 0.000003
Train Epoch: 65 [51200/60000 (85%)]	Loss: 0.000019

Test set: Average loss: 0.0241, Accuracy: 9957/10000 (100%)

Learning rate =  0.00012851215656510312
Train Epoch: 66 [0/60000 (0%)]	Loss: 0.000165
Train Epoch: 66 [12800/60000 (21%)]	Loss: 0.000002
Train Epoch: 66 [25600/60000 (43%)]	Loss: 0.000004
Train Epoch: 66 [38400/60000 (64%)]	Loss: 0.001548
Train Epoch: 66 [51200/60000 (85%)]	Loss: 0.021460

Test set: Average loss: 0.0264, Accuracy: 9946/10000 (99%)

Learning rate =  0.00012208654873684796
Train Epoch: 67 [0/60000 (0%)]	Loss: 0.000001
Train Epoch: 67 [12800/60000 (21%)]	Loss: 0.000122
Train Epoch: 67 [25600/60000 (43%)]	Loss: 0.006705
Train Epoch: 67 [38400/60000 (64%)]	Loss: 0.000005
Train Epoch: 67 [51200/60000 (85%)]	Loss: 0.000012

Test set: Average loss: 0.0241, Accuracy: 9952/10000 (100%)

Learning rate =  0.00011598222130000556
Train Epoch: 68 [0/60000 (0%)]	Loss: 0.000110
Train Epoch: 68 [12800/60000 (21%)]	Loss: 0.000000
Train Epoch: 68 [25600/60000 (43%)]	Loss: 0.000066
Train Epoch: 68 [38400/60000 (64%)]	Loss: 0.000648
Train Epoch: 68 [51200/60000 (85%)]	Loss: 0.067552

Test set: Average loss: 0.0249, Accuracy: 9949/10000 (99%)

Learning rate =  0.00011598222130000556
Train Epoch: 69 [0/60000 (0%)]	Loss: 0.000136
Train Epoch: 69 [12800/60000 (21%)]	Loss: 0.006516
Train Epoch: 69 [25600/60000 (43%)]	Loss: 0.000718
Train Epoch: 69 [38400/60000 (64%)]	Loss: 0.000008
Train Epoch: 69 [51200/60000 (85%)]	Loss: 0.000557

Test set: Average loss: 0.0283, Accuracy: 9945/10000 (99%)

Learning rate =  0.00011018311023500529
Train Epoch: 70 [0/60000 (0%)]	Loss: 0.000100
Train Epoch: 70 [12800/60000 (21%)]	Loss: 0.000757
Train Epoch: 70 [25600/60000 (43%)]	Loss: 0.114676
Train Epoch: 70 [38400/60000 (64%)]	Loss: 0.000010
Train Epoch: 70 [51200/60000 (85%)]	Loss: 0.000105

Test set: Average loss: 0.0237, Accuracy: 9952/10000 (100%)

Learning rate =  0.00011018311023500529
Train Epoch: 71 [0/60000 (0%)]	Loss: 0.000047
Train Epoch: 71 [12800/60000 (21%)]	Loss: 0.031124
Train Epoch: 71 [25600/60000 (43%)]	Loss: 0.000006
Train Epoch: 71 [38400/60000 (64%)]	Loss: 0.000000
Train Epoch: 71 [51200/60000 (85%)]	Loss: 0.000036

Test set: Average loss: 0.0286, Accuracy: 9949/10000 (99%)

Learning rate =  0.00010467395472325501
Train Epoch: 72 [0/60000 (0%)]	Loss: 0.008876
Train Epoch: 72 [12800/60000 (21%)]	Loss: 0.000007
Train Epoch: 72 [25600/60000 (43%)]	Loss: 0.000003
Train Epoch: 72 [38400/60000 (64%)]	Loss: 0.000367
Train Epoch: 72 [51200/60000 (85%)]	Loss: 0.000000

Test set: Average loss: 0.0255, Accuracy: 9952/10000 (100%)

Learning rate =  9.944025698709225e-05
Train Epoch: 73 [0/60000 (0%)]	Loss: 0.000011
Train Epoch: 73 [12800/60000 (21%)]	Loss: 0.004449
Train Epoch: 73 [25600/60000 (43%)]	Loss: 0.000537
Train Epoch: 73 [38400/60000 (64%)]	Loss: 0.000333
Train Epoch: 73 [51200/60000 (85%)]	Loss: 0.041916

Test set: Average loss: 0.0258, Accuracy: 9948/10000 (99%)

Learning rate =  9.944025698709225e-05
Train Epoch: 74 [0/60000 (0%)]	Loss: 0.000118
Train Epoch: 74 [12800/60000 (21%)]	Loss: 0.000001
Train Epoch: 74 [25600/60000 (43%)]	Loss: 0.000056
Train Epoch: 74 [38400/60000 (64%)]	Loss: 0.000033
Train Epoch: 74 [51200/60000 (85%)]	Loss: 0.002626

Test set: Average loss: 0.0270, Accuracy: 9953/10000 (100%)

Learning rate =  9.446824413773763e-05
Train Epoch: 75 [0/60000 (0%)]	Loss: 0.000027
Train Epoch: 75 [12800/60000 (21%)]	Loss: 0.000029
Train Epoch: 75 [25600/60000 (43%)]	Loss: 0.000082
Train Epoch: 75 [38400/60000 (64%)]	Loss: 0.000001
Train Epoch: 75 [51200/60000 (85%)]	Loss: 0.000290

Test set: Average loss: 0.0268, Accuracy: 9949/10000 (99%)

Learning rate =  8.974483193085076e-05
Train Epoch: 76 [0/60000 (0%)]	Loss: 0.000083
Train Epoch: 76 [12800/60000 (21%)]	Loss: 0.000104
Train Epoch: 76 [25600/60000 (43%)]	Loss: 0.000020
Train Epoch: 76 [38400/60000 (64%)]	Loss: 0.000007
Train Epoch: 76 [51200/60000 (85%)]	Loss: 0.000258

Test set: Average loss: 0.0261, Accuracy: 9952/10000 (100%)

Learning rate =  8.974483193085076e-05
Train Epoch: 77 [0/60000 (0%)]	Loss: 0.000249
Train Epoch: 77 [12800/60000 (21%)]	Loss: 0.000044
Train Epoch: 77 [25600/60000 (43%)]	Loss: 0.000027
Train Epoch: 77 [38400/60000 (64%)]	Loss: 0.003077
Train Epoch: 77 [51200/60000 (85%)]	Loss: 0.000297

Test set: Average loss: 0.0248, Accuracy: 9955/10000 (100%)

Learning rate =  8.52575903343082e-05
Train Epoch: 78 [0/60000 (0%)]	Loss: 0.006712
Train Epoch: 78 [12800/60000 (21%)]	Loss: 0.000125
Train Epoch: 78 [25600/60000 (43%)]	Loss: 0.000231
Train Epoch: 78 [38400/60000 (64%)]	Loss: 0.001154
Train Epoch: 78 [51200/60000 (85%)]	Loss: 0.000002

Test set: Average loss: 0.0240, Accuracy: 9952/10000 (100%)

Learning rate =  8.52575903343082e-05
Train Epoch: 79 [0/60000 (0%)]	Loss: 0.000010
Train Epoch: 79 [12800/60000 (21%)]	Loss: 0.000335
Train Epoch: 79 [25600/60000 (43%)]	Loss: 0.000037
Train Epoch: 79 [38400/60000 (64%)]	Loss: 0.000456
Train Epoch: 79 [51200/60000 (85%)]	Loss: 0.000005

Test set: Average loss: 0.0244, Accuracy: 9956/10000 (100%)

Learning rate =  8.099471081759279e-05
Train Epoch: 80 [0/60000 (0%)]	Loss: 0.000042
Train Epoch: 80 [12800/60000 (21%)]	Loss: 0.000053
Train Epoch: 80 [25600/60000 (43%)]	Loss: 0.000047
Train Epoch: 80 [38400/60000 (64%)]	Loss: 0.000084
Train Epoch: 80 [51200/60000 (85%)]	Loss: 0.000022

Test set: Average loss: 0.0271, Accuracy: 9945/10000 (99%)

Learning rate =  7.694497527671315e-05
Train Epoch: 81 [0/60000 (0%)]	Loss: 0.000236
Train Epoch: 81 [12800/60000 (21%)]	Loss: 0.000002
Train Epoch: 81 [25600/60000 (43%)]	Loss: 0.000001
Train Epoch: 81 [38400/60000 (64%)]	Loss: 0.010537
Train Epoch: 81 [51200/60000 (85%)]	Loss: 0.000000

Test set: Average loss: 0.0219, Accuracy: 9954/10000 (100%)

Learning rate =  7.694497527671315e-05
Train Epoch: 82 [0/60000 (0%)]	Loss: 0.000002
Train Epoch: 82 [12800/60000 (21%)]	Loss: 0.021764
Train Epoch: 82 [25600/60000 (43%)]	Loss: 0.000001
Train Epoch: 82 [38400/60000 (64%)]	Loss: 0.000242
Train Epoch: 82 [51200/60000 (85%)]	Loss: 0.000102

Test set: Average loss: 0.0225, Accuracy: 9949/10000 (99%)

Learning rate =  7.30977265128775e-05
Train Epoch: 83 [0/60000 (0%)]	Loss: 0.000011
Train Epoch: 83 [12800/60000 (21%)]	Loss: 0.002845
Train Epoch: 83 [25600/60000 (43%)]	Loss: 0.000022
Train Epoch: 83 [38400/60000 (64%)]	Loss: 0.000047
Train Epoch: 83 [51200/60000 (85%)]	Loss: 0.000050

Test set: Average loss: 0.0215, Accuracy: 9953/10000 (100%)

Learning rate =  7.30977265128775e-05
Train Epoch: 84 [0/60000 (0%)]	Loss: 0.000219
Train Epoch: 84 [12800/60000 (21%)]	Loss: 0.000021
Train Epoch: 84 [25600/60000 (43%)]	Loss: 0.000008
Train Epoch: 84 [38400/60000 (64%)]	Loss: 0.000002
Train Epoch: 84 [51200/60000 (85%)]	Loss: 0.000028

Test set: Average loss: 0.0241, Accuracy: 9955/10000 (100%)

Learning rate =  6.94428401872336e-05
Train Epoch: 85 [0/60000 (0%)]	Loss: 0.000029
Train Epoch: 85 [12800/60000 (21%)]	Loss: 0.000294
Train Epoch: 85 [25600/60000 (43%)]	Loss: 0.000004
Train Epoch: 85 [38400/60000 (64%)]	Loss: 0.000005
Train Epoch: 85 [51200/60000 (85%)]	Loss: 0.000589

Test set: Average loss: 0.0257, Accuracy: 9948/10000 (99%)

Learning rate =  6.597069817787194e-05
Train Epoch: 86 [0/60000 (0%)]	Loss: 0.000031
Train Epoch: 86 [12800/60000 (21%)]	Loss: 0.000133
Train Epoch: 86 [25600/60000 (43%)]	Loss: 0.000006
Train Epoch: 86 [38400/60000 (64%)]	Loss: 0.000016
Train Epoch: 86 [51200/60000 (85%)]	Loss: 0.000381

Test set: Average loss: 0.0224, Accuracy: 9958/10000 (100%)

Learning rate =  6.597069817787194e-05
Train Epoch: 87 [0/60000 (0%)]	Loss: 0.000001
Train Epoch: 87 [12800/60000 (21%)]	Loss: 0.000274
Train Epoch: 87 [25600/60000 (43%)]	Loss: 0.000029
Train Epoch: 87 [38400/60000 (64%)]	Loss: 0.000026
Train Epoch: 87 [51200/60000 (85%)]	Loss: 0.000078

Test set: Average loss: 0.0272, Accuracy: 9950/10000 (100%)

Learning rate =  6.267216326897833e-05
Train Epoch: 88 [0/60000 (0%)]	Loss: 0.000000
Train Epoch: 88 [12800/60000 (21%)]	Loss: 0.000000
Train Epoch: 88 [25600/60000 (43%)]	Loss: 0.000000
Train Epoch: 88 [38400/60000 (64%)]	Loss: 0.000370
Train Epoch: 88 [51200/60000 (85%)]	Loss: 0.000021

Test set: Average loss: 0.0257, Accuracy: 9952/10000 (100%)

Learning rate =  5.953855510552941e-05
Train Epoch: 89 [0/60000 (0%)]	Loss: 0.000001
Train Epoch: 89 [12800/60000 (21%)]	Loss: 0.000001
Train Epoch: 89 [25600/60000 (43%)]	Loss: 0.000817
Train Epoch: 89 [38400/60000 (64%)]	Loss: 0.000001
Train Epoch: 89 [51200/60000 (85%)]	Loss: 0.009510

Test set: Average loss: 0.0251, Accuracy: 9953/10000 (100%)

Learning rate =  5.953855510552941e-05
Train Epoch: 90 [0/60000 (0%)]	Loss: 0.000008
Train Epoch: 90 [12800/60000 (21%)]	Loss: 0.008030
Train Epoch: 90 [25600/60000 (43%)]	Loss: 0.000086
Train Epoch: 90 [38400/60000 (64%)]	Loss: 0.000602
Train Epoch: 90 [51200/60000 (85%)]	Loss: 0.000012

Test set: Average loss: 0.0242, Accuracy: 9957/10000 (100%)

Learning rate =  5.656162735025293e-05
Train Epoch: 91 [0/60000 (0%)]	Loss: 0.000002
Train Epoch: 91 [12800/60000 (21%)]	Loss: 0.000448
Train Epoch: 91 [25600/60000 (43%)]	Loss: 0.000006
Train Epoch: 91 [38400/60000 (64%)]	Loss: 0.000000
Train Epoch: 91 [51200/60000 (85%)]	Loss: 0.000017

Test set: Average loss: 0.0259, Accuracy: 9949/10000 (99%)

Learning rate =  5.656162735025293e-05
Train Epoch: 92 [0/60000 (0%)]	Loss: 0.000387
Train Epoch: 92 [12800/60000 (21%)]	Loss: 0.000036
Train Epoch: 92 [25600/60000 (43%)]	Loss: 0.000009
Train Epoch: 92 [38400/60000 (64%)]	Loss: 0.000001
Train Epoch: 92 [51200/60000 (85%)]	Loss: 0.000010

Test set: Average loss: 0.0214, Accuracy: 9959/10000 (100%)

Learning rate =  5.373354598274029e-05
Train Epoch: 93 [0/60000 (0%)]	Loss: 0.000002
Train Epoch: 93 [12800/60000 (21%)]	Loss: 0.000002
Train Epoch: 93 [25600/60000 (43%)]	Loss: 0.000102
Train Epoch: 93 [38400/60000 (64%)]	Loss: 0.000000
Train Epoch: 93 [51200/60000 (85%)]	Loss: 0.000112

Test set: Average loss: 0.0283, Accuracy: 9947/10000 (99%)

Learning rate =  5.1046868683603266e-05
Train Epoch: 94 [0/60000 (0%)]	Loss: 0.000001
Train Epoch: 94 [12800/60000 (21%)]	Loss: 0.010220
Train Epoch: 94 [25600/60000 (43%)]	Loss: 0.000007
Train Epoch: 94 [38400/60000 (64%)]	Loss: 0.000022
Train Epoch: 94 [51200/60000 (85%)]	Loss: 0.000005

Test set: Average loss: 0.0249, Accuracy: 9960/10000 (100%)

Learning rate =  5.1046868683603266e-05
Train Epoch: 95 [0/60000 (0%)]	Loss: 0.000000
Train Epoch: 95 [12800/60000 (21%)]	Loss: 0.000012
Train Epoch: 95 [25600/60000 (43%)]	Loss: 0.000001
Train Epoch: 95 [38400/60000 (64%)]	Loss: 0.000003
Train Epoch: 95 [51200/60000 (85%)]	Loss: 0.000041

Test set: Average loss: 0.0258, Accuracy: 9950/10000 (100%)

Learning rate =  4.8494525249423105e-05
Train Epoch: 96 [0/60000 (0%)]	Loss: 0.003687
Train Epoch: 96 [12800/60000 (21%)]	Loss: 0.000007
Train Epoch: 96 [25600/60000 (43%)]	Loss: 0.000001
Train Epoch: 96 [38400/60000 (64%)]	Loss: 0.000000
Train Epoch: 96 [51200/60000 (85%)]	Loss: 0.000339

Test set: Average loss: 0.0229, Accuracy: 9950/10000 (100%)

Learning rate =  4.6069798986951947e-05
Train Epoch: 97 [0/60000 (0%)]	Loss: 0.000005
Train Epoch: 97 [12800/60000 (21%)]	Loss: 0.000071
Train Epoch: 97 [25600/60000 (43%)]	Loss: 0.001949
Train Epoch: 97 [38400/60000 (64%)]	Loss: 0.000001
Train Epoch: 97 [51200/60000 (85%)]	Loss: 0.000006

Test set: Average loss: 0.0282, Accuracy: 9947/10000 (99%)

Learning rate =  4.6069798986951947e-05
Train Epoch: 98 [0/60000 (0%)]	Loss: 0.000000
Train Epoch: 98 [12800/60000 (21%)]	Loss: 0.000002
Train Epoch: 98 [25600/60000 (43%)]	Loss: 0.000081
Train Epoch: 98 [38400/60000 (64%)]	Loss: 0.000000
Train Epoch: 98 [51200/60000 (85%)]	Loss: 0.000000

Test set: Average loss: 0.0231, Accuracy: 9953/10000 (100%)

Learning rate =  4.3766309037604346e-05
Train Epoch: 99 [0/60000 (0%)]	Loss: 0.000113
Train Epoch: 99 [12800/60000 (21%)]	Loss: 0.000007
Train Epoch: 99 [25600/60000 (43%)]	Loss: 0.000006
Train Epoch: 99 [38400/60000 (64%)]	Loss: 0.000010
Train Epoch: 99 [51200/60000 (85%)]	Loss: 0.000027

Test set: Average loss: 0.0211, Accuracy: 9957/10000 (100%)

Total time taken: 7074 seconds
In [90]:
import matplotlib.pyplot as plt
plt.plot(learning_rate, '-')
plt.xlabel('epoch')
plt.ylabel('loss')
plt.title('Losses');
In [91]:
import matplotlib.pyplot as plt
plt.plot(losses_1, '-')
plt.xlabel('epoch')
plt.ylabel('loss')
plt.title('Losses');
In [92]:
import matplotlib.pyplot as plt
plt.plot(losses_2, '-')
plt.xlabel('epoch')
plt.ylabel('loss')
plt.title('Losses');
In [93]:
import matplotlib.pyplot as plt
plt.plot(avg_loss, '-')
plt.xlabel('epoch')
plt.ylabel('Avg Loss')
plt.title('Average Loss');
In [94]:
import matplotlib.pyplot as plt
plt.plot(accuracy, '-')
plt.xlabel('epoch')
plt.ylabel('Accuracy')
plt.title('Acc');
In [97]:
torch.save(model, '1_model_MNIST_3579_Adam_0.95_100e.pth')
In [ ]: