Open In Colab

The CIFAR 10 dataset

The CIFAR-10 and CIFAR-100 are labeled subsets of the 80 million tiny images dataset. They were collected by Alex Krizhevsky, Vinod Nair, and Geoffrey Hinton. The CIFAR-10 dataset The CIFAR-10 dataset consists of 60000 32x32 colour images in 10 classes, with 6000 images per class. There are 50000 training images and 10000 test images.

The dataset is divided into five training batches and one test batch, each with 10000 images. The test batch contains exactly 1000 randomly-selected images from each class. The training batches contain the remaining images in random order, but some training batches may contain more images from one class than another. Between them, the training batches contain exactly 5000 images from each class.

Here are the classes in the dataset, as well as 10 random images from each:

image.png The classes are completely mutually exclusive. There is no overlap between automobiles and trucks. "Automobile" includes sedans, SUVs, things of that sort. "Truck" includes only big trucks. Neither includes pickup trucks.

VGG16

# Setting seeds to try and ensure we have the same results - this is not guaranteed across PyTorch releases.
import torch
torch.manual_seed(0)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
import torch.nn as nn
import torch.nn.functional as F
from torchvision import datasets, transforms, models
from torch.utils.data import DataLoader
print(torch.__version__)
1.13.0+cu116
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
device
device(type='cuda')

Data preprocessing (Resize and normalize)

mean = [0.485, 0.456, 0.406]
std = [0.229, 0.224, 0.225]

transform = transforms.Compose([
    transforms.Resize((224, 224)),
    transforms.ToTensor(),
    transforms.Normalize(mean=mean, std=std)
    ])

trainset = datasets.CIFAR10(root='~/.pytorch/CIFAR10',train=True, download=True,transform=transform)
testset = datasets.CIFAR10(root='~/.pytorch/CIFAR10',train=False, transform=transform)

trainloader = DataLoader(trainset, batch_size=64, shuffle=True)
testloader = DataLoader(testset, batch_size=64, shuffle=False)
Downloading https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz to /root/.pytorch/CIFAR10/cifar-10-python.tar.gz



  0%|          | 0/170498071 [00:00<?, ?it/s]


Extracting /root/.pytorch/CIFAR10/cifar-10-python.tar.gz to /root/.pytorch/CIFAR10
for images, labels in trainloader:
  print(images.size(), labels.size())
  break
torch.Size([64, 3, 224, 224]) torch.Size([64])
model = models.vgg16(pretrained=True)
model
/usr/local/lib/python3.8/dist-packages/torchvision/models/_utils.py:208: UserWarning: The parameter 'pretrained' is deprecated since 0.13 and may be removed in the future, please use 'weights' instead.
  warnings.warn(
/usr/local/lib/python3.8/dist-packages/torchvision/models/_utils.py:223: UserWarning: Arguments other than a weight enum or `None` for 'weights' are deprecated since 0.13 and may be removed in the future. The current behavior is equivalent to passing `weights=VGG16_Weights.IMAGENET1K_V1`. You can also use `weights=VGG16_Weights.DEFAULT` to get the most up-to-date weights.
  warnings.warn(msg)
Downloading: "https://download.pytorch.org/models/vgg16-397923af.pth" to /root/.cache/torch/hub/checkpoints/vgg16-397923af.pth



  0%|          | 0.00/528M [00:00<?, ?B/s]





VGG(
  (features): Sequential(
    (0): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (1): ReLU(inplace=True)
    (2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (3): ReLU(inplace=True)
    (4): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
    (5): Conv2d(64, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (6): ReLU(inplace=True)
    (7): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (8): ReLU(inplace=True)
    (9): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
    (10): Conv2d(128, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (11): ReLU(inplace=True)
    (12): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (13): ReLU(inplace=True)
    (14): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (15): ReLU(inplace=True)
    (16): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
    (17): Conv2d(256, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (18): ReLU(inplace=True)
    (19): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (20): ReLU(inplace=True)
    (21): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (22): ReLU(inplace=True)
    (23): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
    (24): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (25): ReLU(inplace=True)
    (26): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (27): ReLU(inplace=True)
    (28): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (29): ReLU(inplace=True)
    (30): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
  )
  (avgpool): AdaptiveAvgPool2d(output_size=(7, 7))
  (classifier): Sequential(
    (0): Linear(in_features=25088, out_features=4096, bias=True)
    (1): ReLU(inplace=True)
    (2): Dropout(p=0.5, inplace=False)
    (3): Linear(in_features=4096, out_features=4096, bias=True)
    (4): ReLU(inplace=True)
    (5): Dropout(p=0.5, inplace=False)
    (6): Linear(in_features=4096, out_features=1000, bias=True)
  )
)

Transfer learning - Fixed Feature Extractor

model.features
Sequential(
  (0): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
  (1): ReLU(inplace=True)
  (2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
  (3): ReLU(inplace=True)
  (4): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
  (5): Conv2d(64, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
  (6): ReLU(inplace=True)
  (7): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
  (8): ReLU(inplace=True)
  (9): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
  (10): Conv2d(128, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
  (11): ReLU(inplace=True)
  (12): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
  (13): ReLU(inplace=True)
  (14): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
  (15): ReLU(inplace=True)
  (16): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
  (17): Conv2d(256, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
  (18): ReLU(inplace=True)
  (19): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
  (20): ReLU(inplace=True)
  (21): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
  (22): ReLU(inplace=True)
  (23): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
  (24): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
  (25): ReLU(inplace=True)
  (26): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
  (27): ReLU(inplace=True)
  (28): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
  (29): ReLU(inplace=True)
  (30): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
)
model.classifier
Sequential(
  (0): Linear(in_features=25088, out_features=4096, bias=True)
  (1): ReLU(inplace=True)
  (2): Dropout(p=0.5, inplace=False)
  (3): Linear(in_features=4096, out_features=4096, bias=True)
  (4): ReLU(inplace=True)
  (5): Dropout(p=0.5, inplace=False)
  (6): Linear(in_features=4096, out_features=1000, bias=True)
)

When we have the model, we have modified the model parameters according to our data. We can stop the propagation inside the network in defines layer before the classifier or change the last fully connected network and retrain it.

for param in model.parameters():
  param.requires_grad = False

Here we want to remove the last fully connected layer and change the output layer because we have 10 instead of 1000 classes. We add the logSoftmax at the end.

model.classifier[-1] = nn.Sequential(
                       nn.Linear(in_features=4096, out_features=10),
                       nn.LogSoftmax(dim=1)
                        )
model.classifier
Sequential(
  (0): Linear(in_features=25088, out_features=4096, bias=True)
  (1): ReLU(inplace=True)
  (2): Dropout(p=0.5, inplace=False)
  (3): Linear(in_features=4096, out_features=4096, bias=True)
  (4): ReLU(inplace=True)
  (5): Dropout(p=0.5, inplace=False)
  (6): Sequential(
    (0): Linear(in_features=4096, out_features=10, bias=True)
    (1): LogSoftmax(dim=1)
  )
)

Training the Fixed Feature Extractor

from torch.optim import Adam

model = model.to(device)
optimizer = Adam(model.parameters())
criterion = nn.NLLLoss()
num_epochs = 1
batch_loss = 0
cum_epoch_loss = 0

for e in range(num_epochs):
  cum_epoch_loss = 0

  for batch, (images, labels) in enumerate(trainloader,1):
    images = images.to(device)
    labels = labels.to(device)
    criterion = criterion 
    optimizer.zero_grad()
    logps = model(images)
    loss = criterion(logps, labels)
    loss.backward()
    optimizer.step()

    batch_loss += loss.item()
    print(f'Epoch({e}/{num_epochs} : Batch number({batch}/{len(trainloader)})  Batch loss : {loss.item()}')

  print(f'Training loss : {batch_loss/len(trainloader)}') 
Epoch(0/1 : Batch number(1/782)  Batch loss : 2.4472618103027344
Epoch(0/1 : Batch number(2/782)  Batch loss : 2.231184482574463
Epoch(0/1 : Batch number(3/782)  Batch loss : 2.0071535110473633
Epoch(0/1 : Batch number(4/782)  Batch loss : 1.9421484470367432
Epoch(0/1 : Batch number(5/782)  Batch loss : 1.9430814981460571
Epoch(0/1 : Batch number(6/782)  Batch loss : 1.6382205486297607
Epoch(0/1 : Batch number(7/782)  Batch loss : 1.5217958688735962
Epoch(0/1 : Batch number(8/782)  Batch loss : 1.3577253818511963
Epoch(0/1 : Batch number(9/782)  Batch loss : 1.4438300132751465
Epoch(0/1 : Batch number(10/782)  Batch loss : 1.2072449922561646
Epoch(0/1 : Batch number(11/782)  Batch loss : 1.0614283084869385
Epoch(0/1 : Batch number(12/782)  Batch loss : 1.2457263469696045
Epoch(0/1 : Batch number(13/782)  Batch loss : 1.0670894384384155
Epoch(0/1 : Batch number(14/782)  Batch loss : 1.09145987033844
Epoch(0/1 : Batch number(15/782)  Batch loss : 1.1152921915054321
Epoch(0/1 : Batch number(16/782)  Batch loss : 0.9850171208381653
Epoch(0/1 : Batch number(17/782)  Batch loss : 0.9673246145248413
Epoch(0/1 : Batch number(18/782)  Batch loss : 0.880038321018219
Epoch(0/1 : Batch number(19/782)  Batch loss : 0.8975075483322144
Epoch(0/1 : Batch number(20/782)  Batch loss : 0.7708606719970703
Epoch(0/1 : Batch number(21/782)  Batch loss : 0.772428572177887
Epoch(0/1 : Batch number(22/782)  Batch loss : 0.7804669737815857
Epoch(0/1 : Batch number(23/782)  Batch loss : 0.931649923324585
Epoch(0/1 : Batch number(24/782)  Batch loss : 0.7864188551902771
Epoch(0/1 : Batch number(25/782)  Batch loss : 0.7851861119270325
Epoch(0/1 : Batch number(26/782)  Batch loss : 0.8151208162307739
Epoch(0/1 : Batch number(27/782)  Batch loss : 0.8504083752632141
Epoch(0/1 : Batch number(28/782)  Batch loss : 0.7692379951477051
Epoch(0/1 : Batch number(29/782)  Batch loss : 0.8421614766120911
Epoch(0/1 : Batch number(30/782)  Batch loss : 0.6432204246520996
Epoch(0/1 : Batch number(31/782)  Batch loss : 0.8474072813987732
Epoch(0/1 : Batch number(32/782)  Batch loss : 0.6535457968711853
Epoch(0/1 : Batch number(33/782)  Batch loss : 0.6718240976333618
Epoch(0/1 : Batch number(34/782)  Batch loss : 0.9112640023231506
Epoch(0/1 : Batch number(35/782)  Batch loss : 0.7203719615936279
Epoch(0/1 : Batch number(36/782)  Batch loss : 0.9174689054489136
Epoch(0/1 : Batch number(37/782)  Batch loss : 0.7767692804336548
Epoch(0/1 : Batch number(38/782)  Batch loss : 0.7751827239990234
Epoch(0/1 : Batch number(39/782)  Batch loss : 0.727605402469635
Epoch(0/1 : Batch number(40/782)  Batch loss : 0.6648451685905457
Epoch(0/1 : Batch number(41/782)  Batch loss : 0.6174180507659912
Epoch(0/1 : Batch number(42/782)  Batch loss : 0.752968966960907
Epoch(0/1 : Batch number(43/782)  Batch loss : 0.719209611415863
Epoch(0/1 : Batch number(44/782)  Batch loss : 0.700231671333313
Epoch(0/1 : Batch number(45/782)  Batch loss : 0.8082693219184875
Epoch(0/1 : Batch number(46/782)  Batch loss : 0.681194007396698
Epoch(0/1 : Batch number(47/782)  Batch loss : 0.9105187654495239
Epoch(0/1 : Batch number(48/782)  Batch loss : 0.9212053418159485
Epoch(0/1 : Batch number(49/782)  Batch loss : 0.5839594006538391
Epoch(0/1 : Batch number(50/782)  Batch loss : 0.6955371499061584
Epoch(0/1 : Batch number(51/782)  Batch loss : 0.8231170177459717
Epoch(0/1 : Batch number(52/782)  Batch loss : 0.8980750441551208
Epoch(0/1 : Batch number(53/782)  Batch loss : 0.4801161587238312
Epoch(0/1 : Batch number(54/782)  Batch loss : 0.6344966888427734
Epoch(0/1 : Batch number(55/782)  Batch loss : 0.6696928143501282
Epoch(0/1 : Batch number(56/782)  Batch loss : 0.6443082094192505
Epoch(0/1 : Batch number(57/782)  Batch loss : 0.8048222064971924
Epoch(0/1 : Batch number(58/782)  Batch loss : 0.6743636727333069
Epoch(0/1 : Batch number(59/782)  Batch loss : 0.770437479019165
Epoch(0/1 : Batch number(60/782)  Batch loss : 0.6232117414474487
Epoch(0/1 : Batch number(61/782)  Batch loss : 0.8024176955223083
Epoch(0/1 : Batch number(62/782)  Batch loss : 0.8466433882713318
Epoch(0/1 : Batch number(63/782)  Batch loss : 0.49027812480926514
Epoch(0/1 : Batch number(64/782)  Batch loss : 0.6431823372840881
Epoch(0/1 : Batch number(65/782)  Batch loss : 0.701055109500885
Epoch(0/1 : Batch number(66/782)  Batch loss : 0.7513391375541687
Epoch(0/1 : Batch number(67/782)  Batch loss : 0.7118852734565735
Epoch(0/1 : Batch number(68/782)  Batch loss : 0.7601311802864075
Epoch(0/1 : Batch number(69/782)  Batch loss : 0.6121333837509155
Epoch(0/1 : Batch number(70/782)  Batch loss : 0.8287459015846252
Epoch(0/1 : Batch number(71/782)  Batch loss : 0.6545253992080688
Epoch(0/1 : Batch number(72/782)  Batch loss : 0.6102750897407532
Epoch(0/1 : Batch number(73/782)  Batch loss : 0.6932210326194763
Epoch(0/1 : Batch number(74/782)  Batch loss : 0.9618021845817566
Epoch(0/1 : Batch number(75/782)  Batch loss : 0.8294941782951355
Epoch(0/1 : Batch number(76/782)  Batch loss : 0.7225808501243591
Epoch(0/1 : Batch number(77/782)  Batch loss : 0.5746610760688782
Epoch(0/1 : Batch number(78/782)  Batch loss : 0.7915189862251282
Epoch(0/1 : Batch number(79/782)  Batch loss : 0.4597938656806946
Epoch(0/1 : Batch number(80/782)  Batch loss : 0.6637513041496277
Epoch(0/1 : Batch number(81/782)  Batch loss : 0.6492860913276672
Epoch(0/1 : Batch number(82/782)  Batch loss : 0.8008872866630554
Epoch(0/1 : Batch number(83/782)  Batch loss : 0.603843092918396
Epoch(0/1 : Batch number(84/782)  Batch loss : 0.47709205746650696
Epoch(0/1 : Batch number(85/782)  Batch loss : 0.674182653427124
Epoch(0/1 : Batch number(86/782)  Batch loss : 0.8320193886756897
Epoch(0/1 : Batch number(87/782)  Batch loss : 0.45045435428619385
Epoch(0/1 : Batch number(88/782)  Batch loss : 0.539964497089386
Epoch(0/1 : Batch number(89/782)  Batch loss : 0.5216782093048096
Epoch(0/1 : Batch number(90/782)  Batch loss : 0.8428390622138977
Epoch(0/1 : Batch number(91/782)  Batch loss : 0.706964373588562
Epoch(0/1 : Batch number(92/782)  Batch loss : 0.603232741355896
Epoch(0/1 : Batch number(93/782)  Batch loss : 0.5179840326309204
Epoch(0/1 : Batch number(94/782)  Batch loss : 0.43184685707092285
Epoch(0/1 : Batch number(95/782)  Batch loss : 0.7168488502502441
Epoch(0/1 : Batch number(96/782)  Batch loss : 0.5693473815917969
Epoch(0/1 : Batch number(97/782)  Batch loss : 0.5675541162490845
Epoch(0/1 : Batch number(98/782)  Batch loss : 0.6181146502494812
Epoch(0/1 : Batch number(99/782)  Batch loss : 0.6880702376365662
Epoch(0/1 : Batch number(100/782)  Batch loss : 0.6234488487243652
Epoch(0/1 : Batch number(101/782)  Batch loss : 0.8885912895202637
Epoch(0/1 : Batch number(102/782)  Batch loss : 0.7719464302062988
Epoch(0/1 : Batch number(103/782)  Batch loss : 0.6692326068878174
Epoch(0/1 : Batch number(104/782)  Batch loss : 0.70859694480896
Epoch(0/1 : Batch number(105/782)  Batch loss : 0.5859785676002502
Epoch(0/1 : Batch number(106/782)  Batch loss : 0.6875414252281189
Epoch(0/1 : Batch number(107/782)  Batch loss : 0.5254945158958435
Epoch(0/1 : Batch number(108/782)  Batch loss : 0.7810012102127075
Epoch(0/1 : Batch number(109/782)  Batch loss : 0.5580793023109436
Epoch(0/1 : Batch number(110/782)  Batch loss : 0.6351724863052368
Epoch(0/1 : Batch number(111/782)  Batch loss : 0.45104268193244934
Epoch(0/1 : Batch number(112/782)  Batch loss : 0.6584742069244385
Epoch(0/1 : Batch number(113/782)  Batch loss : 0.6407800912857056
Epoch(0/1 : Batch number(114/782)  Batch loss : 0.645943284034729
Epoch(0/1 : Batch number(115/782)  Batch loss : 0.675926685333252
Epoch(0/1 : Batch number(116/782)  Batch loss : 0.7336859107017517
Epoch(0/1 : Batch number(117/782)  Batch loss : 0.5359864830970764
Epoch(0/1 : Batch number(118/782)  Batch loss : 0.452770471572876
Epoch(0/1 : Batch number(119/782)  Batch loss : 0.5588313937187195
Epoch(0/1 : Batch number(120/782)  Batch loss : 0.6487963199615479
Epoch(0/1 : Batch number(121/782)  Batch loss : 0.7909245491027832
Epoch(0/1 : Batch number(122/782)  Batch loss : 0.5000779032707214
Epoch(0/1 : Batch number(123/782)  Batch loss : 0.5106939673423767
Epoch(0/1 : Batch number(124/782)  Batch loss : 0.6736598610877991
Epoch(0/1 : Batch number(125/782)  Batch loss : 0.6391501426696777
Epoch(0/1 : Batch number(126/782)  Batch loss : 0.6424500346183777
Epoch(0/1 : Batch number(127/782)  Batch loss : 0.6621284484863281
Epoch(0/1 : Batch number(128/782)  Batch loss : 0.7636696696281433
Epoch(0/1 : Batch number(129/782)  Batch loss : 0.6525097489356995
Epoch(0/1 : Batch number(130/782)  Batch loss : 0.43543240427970886
Epoch(0/1 : Batch number(131/782)  Batch loss : 0.46893325448036194
Epoch(0/1 : Batch number(132/782)  Batch loss : 0.6308790445327759
Epoch(0/1 : Batch number(133/782)  Batch loss : 0.6299154758453369
Epoch(0/1 : Batch number(134/782)  Batch loss : 0.5981770753860474
Epoch(0/1 : Batch number(135/782)  Batch loss : 0.86009681224823
Epoch(0/1 : Batch number(136/782)  Batch loss : 0.6016729474067688
Epoch(0/1 : Batch number(137/782)  Batch loss : 0.7855677604675293
Epoch(0/1 : Batch number(138/782)  Batch loss : 0.6995458006858826
Epoch(0/1 : Batch number(139/782)  Batch loss : 0.6680655479431152
Epoch(0/1 : Batch number(140/782)  Batch loss : 0.46283194422721863
Epoch(0/1 : Batch number(141/782)  Batch loss : 0.48601481318473816
Epoch(0/1 : Batch number(142/782)  Batch loss : 0.6436841487884521
Epoch(0/1 : Batch number(143/782)  Batch loss : 0.575107216835022
Epoch(0/1 : Batch number(144/782)  Batch loss : 0.7316113710403442
Epoch(0/1 : Batch number(145/782)  Batch loss : 0.5862393975257874
Epoch(0/1 : Batch number(146/782)  Batch loss : 0.8147796988487244
Epoch(0/1 : Batch number(147/782)  Batch loss : 0.7245230078697205
Epoch(0/1 : Batch number(148/782)  Batch loss : 0.873357892036438
Epoch(0/1 : Batch number(149/782)  Batch loss : 0.7109060883522034
Epoch(0/1 : Batch number(150/782)  Batch loss : 0.547787606716156
Epoch(0/1 : Batch number(151/782)  Batch loss : 0.7806680798530579
Epoch(0/1 : Batch number(152/782)  Batch loss : 0.6512836813926697
Epoch(0/1 : Batch number(153/782)  Batch loss : 0.8284737467765808
Epoch(0/1 : Batch number(154/782)  Batch loss : 0.4220985770225525
Epoch(0/1 : Batch number(155/782)  Batch loss : 0.5185887217521667
Epoch(0/1 : Batch number(156/782)  Batch loss : 0.4398312270641327
Epoch(0/1 : Batch number(157/782)  Batch loss : 0.43835294246673584
Epoch(0/1 : Batch number(158/782)  Batch loss : 0.6655004620552063
Epoch(0/1 : Batch number(159/782)  Batch loss : 0.8535741567611694
Epoch(0/1 : Batch number(160/782)  Batch loss : 0.6877678036689758
Epoch(0/1 : Batch number(161/782)  Batch loss : 0.6047279238700867
Epoch(0/1 : Batch number(162/782)  Batch loss : 0.49969419836997986
Epoch(0/1 : Batch number(163/782)  Batch loss : 0.49253949522972107
Epoch(0/1 : Batch number(164/782)  Batch loss : 0.3861662447452545
Epoch(0/1 : Batch number(165/782)  Batch loss : 0.5964959859848022
Epoch(0/1 : Batch number(166/782)  Batch loss : 0.796068012714386
Epoch(0/1 : Batch number(167/782)  Batch loss : 0.7890188097953796
Epoch(0/1 : Batch number(168/782)  Batch loss : 0.5527806282043457
Epoch(0/1 : Batch number(169/782)  Batch loss : 0.4743013381958008
Epoch(0/1 : Batch number(170/782)  Batch loss : 0.6583073735237122
Epoch(0/1 : Batch number(171/782)  Batch loss : 0.536910891532898
Epoch(0/1 : Batch number(172/782)  Batch loss : 0.5538733601570129
Epoch(0/1 : Batch number(173/782)  Batch loss : 0.47630253434181213
Epoch(0/1 : Batch number(174/782)  Batch loss : 0.8236558437347412
Epoch(0/1 : Batch number(175/782)  Batch loss : 0.8095219135284424
Epoch(0/1 : Batch number(176/782)  Batch loss : 0.5451842546463013
Epoch(0/1 : Batch number(177/782)  Batch loss : 0.6919307708740234
Epoch(0/1 : Batch number(178/782)  Batch loss : 0.4864185154438019
Epoch(0/1 : Batch number(179/782)  Batch loss : 0.42962655425071716
Epoch(0/1 : Batch number(180/782)  Batch loss : 0.5391951203346252
Epoch(0/1 : Batch number(181/782)  Batch loss : 0.5413989424705505
Epoch(0/1 : Batch number(182/782)  Batch loss : 0.5653239488601685
Epoch(0/1 : Batch number(183/782)  Batch loss : 0.6613482236862183
Epoch(0/1 : Batch number(184/782)  Batch loss : 0.5327613949775696
Epoch(0/1 : Batch number(185/782)  Batch loss : 0.8372830152511597
Epoch(0/1 : Batch number(186/782)  Batch loss : 0.6876596212387085
Epoch(0/1 : Batch number(187/782)  Batch loss : 0.5141844749450684
Epoch(0/1 : Batch number(188/782)  Batch loss : 0.5395441651344299
Epoch(0/1 : Batch number(189/782)  Batch loss : 0.664724588394165
Epoch(0/1 : Batch number(190/782)  Batch loss : 0.6158926486968994
Epoch(0/1 : Batch number(191/782)  Batch loss : 0.7901236414909363
Epoch(0/1 : Batch number(192/782)  Batch loss : 0.6847739815711975
Epoch(0/1 : Batch number(193/782)  Batch loss : 0.6005019545555115
Epoch(0/1 : Batch number(194/782)  Batch loss : 0.7443474531173706
Epoch(0/1 : Batch number(195/782)  Batch loss : 0.5552122592926025
Epoch(0/1 : Batch number(196/782)  Batch loss : 0.5596039295196533
Epoch(0/1 : Batch number(197/782)  Batch loss : 0.7188726663589478
Epoch(0/1 : Batch number(198/782)  Batch loss : 0.7142008543014526
Epoch(0/1 : Batch number(199/782)  Batch loss : 0.6746870279312134
Epoch(0/1 : Batch number(200/782)  Batch loss : 0.8839515447616577
Epoch(0/1 : Batch number(201/782)  Batch loss : 0.47132423520088196
Epoch(0/1 : Batch number(202/782)  Batch loss : 0.5308958292007446
Epoch(0/1 : Batch number(203/782)  Batch loss : 0.4775051474571228
Epoch(0/1 : Batch number(204/782)  Batch loss : 0.7634710669517517
Epoch(0/1 : Batch number(205/782)  Batch loss : 0.5200814008712769
Epoch(0/1 : Batch number(206/782)  Batch loss : 0.5764930248260498
Epoch(0/1 : Batch number(207/782)  Batch loss : 0.5051706433296204
Epoch(0/1 : Batch number(208/782)  Batch loss : 0.5083056092262268
Epoch(0/1 : Batch number(209/782)  Batch loss : 1.074889898300171
Epoch(0/1 : Batch number(210/782)  Batch loss : 0.9769167304039001
Epoch(0/1 : Batch number(211/782)  Batch loss : 0.8174093961715698
Epoch(0/1 : Batch number(212/782)  Batch loss : 0.6892744898796082
Epoch(0/1 : Batch number(213/782)  Batch loss : 0.6137502193450928
Epoch(0/1 : Batch number(214/782)  Batch loss : 0.8806045055389404
Epoch(0/1 : Batch number(215/782)  Batch loss : 0.6153102517127991
Epoch(0/1 : Batch number(216/782)  Batch loss : 0.6418598890304565
Epoch(0/1 : Batch number(217/782)  Batch loss : 0.9236591458320618
Epoch(0/1 : Batch number(218/782)  Batch loss : 0.2783016264438629
Epoch(0/1 : Batch number(219/782)  Batch loss : 0.5441228747367859
Epoch(0/1 : Batch number(220/782)  Batch loss : 0.6999470591545105
Epoch(0/1 : Batch number(221/782)  Batch loss : 0.8317822217941284
Epoch(0/1 : Batch number(222/782)  Batch loss : 0.8016800284385681
Epoch(0/1 : Batch number(223/782)  Batch loss : 0.6265998482704163
Epoch(0/1 : Batch number(224/782)  Batch loss : 0.5585964918136597
Epoch(0/1 : Batch number(225/782)  Batch loss : 0.658440887928009
Epoch(0/1 : Batch number(226/782)  Batch loss : 0.5768511891365051
Epoch(0/1 : Batch number(227/782)  Batch loss : 0.608953595161438
Epoch(0/1 : Batch number(228/782)  Batch loss : 0.668402373790741
Epoch(0/1 : Batch number(229/782)  Batch loss : 0.8624747395515442
Epoch(0/1 : Batch number(230/782)  Batch loss : 0.6222614645957947
Epoch(0/1 : Batch number(231/782)  Batch loss : 0.4032633900642395
Epoch(0/1 : Batch number(232/782)  Batch loss : 0.7607954144477844
Epoch(0/1 : Batch number(233/782)  Batch loss : 0.45432716608047485
Epoch(0/1 : Batch number(234/782)  Batch loss : 0.6276374459266663
Epoch(0/1 : Batch number(235/782)  Batch loss : 0.5543639659881592
Epoch(0/1 : Batch number(236/782)  Batch loss : 0.5435991883277893
Epoch(0/1 : Batch number(237/782)  Batch loss : 0.3426479697227478
Epoch(0/1 : Batch number(238/782)  Batch loss : 0.4811604917049408
Epoch(0/1 : Batch number(239/782)  Batch loss : 0.669520914554596
Epoch(0/1 : Batch number(240/782)  Batch loss : 0.568730354309082
Epoch(0/1 : Batch number(241/782)  Batch loss : 0.43212783336639404
Epoch(0/1 : Batch number(242/782)  Batch loss : 0.7078235745429993
Epoch(0/1 : Batch number(243/782)  Batch loss : 0.651172399520874
Epoch(0/1 : Batch number(244/782)  Batch loss : 0.52781081199646
Epoch(0/1 : Batch number(245/782)  Batch loss : 0.7320753335952759
Epoch(0/1 : Batch number(246/782)  Batch loss : 0.6695072054862976
Epoch(0/1 : Batch number(247/782)  Batch loss : 0.4644436836242676
Epoch(0/1 : Batch number(248/782)  Batch loss : 0.5000645518302917
Epoch(0/1 : Batch number(249/782)  Batch loss : 0.7092971801757812
Epoch(0/1 : Batch number(250/782)  Batch loss : 0.610829770565033
Epoch(0/1 : Batch number(251/782)  Batch loss : 0.4896586239337921
Epoch(0/1 : Batch number(252/782)  Batch loss : 0.5673589706420898
Epoch(0/1 : Batch number(253/782)  Batch loss : 0.6871930956840515
Epoch(0/1 : Batch number(254/782)  Batch loss : 0.8895171284675598
Epoch(0/1 : Batch number(255/782)  Batch loss : 0.9756947159767151
Epoch(0/1 : Batch number(256/782)  Batch loss : 0.44306349754333496
Epoch(0/1 : Batch number(257/782)  Batch loss : 0.5396999716758728
Epoch(0/1 : Batch number(258/782)  Batch loss : 0.7568348050117493
Epoch(0/1 : Batch number(259/782)  Batch loss : 0.5458429455757141
Epoch(0/1 : Batch number(260/782)  Batch loss : 0.6471407413482666
Epoch(0/1 : Batch number(261/782)  Batch loss : 0.5830707550048828
Epoch(0/1 : Batch number(262/782)  Batch loss : 0.5490422248840332
Epoch(0/1 : Batch number(263/782)  Batch loss : 0.8387126326560974
Epoch(0/1 : Batch number(264/782)  Batch loss : 0.8176298141479492
Epoch(0/1 : Batch number(265/782)  Batch loss : 0.47489994764328003
Epoch(0/1 : Batch number(266/782)  Batch loss : 0.7867830395698547
Epoch(0/1 : Batch number(267/782)  Batch loss : 0.6089102029800415
Epoch(0/1 : Batch number(268/782)  Batch loss : 0.43519243597984314
Epoch(0/1 : Batch number(269/782)  Batch loss : 0.8255168795585632
Epoch(0/1 : Batch number(270/782)  Batch loss : 0.7984843850135803
Epoch(0/1 : Batch number(271/782)  Batch loss : 0.46149522066116333
Epoch(0/1 : Batch number(272/782)  Batch loss : 0.6140084862709045
Epoch(0/1 : Batch number(273/782)  Batch loss : 0.551680326461792
Epoch(0/1 : Batch number(274/782)  Batch loss : 0.4159856140613556
Epoch(0/1 : Batch number(275/782)  Batch loss : 0.3957326412200928
Epoch(0/1 : Batch number(276/782)  Batch loss : 0.7874684929847717
Epoch(0/1 : Batch number(277/782)  Batch loss : 0.7215738296508789
Epoch(0/1 : Batch number(278/782)  Batch loss : 0.7722930312156677
Epoch(0/1 : Batch number(279/782)  Batch loss : 0.48096713423728943
Epoch(0/1 : Batch number(280/782)  Batch loss : 0.7153612971305847
Epoch(0/1 : Batch number(281/782)  Batch loss : 0.7487027645111084
Epoch(0/1 : Batch number(282/782)  Batch loss : 0.5553444623947144
Epoch(0/1 : Batch number(283/782)  Batch loss : 0.6508932113647461
Epoch(0/1 : Batch number(284/782)  Batch loss : 0.6161901354789734
Epoch(0/1 : Batch number(285/782)  Batch loss : 0.7837055921554565
Epoch(0/1 : Batch number(286/782)  Batch loss : 0.6502672433853149
Epoch(0/1 : Batch number(287/782)  Batch loss : 0.43358781933784485
Epoch(0/1 : Batch number(288/782)  Batch loss : 0.4964958429336548
Epoch(0/1 : Batch number(289/782)  Batch loss : 0.5653269290924072
Epoch(0/1 : Batch number(290/782)  Batch loss : 0.5551846623420715
Epoch(0/1 : Batch number(291/782)  Batch loss : 0.5162768363952637
Epoch(0/1 : Batch number(292/782)  Batch loss : 0.5223584175109863
Epoch(0/1 : Batch number(293/782)  Batch loss : 0.6067913174629211
Epoch(0/1 : Batch number(294/782)  Batch loss : 0.5284320116043091
Epoch(0/1 : Batch number(295/782)  Batch loss : 0.5026647448539734
Epoch(0/1 : Batch number(296/782)  Batch loss : 0.7101321220397949
Epoch(0/1 : Batch number(297/782)  Batch loss : 0.5027725100517273
Epoch(0/1 : Batch number(298/782)  Batch loss : 0.5968829393386841
Epoch(0/1 : Batch number(299/782)  Batch loss : 0.745620846748352
Epoch(0/1 : Batch number(300/782)  Batch loss : 0.5915012955665588
Epoch(0/1 : Batch number(301/782)  Batch loss : 0.511272668838501
Epoch(0/1 : Batch number(302/782)  Batch loss : 0.39714592695236206
Epoch(0/1 : Batch number(303/782)  Batch loss : 0.6360657811164856
Epoch(0/1 : Batch number(304/782)  Batch loss : 0.9136107563972473
Epoch(0/1 : Batch number(305/782)  Batch loss : 0.6186025738716125
Epoch(0/1 : Batch number(306/782)  Batch loss : 0.6103270053863525
Epoch(0/1 : Batch number(307/782)  Batch loss : 0.6956163048744202
Epoch(0/1 : Batch number(308/782)  Batch loss : 0.39577457308769226
Epoch(0/1 : Batch number(309/782)  Batch loss : 0.3718549311161041
Epoch(0/1 : Batch number(310/782)  Batch loss : 0.6552459597587585
Epoch(0/1 : Batch number(311/782)  Batch loss : 0.6247960925102234
Epoch(0/1 : Batch number(312/782)  Batch loss : 0.52865070104599
Epoch(0/1 : Batch number(313/782)  Batch loss : 0.45064353942871094
Epoch(0/1 : Batch number(314/782)  Batch loss : 0.5045316219329834
Epoch(0/1 : Batch number(315/782)  Batch loss : 1.0174511671066284
Epoch(0/1 : Batch number(316/782)  Batch loss : 0.47248849272727966
Epoch(0/1 : Batch number(317/782)  Batch loss : 0.6579678058624268
Epoch(0/1 : Batch number(318/782)  Batch loss : 0.5494459271430969
Epoch(0/1 : Batch number(319/782)  Batch loss : 0.672187089920044
Epoch(0/1 : Batch number(320/782)  Batch loss : 0.48143643140792847
Epoch(0/1 : Batch number(321/782)  Batch loss : 0.6050519943237305
Epoch(0/1 : Batch number(322/782)  Batch loss : 0.47838905453681946
Epoch(0/1 : Batch number(323/782)  Batch loss : 0.6801756620407104
Epoch(0/1 : Batch number(324/782)  Batch loss : 0.44038069248199463
Epoch(0/1 : Batch number(325/782)  Batch loss : 0.547701895236969
Epoch(0/1 : Batch number(326/782)  Batch loss : 0.7045364379882812
Epoch(0/1 : Batch number(327/782)  Batch loss : 0.7675736546516418
Epoch(0/1 : Batch number(328/782)  Batch loss : 0.36438819766044617
Epoch(0/1 : Batch number(329/782)  Batch loss : 0.6686370372772217
Epoch(0/1 : Batch number(330/782)  Batch loss : 0.814601480960846
Epoch(0/1 : Batch number(331/782)  Batch loss : 0.7585381269454956
Epoch(0/1 : Batch number(332/782)  Batch loss : 0.730946958065033
Epoch(0/1 : Batch number(333/782)  Batch loss : 0.8588382005691528
Epoch(0/1 : Batch number(334/782)  Batch loss : 0.517646849155426
Epoch(0/1 : Batch number(335/782)  Batch loss : 0.316030353307724
Epoch(0/1 : Batch number(336/782)  Batch loss : 0.8421246409416199
Epoch(0/1 : Batch number(337/782)  Batch loss : 0.5920635461807251
Epoch(0/1 : Batch number(338/782)  Batch loss : 0.5532370209693909
Epoch(0/1 : Batch number(339/782)  Batch loss : 0.6720740795135498
Epoch(0/1 : Batch number(340/782)  Batch loss : 0.427580326795578
Epoch(0/1 : Batch number(341/782)  Batch loss : 0.5387875437736511
Epoch(0/1 : Batch number(342/782)  Batch loss : 0.6166757941246033
Epoch(0/1 : Batch number(343/782)  Batch loss : 0.7748298645019531
Epoch(0/1 : Batch number(344/782)  Batch loss : 0.670362651348114
Epoch(0/1 : Batch number(345/782)  Batch loss : 0.7251222729682922
Epoch(0/1 : Batch number(346/782)  Batch loss : 0.7529156804084778
Epoch(0/1 : Batch number(347/782)  Batch loss : 0.5348615646362305
Epoch(0/1 : Batch number(348/782)  Batch loss : 0.7200441360473633
Epoch(0/1 : Batch number(349/782)  Batch loss : 0.6563940048217773
Epoch(0/1 : Batch number(350/782)  Batch loss : 0.5628518462181091
Epoch(0/1 : Batch number(351/782)  Batch loss : 0.7151025533676147
Epoch(0/1 : Batch number(352/782)  Batch loss : 0.5836706161499023
Epoch(0/1 : Batch number(353/782)  Batch loss : 0.650550365447998
Epoch(0/1 : Batch number(354/782)  Batch loss : 0.5841370224952698
Epoch(0/1 : Batch number(355/782)  Batch loss : 0.6160508990287781
Epoch(0/1 : Batch number(356/782)  Batch loss : 0.5083843469619751
Epoch(0/1 : Batch number(357/782)  Batch loss : 0.7320501804351807
Epoch(0/1 : Batch number(358/782)  Batch loss : 0.6267932057380676
Epoch(0/1 : Batch number(359/782)  Batch loss : 0.6718224287033081
Epoch(0/1 : Batch number(360/782)  Batch loss : 0.39224639534950256
Epoch(0/1 : Batch number(361/782)  Batch loss : 0.49880462884902954
Epoch(0/1 : Batch number(362/782)  Batch loss : 0.5741066932678223
Epoch(0/1 : Batch number(363/782)  Batch loss : 0.6591951251029968
Epoch(0/1 : Batch number(364/782)  Batch loss : 0.5149409770965576
Epoch(0/1 : Batch number(365/782)  Batch loss : 0.4697505533695221
Epoch(0/1 : Batch number(366/782)  Batch loss : 0.5950822830200195
Epoch(0/1 : Batch number(367/782)  Batch loss : 0.6210896968841553
Epoch(0/1 : Batch number(368/782)  Batch loss : 0.7841016054153442
Epoch(0/1 : Batch number(369/782)  Batch loss : 0.5969008803367615
Epoch(0/1 : Batch number(370/782)  Batch loss : 0.43599486351013184
Epoch(0/1 : Batch number(371/782)  Batch loss : 0.580047070980072
Epoch(0/1 : Batch number(372/782)  Batch loss : 0.5865144729614258
Epoch(0/1 : Batch number(373/782)  Batch loss : 0.49350082874298096
Epoch(0/1 : Batch number(374/782)  Batch loss : 0.4755745232105255
Epoch(0/1 : Batch number(375/782)  Batch loss : 0.6279872059822083
Epoch(0/1 : Batch number(376/782)  Batch loss : 0.5383443236351013
Epoch(0/1 : Batch number(377/782)  Batch loss : 0.4403327703475952
Epoch(0/1 : Batch number(378/782)  Batch loss : 0.6829884648323059
Epoch(0/1 : Batch number(379/782)  Batch loss : 0.7581155300140381
Epoch(0/1 : Batch number(380/782)  Batch loss : 0.7426605224609375
Epoch(0/1 : Batch number(381/782)  Batch loss : 0.8827443718910217
Epoch(0/1 : Batch number(382/782)  Batch loss : 0.5023037791252136
Epoch(0/1 : Batch number(383/782)  Batch loss : 0.6789712309837341
Epoch(0/1 : Batch number(384/782)  Batch loss : 0.49630168080329895
Epoch(0/1 : Batch number(385/782)  Batch loss : 0.7750781178474426
Epoch(0/1 : Batch number(386/782)  Batch loss : 0.5587349534034729
Epoch(0/1 : Batch number(387/782)  Batch loss : 0.5720505118370056
Epoch(0/1 : Batch number(388/782)  Batch loss : 0.8499588966369629
Epoch(0/1 : Batch number(389/782)  Batch loss : 0.504406213760376
Epoch(0/1 : Batch number(390/782)  Batch loss : 0.822394609451294
Epoch(0/1 : Batch number(391/782)  Batch loss : 0.576391875743866
Epoch(0/1 : Batch number(392/782)  Batch loss : 0.5675477981567383
Epoch(0/1 : Batch number(393/782)  Batch loss : 0.7298311591148376
Epoch(0/1 : Batch number(394/782)  Batch loss : 0.33871543407440186
Epoch(0/1 : Batch number(395/782)  Batch loss : 0.5661399364471436
Epoch(0/1 : Batch number(396/782)  Batch loss : 0.7474867701530457
Epoch(0/1 : Batch number(397/782)  Batch loss : 0.686561107635498
Epoch(0/1 : Batch number(398/782)  Batch loss : 0.47308626770973206
Epoch(0/1 : Batch number(399/782)  Batch loss : 0.5793898701667786
Epoch(0/1 : Batch number(400/782)  Batch loss : 0.7701399922370911
Epoch(0/1 : Batch number(401/782)  Batch loss : 0.9109356999397278
Epoch(0/1 : Batch number(402/782)  Batch loss : 0.530646800994873
Epoch(0/1 : Batch number(403/782)  Batch loss : 0.9710667133331299
Epoch(0/1 : Batch number(404/782)  Batch loss : 0.6187557578086853
Epoch(0/1 : Batch number(405/782)  Batch loss : 0.6632344722747803
Epoch(0/1 : Batch number(406/782)  Batch loss : 0.6226958632469177
Epoch(0/1 : Batch number(407/782)  Batch loss : 0.7553615570068359
Epoch(0/1 : Batch number(408/782)  Batch loss : 0.6354790925979614
Epoch(0/1 : Batch number(409/782)  Batch loss : 0.5785180926322937
Epoch(0/1 : Batch number(410/782)  Batch loss : 0.3783246576786041
Epoch(0/1 : Batch number(411/782)  Batch loss : 0.6122217774391174
Epoch(0/1 : Batch number(412/782)  Batch loss : 0.959982693195343
Epoch(0/1 : Batch number(413/782)  Batch loss : 0.6741698980331421
Epoch(0/1 : Batch number(414/782)  Batch loss : 0.43542516231536865
Epoch(0/1 : Batch number(415/782)  Batch loss : 0.6006482243537903
Epoch(0/1 : Batch number(416/782)  Batch loss : 0.7594712376594543
Epoch(0/1 : Batch number(417/782)  Batch loss : 0.4813486933708191
Epoch(0/1 : Batch number(418/782)  Batch loss : 0.5133041739463806
Epoch(0/1 : Batch number(419/782)  Batch loss : 0.7756672501564026
Epoch(0/1 : Batch number(420/782)  Batch loss : 0.43114835023880005
Epoch(0/1 : Batch number(421/782)  Batch loss : 0.832310676574707
Epoch(0/1 : Batch number(422/782)  Batch loss : 0.6228570938110352
Epoch(0/1 : Batch number(423/782)  Batch loss : 0.507828950881958
Epoch(0/1 : Batch number(424/782)  Batch loss : 0.7162591218948364
Epoch(0/1 : Batch number(425/782)  Batch loss : 0.514487624168396
Epoch(0/1 : Batch number(426/782)  Batch loss : 0.3729185163974762
Epoch(0/1 : Batch number(427/782)  Batch loss : 0.670915424823761
Epoch(0/1 : Batch number(428/782)  Batch loss : 0.5939338207244873
Epoch(0/1 : Batch number(429/782)  Batch loss : 0.6575393080711365
Epoch(0/1 : Batch number(430/782)  Batch loss : 0.6847612857818604
Epoch(0/1 : Batch number(431/782)  Batch loss : 0.583921492099762
Epoch(0/1 : Batch number(432/782)  Batch loss : 0.5948262214660645
Epoch(0/1 : Batch number(433/782)  Batch loss : 0.6048126816749573
Epoch(0/1 : Batch number(434/782)  Batch loss : 0.6286665201187134
Epoch(0/1 : Batch number(435/782)  Batch loss : 0.5434166789054871
Epoch(0/1 : Batch number(436/782)  Batch loss : 0.6930367946624756
Epoch(0/1 : Batch number(437/782)  Batch loss : 0.5521285533905029
Epoch(0/1 : Batch number(438/782)  Batch loss : 0.7210499048233032
Epoch(0/1 : Batch number(439/782)  Batch loss : 0.5980047583580017
Epoch(0/1 : Batch number(440/782)  Batch loss : 0.5961059927940369
Epoch(0/1 : Batch number(441/782)  Batch loss : 0.44429174065589905
Epoch(0/1 : Batch number(442/782)  Batch loss : 0.7498698830604553
Epoch(0/1 : Batch number(443/782)  Batch loss : 0.6701623201370239
Epoch(0/1 : Batch number(444/782)  Batch loss : 0.8010424971580505
Epoch(0/1 : Batch number(445/782)  Batch loss : 0.5479155778884888
Epoch(0/1 : Batch number(446/782)  Batch loss : 0.2826715409755707
Epoch(0/1 : Batch number(447/782)  Batch loss : 0.6827945113182068
Epoch(0/1 : Batch number(448/782)  Batch loss : 0.464789479970932
Epoch(0/1 : Batch number(449/782)  Batch loss : 0.7253171801567078
Epoch(0/1 : Batch number(450/782)  Batch loss : 0.6474236845970154
Epoch(0/1 : Batch number(451/782)  Batch loss : 0.7996121048927307
Epoch(0/1 : Batch number(452/782)  Batch loss : 0.6048604846000671
Epoch(0/1 : Batch number(453/782)  Batch loss : 0.6495413184165955
Epoch(0/1 : Batch number(454/782)  Batch loss : 0.5280637741088867
Epoch(0/1 : Batch number(455/782)  Batch loss : 0.7425614595413208
Epoch(0/1 : Batch number(456/782)  Batch loss : 0.4920039474964142
Epoch(0/1 : Batch number(457/782)  Batch loss : 0.6532719731330872
Epoch(0/1 : Batch number(458/782)  Batch loss : 0.5745978355407715
Epoch(0/1 : Batch number(459/782)  Batch loss : 0.567343533039093
Epoch(0/1 : Batch number(460/782)  Batch loss : 0.5707617402076721
Epoch(0/1 : Batch number(461/782)  Batch loss : 0.5429951548576355
Epoch(0/1 : Batch number(462/782)  Batch loss : 0.7189590334892273
Epoch(0/1 : Batch number(463/782)  Batch loss : 0.5775240063667297
Epoch(0/1 : Batch number(464/782)  Batch loss : 0.7627661228179932
Epoch(0/1 : Batch number(465/782)  Batch loss : 0.6525449156761169
Epoch(0/1 : Batch number(466/782)  Batch loss : 0.620783269405365
Epoch(0/1 : Batch number(467/782)  Batch loss : 0.5357507467269897
Epoch(0/1 : Batch number(468/782)  Batch loss : 0.5295665860176086
Epoch(0/1 : Batch number(469/782)  Batch loss : 0.6854893565177917
Epoch(0/1 : Batch number(470/782)  Batch loss : 0.5065574049949646
Epoch(0/1 : Batch number(471/782)  Batch loss : 0.7552657723426819
Epoch(0/1 : Batch number(472/782)  Batch loss : 0.7396969199180603
Epoch(0/1 : Batch number(473/782)  Batch loss : 0.5320834517478943
Epoch(0/1 : Batch number(474/782)  Batch loss : 0.7532282471656799
Epoch(0/1 : Batch number(475/782)  Batch loss : 0.4416346549987793
Epoch(0/1 : Batch number(476/782)  Batch loss : 0.5959939956665039
Epoch(0/1 : Batch number(477/782)  Batch loss : 0.6769382357597351
Epoch(0/1 : Batch number(478/782)  Batch loss : 0.6731514930725098
Epoch(0/1 : Batch number(479/782)  Batch loss : 0.6864326596260071
Epoch(0/1 : Batch number(480/782)  Batch loss : 0.5381104946136475
Epoch(0/1 : Batch number(481/782)  Batch loss : 0.5786455869674683
Epoch(0/1 : Batch number(482/782)  Batch loss : 0.6030793786048889
Epoch(0/1 : Batch number(483/782)  Batch loss : 0.5175842642784119
Epoch(0/1 : Batch number(484/782)  Batch loss : 0.5173280239105225
Epoch(0/1 : Batch number(485/782)  Batch loss : 0.49986979365348816
Epoch(0/1 : Batch number(486/782)  Batch loss : 0.5576934218406677
Epoch(0/1 : Batch number(487/782)  Batch loss : 0.8414430022239685
Epoch(0/1 : Batch number(488/782)  Batch loss : 0.4422324597835541
Epoch(0/1 : Batch number(489/782)  Batch loss : 0.7206338047981262
Epoch(0/1 : Batch number(490/782)  Batch loss : 0.7563795447349548
Epoch(0/1 : Batch number(491/782)  Batch loss : 0.37776893377304077
Epoch(0/1 : Batch number(492/782)  Batch loss : 0.73606276512146
Epoch(0/1 : Batch number(493/782)  Batch loss : 0.6760420799255371
Epoch(0/1 : Batch number(494/782)  Batch loss : 0.49628394842147827
Epoch(0/1 : Batch number(495/782)  Batch loss : 0.5480573773384094
Epoch(0/1 : Batch number(496/782)  Batch loss : 0.608959972858429
Epoch(0/1 : Batch number(497/782)  Batch loss : 0.558633029460907
Epoch(0/1 : Batch number(498/782)  Batch loss : 0.7329035997390747
Epoch(0/1 : Batch number(499/782)  Batch loss : 0.6683279871940613
Epoch(0/1 : Batch number(500/782)  Batch loss : 0.6742287874221802
Epoch(0/1 : Batch number(501/782)  Batch loss : 0.6715677976608276
Epoch(0/1 : Batch number(502/782)  Batch loss : 0.6787260174751282
Epoch(0/1 : Batch number(503/782)  Batch loss : 0.9556241035461426
Epoch(0/1 : Batch number(504/782)  Batch loss : 0.37170228362083435
Epoch(0/1 : Batch number(505/782)  Batch loss : 0.523084282875061
Epoch(0/1 : Batch number(506/782)  Batch loss : 0.5387939214706421
Epoch(0/1 : Batch number(507/782)  Batch loss : 0.7014915943145752
Epoch(0/1 : Batch number(508/782)  Batch loss : 0.6357316970825195
Epoch(0/1 : Batch number(509/782)  Batch loss : 0.8024764657020569
Epoch(0/1 : Batch number(510/782)  Batch loss : 0.6589760780334473
Epoch(0/1 : Batch number(511/782)  Batch loss : 0.41608673334121704
Epoch(0/1 : Batch number(512/782)  Batch loss : 0.7558214664459229
Epoch(0/1 : Batch number(513/782)  Batch loss : 0.5414577126502991
Epoch(0/1 : Batch number(514/782)  Batch loss : 0.4863750636577606
Epoch(0/1 : Batch number(515/782)  Batch loss : 0.7058964371681213
Epoch(0/1 : Batch number(516/782)  Batch loss : 0.48862722516059875
Epoch(0/1 : Batch number(517/782)  Batch loss : 0.7941679954528809
Epoch(0/1 : Batch number(518/782)  Batch loss : 0.6362942457199097
Epoch(0/1 : Batch number(519/782)  Batch loss : 0.5070326924324036
Epoch(0/1 : Batch number(520/782)  Batch loss : 0.7540040612220764
Epoch(0/1 : Batch number(521/782)  Batch loss : 1.0373263359069824
Epoch(0/1 : Batch number(522/782)  Batch loss : 0.7378392219543457
Epoch(0/1 : Batch number(523/782)  Batch loss : 0.37922704219818115
Epoch(0/1 : Batch number(524/782)  Batch loss : 0.6496992111206055
Epoch(0/1 : Batch number(525/782)  Batch loss : 0.7410475611686707
Epoch(0/1 : Batch number(526/782)  Batch loss : 0.8316811919212341
Epoch(0/1 : Batch number(527/782)  Batch loss : 0.6019769906997681
Epoch(0/1 : Batch number(528/782)  Batch loss : 1.004974126815796
Epoch(0/1 : Batch number(529/782)  Batch loss : 0.49435558915138245
Epoch(0/1 : Batch number(530/782)  Batch loss : 0.42590808868408203
Epoch(0/1 : Batch number(531/782)  Batch loss : 0.36245235800743103
Epoch(0/1 : Batch number(532/782)  Batch loss : 0.41301125288009644
Epoch(0/1 : Batch number(533/782)  Batch loss : 0.5363104939460754
Epoch(0/1 : Batch number(534/782)  Batch loss : 0.7427675724029541
Epoch(0/1 : Batch number(535/782)  Batch loss : 0.6011291742324829
Epoch(0/1 : Batch number(536/782)  Batch loss : 0.7515856623649597
Epoch(0/1 : Batch number(537/782)  Batch loss : 0.5963369607925415
Epoch(0/1 : Batch number(538/782)  Batch loss : 0.5375844836235046
Epoch(0/1 : Batch number(539/782)  Batch loss : 0.602465033531189
Epoch(0/1 : Batch number(540/782)  Batch loss : 0.5028870105743408
Epoch(0/1 : Batch number(541/782)  Batch loss : 0.4820280075073242
Epoch(0/1 : Batch number(542/782)  Batch loss : 0.5157647728919983
Epoch(0/1 : Batch number(543/782)  Batch loss : 0.881441593170166
Epoch(0/1 : Batch number(544/782)  Batch loss : 0.5363494753837585
Epoch(0/1 : Batch number(545/782)  Batch loss : 0.3276068866252899
Epoch(0/1 : Batch number(546/782)  Batch loss : 0.6294483542442322
Epoch(0/1 : Batch number(547/782)  Batch loss : 0.5032784342765808
Epoch(0/1 : Batch number(548/782)  Batch loss : 0.7043796181678772
Epoch(0/1 : Batch number(549/782)  Batch loss : 0.580467164516449
Epoch(0/1 : Batch number(550/782)  Batch loss : 0.4073447287082672
Epoch(0/1 : Batch number(551/782)  Batch loss : 0.467485636472702
Epoch(0/1 : Batch number(552/782)  Batch loss : 0.6937636137008667
Epoch(0/1 : Batch number(553/782)  Batch loss : 0.6755107045173645
Epoch(0/1 : Batch number(554/782)  Batch loss : 0.5953344702720642
Epoch(0/1 : Batch number(555/782)  Batch loss : 0.6026793718338013
Epoch(0/1 : Batch number(556/782)  Batch loss : 0.2898055911064148
Epoch(0/1 : Batch number(557/782)  Batch loss : 0.6412779092788696
Epoch(0/1 : Batch number(558/782)  Batch loss : 0.68288254737854
Epoch(0/1 : Batch number(559/782)  Batch loss : 0.7618099451065063
Epoch(0/1 : Batch number(560/782)  Batch loss : 0.7401654124259949
Epoch(0/1 : Batch number(561/782)  Batch loss : 0.5634981393814087
Epoch(0/1 : Batch number(562/782)  Batch loss : 0.6620791554450989
Epoch(0/1 : Batch number(563/782)  Batch loss : 0.7926945686340332
Epoch(0/1 : Batch number(564/782)  Batch loss : 0.5107869505882263
Epoch(0/1 : Batch number(565/782)  Batch loss : 0.3288712203502655
Epoch(0/1 : Batch number(566/782)  Batch loss : 0.7491353154182434
Epoch(0/1 : Batch number(567/782)  Batch loss : 0.6084270477294922
Epoch(0/1 : Batch number(568/782)  Batch loss : 0.6116604804992676
Epoch(0/1 : Batch number(569/782)  Batch loss : 0.7937276363372803
Epoch(0/1 : Batch number(570/782)  Batch loss : 0.4446086287498474
Epoch(0/1 : Batch number(571/782)  Batch loss : 0.5603504776954651
Epoch(0/1 : Batch number(572/782)  Batch loss : 0.5436297655105591
Epoch(0/1 : Batch number(573/782)  Batch loss : 0.6059887409210205
Epoch(0/1 : Batch number(574/782)  Batch loss : 0.5700686573982239
Epoch(0/1 : Batch number(575/782)  Batch loss : 0.9011242389678955
Epoch(0/1 : Batch number(576/782)  Batch loss : 0.7352796792984009
Epoch(0/1 : Batch number(577/782)  Batch loss : 0.5455958843231201
Epoch(0/1 : Batch number(578/782)  Batch loss : 0.9138872623443604
Epoch(0/1 : Batch number(579/782)  Batch loss : 0.42745447158813477
Epoch(0/1 : Batch number(580/782)  Batch loss : 0.5527986884117126
Epoch(0/1 : Batch number(581/782)  Batch loss : 0.660333514213562
Epoch(0/1 : Batch number(582/782)  Batch loss : 0.6510578393936157
Epoch(0/1 : Batch number(583/782)  Batch loss : 0.348483681678772
Epoch(0/1 : Batch number(584/782)  Batch loss : 0.9021710753440857
Epoch(0/1 : Batch number(585/782)  Batch loss : 0.6115544438362122
Epoch(0/1 : Batch number(586/782)  Batch loss : 0.541872501373291
Epoch(0/1 : Batch number(587/782)  Batch loss : 0.52153480052948
Epoch(0/1 : Batch number(588/782)  Batch loss : 0.5350368618965149
Epoch(0/1 : Batch number(589/782)  Batch loss : 0.4713171422481537
Epoch(0/1 : Batch number(590/782)  Batch loss : 0.7785010933876038
Epoch(0/1 : Batch number(591/782)  Batch loss : 0.5139359831809998
Epoch(0/1 : Batch number(592/782)  Batch loss : 0.844659686088562
Epoch(0/1 : Batch number(593/782)  Batch loss : 0.5212469100952148
Epoch(0/1 : Batch number(594/782)  Batch loss : 0.6584556102752686
Epoch(0/1 : Batch number(595/782)  Batch loss : 0.6488038301467896
Epoch(0/1 : Batch number(596/782)  Batch loss : 0.8039218187332153
Epoch(0/1 : Batch number(597/782)  Batch loss : 0.6547719240188599
Epoch(0/1 : Batch number(598/782)  Batch loss : 0.7704627513885498
Epoch(0/1 : Batch number(599/782)  Batch loss : 0.39283204078674316
Epoch(0/1 : Batch number(600/782)  Batch loss : 1.0400530099868774
Epoch(0/1 : Batch number(601/782)  Batch loss : 0.504397451877594
Epoch(0/1 : Batch number(602/782)  Batch loss : 0.699006974697113
Epoch(0/1 : Batch number(603/782)  Batch loss : 0.49265655875205994
Epoch(0/1 : Batch number(604/782)  Batch loss : 0.7327977418899536
Epoch(0/1 : Batch number(605/782)  Batch loss : 0.809225857257843
Epoch(0/1 : Batch number(606/782)  Batch loss : 0.35073333978652954
Epoch(0/1 : Batch number(607/782)  Batch loss : 0.4065733253955841
Epoch(0/1 : Batch number(608/782)  Batch loss : 0.8053706288337708
Epoch(0/1 : Batch number(609/782)  Batch loss : 0.4820062518119812
Epoch(0/1 : Batch number(610/782)  Batch loss : 0.5846962928771973
Epoch(0/1 : Batch number(611/782)  Batch loss : 0.7043479084968567
Epoch(0/1 : Batch number(612/782)  Batch loss : 0.34994015097618103
Epoch(0/1 : Batch number(613/782)  Batch loss : 0.4817349314689636
Epoch(0/1 : Batch number(614/782)  Batch loss : 0.7975808382034302
Epoch(0/1 : Batch number(615/782)  Batch loss : 0.484379380941391
Epoch(0/1 : Batch number(616/782)  Batch loss : 0.5290202498435974
Epoch(0/1 : Batch number(617/782)  Batch loss : 0.5806289315223694
Epoch(0/1 : Batch number(618/782)  Batch loss : 0.534734845161438
Epoch(0/1 : Batch number(619/782)  Batch loss : 0.5455601811408997
Epoch(0/1 : Batch number(620/782)  Batch loss : 0.6408214569091797
Epoch(0/1 : Batch number(621/782)  Batch loss : 0.607380747795105
Epoch(0/1 : Batch number(622/782)  Batch loss : 0.5468354821205139
Epoch(0/1 : Batch number(623/782)  Batch loss : 0.39993709325790405
Epoch(0/1 : Batch number(624/782)  Batch loss : 0.5661263465881348
Epoch(0/1 : Batch number(625/782)  Batch loss : 0.7725152373313904
Epoch(0/1 : Batch number(626/782)  Batch loss : 0.699529230594635
Epoch(0/1 : Batch number(627/782)  Batch loss : 0.6867872476577759
Epoch(0/1 : Batch number(628/782)  Batch loss : 0.8521869778633118
Epoch(0/1 : Batch number(629/782)  Batch loss : 0.9326173067092896
Epoch(0/1 : Batch number(630/782)  Batch loss : 0.5264186263084412
Epoch(0/1 : Batch number(631/782)  Batch loss : 0.8535378575325012
Epoch(0/1 : Batch number(632/782)  Batch loss : 0.6236343383789062
Epoch(0/1 : Batch number(633/782)  Batch loss : 0.5194670557975769
Epoch(0/1 : Batch number(634/782)  Batch loss : 0.7135323882102966
Epoch(0/1 : Batch number(635/782)  Batch loss : 0.6947751641273499
Epoch(0/1 : Batch number(636/782)  Batch loss : 0.6431519985198975
Epoch(0/1 : Batch number(637/782)  Batch loss : 0.7238742709159851
Epoch(0/1 : Batch number(638/782)  Batch loss : 0.5688011050224304
Epoch(0/1 : Batch number(639/782)  Batch loss : 0.3759114444255829
Epoch(0/1 : Batch number(640/782)  Batch loss : 0.7092161178588867
Epoch(0/1 : Batch number(641/782)  Batch loss : 0.6736717224121094
Epoch(0/1 : Batch number(642/782)  Batch loss : 0.3927321434020996
Epoch(0/1 : Batch number(643/782)  Batch loss : 0.7762894034385681
Epoch(0/1 : Batch number(644/782)  Batch loss : 0.5807128548622131
Epoch(0/1 : Batch number(645/782)  Batch loss : 0.4742683470249176
Epoch(0/1 : Batch number(646/782)  Batch loss : 0.621937096118927
Epoch(0/1 : Batch number(647/782)  Batch loss : 0.5376333594322205
Epoch(0/1 : Batch number(648/782)  Batch loss : 0.4991680979728699
Epoch(0/1 : Batch number(649/782)  Batch loss : 0.6482667922973633
Epoch(0/1 : Batch number(650/782)  Batch loss : 0.49942970275878906
Epoch(0/1 : Batch number(651/782)  Batch loss : 0.8411586880683899
Epoch(0/1 : Batch number(652/782)  Batch loss : 0.746495246887207
Epoch(0/1 : Batch number(653/782)  Batch loss : 0.7934471964836121
Epoch(0/1 : Batch number(654/782)  Batch loss : 0.747407078742981
Epoch(0/1 : Batch number(655/782)  Batch loss : 0.6462190747261047
Epoch(0/1 : Batch number(656/782)  Batch loss : 0.6041824221611023
Epoch(0/1 : Batch number(657/782)  Batch loss : 0.6639719009399414
Epoch(0/1 : Batch number(658/782)  Batch loss : 0.48328277468681335
Epoch(0/1 : Batch number(659/782)  Batch loss : 0.47595828771591187
Epoch(0/1 : Batch number(660/782)  Batch loss : 0.6487753987312317
Epoch(0/1 : Batch number(661/782)  Batch loss : 0.4711132049560547
Epoch(0/1 : Batch number(662/782)  Batch loss : 0.4828324019908905
Epoch(0/1 : Batch number(663/782)  Batch loss : 0.5065794587135315
Epoch(0/1 : Batch number(664/782)  Batch loss : 0.6319617629051208
Epoch(0/1 : Batch number(665/782)  Batch loss : 0.5193799734115601
Epoch(0/1 : Batch number(666/782)  Batch loss : 0.4138513505458832
Epoch(0/1 : Batch number(667/782)  Batch loss : 0.5211943984031677
Epoch(0/1 : Batch number(668/782)  Batch loss : 0.6535183787345886
Epoch(0/1 : Batch number(669/782)  Batch loss : 0.6597458124160767
Epoch(0/1 : Batch number(670/782)  Batch loss : 0.5541476607322693
Epoch(0/1 : Batch number(671/782)  Batch loss : 0.7305759191513062
Epoch(0/1 : Batch number(672/782)  Batch loss : 0.8988979458808899
Epoch(0/1 : Batch number(673/782)  Batch loss : 0.6198183298110962
Epoch(0/1 : Batch number(674/782)  Batch loss : 0.7012845277786255
Epoch(0/1 : Batch number(675/782)  Batch loss : 0.7182418704032898
Epoch(0/1 : Batch number(676/782)  Batch loss : 0.6456270217895508
Epoch(0/1 : Batch number(677/782)  Batch loss : 0.5006728768348694
Epoch(0/1 : Batch number(678/782)  Batch loss : 0.5679606795310974
Epoch(0/1 : Batch number(679/782)  Batch loss : 0.8634270429611206
Epoch(0/1 : Batch number(680/782)  Batch loss : 0.5176006555557251
Epoch(0/1 : Batch number(681/782)  Batch loss : 0.6470212340354919
Epoch(0/1 : Batch number(682/782)  Batch loss : 0.7557275891304016
Epoch(0/1 : Batch number(683/782)  Batch loss : 0.4211263358592987
Epoch(0/1 : Batch number(684/782)  Batch loss : 0.5173150300979614
Epoch(0/1 : Batch number(685/782)  Batch loss : 0.4777890145778656
Epoch(0/1 : Batch number(686/782)  Batch loss : 0.7448849678039551
Epoch(0/1 : Batch number(687/782)  Batch loss : 0.7678209543228149
Epoch(0/1 : Batch number(688/782)  Batch loss : 0.6089339852333069
Epoch(0/1 : Batch number(689/782)  Batch loss : 0.4812534749507904
Epoch(0/1 : Batch number(690/782)  Batch loss : 0.4688219130039215
Epoch(0/1 : Batch number(691/782)  Batch loss : 0.8609947562217712
Epoch(0/1 : Batch number(692/782)  Batch loss : 0.7333273887634277
Epoch(0/1 : Batch number(693/782)  Batch loss : 0.35221564769744873
Epoch(0/1 : Batch number(694/782)  Batch loss : 0.5064975619316101
Epoch(0/1 : Batch number(695/782)  Batch loss : 0.5356342792510986
Epoch(0/1 : Batch number(696/782)  Batch loss : 0.5517463088035583
Epoch(0/1 : Batch number(697/782)  Batch loss : 0.6196726560592651
Epoch(0/1 : Batch number(698/782)  Batch loss : 0.652286946773529
Epoch(0/1 : Batch number(699/782)  Batch loss : 0.6684399843215942
Epoch(0/1 : Batch number(700/782)  Batch loss : 0.7986763715744019
Epoch(0/1 : Batch number(701/782)  Batch loss : 0.4868822693824768
Epoch(0/1 : Batch number(702/782)  Batch loss : 0.6003249883651733
Epoch(0/1 : Batch number(703/782)  Batch loss : 0.6064913868904114
Epoch(0/1 : Batch number(704/782)  Batch loss : 0.6958431005477905
Epoch(0/1 : Batch number(705/782)  Batch loss : 0.4217546284198761
Epoch(0/1 : Batch number(706/782)  Batch loss : 0.5827295780181885
Epoch(0/1 : Batch number(707/782)  Batch loss : 0.7772004008293152
Epoch(0/1 : Batch number(708/782)  Batch loss : 0.5963233709335327
Epoch(0/1 : Batch number(709/782)  Batch loss : 0.516265869140625
Epoch(0/1 : Batch number(710/782)  Batch loss : 0.8769380450248718
Epoch(0/1 : Batch number(711/782)  Batch loss : 0.5771899223327637
Epoch(0/1 : Batch number(712/782)  Batch loss : 0.3942238688468933
Epoch(0/1 : Batch number(713/782)  Batch loss : 0.7709614038467407
Epoch(0/1 : Batch number(714/782)  Batch loss : 0.3565967082977295
Epoch(0/1 : Batch number(715/782)  Batch loss : 0.5897579789161682
Epoch(0/1 : Batch number(716/782)  Batch loss : 0.46458321809768677
Epoch(0/1 : Batch number(717/782)  Batch loss : 0.4999329447746277
Epoch(0/1 : Batch number(718/782)  Batch loss : 0.908933699131012
Epoch(0/1 : Batch number(719/782)  Batch loss : 0.6419520378112793
Epoch(0/1 : Batch number(720/782)  Batch loss : 0.9060695171356201
Epoch(0/1 : Batch number(721/782)  Batch loss : 0.5619222521781921
Epoch(0/1 : Batch number(722/782)  Batch loss : 0.5564568042755127
Epoch(0/1 : Batch number(723/782)  Batch loss : 0.5351076722145081
Epoch(0/1 : Batch number(724/782)  Batch loss : 0.6679062247276306
Epoch(0/1 : Batch number(725/782)  Batch loss : 0.6801314949989319
Epoch(0/1 : Batch number(726/782)  Batch loss : 0.674117386341095
Epoch(0/1 : Batch number(727/782)  Batch loss : 0.685249924659729
Epoch(0/1 : Batch number(728/782)  Batch loss : 0.6738826036453247
Epoch(0/1 : Batch number(729/782)  Batch loss : 0.7611808776855469
Epoch(0/1 : Batch number(730/782)  Batch loss : 0.6378862857818604
Epoch(0/1 : Batch number(731/782)  Batch loss : 0.4797687232494354
Epoch(0/1 : Batch number(732/782)  Batch loss : 0.6696380376815796
Epoch(0/1 : Batch number(733/782)  Batch loss : 0.5604094862937927
Epoch(0/1 : Batch number(734/782)  Batch loss : 0.5449715256690979
Epoch(0/1 : Batch number(735/782)  Batch loss : 0.6291915774345398
Epoch(0/1 : Batch number(736/782)  Batch loss : 0.6702178716659546
Epoch(0/1 : Batch number(737/782)  Batch loss : 0.7714073657989502
Epoch(0/1 : Batch number(738/782)  Batch loss : 0.8509498238563538
Epoch(0/1 : Batch number(739/782)  Batch loss : 0.39512068033218384
Epoch(0/1 : Batch number(740/782)  Batch loss : 0.8438107371330261
Epoch(0/1 : Batch number(741/782)  Batch loss : 0.6825851798057556
Epoch(0/1 : Batch number(742/782)  Batch loss : 0.5254389643669128
Epoch(0/1 : Batch number(743/782)  Batch loss : 0.9360045790672302
Epoch(0/1 : Batch number(744/782)  Batch loss : 0.7521718740463257
Epoch(0/1 : Batch number(745/782)  Batch loss : 0.7652844786643982
Epoch(0/1 : Batch number(746/782)  Batch loss : 0.6953853368759155
Epoch(0/1 : Batch number(747/782)  Batch loss : 0.9962753057479858
Epoch(0/1 : Batch number(748/782)  Batch loss : 0.520943284034729
Epoch(0/1 : Batch number(749/782)  Batch loss : 0.7840050458908081
Epoch(0/1 : Batch number(750/782)  Batch loss : 0.4962494373321533
Epoch(0/1 : Batch number(751/782)  Batch loss : 0.41977959871292114
Epoch(0/1 : Batch number(752/782)  Batch loss : 0.5740584135055542
Epoch(0/1 : Batch number(753/782)  Batch loss : 0.5242967009544373
Epoch(0/1 : Batch number(754/782)  Batch loss : 0.5753991007804871
Epoch(0/1 : Batch number(755/782)  Batch loss : 0.4869183599948883
Epoch(0/1 : Batch number(756/782)  Batch loss : 0.6392263770103455
Epoch(0/1 : Batch number(757/782)  Batch loss : 0.6276686787605286
Epoch(0/1 : Batch number(758/782)  Batch loss : 0.6989244222640991
Epoch(0/1 : Batch number(759/782)  Batch loss : 0.2927672564983368
Epoch(0/1 : Batch number(760/782)  Batch loss : 0.4794183373451233
Epoch(0/1 : Batch number(761/782)  Batch loss : 0.4898393452167511
Epoch(0/1 : Batch number(762/782)  Batch loss : 0.768716037273407
Epoch(0/1 : Batch number(763/782)  Batch loss : 0.5085397362709045
Epoch(0/1 : Batch number(764/782)  Batch loss : 0.662692666053772
Epoch(0/1 : Batch number(765/782)  Batch loss : 0.9856228828430176
Epoch(0/1 : Batch number(766/782)  Batch loss : 0.5568333864212036
Epoch(0/1 : Batch number(767/782)  Batch loss : 0.5665826201438904
Epoch(0/1 : Batch number(768/782)  Batch loss : 0.44551926851272583
Epoch(0/1 : Batch number(769/782)  Batch loss : 0.5424309968948364
Epoch(0/1 : Batch number(770/782)  Batch loss : 0.48389899730682373
Epoch(0/1 : Batch number(771/782)  Batch loss : 0.4647360146045685
Epoch(0/1 : Batch number(772/782)  Batch loss : 0.7771672010421753
Epoch(0/1 : Batch number(773/782)  Batch loss : 0.6462194919586182
Epoch(0/1 : Batch number(774/782)  Batch loss : 0.5444169044494629
Epoch(0/1 : Batch number(775/782)  Batch loss : 0.6214494705200195
Epoch(0/1 : Batch number(776/782)  Batch loss : 0.4268353581428528
Epoch(0/1 : Batch number(777/782)  Batch loss : 0.49654093384742737
Epoch(0/1 : Batch number(778/782)  Batch loss : 0.27706897258758545
Epoch(0/1 : Batch number(779/782)  Batch loss : 0.4966617822647095
Epoch(0/1 : Batch number(780/782)  Batch loss : 0.5388930439949036
Epoch(0/1 : Batch number(781/782)  Batch loss : 0.5077332258224487
Epoch(0/1 : Batch number(782/782)  Batch loss : 0.8724468350410461
Training loss : 0.6482589438443294

Evaluating the network and viewing images

import matplotlib.pyplot as plt
%matplotlib inline
import numpy as np

Here for the model evaluating, we have to move back the model into CPU because, the image we use run only on CPU

CIFAR10_classes = ['plane', 'car', 'bird', 'cat',
           'deer', 'dog', 'frog', 'horse', 'ship', 'truck']


model.to('cpu')

model.eval()

with torch.no_grad():
  images, labels = next(iter(testloader))
  logps = model(images)

  output = torch.exp(logps)
  print(output)
tensor([[1.2113e-03, 5.7369e-03, 1.9268e-03, 8.9940e-01, 2.0054e-04, 3.5575e-02,
         4.7963e-02, 5.1541e-03, 2.6829e-03, 1.4812e-04],
        [8.5297e-03, 3.4591e-01, 1.8998e-05, 3.8195e-05, 2.3665e-06, 2.4522e-06,
         2.7087e-06, 2.0283e-06, 6.4424e-01, 1.2575e-03],
        [3.0444e-04, 2.0631e-03, 5.6353e-07, 2.5822e-07, 9.0606e-08, 1.5962e-08,
         7.6774e-09, 2.2511e-07, 9.9761e-01, 1.8954e-05],
        [2.5504e-01, 2.0685e-03, 7.3005e-02, 7.5947e-02, 1.3378e-03, 1.6631e-03,
         1.2541e-03, 1.2839e-03, 5.8759e-01, 8.1071e-04],
        [3.3044e-07, 2.0871e-07, 4.2308e-03, 4.2265e-04, 1.4117e-04, 1.4808e-05,
         9.9519e-01, 1.1944e-07, 1.3658e-07, 3.1089e-08],
        [6.8892e-06, 1.9955e-06, 1.2071e-04, 1.0398e-01, 2.2045e-02, 1.4370e-01,
         7.2937e-01, 7.3934e-04, 1.0501e-06, 2.7530e-05],
        [4.5535e-05, 9.9389e-01, 8.8168e-07, 4.6423e-05, 9.2625e-08, 3.4999e-05,
         4.9127e-06, 2.5228e-05, 1.2084e-05, 5.9362e-03],
        [3.3074e-02, 5.9544e-03, 5.4569e-02, 5.5880e-02, 1.8693e-02, 5.8131e-03,
         7.8216e-01, 7.6402e-03, 2.0490e-02, 1.5727e-02],
        [3.8806e-05, 3.0470e-06, 6.9235e-03, 9.7171e-01, 4.7381e-03, 1.3387e-02,
         1.7680e-03, 1.3218e-03, 1.0466e-04, 2.4489e-06],
        [1.7079e-05, 9.5583e-01, 5.5334e-06, 1.7630e-04, 2.2357e-05, 1.5181e-04,
         1.4299e-04, 5.2525e-05, 1.0801e-04, 4.3497e-02],
        [9.6833e-01, 2.0272e-05, 1.1045e-03, 6.9936e-03, 9.6624e-03, 2.0077e-03,
         1.1925e-04, 3.6287e-03, 7.8965e-03, 2.3908e-04],
        [2.1112e-08, 2.0599e-03, 5.1397e-10, 5.1849e-09, 2.0936e-10, 3.4549e-09,
         1.0410e-09, 1.2440e-09, 4.8472e-07, 9.9794e-01],
        [1.1723e-04, 3.9254e-05, 1.3782e-03, 2.5092e-01, 4.1142e-03, 5.9271e-01,
         3.8721e-03, 1.4678e-01, 4.0515e-05, 2.4059e-05],
        [1.9583e-04, 1.0761e-03, 1.4763e-02, 3.7750e-01, 6.8518e-03, 8.6262e-02,
         4.4195e-03, 5.0815e-01, 2.5906e-04, 5.3065e-04],
        [4.3737e-06, 1.3153e-03, 2.6249e-08, 3.0749e-07, 1.1680e-07, 1.4266e-07,
         3.9740e-09, 4.3181e-07, 1.2185e-05, 9.9867e-01],
        [2.2010e-02, 1.4526e-02, 2.4091e-02, 4.1219e-04, 1.5758e-04, 2.2269e-03,
         7.6534e-02, 1.6532e-04, 8.5980e-01, 7.5958e-05],
        [1.5717e-05, 1.1077e-06, 1.4269e-04, 5.6820e-02, 2.6723e-06, 9.4277e-01,
         6.3551e-05, 1.7937e-04, 1.0929e-06, 1.0279e-06],
        [1.8752e-04, 1.7066e-02, 6.4567e-04, 1.4506e-03, 1.6871e-03, 6.9808e-02,
         4.1117e-03, 9.0160e-01, 1.8181e-04, 3.2636e-03],
        [7.0071e-05, 1.6759e-04, 1.2385e-06, 6.2300e-07, 1.1085e-05, 1.3833e-07,
         1.8829e-08, 2.7852e-06, 9.9913e-01, 6.1592e-04],
        [6.0862e-05, 1.2807e-05, 3.8328e-03, 2.5106e-03, 1.2724e-03, 5.1355e-05,
         9.9219e-01, 2.7570e-06, 6.5231e-05, 2.4937e-06],
        [1.1551e-01, 6.6639e-03, 1.0507e-02, 1.2734e-01, 3.6779e-02, 2.7788e-02,
         5.0599e-04, 6.5892e-01, 1.3684e-02, 2.3124e-03],
        [9.8006e-01, 3.9568e-04, 3.8290e-03, 1.3180e-02, 3.3637e-04, 2.0682e-04,
         4.8279e-04, 1.1028e-04, 1.3642e-03, 2.9690e-05],
        [5.8600e-02, 2.7094e-04, 3.7615e-02, 3.9952e-02, 7.9403e-01, 3.0085e-03,
         2.2774e-02, 4.3271e-03, 3.9365e-02, 5.5878e-05],
        [1.6183e-04, 4.5932e-01, 8.6388e-07, 8.6435e-06, 4.3777e-07, 4.2037e-05,
         1.3920e-07, 6.7257e-06, 1.1230e-04, 5.4034e-01],
        [7.1349e-06, 1.5963e-05, 7.2371e-04, 3.4586e-02, 2.3288e-02, 9.3947e-01,
         5.0553e-04, 1.2967e-03, 1.0013e-04, 4.9019e-06],
        [2.5509e-03, 1.1047e-04, 7.0559e-01, 7.4033e-02, 4.0030e-02, 6.9699e-02,
         3.6068e-04, 1.0694e-01, 6.0867e-04, 7.2859e-05],
        [3.0123e-03, 4.2318e-04, 3.0418e-02, 6.5703e-01, 2.0195e-01, 4.2107e-02,
         5.0389e-02, 9.2009e-03, 2.7002e-03, 2.7683e-03],
        [7.6662e-01, 1.2839e-04, 1.5014e-01, 9.8554e-03, 1.9794e-02, 5.0792e-04,
         1.2039e-02, 2.9692e-02, 1.0860e-02, 3.6212e-04],
        [7.7395e-06, 1.0913e-03, 7.8448e-08, 2.9837e-06, 9.7440e-07, 2.1490e-07,
         6.0153e-08, 1.2489e-05, 5.5653e-06, 9.9888e-01],
        [2.0567e-05, 3.9808e-07, 1.7153e-03, 6.9518e-03, 3.0810e-03, 4.6633e-04,
         9.8769e-01, 6.2077e-05, 1.4530e-05, 4.9431e-07],
        [3.0752e-04, 2.0826e-04, 1.1048e-02, 2.3267e-01, 3.4816e-03, 2.0712e-03,
         7.4914e-01, 3.5714e-04, 2.8915e-05, 6.7787e-04],
        [3.0671e-06, 6.2482e-06, 1.3099e-03, 1.0619e-02, 1.9649e-03, 9.8235e-01,
         2.8405e-04, 3.4460e-03, 9.4144e-06, 2.5145e-06],
        [5.9625e-04, 1.5031e-03, 5.3364e-02, 1.9927e-02, 8.8758e-01, 1.1956e-02,
         1.8595e-02, 5.6304e-03, 4.3343e-04, 4.1228e-04],
        [1.6451e-05, 9.4223e-04, 5.8692e-03, 3.6749e-01, 3.4628e-02, 5.4836e-01,
         4.1956e-02, 6.6866e-04, 2.1670e-05, 5.0091e-05],
        [1.1482e-03, 4.1007e-04, 7.2318e-07, 7.3082e-07, 3.3509e-06, 2.1242e-07,
         1.2835e-07, 2.8839e-06, 1.1168e-03, 9.9732e-01],
        [6.1386e-03, 2.9135e-02, 9.7623e-02, 1.7043e-01, 5.3014e-04, 6.2670e-01,
         5.8838e-03, 4.9938e-02, 1.2032e-02, 1.5872e-03],
        [1.8895e-05, 2.8103e-08, 4.7548e-04, 2.3157e-03, 8.0929e-01, 8.5699e-02,
         1.4071e-05, 1.0218e-01, 2.3894e-06, 4.0097e-08],
        [2.4627e-04, 7.4863e-01, 4.0947e-07, 1.0164e-05, 2.2963e-06, 1.1840e-06,
         6.9362e-07, 8.7570e-06, 2.7528e-05, 2.5107e-01],
        [5.1783e-05, 3.3416e-03, 1.2340e-05, 6.7170e-04, 5.5872e-06, 1.9402e-03,
         1.6461e-06, 4.6060e-04, 2.7317e-03, 9.9078e-01],
        [8.5486e-09, 1.0791e-07, 4.3318e-07, 9.5189e-05, 6.6346e-08, 9.9990e-01,
         4.6205e-06, 5.7553e-07, 7.0833e-08, 2.7484e-08],
        [2.9557e-01, 7.8143e-03, 4.2952e-01, 8.8177e-03, 1.6584e-01, 2.1555e-03,
         1.4575e-02, 6.9546e-02, 5.7538e-03, 4.0314e-04],
        [2.0323e-04, 8.8010e-05, 4.8932e-03, 3.0158e-03, 4.7919e-03, 1.6690e-04,
         9.8651e-01, 2.2682e-04, 2.9328e-05, 7.0730e-05],
        [3.4228e-05, 1.3270e-04, 1.2915e-03, 1.0655e-01, 1.5613e-04, 8.8213e-01,
         4.2506e-05, 9.5806e-03, 3.5692e-05, 4.6112e-05],
        [2.1061e-04, 1.9460e-04, 1.3581e-02, 3.5368e-02, 3.1734e-03, 3.8354e-02,
         9.0839e-01, 8.5377e-05, 4.1548e-04, 2.3025e-04],
        [6.4589e-01, 1.8619e-01, 3.5772e-03, 1.1029e-04, 1.6149e-03, 7.9385e-05,
         1.0845e-03, 1.0573e-03, 7.9282e-02, 8.1113e-02],
        [4.9646e-05, 9.1150e-04, 3.4474e-08, 1.9303e-07, 2.3459e-07, 5.9032e-08,
         1.0633e-09, 1.4386e-05, 1.7949e-03, 9.9723e-01],
        [3.2627e-06, 4.1760e-04, 1.1080e-03, 7.5293e-01, 6.9407e-03, 2.3458e-01,
         8.1675e-04, 3.0074e-03, 1.1061e-04, 9.1619e-05],
        [1.7746e-02, 7.8845e-02, 1.0965e-04, 4.5630e-04, 1.6869e-05, 1.3913e-05,
         2.5238e-05, 1.3167e-04, 4.4753e-02, 8.5790e-01],
        [2.0448e-06, 6.3058e-07, 8.9893e-04, 7.4903e-05, 1.5401e-01, 1.3653e-03,
         6.9978e-06, 8.4364e-01, 1.2190e-06, 1.4792e-07],
        [2.2668e-04, 6.6189e-05, 5.1452e-02, 1.4459e-03, 1.0852e-02, 4.9669e-03,
         9.3086e-01, 3.3382e-05, 1.0147e-04, 4.6010e-07],
        [1.7992e-02, 6.1505e-05, 8.6350e-06, 4.0776e-06, 2.8062e-05, 1.7884e-07,
         2.1865e-07, 8.0336e-06, 2.4396e-03, 9.7946e-01],
        [1.8371e-03, 3.1564e-06, 1.1384e-04, 3.0063e-06, 3.1845e-05, 4.6281e-06,
         5.3200e-06, 1.7263e-05, 9.9780e-01, 1.8088e-04],
        [7.2186e-01, 1.9629e-04, 9.8230e-04, 9.6987e-03, 1.0193e-03, 5.0362e-03,
         1.5239e-02, 1.6316e-04, 2.3508e-01, 1.0719e-02],
        [7.6611e-06, 7.5238e-05, 1.1091e-02, 9.3578e-01, 1.7459e-03, 1.4525e-02,
         3.6238e-02, 2.0789e-04, 8.1826e-05, 2.4313e-04],
        [1.9646e-03, 9.0668e-07, 1.0852e-06, 3.8957e-08, 2.3115e-08, 1.0111e-08,
         2.9286e-09, 1.1960e-08, 9.9803e-01, 4.0649e-08],
        [1.7023e-04, 2.1604e-06, 8.2619e-05, 5.4277e-05, 1.0247e-04, 3.9206e-05,
         1.3518e-08, 2.0571e-05, 9.9953e-01, 3.2908e-06],
        [1.8501e-04, 5.0424e-04, 1.4459e-04, 1.5345e-01, 2.0608e-03, 1.1433e-01,
         1.1869e-05, 7.2499e-01, 8.7002e-05, 4.2341e-03],
        [3.4869e-02, 1.3949e-02, 3.4844e-02, 7.5717e-01, 6.8734e-02, 3.0900e-02,
         2.7396e-02, 1.2221e-02, 1.7675e-02, 2.2405e-03],
        [2.2636e-03, 2.1945e-04, 2.0568e-03, 5.4138e-01, 6.2347e-02, 3.3461e-01,
         7.7418e-03, 4.0857e-02, 7.2149e-03, 1.3162e-03],
        [3.2231e-02, 9.7227e-03, 1.3649e-02, 3.9683e-01, 3.3608e-02, 3.5756e-01,
         1.2094e-01, 1.2010e-02, 1.2767e-02, 1.0682e-02],
        [3.3931e-06, 5.1018e-07, 8.4840e-05, 1.0965e-05, 1.9850e-01, 9.8649e-05,
         2.3373e-06, 8.0130e-01, 2.5734e-07, 4.0048e-08],
        [2.1274e-05, 2.9416e-05, 1.7914e-02, 7.6104e-01, 1.7628e-03, 1.9561e-01,
         6.5628e-03, 1.6969e-02, 5.7681e-05, 2.8008e-05],
        [2.4290e-02, 6.3221e-03, 3.5828e-02, 1.2811e-02, 3.9836e-03, 1.1816e-03,
         9.1090e-01, 1.7085e-03, 1.9586e-03, 1.0150e-03],
        [9.9146e-04, 2.7040e-02, 5.4741e-03, 9.2928e-01, 4.4170e-04, 1.2461e-02,
         4.1073e-04, 8.9370e-05, 1.6100e-02, 7.7069e-03]])
CIFAR10_classes = ['plane', 'car', 'bird', 'cat',
           'deer', 'dog', 'frog', 'horse', 'ship', 'truck']


model.to('cpu')

model.eval()

with torch.no_grad():
  images, labels = next(iter(testloader))
  logps = model(images)

  output = torch.exp(logps)

  pred = torch.argmax(output,1)

model.train()
VGG(
  (features): Sequential(
    (0): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (1): ReLU(inplace=True)
    (2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (3): ReLU(inplace=True)
    (4): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
    (5): Conv2d(64, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (6): ReLU(inplace=True)
    (7): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (8): ReLU(inplace=True)
    (9): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
    (10): Conv2d(128, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (11): ReLU(inplace=True)
    (12): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (13): ReLU(inplace=True)
    (14): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (15): ReLU(inplace=True)
    (16): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
    (17): Conv2d(256, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (18): ReLU(inplace=True)
    (19): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (20): ReLU(inplace=True)
    (21): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (22): ReLU(inplace=True)
    (23): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
    (24): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (25): ReLU(inplace=True)
    (26): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (27): ReLU(inplace=True)
    (28): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (29): ReLU(inplace=True)
    (30): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
  )
  (avgpool): AdaptiveAvgPool2d(output_size=(7, 7))
  (classifier): Sequential(
    (0): Linear(in_features=25088, out_features=4096, bias=True)
    (1): ReLU(inplace=True)
    (2): Dropout(p=0.5, inplace=False)
    (3): Linear(in_features=4096, out_features=4096, bias=True)
    (4): ReLU(inplace=True)
    (5): Dropout(p=0.5, inplace=False)
    (6): Sequential(
      (0): Linear(in_features=4096, out_features=10, bias=True)
      (1): LogSoftmax(dim=1)
    )
  )
)
test_image_id=2

def denormalize(tensor):
  tensor = tensor*std+ mean
  return tensor

def show_img(img):
  img = img.numpy().transpose((1,2,0))
  img = denormalize(img)
  img = np.clip(img,0,1)
  plt.imshow(img)

def get_CIFAR10_class(id):
  CIFAR10_classes = ['plane', 'car', 'bird', 'cat',
           'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
  return CIFAR10_classes[id]

show_img(images[test_image_id])
print(get_CIFAR10_class(pred[test_image_id]))
ship

png

Viewing images & normalization - Under the hood

pred
tensor([3, 8, 8, 8, 6, 6, 1, 6, 3, 1, 0, 9, 5, 7, 9, 8, 5, 7, 8, 6, 7, 0, 4, 9,
        5, 2, 3, 0, 9, 6, 6, 5, 4, 5, 9, 5, 4, 1, 9, 5, 2, 6, 5, 6, 0, 9, 3, 9,
        7, 6, 9, 8, 0, 3, 8, 8, 7, 3, 3, 3, 7, 3, 6, 3])
images.size()
torch.Size([64, 3, 224, 224])
images[0].size()
torch.Size([3, 224, 224])
np.transpose(images[test_image_id],(1,2,0))
tensor([[[ 0.5878,  1.2906,  2.0648],
         [ 0.5878,  1.2906,  2.0648],
         [ 0.5878,  1.2906,  2.0648],
         ...,
         [ 1.9578,  2.1835,  2.4831],
         [ 1.9578,  2.1835,  2.4831],
         [ 1.9578,  2.1835,  2.4831]],

        [[ 0.5878,  1.2906,  2.0648],
         [ 0.5878,  1.2906,  2.0648],
         [ 0.5878,  1.2906,  2.0648],
         ...,
         [ 1.9578,  2.1835,  2.4831],
         [ 1.9578,  2.1835,  2.4831],
         [ 1.9578,  2.1835,  2.4831]],

        [[ 0.5878,  1.2906,  2.0648],
         [ 0.5878,  1.2906,  2.0648],
         [ 0.5878,  1.2906,  2.0648],
         ...,
         [ 1.9578,  2.1835,  2.4831],
         [ 1.9578,  2.1835,  2.4831],
         [ 1.9578,  2.1835,  2.4831]],

        ...,

        [[-1.6384, -1.3179, -0.9853],
         [-1.6384, -1.3179, -0.9853],
         [-1.6384, -1.3179, -0.9853],
         ...,
         [-1.9980, -1.8957, -1.6824],
         [-1.9980, -1.8957, -1.6824],
         [-1.9980, -1.8957, -1.6824]],

        [[-1.6384, -1.3179, -0.9853],
         [-1.6384, -1.3179, -0.9853],
         [-1.6384, -1.3179, -0.9853],
         ...,
         [-1.9980, -1.8957, -1.6824],
         [-1.9980, -1.8957, -1.6824],
         [-1.9980, -1.8957, -1.6824]],

        [[-1.6384, -1.3179, -0.9853],
         [-1.6384, -1.3179, -0.9853],
         [-1.6384, -1.3179, -0.9853],
         ...,
         [-1.9980, -1.8957, -1.6824],
         [-1.9980, -1.8957, -1.6824],
         [-1.9980, -1.8957, -1.6824]]])
plt.imshow(np.clip(np.transpose(images[test_image_id],(1,2,0)),0,1))
<matplotlib.image.AxesImage at 0x7f90fbc0a1c0>

png

# The z-core isq defined for the normalization step.
#z = (x - mean) / std
def denormalize(tensor):
  tensor = tensor*std + mean
  return tensor
def show_img(img):
  img = img.numpy().transpose((1,2,0))
  img = denormalize(img)
  img = np.clip(img,0,1)
  plt.imshow(img)
show_img(images[test_image_id])

png

def get_CIFAR10_class(id):
  CIFAR10_classes = ['plane', 'car', 'bird', 'cat',
           'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
  return CIFAR10_classes[id]
get_CIFAR10_class(pred[test_image_id])
'ship'

Accuracy of the model

model.eval()
with torch.no_grad():
    num_correct = 0
    total = 0

    cnt = 0
    #set_trace()
    for images, labels in testloader:

        logps = model(images)
        output = torch.exp(logps)
        print(output)
        cnt+=1

        if cnt > 0:
          break
tensor([[1.2113e-03, 5.7369e-03, 1.9268e-03, 8.9940e-01, 2.0054e-04, 3.5575e-02,
         4.7963e-02, 5.1541e-03, 2.6829e-03, 1.4812e-04],
        [8.5297e-03, 3.4591e-01, 1.8998e-05, 3.8195e-05, 2.3665e-06, 2.4522e-06,
         2.7087e-06, 2.0283e-06, 6.4424e-01, 1.2575e-03],
        [3.0444e-04, 2.0631e-03, 5.6353e-07, 2.5822e-07, 9.0606e-08, 1.5962e-08,
         7.6774e-09, 2.2511e-07, 9.9761e-01, 1.8954e-05],
        [2.5504e-01, 2.0685e-03, 7.3005e-02, 7.5947e-02, 1.3378e-03, 1.6631e-03,
         1.2541e-03, 1.2839e-03, 5.8759e-01, 8.1071e-04],
        [3.3044e-07, 2.0871e-07, 4.2308e-03, 4.2265e-04, 1.4117e-04, 1.4808e-05,
         9.9519e-01, 1.1944e-07, 1.3658e-07, 3.1089e-08],
        [6.8892e-06, 1.9955e-06, 1.2071e-04, 1.0398e-01, 2.2045e-02, 1.4370e-01,
         7.2937e-01, 7.3934e-04, 1.0501e-06, 2.7530e-05],
        [4.5535e-05, 9.9389e-01, 8.8168e-07, 4.6423e-05, 9.2625e-08, 3.4999e-05,
         4.9127e-06, 2.5228e-05, 1.2084e-05, 5.9362e-03],
        [3.3074e-02, 5.9544e-03, 5.4569e-02, 5.5880e-02, 1.8693e-02, 5.8131e-03,
         7.8216e-01, 7.6402e-03, 2.0490e-02, 1.5727e-02],
        [3.8806e-05, 3.0470e-06, 6.9235e-03, 9.7171e-01, 4.7381e-03, 1.3387e-02,
         1.7680e-03, 1.3218e-03, 1.0466e-04, 2.4489e-06],
        [1.7079e-05, 9.5583e-01, 5.5334e-06, 1.7630e-04, 2.2357e-05, 1.5181e-04,
         1.4299e-04, 5.2525e-05, 1.0801e-04, 4.3497e-02],
        [9.6833e-01, 2.0272e-05, 1.1045e-03, 6.9936e-03, 9.6624e-03, 2.0077e-03,
         1.1925e-04, 3.6287e-03, 7.8965e-03, 2.3908e-04],
        [2.1112e-08, 2.0599e-03, 5.1397e-10, 5.1849e-09, 2.0936e-10, 3.4549e-09,
         1.0410e-09, 1.2440e-09, 4.8472e-07, 9.9794e-01],
        [1.1723e-04, 3.9254e-05, 1.3782e-03, 2.5092e-01, 4.1142e-03, 5.9271e-01,
         3.8721e-03, 1.4678e-01, 4.0515e-05, 2.4059e-05],
        [1.9583e-04, 1.0761e-03, 1.4763e-02, 3.7750e-01, 6.8518e-03, 8.6262e-02,
         4.4195e-03, 5.0815e-01, 2.5906e-04, 5.3065e-04],
        [4.3737e-06, 1.3153e-03, 2.6249e-08, 3.0749e-07, 1.1680e-07, 1.4266e-07,
         3.9740e-09, 4.3181e-07, 1.2185e-05, 9.9867e-01],
        [2.2010e-02, 1.4526e-02, 2.4091e-02, 4.1219e-04, 1.5758e-04, 2.2269e-03,
         7.6534e-02, 1.6532e-04, 8.5980e-01, 7.5958e-05],
        [1.5717e-05, 1.1077e-06, 1.4269e-04, 5.6820e-02, 2.6723e-06, 9.4277e-01,
         6.3551e-05, 1.7937e-04, 1.0929e-06, 1.0279e-06],
        [1.8752e-04, 1.7066e-02, 6.4567e-04, 1.4506e-03, 1.6871e-03, 6.9808e-02,
         4.1117e-03, 9.0160e-01, 1.8181e-04, 3.2636e-03],
        [7.0071e-05, 1.6759e-04, 1.2385e-06, 6.2300e-07, 1.1085e-05, 1.3833e-07,
         1.8829e-08, 2.7852e-06, 9.9913e-01, 6.1592e-04],
        [6.0862e-05, 1.2807e-05, 3.8328e-03, 2.5106e-03, 1.2724e-03, 5.1355e-05,
         9.9219e-01, 2.7570e-06, 6.5231e-05, 2.4937e-06],
        [1.1551e-01, 6.6639e-03, 1.0507e-02, 1.2734e-01, 3.6779e-02, 2.7788e-02,
         5.0599e-04, 6.5892e-01, 1.3684e-02, 2.3124e-03],
        [9.8006e-01, 3.9568e-04, 3.8290e-03, 1.3180e-02, 3.3637e-04, 2.0682e-04,
         4.8279e-04, 1.1028e-04, 1.3642e-03, 2.9690e-05],
        [5.8600e-02, 2.7094e-04, 3.7615e-02, 3.9952e-02, 7.9403e-01, 3.0085e-03,
         2.2774e-02, 4.3271e-03, 3.9365e-02, 5.5878e-05],
        [1.6183e-04, 4.5932e-01, 8.6388e-07, 8.6435e-06, 4.3777e-07, 4.2037e-05,
         1.3920e-07, 6.7257e-06, 1.1230e-04, 5.4034e-01],
        [7.1349e-06, 1.5963e-05, 7.2371e-04, 3.4586e-02, 2.3288e-02, 9.3947e-01,
         5.0553e-04, 1.2967e-03, 1.0013e-04, 4.9019e-06],
        [2.5509e-03, 1.1047e-04, 7.0559e-01, 7.4033e-02, 4.0030e-02, 6.9699e-02,
         3.6068e-04, 1.0694e-01, 6.0867e-04, 7.2859e-05],
        [3.0123e-03, 4.2318e-04, 3.0418e-02, 6.5703e-01, 2.0195e-01, 4.2107e-02,
         5.0389e-02, 9.2009e-03, 2.7002e-03, 2.7683e-03],
        [7.6662e-01, 1.2839e-04, 1.5014e-01, 9.8554e-03, 1.9794e-02, 5.0792e-04,
         1.2039e-02, 2.9692e-02, 1.0860e-02, 3.6212e-04],
        [7.7395e-06, 1.0913e-03, 7.8448e-08, 2.9837e-06, 9.7440e-07, 2.1490e-07,
         6.0153e-08, 1.2489e-05, 5.5653e-06, 9.9888e-01],
        [2.0567e-05, 3.9808e-07, 1.7153e-03, 6.9518e-03, 3.0810e-03, 4.6633e-04,
         9.8769e-01, 6.2077e-05, 1.4530e-05, 4.9431e-07],
        [3.0752e-04, 2.0826e-04, 1.1048e-02, 2.3267e-01, 3.4816e-03, 2.0712e-03,
         7.4914e-01, 3.5714e-04, 2.8915e-05, 6.7787e-04],
        [3.0671e-06, 6.2482e-06, 1.3099e-03, 1.0619e-02, 1.9649e-03, 9.8235e-01,
         2.8405e-04, 3.4460e-03, 9.4144e-06, 2.5145e-06],
        [5.9625e-04, 1.5031e-03, 5.3364e-02, 1.9927e-02, 8.8758e-01, 1.1956e-02,
         1.8595e-02, 5.6304e-03, 4.3343e-04, 4.1228e-04],
        [1.6451e-05, 9.4223e-04, 5.8692e-03, 3.6749e-01, 3.4628e-02, 5.4836e-01,
         4.1956e-02, 6.6866e-04, 2.1670e-05, 5.0091e-05],
        [1.1482e-03, 4.1007e-04, 7.2318e-07, 7.3082e-07, 3.3509e-06, 2.1242e-07,
         1.2835e-07, 2.8839e-06, 1.1168e-03, 9.9732e-01],
        [6.1386e-03, 2.9135e-02, 9.7623e-02, 1.7043e-01, 5.3014e-04, 6.2670e-01,
         5.8838e-03, 4.9938e-02, 1.2032e-02, 1.5872e-03],
        [1.8895e-05, 2.8103e-08, 4.7548e-04, 2.3157e-03, 8.0929e-01, 8.5699e-02,
         1.4071e-05, 1.0218e-01, 2.3894e-06, 4.0097e-08],
        [2.4627e-04, 7.4863e-01, 4.0947e-07, 1.0164e-05, 2.2963e-06, 1.1840e-06,
         6.9362e-07, 8.7570e-06, 2.7528e-05, 2.5107e-01],
        [5.1783e-05, 3.3416e-03, 1.2340e-05, 6.7170e-04, 5.5872e-06, 1.9402e-03,
         1.6461e-06, 4.6060e-04, 2.7317e-03, 9.9078e-01],
        [8.5486e-09, 1.0791e-07, 4.3318e-07, 9.5189e-05, 6.6346e-08, 9.9990e-01,
         4.6205e-06, 5.7553e-07, 7.0833e-08, 2.7484e-08],
        [2.9557e-01, 7.8143e-03, 4.2952e-01, 8.8177e-03, 1.6584e-01, 2.1555e-03,
         1.4575e-02, 6.9546e-02, 5.7538e-03, 4.0314e-04],
        [2.0323e-04, 8.8010e-05, 4.8932e-03, 3.0158e-03, 4.7919e-03, 1.6690e-04,
         9.8651e-01, 2.2682e-04, 2.9328e-05, 7.0730e-05],
        [3.4228e-05, 1.3270e-04, 1.2915e-03, 1.0655e-01, 1.5613e-04, 8.8213e-01,
         4.2506e-05, 9.5806e-03, 3.5692e-05, 4.6112e-05],
        [2.1061e-04, 1.9460e-04, 1.3581e-02, 3.5368e-02, 3.1734e-03, 3.8354e-02,
         9.0839e-01, 8.5377e-05, 4.1548e-04, 2.3025e-04],
        [6.4589e-01, 1.8619e-01, 3.5772e-03, 1.1029e-04, 1.6149e-03, 7.9385e-05,
         1.0845e-03, 1.0573e-03, 7.9282e-02, 8.1113e-02],
        [4.9646e-05, 9.1150e-04, 3.4474e-08, 1.9303e-07, 2.3459e-07, 5.9032e-08,
         1.0633e-09, 1.4386e-05, 1.7949e-03, 9.9723e-01],
        [3.2627e-06, 4.1760e-04, 1.1080e-03, 7.5293e-01, 6.9407e-03, 2.3458e-01,
         8.1675e-04, 3.0074e-03, 1.1061e-04, 9.1619e-05],
        [1.7746e-02, 7.8845e-02, 1.0965e-04, 4.5630e-04, 1.6869e-05, 1.3913e-05,
         2.5238e-05, 1.3167e-04, 4.4753e-02, 8.5790e-01],
        [2.0448e-06, 6.3058e-07, 8.9893e-04, 7.4903e-05, 1.5401e-01, 1.3653e-03,
         6.9978e-06, 8.4364e-01, 1.2190e-06, 1.4792e-07],
        [2.2668e-04, 6.6189e-05, 5.1452e-02, 1.4459e-03, 1.0852e-02, 4.9669e-03,
         9.3086e-01, 3.3382e-05, 1.0147e-04, 4.6010e-07],
        [1.7992e-02, 6.1505e-05, 8.6350e-06, 4.0776e-06, 2.8062e-05, 1.7884e-07,
         2.1865e-07, 8.0336e-06, 2.4396e-03, 9.7946e-01],
        [1.8371e-03, 3.1564e-06, 1.1384e-04, 3.0063e-06, 3.1845e-05, 4.6281e-06,
         5.3200e-06, 1.7263e-05, 9.9780e-01, 1.8088e-04],
        [7.2186e-01, 1.9629e-04, 9.8230e-04, 9.6987e-03, 1.0193e-03, 5.0362e-03,
         1.5239e-02, 1.6316e-04, 2.3508e-01, 1.0719e-02],
        [7.6611e-06, 7.5238e-05, 1.1091e-02, 9.3578e-01, 1.7459e-03, 1.4525e-02,
         3.6238e-02, 2.0789e-04, 8.1826e-05, 2.4313e-04],
        [1.9646e-03, 9.0668e-07, 1.0852e-06, 3.8957e-08, 2.3115e-08, 1.0111e-08,
         2.9286e-09, 1.1960e-08, 9.9803e-01, 4.0649e-08],
        [1.7023e-04, 2.1604e-06, 8.2619e-05, 5.4277e-05, 1.0247e-04, 3.9206e-05,
         1.3518e-08, 2.0571e-05, 9.9953e-01, 3.2908e-06],
        [1.8501e-04, 5.0424e-04, 1.4459e-04, 1.5345e-01, 2.0608e-03, 1.1433e-01,
         1.1869e-05, 7.2499e-01, 8.7002e-05, 4.2341e-03],
        [3.4869e-02, 1.3949e-02, 3.4844e-02, 7.5717e-01, 6.8734e-02, 3.0900e-02,
         2.7396e-02, 1.2221e-02, 1.7675e-02, 2.2405e-03],
        [2.2636e-03, 2.1945e-04, 2.0568e-03, 5.4138e-01, 6.2347e-02, 3.3461e-01,
         7.7418e-03, 4.0857e-02, 7.2149e-03, 1.3162e-03],
        [3.2231e-02, 9.7227e-03, 1.3649e-02, 3.9683e-01, 3.3608e-02, 3.5756e-01,
         1.2094e-01, 1.2010e-02, 1.2767e-02, 1.0682e-02],
        [3.3931e-06, 5.1018e-07, 8.4840e-05, 1.0965e-05, 1.9850e-01, 9.8649e-05,
         2.3373e-06, 8.0130e-01, 2.5734e-07, 4.0048e-08],
        [2.1274e-05, 2.9416e-05, 1.7914e-02, 7.6104e-01, 1.7628e-03, 1.9561e-01,
         6.5628e-03, 1.6969e-02, 5.7681e-05, 2.8008e-05],
        [2.4290e-02, 6.3221e-03, 3.5828e-02, 1.2811e-02, 3.9836e-03, 1.1816e-03,
         9.1090e-01, 1.7085e-03, 1.9586e-03, 1.0150e-03],
        [9.9146e-04, 2.7040e-02, 5.4741e-03, 9.2928e-01, 4.4170e-04, 1.2461e-02,
         4.1073e-04, 8.9370e-05, 1.6100e-02, 7.7069e-03]])
pred,labels
(tensor([3, 8, 8, 8, 6, 6, 1, 6, 3, 1, 0, 9, 5, 7, 9, 8, 5, 7, 8, 6, 7, 0, 4, 9,
         5, 2, 3, 0, 9, 6, 6, 5, 4, 5, 9, 5, 4, 1, 9, 5, 2, 6, 5, 6, 0, 9, 3, 9,
         7, 6, 9, 8, 0, 3, 8, 8, 7, 3, 3, 3, 7, 3, 6, 3]),
 tensor([3, 8, 8, 0, 6, 6, 1, 6, 3, 1, 0, 9, 5, 7, 9, 8, 5, 7, 8, 6, 7, 0, 4, 9,
         5, 2, 4, 0, 9, 6, 6, 5, 4, 5, 9, 2, 4, 1, 9, 5, 4, 6, 5, 6, 0, 9, 3, 9,
         7, 6, 9, 8, 0, 3, 8, 8, 7, 7, 4, 6, 7, 3, 6, 3]))
pred == labels
tensor([ True,  True,  True,  True,  True, False,  True,  True,  True,  True,
        False,  True,  True, False,  True,  True,  True,  True,  True, False,
         True,  True,  True, False,  True,  True,  True,  True,  True,  True,
         True, False,  True,  True,  True,  True,  True, False,  True,  True,
         True,  True,  True,  True,  True,  True, False, False, False, False,
         True,  True, False, False,  True,  True,  True, False,  True,  True,
        False,  True,  True,  True])
model.eval()
with torch.no_grad():
    num_correct = 0
    total = 0


    for batch, (images, labels) in enumerate(testloader,1):

        logps = model(images)
        output = torch.exp(logps)

        pred = torch.argmax(output, 1)
        total += labels.size(0)
        num_correct += (pred == labels).sum().item()
        print(f'Batch ({batch}/{len(testloader)})')

        if batch == 5:
          break

    print(f'Accuracy of the model on {total} test images: {num_correct * 100 / total}% ')
Batch (1/157)
Batch (2/157)
Batch (3/157)
Batch (4/157)
Batch (5/157)
Accuracy of the model on 320 test images: 79.375%