import torch
import torchvision
import torchvision.transforms as transforms
import torch.nn as nn
import torch.nn.functional as F
import matplotlib.pyplot as plt
import numpy as np
import time
if torch.cuda.is_available():
device = "cuda"
print(f"CUDA Version: {torch.version.cuda}")
print(torch.cuda.get_device_name(0))
else:
device ="cpu"
CUDA Version: 12.1 NVIDIA GeForce RTX 4080 Laptop GPU
Data Loading & Augmentation Pipeline¶
Implementation Details¶
Strategic Over-sampling: Uses a WeightedRandomSampler (1.3x weight for Cats/Dogs) to force higher exposure to difficult class boundaries
Hybrid Augmentation: Combines standard geometry tricks (RandomCrop, HorizontalFlip) with RandAugment, which automatically applies a random sequence of complex distortions (like solarization or rotation) to prevent the model from memorizing the tiny dataset. Uses RandomErasing to mask out random sections of the image, forcing the network to learn robust classification based on partial features
classes = ('plane', 'car', 'bird', 'cat',
'deer', 'dog', 'frog', 'horse', 'ship', 'truck')
class_weights = [1.0, 1.0, 1.0, 1.3, 1.0, 1.3, 1.0, 1.0, 1.0, 1.0] # Indices 3 and 5 are Cat and Dog
# add some augmentation to the training data including Cutout and RandAugment
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.RandAugment(num_ops=2, magnitude=9),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
transforms.RandomErasing(p=0.5, scale=(0.02, 0.33), ratio=(0.3, 3.3), value=0)
])
transform_test = transforms.Compose(
[transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))])
batch_size = 32
trainset = torchvision.datasets.CIFAR10(root='./data', train=True,
download=True, transform=transform_train)
sample_weights = [class_weights[label] for _, label in trainset]
sampler = torch.utils.data.WeightedRandomSampler(
weights=sample_weights,
num_samples=len(trainset),
replacement=True
)
trainloader = torch.utils.data.DataLoader(
trainset, batch_size=batch_size, sampler=sampler,
num_workers=2, persistent_workers=True, pin_memory=True
)
testset = torchvision.datasets.CIFAR10(root='./data', train=False,
download=True, transform=transform_test)
testloader = torch.utils.data.DataLoader(testset, batch_size=64,
shuffle=False, num_workers=0)
Files already downloaded and verified Files already downloaded and verified
The Model and Its Weight Initialisation¶
import torch
import torch.nn as nn
import torch.nn.functional as F
class BlurPool(nn.Module):
def __init__(self, channels, stride=2):
super(BlurPool, self).__init__()
f = torch.tensor([1, 2, 1], dtype=torch.float32)
kernel = f[:, None] * f[None, :]
kernel = kernel / kernel.sum()
self.register_buffer('filter', kernel.expand(channels, 1, 3, 3))
self.stride = stride
self.groups = channels
def forward(self, x):
return F.conv2d(x, self.filter, stride=self.stride, padding=1, groups=self.groups)
class CNN(nn.Module):
def __init__(self):
super(CNN, self).__init__()
self.act = nn.LeakyReLU(negative_slope=0.1, inplace=True)
# --- STAGE 1: Enhanced Local Detail ---
self.b1_1 = nn.Conv2d(3, 16, kernel_size=3, padding=1)
self.b1_2 = nn.Conv2d(16, 16, kernel_size=3, padding=1)
self.b2_3x3 = nn.Conv2d(3, 8, kernel_size=3, padding=1)
self.b2_5x5 = nn.Conv2d(3, 8, kernel_size=5, padding=2)
self.b3_pre = nn.Conv2d(3, 4, kernel_size=1)
self.b3 = nn.Conv2d(4, 4, kernel_size=7, padding=3)
self.compress1 = nn.Conv2d(36, 32, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(32)
self.local_refine = nn.Conv2d(32, 32, kernel_size=3, padding=1, groups=32, bias=False)
self.refine_bn = nn.BatchNorm2d(32)
nn.init.constant_(self.refine_bn.weight, 0)
self.proj1 = nn.Conv2d(32, 64, kernel_size=1, bias=False)
self.cnn11 = nn.Conv2d(32, 64, kernel_size=3, padding=1)
self.downsample1 = nn.Sequential(
BlurPool(64, stride=2),
nn.Conv2d(64, 64, kernel_size=1, bias=False)
)
# --- STAGE 2: Feature Refinement ---
self.cnn20 = nn.Conv2d(64, 64, kernel_size=3, padding=1)
self.cnn21 = nn.Conv2d(64, 64, kernel_size=3, padding=2, dilation=2, bias=False)
self.se_map2 = nn.Sequential(
nn.AdaptiveAvgPool2d(1),
nn.Conv2d(128, 32, 1),
nn.ReLU(inplace=True),
nn.Conv2d(32, 128, 1),
nn.Sigmoid()
)
self.compress2 = nn.Conv2d(128, 128, kernel_size=1, bias=False)
self.bn2 = nn.BatchNorm2d(128)
self.proj2 = nn.Conv2d(128, 256, kernel_size=1, bias=False)
self.cnn22 = nn.Conv2d(128, 256, kernel_size=3, padding=1)
self.downsample2 = nn.Sequential(
BlurPool(256, stride=2),
nn.Conv2d(256, 256, kernel_size=1, bias=False)
)
# --- STAGE 3: High-Level Concepts ---
self.cnn3 = nn.Conv2d(256, 256, kernel_size=3, padding=1)
self.cnn30 = nn.Conv2d(256, 256, kernel_size=5, padding=2)
self.compress3 = nn.Conv2d(512, 256, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(256)
self.proj3 = nn.Conv2d(256, 512, kernel_size=1, bias=False)
self.cnn33 = nn.Conv2d(256, 512, kernel_size=3, padding=1)
# --- DUAL SE ATTENTION ---
self.se_spatial = nn.Sequential(
nn.AdaptiveAvgPool2d(1),
nn.Conv2d(512, 128, 1),
nn.ReLU(inplace=True),
nn.Conv2d(128, 512, 1),
nn.Sigmoid()
)
self.se_pooled = nn.Sequential(
nn.Linear(1024, 256),
nn.ReLU(inplace=True),
nn.Linear(256, 1024),
nn.Hardsigmoid()
)
self.pool_attn = nn.Sequential(
nn.Conv2d(512, 1, kernel_size=1),
nn.Sigmoid()
)
# --- HEAD ---
self.fc1 = nn.Linear(1024, 512)
self.bn_fc = nn.BatchNorm1d(512)
self.drop = nn.Dropout(0.2)
self.fc2 = nn.Linear(512, 10)
def forward(self, x):
# STAGE 1
b1 = self.act(self.b1_1(x))
b1 = self.b1_2(b1)
b2_3 = self.b2_3x3(x)
b2_5 = self.b2_5x5(x)
b3 = self.act(self.b3_pre(x))
b3 = self.b3(b3)
x = torch.cat([b1, b2_3, b2_5, b3], dim=1)
x_pre = self.act(self.bn1(self.compress1(x)))
x = x_pre + self.local_refine(x_pre)
x = self.refine_bn(x)
x = self.act(self.cnn11(x) + self.proj1(x))
x = self.downsample1(x)
# STAGE 2
x_cat = torch.cat([self.cnn20(x), self.cnn21(x)], dim=1)
x = self.act(self.bn2(self.compress2(x_cat)))
x = x * self.se_map2(x)
res = self.proj2(x)
x = self.act(self.cnn22(x) + res)
x = self.downsample2(x)
# STAGE 3
x = torch.cat([self.cnn3(x), self.cnn30(x)], dim=1)
x = self.act(self.bn3(self.compress3(x)))
res = self.proj3(x)
x = self.act(self.cnn33(x) + res)
# DUAL SE ATTENTION
x = x * self.se_spatial(x)
w = self.pool_attn(x)
x_attn = (x * w).sum(dim=(2,3)) / (w.sum(dim=(2,3)) + 1e-6)
max_p = F.adaptive_max_pool2d(x, 1).flatten(1)
pooled = torch.cat([x_attn, max_p], dim=1)
x = pooled * self.se_pooled(pooled)
# Head
x = self.fc1(x)
x = self.bn_fc(x)
x = F.leaky_relu(x, negative_slope=0.03)
x = self.drop(x)
x = self.fc2(x)
return x
net = CNN()
num_params = sum(p.numel() for p in net.parameters())
print(num_params)
# Kaiming (He) Initialization
def init_weights(m):
if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='leaky_relu', a=0.1)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
net.apply(init_weights)
net.to(device)
5383167
CNN(
(act): LeakyReLU(negative_slope=0.1, inplace=True)
(b1_1): Conv2d(3, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(b1_2): Conv2d(16, 16, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(b2_3x3): Conv2d(3, 8, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(b2_5x5): Conv2d(3, 8, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
(b3_pre): Conv2d(3, 4, kernel_size=(1, 1), stride=(1, 1))
(b3): Conv2d(4, 4, kernel_size=(7, 7), stride=(1, 1), padding=(3, 3))
(compress1): Conv2d(36, 32, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn1): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(local_refine): Conv2d(32, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), groups=32, bias=False)
(refine_bn): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(proj1): Conv2d(32, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
(cnn11): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(downsample1): Sequential(
(0): BlurPool()
(1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)
)
(cnn20): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(cnn21): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(2, 2), dilation=(2, 2), bias=False)
(se_map2): Sequential(
(0): AdaptiveAvgPool2d(output_size=1)
(1): Conv2d(128, 32, kernel_size=(1, 1), stride=(1, 1))
(2): ReLU(inplace=True)
(3): Conv2d(32, 128, kernel_size=(1, 1), stride=(1, 1))
(4): Sigmoid()
)
(compress2): Conv2d(128, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(proj2): Conv2d(128, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
(cnn22): Conv2d(128, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(downsample2): Sequential(
(0): BlurPool()
(1): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
)
(cnn3): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(cnn30): Conv2d(256, 256, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
(compress3): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)
(bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(proj3): Conv2d(256, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)
(cnn33): Conv2d(256, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(se_spatial): Sequential(
(0): AdaptiveAvgPool2d(output_size=1)
(1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1))
(2): ReLU(inplace=True)
(3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1))
(4): Sigmoid()
)
(se_pooled): Sequential(
(0): Linear(in_features=1024, out_features=256, bias=True)
(1): ReLU(inplace=True)
(2): Linear(in_features=256, out_features=1024, bias=True)
(3): Hardsigmoid()
)
(pool_attn): Sequential(
(0): Conv2d(512, 1, kernel_size=(1, 1), stride=(1, 1))
(1): Sigmoid()
)
(fc1): Linear(in_features=1024, out_features=512, bias=True)
(bn_fc): BatchNorm1d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(drop): Dropout(p=0.2, inplace=False)
(fc2): Linear(in_features=512, out_features=10, bias=True)
)
Training¶
Advanced Augmentation - Mixup: Blends pairs of images and their labels during the forward pass. This forces the model to predict across "interpolated" distributions, making the loss calculation far more complex but the resulting decision boundaries much more robust.
Label Smoothing (0.05): Softens the target distribution to prevent overconfidence, tuned specifically to complement the Mixup logic.
Notice that Training Accuracy < Validation Accuracy - This is by design. The training set is intentionally made "nasty" through aggressive augmentation (Mixup + RandAugment + Erasing). If the model can navigate this artificial difficulty, the clean validation set becomes easy. This effectively eliminates the risk of overfitting.
import torch.backends.cudnn as cudnn
torch.manual_seed(137)
import torch.optim as optim
cudnn.benchmark = True
best_acc = 0.0
train_loss_history = []
val_loss_history = []
val_acc_history = []
num_epochs = 900
warmup_epochs = 10
criterion = nn.CrossEntropyLoss(label_smoothing=0.05)
optimizer = optim.SGD(net.parameters(), lr=0.01, momentum=0.9, weight_decay=5e-4)
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=num_epochs)
def mixup_data(x, y, alpha=1.0):
if alpha > 0:
lam = torch.distributions.Beta(alpha, alpha).sample().item()
else:
lam = 1
batch_size = x.size()[0]
index = torch.randperm(batch_size).to(x.device)
mixed_x = lam * x + (1 - lam) * x[index, :]
y_a, y_b = y, y[index]
return mixed_x, y_a, y_b, lam
def mixup_criterion(criterion, pred, y_a, y_b, lam):
return lam * criterion(pred, y_a) + (1 - lam) * criterion(pred, y_b)
for epoch in range(num_epochs):
net.train()
running_loss = 0.0
train_correct = 0.0 # Changed to float for Mixup partial credit
train_total = 0
for i, data in enumerate(trainloader, 0):
inputs, labels = data
inputs, labels = inputs.to(device), labels.to(device)
optimizer.zero_grad()
if epoch >= warmup_epochs:
inputs, targets_a, targets_b, lam = mixup_data(inputs, labels, alpha=0.4)
outputs = net(inputs)
loss = mixup_criterion(criterion, outputs, targets_a, targets_b, lam)
else:
outputs = net(inputs)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
# Statistics
running_loss += loss.item()
_, predicted = outputs.max(1)
train_total += labels.size(0)
if epoch >= warmup_epochs:
# Partial credit for Mixup
train_correct += (lam * predicted.eq(targets_a).sum().item() +
(1 - lam) * predicted.eq(targets_b).sum().item())
else:
train_correct += predicted.eq(labels).sum().item()
# --- END OF EPOCH LOGGING ---
epoch_loss = running_loss / len(trainloader)
epoch_acc = 100. * train_correct / train_total
train_loss_history.append(epoch_loss)
# --- Validation Section (Run once per epoch) ---
net.eval()
test_loss, correct = 0, 0
size = len(testloader.dataset)
num_batches = len(testloader)
with torch.no_grad():
for X, y in testloader:
X, y = X.to(device), y.to(device)
pred = net(X)
test_loss += criterion(pred, y).item()
correct += (pred.argmax(1) == y).type(torch.float).sum().item()
val_loss_avg = test_loss / num_batches
val_acc = 100 * correct / size
val_loss_history.append(val_loss_avg)
val_acc_history.append(val_acc)
print(f'Epoch [{epoch+1}/{num_epochs}] '
f'Loss: {epoch_loss:.3f} | Acc: {epoch_acc:.2f}% | '
f'Val Acc: {val_acc:.2f}% | LR: {scheduler.get_last_lr()[0]:.5f}')
# Update scheduler
scheduler.step()
if val_acc > best_acc:
best_acc = val_acc
torch.save(net.state_dict(), 'best_model_wide.pth')
print(f"--> New Best Val Acc: {best_acc:.2f}% (Saved!)")
Epoch [1/900] Loss: 2.942 | Acc: 15.75% | Val Acc: 25.72% | LR: 0.01000 --> New Best Val Acc: 25.72% (Saved!) Epoch [2/900] Loss: 2.128 | Acc: 23.73% | Val Acc: 34.59% | LR: 0.01000 --> New Best Val Acc: 34.59% (Saved!) Epoch [3/900] Loss: 1.942 | Acc: 29.65% | Val Acc: 36.62% | LR: 0.01000 --> New Best Val Acc: 36.62% (Saved!) Epoch [4/900] Loss: 1.819 | Acc: 35.41% | Val Acc: 47.00% | LR: 0.01000 --> New Best Val Acc: 47.00% (Saved!) Epoch [5/900] Loss: 1.688 | Acc: 41.64% | Val Acc: 54.07% | LR: 0.01000 --> New Best Val Acc: 54.07% (Saved!) Epoch [6/900] Loss: 1.567 | Acc: 47.32% | Val Acc: 59.69% | LR: 0.01000 --> New Best Val Acc: 59.69% (Saved!) Epoch [7/900] Loss: 1.453 | Acc: 52.57% | Val Acc: 62.64% | LR: 0.01000 --> New Best Val Acc: 62.64% (Saved!) Epoch [8/900] Loss: 1.371 | Acc: 55.91% | Val Acc: 66.90% | LR: 0.01000 --> New Best Val Acc: 66.90% (Saved!) Epoch [9/900] Loss: 1.301 | Acc: 59.05% | Val Acc: 68.81% | LR: 0.01000 --> New Best Val Acc: 68.81% (Saved!) Epoch [10/900] Loss: 1.253 | Acc: 61.18% | Val Acc: 72.66% | LR: 0.01000 --> New Best Val Acc: 72.66% (Saved!) Epoch [11/900] Loss: 1.535 | Acc: 53.62% | Val Acc: 75.45% | LR: 0.01000 --> New Best Val Acc: 75.45% (Saved!) Epoch [12/900] Loss: 1.495 | Acc: 55.53% | Val Acc: 75.34% | LR: 0.01000 Epoch [13/900] Loss: 1.489 | Acc: 55.75% | Val Acc: 77.47% | LR: 0.01000 --> New Best Val Acc: 77.47% (Saved!) Epoch [14/900] Loss: 1.454 | Acc: 57.57% | Val Acc: 78.22% | LR: 0.00999 --> New Best Val Acc: 78.22% (Saved!) Epoch [15/900] Loss: 1.454 | Acc: 57.54% | Val Acc: 78.80% | LR: 0.00999 --> New Best Val Acc: 78.80% (Saved!) Epoch [16/900] Loss: 1.444 | Acc: 57.99% | Val Acc: 79.35% | LR: 0.00999 --> New Best Val Acc: 79.35% (Saved!) Epoch [17/900] Loss: 1.404 | Acc: 59.72% | Val Acc: 78.94% | LR: 0.00999 Epoch [18/900] Loss: 1.408 | Acc: 59.55% | Val Acc: 78.09% | LR: 0.00999 Epoch [19/900] Loss: 1.404 | Acc: 59.47% | Val Acc: 78.93% | LR: 0.00999 Epoch [20/900] Loss: 1.388 | Acc: 60.52% | Val Acc: 80.60% | LR: 0.00999 --> New Best Val Acc: 80.60% (Saved!) Epoch [21/900] Loss: 1.378 | Acc: 61.11% | Val Acc: 81.77% | LR: 0.00999 --> New Best Val Acc: 81.77% (Saved!) Epoch [22/900] Loss: 1.389 | Acc: 60.63% | Val Acc: 80.74% | LR: 0.00999 Epoch [23/900] Loss: 1.375 | Acc: 61.16% | Val Acc: 81.51% | LR: 0.00999 Epoch [24/900] Loss: 1.371 | Acc: 61.29% | Val Acc: 82.53% | LR: 0.00998 --> New Best Val Acc: 82.53% (Saved!) Epoch [25/900] Loss: 1.364 | Acc: 61.57% | Val Acc: 76.36% | LR: 0.00998 Epoch [26/900] Loss: 1.343 | Acc: 62.60% | Val Acc: 80.41% | LR: 0.00998 Epoch [27/900] Loss: 1.344 | Acc: 62.66% | Val Acc: 83.33% | LR: 0.00998 --> New Best Val Acc: 83.33% (Saved!) Epoch [28/900] Loss: 1.344 | Acc: 62.70% | Val Acc: 79.70% | LR: 0.00998 Epoch [29/900] Loss: 1.339 | Acc: 62.86% | Val Acc: 82.81% | LR: 0.00998 Epoch [30/900] Loss: 1.338 | Acc: 63.00% | Val Acc: 82.81% | LR: 0.00997 Epoch [31/900] Loss: 1.340 | Acc: 62.82% | Val Acc: 82.76% | LR: 0.00997 Epoch [32/900] Loss: 1.321 | Acc: 63.71% | Val Acc: 82.14% | LR: 0.00997 Epoch [33/900] Loss: 1.326 | Acc: 63.64% | Val Acc: 83.81% | LR: 0.00997 --> New Best Val Acc: 83.81% (Saved!) Epoch [34/900] Loss: 1.309 | Acc: 64.26% | Val Acc: 83.77% | LR: 0.00997 Epoch [35/900] Loss: 1.309 | Acc: 64.10% | Val Acc: 84.57% | LR: 0.00996 --> New Best Val Acc: 84.57% (Saved!) Epoch [36/900] Loss: 1.303 | Acc: 64.41% | Val Acc: 83.97% | LR: 0.00996 Epoch [37/900] Loss: 1.313 | Acc: 64.20% | Val Acc: 84.63% | LR: 0.00996 --> New Best Val Acc: 84.63% (Saved!) Epoch [38/900] Loss: 1.296 | Acc: 64.78% | Val Acc: 83.45% | LR: 0.00996 Epoch [39/900] Loss: 1.303 | Acc: 64.59% | Val Acc: 84.18% | LR: 0.00996 Epoch [40/900] Loss: 1.315 | Acc: 64.12% | Val Acc: 84.09% | LR: 0.00995 Epoch [41/900] Loss: 1.311 | Acc: 64.27% | Val Acc: 81.99% | LR: 0.00995 Epoch [42/900] Loss: 1.312 | Acc: 64.24% | Val Acc: 82.70% | LR: 0.00995 Epoch [43/900] Loss: 1.304 | Acc: 64.50% | Val Acc: 84.43% | LR: 0.00995 Epoch [44/900] Loss: 1.298 | Acc: 64.88% | Val Acc: 83.04% | LR: 0.00994 Epoch [45/900] Loss: 1.295 | Acc: 64.75% | Val Acc: 83.57% | LR: 0.00994 Epoch [46/900] Loss: 1.304 | Acc: 64.50% | Val Acc: 86.08% | LR: 0.00994 --> New Best Val Acc: 86.08% (Saved!) Epoch [47/900] Loss: 1.303 | Acc: 64.53% | Val Acc: 84.33% | LR: 0.00994 Epoch [48/900] Loss: 1.293 | Acc: 65.01% | Val Acc: 84.76% | LR: 0.00993 Epoch [49/900] Loss: 1.286 | Acc: 65.21% | Val Acc: 85.38% | LR: 0.00993 Epoch [50/900] Loss: 1.292 | Acc: 65.36% | Val Acc: 84.97% | LR: 0.00993 Epoch [51/900] Loss: 1.275 | Acc: 65.77% | Val Acc: 85.29% | LR: 0.00992 Epoch [52/900] Loss: 1.286 | Acc: 65.27% | Val Acc: 83.92% | LR: 0.00992 Epoch [53/900] Loss: 1.285 | Acc: 65.57% | Val Acc: 85.70% | LR: 0.00992 Epoch [54/900] Loss: 1.278 | Acc: 65.64% | Val Acc: 85.49% | LR: 0.00991 Epoch [55/900] Loss: 1.274 | Acc: 65.76% | Val Acc: 83.73% | LR: 0.00991 Epoch [56/900] Loss: 1.261 | Acc: 66.27% | Val Acc: 85.80% | LR: 0.00991 Epoch [57/900] Loss: 1.275 | Acc: 65.88% | Val Acc: 85.09% | LR: 0.00990 Epoch [58/900] Loss: 1.264 | Acc: 66.40% | Val Acc: 85.60% | LR: 0.00990 Epoch [59/900] Loss: 1.261 | Acc: 66.47% | Val Acc: 86.99% | LR: 0.00990 --> New Best Val Acc: 86.99% (Saved!) Epoch [60/900] Loss: 1.281 | Acc: 65.68% | Val Acc: 85.83% | LR: 0.00989 Epoch [61/900] Loss: 1.279 | Acc: 65.84% | Val Acc: 85.74% | LR: 0.00989 Epoch [62/900] Loss: 1.268 | Acc: 66.26% | Val Acc: 86.29% | LR: 0.00989 Epoch [63/900] Loss: 1.272 | Acc: 66.05% | Val Acc: 86.40% | LR: 0.00988 Epoch [64/900] Loss: 1.278 | Acc: 65.78% | Val Acc: 85.85% | LR: 0.00988 Epoch [65/900] Loss: 1.262 | Acc: 66.29% | Val Acc: 86.70% | LR: 0.00988 Epoch [66/900] Loss: 1.263 | Acc: 66.40% | Val Acc: 85.87% | LR: 0.00987 Epoch [67/900] Loss: 1.256 | Acc: 66.80% | Val Acc: 86.40% | LR: 0.00987 Epoch [68/900] Loss: 1.261 | Acc: 66.27% | Val Acc: 87.26% | LR: 0.00986 --> New Best Val Acc: 87.26% (Saved!) Epoch [69/900] Loss: 1.254 | Acc: 66.60% | Val Acc: 86.99% | LR: 0.00986 Epoch [70/900] Loss: 1.250 | Acc: 66.87% | Val Acc: 86.74% | LR: 0.00986 Epoch [71/900] Loss: 1.277 | Acc: 66.06% | Val Acc: 86.24% | LR: 0.00985 Epoch [72/900] Loss: 1.280 | Acc: 65.64% | Val Acc: 87.14% | LR: 0.00985 Epoch [73/900] Loss: 1.258 | Acc: 66.54% | Val Acc: 84.86% | LR: 0.00984 Epoch [74/900] Loss: 1.253 | Acc: 67.04% | Val Acc: 86.90% | LR: 0.00984 Epoch [75/900] Loss: 1.233 | Acc: 67.48% | Val Acc: 85.84% | LR: 0.00983 Epoch [76/900] Loss: 1.273 | Acc: 66.21% | Val Acc: 85.18% | LR: 0.00983 Epoch [77/900] Loss: 1.260 | Acc: 66.42% | Val Acc: 85.88% | LR: 0.00983 Epoch [78/900] Loss: 1.239 | Acc: 67.29% | Val Acc: 83.86% | LR: 0.00982 Epoch [79/900] Loss: 1.258 | Acc: 66.70% | Val Acc: 87.76% | LR: 0.00982 --> New Best Val Acc: 87.76% (Saved!) Epoch [80/900] Loss: 1.242 | Acc: 67.35% | Val Acc: 86.34% | LR: 0.00981 Epoch [81/900] Loss: 1.257 | Acc: 66.74% | Val Acc: 85.28% | LR: 0.00981 Epoch [82/900] Loss: 1.250 | Acc: 67.21% | Val Acc: 85.51% | LR: 0.00980 Epoch [83/900] Loss: 1.231 | Acc: 67.78% | Val Acc: 86.02% | LR: 0.00980 Epoch [84/900] Loss: 1.250 | Acc: 67.01% | Val Acc: 87.39% | LR: 0.00979 Epoch [85/900] Loss: 1.249 | Acc: 67.03% | Val Acc: 86.93% | LR: 0.00979 Epoch [86/900] Loss: 1.243 | Acc: 67.19% | Val Acc: 85.77% | LR: 0.00978 Epoch [87/900] Loss: 1.242 | Acc: 67.21% | Val Acc: 85.46% | LR: 0.00978 Epoch [88/900] Loss: 1.249 | Acc: 66.53% | Val Acc: 85.28% | LR: 0.00977 Epoch [89/900] Loss: 1.230 | Acc: 67.25% | Val Acc: 85.86% | LR: 0.00977 Epoch [90/900] Loss: 1.218 | Acc: 67.38% | Val Acc: 85.64% | LR: 0.00976 Epoch [91/900] Loss: 1.232 | Acc: 67.21% | Val Acc: 84.48% | LR: 0.00976 Epoch [92/900] Loss: 1.244 | Acc: 66.43% | Val Acc: 87.82% | LR: 0.00975 --> New Best Val Acc: 87.82% (Saved!) Epoch [93/900] Loss: 1.228 | Acc: 67.00% | Val Acc: 87.30% | LR: 0.00974 Epoch [94/900] Loss: 1.245 | Acc: 66.38% | Val Acc: 84.99% | LR: 0.00974 Epoch [95/900] Loss: 1.224 | Acc: 67.20% | Val Acc: 87.12% | LR: 0.00973 Epoch [96/900] Loss: 1.214 | Acc: 67.50% | Val Acc: 85.65% | LR: 0.00973 Epoch [97/900] Loss: 1.213 | Acc: 67.54% | Val Acc: 87.32% | LR: 0.00972 Epoch [98/900] Loss: 1.210 | Acc: 67.75% | Val Acc: 87.84% | LR: 0.00972 --> New Best Val Acc: 87.84% (Saved!) Epoch [99/900] Loss: 1.236 | Acc: 66.95% | Val Acc: 86.92% | LR: 0.00971 Epoch [100/900] Loss: 1.213 | Acc: 67.51% | Val Acc: 86.92% | LR: 0.00970 Epoch [101/900] Loss: 1.221 | Acc: 67.27% | Val Acc: 87.78% | LR: 0.00970 Epoch [102/900] Loss: 1.224 | Acc: 66.96% | Val Acc: 87.08% | LR: 0.00969 Epoch [103/900] Loss: 1.215 | Acc: 67.41% | Val Acc: 87.35% | LR: 0.00969 Epoch [104/900] Loss: 1.221 | Acc: 67.05% | Val Acc: 85.93% | LR: 0.00968 Epoch [105/900] Loss: 1.214 | Acc: 67.50% | Val Acc: 85.66% | LR: 0.00967 Epoch [106/900] Loss: 1.215 | Acc: 67.51% | Val Acc: 85.99% | LR: 0.00967 Epoch [107/900] Loss: 1.201 | Acc: 68.04% | Val Acc: 87.49% | LR: 0.00966 Epoch [108/900] Loss: 1.220 | Acc: 67.40% | Val Acc: 87.08% | LR: 0.00966 Epoch [109/900] Loss: 1.222 | Acc: 67.17% | Val Acc: 87.26% | LR: 0.00965 Epoch [110/900] Loss: 1.211 | Acc: 67.71% | Val Acc: 86.49% | LR: 0.00964 Epoch [111/900] Loss: 1.219 | Acc: 67.64% | Val Acc: 86.71% | LR: 0.00964 Epoch [112/900] Loss: 1.186 | Acc: 68.52% | Val Acc: 87.90% | LR: 0.00963 --> New Best Val Acc: 87.90% (Saved!) Epoch [113/900] Loss: 1.206 | Acc: 67.70% | Val Acc: 88.10% | LR: 0.00962 --> New Best Val Acc: 88.10% (Saved!) Epoch [114/900] Loss: 1.215 | Acc: 67.48% | Val Acc: 87.99% | LR: 0.00962 Epoch [115/900] Loss: 1.202 | Acc: 67.90% | Val Acc: 86.92% | LR: 0.00961 Epoch [116/900] Loss: 1.192 | Acc: 68.22% | Val Acc: 88.76% | LR: 0.00960 --> New Best Val Acc: 88.76% (Saved!) Epoch [117/900] Loss: 1.208 | Acc: 68.05% | Val Acc: 87.07% | LR: 0.00960 Epoch [118/900] Loss: 1.194 | Acc: 68.17% | Val Acc: 87.92% | LR: 0.00959 Epoch [119/900] Loss: 1.198 | Acc: 68.26% | Val Acc: 84.37% | LR: 0.00958 Epoch [120/900] Loss: 1.213 | Acc: 67.50% | Val Acc: 88.92% | LR: 0.00957 --> New Best Val Acc: 88.92% (Saved!) Epoch [121/900] Loss: 1.207 | Acc: 67.91% | Val Acc: 87.05% | LR: 0.00957 Epoch [122/900] Loss: 1.222 | Acc: 67.39% | Val Acc: 88.39% | LR: 0.00956 Epoch [123/900] Loss: 1.207 | Acc: 67.78% | Val Acc: 85.57% | LR: 0.00955 Epoch [124/900] Loss: 1.214 | Acc: 67.67% | Val Acc: 83.76% | LR: 0.00955 Epoch [125/900] Loss: 1.214 | Acc: 67.67% | Val Acc: 88.14% | LR: 0.00954 Epoch [126/900] Loss: 1.210 | Acc: 67.76% | Val Acc: 88.01% | LR: 0.00953 Epoch [127/900] Loss: 1.208 | Acc: 67.79% | Val Acc: 87.83% | LR: 0.00952 Epoch [128/900] Loss: 1.207 | Acc: 68.06% | Val Acc: 87.22% | LR: 0.00952 Epoch [129/900] Loss: 1.201 | Acc: 68.01% | Val Acc: 87.78% | LR: 0.00951 Epoch [130/900] Loss: 1.214 | Acc: 67.44% | Val Acc: 85.70% | LR: 0.00950 Epoch [131/900] Loss: 1.206 | Acc: 67.77% | Val Acc: 87.16% | LR: 0.00949 Epoch [132/900] Loss: 1.199 | Acc: 68.08% | Val Acc: 87.69% | LR: 0.00949 Epoch [133/900] Loss: 1.198 | Acc: 68.31% | Val Acc: 87.66% | LR: 0.00948 Epoch [134/900] Loss: 1.197 | Acc: 68.15% | Val Acc: 87.56% | LR: 0.00947 Epoch [135/900] Loss: 1.190 | Acc: 68.32% | Val Acc: 86.81% | LR: 0.00946 Epoch [136/900] Loss: 1.203 | Acc: 68.07% | Val Acc: 87.71% | LR: 0.00946 Epoch [137/900] Loss: 1.199 | Acc: 68.15% | Val Acc: 87.51% | LR: 0.00945 Epoch [138/900] Loss: 1.213 | Acc: 67.88% | Val Acc: 87.97% | LR: 0.00944 Epoch [139/900] Loss: 1.202 | Acc: 67.96% | Val Acc: 87.08% | LR: 0.00943 Epoch [140/900] Loss: 1.205 | Acc: 67.73% | Val Acc: 85.92% | LR: 0.00942 Epoch [141/900] Loss: 1.207 | Acc: 67.91% | Val Acc: 87.68% | LR: 0.00941 Epoch [142/900] Loss: 1.184 | Acc: 68.76% | Val Acc: 87.83% | LR: 0.00941 Epoch [143/900] Loss: 1.184 | Acc: 68.74% | Val Acc: 86.20% | LR: 0.00940 Epoch [144/900] Loss: 1.202 | Acc: 67.97% | Val Acc: 87.93% | LR: 0.00939 Epoch [145/900] Loss: 1.181 | Acc: 68.98% | Val Acc: 88.62% | LR: 0.00938 Epoch [146/900] Loss: 1.207 | Acc: 67.86% | Val Acc: 87.41% | LR: 0.00937 Epoch [147/900] Loss: 1.184 | Acc: 68.77% | Val Acc: 87.21% | LR: 0.00936 Epoch [148/900] Loss: 1.198 | Acc: 67.98% | Val Acc: 88.98% | LR: 0.00936 --> New Best Val Acc: 88.98% (Saved!) Epoch [149/900] Loss: 1.180 | Acc: 68.67% | Val Acc: 87.95% | LR: 0.00935 Epoch [150/900] Loss: 1.210 | Acc: 67.78% | Val Acc: 88.70% | LR: 0.00934 Epoch [151/900] Loss: 1.180 | Acc: 68.81% | Val Acc: 86.76% | LR: 0.00933 Epoch [152/900] Loss: 1.203 | Acc: 67.93% | Val Acc: 86.96% | LR: 0.00932 Epoch [153/900] Loss: 1.195 | Acc: 68.51% | Val Acc: 86.87% | LR: 0.00931 Epoch [154/900] Loss: 1.208 | Acc: 67.82% | Val Acc: 85.36% | LR: 0.00930 Epoch [155/900] Loss: 1.193 | Acc: 68.42% | Val Acc: 87.70% | LR: 0.00929 Epoch [156/900] Loss: 1.221 | Acc: 67.54% | Val Acc: 87.14% | LR: 0.00929 Epoch [157/900] Loss: 1.193 | Acc: 68.46% | Val Acc: 87.77% | LR: 0.00928 Epoch [158/900] Loss: 1.199 | Acc: 68.02% | Val Acc: 88.72% | LR: 0.00927 Epoch [159/900] Loss: 1.202 | Acc: 67.96% | Val Acc: 88.12% | LR: 0.00926 Epoch [160/900] Loss: 1.190 | Acc: 68.43% | Val Acc: 88.31% | LR: 0.00925 Epoch [161/900] Loss: 1.192 | Acc: 68.51% | Val Acc: 87.57% | LR: 0.00924 Epoch [162/900] Loss: 1.189 | Acc: 68.56% | Val Acc: 87.91% | LR: 0.00923 Epoch [163/900] Loss: 1.209 | Acc: 67.94% | Val Acc: 87.18% | LR: 0.00922 Epoch [164/900] Loss: 1.209 | Acc: 67.67% | Val Acc: 88.65% | LR: 0.00921 Epoch [165/900] Loss: 1.197 | Acc: 68.23% | Val Acc: 86.48% | LR: 0.00920 Epoch [166/900] Loss: 1.207 | Acc: 68.08% | Val Acc: 87.27% | LR: 0.00919 Epoch [167/900] Loss: 1.199 | Acc: 68.32% | Val Acc: 87.13% | LR: 0.00918 Epoch [168/900] Loss: 1.199 | Acc: 68.20% | Val Acc: 86.81% | LR: 0.00917 Epoch [169/900] Loss: 1.205 | Acc: 67.79% | Val Acc: 86.22% | LR: 0.00916 Epoch [170/900] Loss: 1.208 | Acc: 67.68% | Val Acc: 87.86% | LR: 0.00915 Epoch [171/900] Loss: 1.184 | Acc: 68.90% | Val Acc: 87.87% | LR: 0.00915 Epoch [172/900] Loss: 1.188 | Acc: 68.47% | Val Acc: 89.10% | LR: 0.00914 --> New Best Val Acc: 89.10% (Saved!) Epoch [173/900] Loss: 1.195 | Acc: 68.27% | Val Acc: 87.68% | LR: 0.00913 Epoch [174/900] Loss: 1.200 | Acc: 68.37% | Val Acc: 88.56% | LR: 0.00912 Epoch [175/900] Loss: 1.190 | Acc: 68.54% | Val Acc: 88.63% | LR: 0.00911 Epoch [176/900] Loss: 1.198 | Acc: 68.13% | Val Acc: 87.13% | LR: 0.00910 Epoch [177/900] Loss: 1.202 | Acc: 67.84% | Val Acc: 88.04% | LR: 0.00909 Epoch [178/900] Loss: 1.189 | Acc: 68.70% | Val Acc: 87.89% | LR: 0.00908 Epoch [179/900] Loss: 1.195 | Acc: 68.22% | Val Acc: 87.10% | LR: 0.00907 Epoch [180/900] Loss: 1.194 | Acc: 68.29% | Val Acc: 84.95% | LR: 0.00906 Epoch [181/900] Loss: 1.181 | Acc: 68.97% | Val Acc: 87.93% | LR: 0.00905 Epoch [182/900] Loss: 1.198 | Acc: 68.31% | Val Acc: 86.21% | LR: 0.00903 Epoch [183/900] Loss: 1.190 | Acc: 68.43% | Val Acc: 87.68% | LR: 0.00902 Epoch [184/900] Loss: 1.185 | Acc: 68.77% | Val Acc: 88.27% | LR: 0.00901 Epoch [185/900] Loss: 1.200 | Acc: 68.40% | Val Acc: 87.78% | LR: 0.00900 Epoch [186/900] Loss: 1.181 | Acc: 68.88% | Val Acc: 89.30% | LR: 0.00899 --> New Best Val Acc: 89.30% (Saved!) Epoch [187/900] Loss: 1.185 | Acc: 68.74% | Val Acc: 87.69% | LR: 0.00898 Epoch [188/900] Loss: 1.184 | Acc: 68.75% | Val Acc: 89.16% | LR: 0.00897 Epoch [189/900] Loss: 1.184 | Acc: 68.75% | Val Acc: 85.81% | LR: 0.00896 Epoch [190/900] Loss: 1.196 | Acc: 68.24% | Val Acc: 88.59% | LR: 0.00895 Epoch [191/900] Loss: 1.190 | Acc: 68.57% | Val Acc: 88.88% | LR: 0.00894 Epoch [192/900] Loss: 1.190 | Acc: 68.65% | Val Acc: 87.46% | LR: 0.00893 Epoch [193/900] Loss: 1.180 | Acc: 68.94% | Val Acc: 86.59% | LR: 0.00892 Epoch [194/900] Loss: 1.178 | Acc: 69.10% | Val Acc: 88.45% | LR: 0.00891 Epoch [195/900] Loss: 1.189 | Acc: 68.79% | Val Acc: 84.66% | LR: 0.00890 Epoch [196/900] Loss: 1.190 | Acc: 68.75% | Val Acc: 88.15% | LR: 0.00889 Epoch [197/900] Loss: 1.189 | Acc: 68.60% | Val Acc: 88.00% | LR: 0.00887 Epoch [198/900] Loss: 1.175 | Acc: 69.37% | Val Acc: 88.61% | LR: 0.00886 Epoch [199/900] Loss: 1.199 | Acc: 68.20% | Val Acc: 88.19% | LR: 0.00885 Epoch [200/900] Loss: 1.164 | Acc: 69.60% | Val Acc: 87.53% | LR: 0.00884 Epoch [201/900] Loss: 1.180 | Acc: 69.01% | Val Acc: 88.88% | LR: 0.00883 Epoch [202/900] Loss: 1.179 | Acc: 69.04% | Val Acc: 88.48% | LR: 0.00882 Epoch [203/900] Loss: 1.182 | Acc: 69.02% | Val Acc: 87.17% | LR: 0.00881 Epoch [204/900] Loss: 1.179 | Acc: 68.98% | Val Acc: 87.74% | LR: 0.00880 Epoch [205/900] Loss: 1.167 | Acc: 69.40% | Val Acc: 88.24% | LR: 0.00878 Epoch [206/900] Loss: 1.177 | Acc: 69.39% | Val Acc: 88.78% | LR: 0.00877 Epoch [207/900] Loss: 1.196 | Acc: 68.39% | Val Acc: 87.00% | LR: 0.00876 Epoch [208/900] Loss: 1.174 | Acc: 69.29% | Val Acc: 87.79% | LR: 0.00875 Epoch [209/900] Loss: 1.162 | Acc: 69.73% | Val Acc: 87.99% | LR: 0.00874 Epoch [210/900] Loss: 1.178 | Acc: 69.04% | Val Acc: 88.51% | LR: 0.00873 Epoch [211/900] Loss: 1.179 | Acc: 69.04% | Val Acc: 87.93% | LR: 0.00872 Epoch [212/900] Loss: 1.176 | Acc: 69.23% | Val Acc: 87.10% | LR: 0.00870 Epoch [213/900] Loss: 1.169 | Acc: 69.34% | Val Acc: 88.78% | LR: 0.00869 Epoch [214/900] Loss: 1.179 | Acc: 69.16% | Val Acc: 88.87% | LR: 0.00868 Epoch [215/900] Loss: 1.179 | Acc: 68.97% | Val Acc: 89.96% | LR: 0.00867 --> New Best Val Acc: 89.96% (Saved!) Epoch [216/900] Loss: 1.186 | Acc: 68.76% | Val Acc: 87.85% | LR: 0.00866 Epoch [217/900] Loss: 1.167 | Acc: 69.39% | Val Acc: 88.31% | LR: 0.00864 Epoch [218/900] Loss: 1.162 | Acc: 69.59% | Val Acc: 86.90% | LR: 0.00863 Epoch [219/900] Loss: 1.177 | Acc: 69.04% | Val Acc: 86.90% | LR: 0.00862 Epoch [220/900] Loss: 1.174 | Acc: 69.34% | Val Acc: 89.27% | LR: 0.00861 Epoch [221/900] Loss: 1.176 | Acc: 69.14% | Val Acc: 87.20% | LR: 0.00860 Epoch [222/900] Loss: 1.174 | Acc: 69.15% | Val Acc: 89.10% | LR: 0.00858 Epoch [223/900] Loss: 1.168 | Acc: 69.36% | Val Acc: 88.74% | LR: 0.00857 Epoch [224/900] Loss: 1.169 | Acc: 69.68% | Val Acc: 87.92% | LR: 0.00856 Epoch [225/900] Loss: 1.177 | Acc: 69.21% | Val Acc: 89.23% | LR: 0.00855 Epoch [226/900] Loss: 1.186 | Acc: 68.65% | Val Acc: 88.83% | LR: 0.00854 Epoch [227/900] Loss: 1.170 | Acc: 69.41% | Val Acc: 88.72% | LR: 0.00852 Epoch [228/900] Loss: 1.181 | Acc: 69.02% | Val Acc: 88.98% | LR: 0.00851 Epoch [229/900] Loss: 1.175 | Acc: 69.26% | Val Acc: 89.39% | LR: 0.00850 Epoch [230/900] Loss: 1.176 | Acc: 69.22% | Val Acc: 88.89% | LR: 0.00849 Epoch [231/900] Loss: 1.154 | Acc: 70.19% | Val Acc: 89.60% | LR: 0.00847 Epoch [232/900] Loss: 1.171 | Acc: 69.33% | Val Acc: 87.75% | LR: 0.00846 Epoch [233/900] Loss: 1.176 | Acc: 69.06% | Val Acc: 88.79% | LR: 0.00845 Epoch [234/900] Loss: 1.161 | Acc: 69.83% | Val Acc: 88.82% | LR: 0.00844 Epoch [235/900] Loss: 1.176 | Acc: 69.42% | Val Acc: 88.55% | LR: 0.00842 Epoch [236/900] Loss: 1.180 | Acc: 68.89% | Val Acc: 89.20% | LR: 0.00841 Epoch [237/900] Loss: 1.175 | Acc: 69.04% | Val Acc: 88.39% | LR: 0.00840 Epoch [238/900] Loss: 1.186 | Acc: 68.69% | Val Acc: 87.83% | LR: 0.00838 Epoch [239/900] Loss: 1.170 | Acc: 69.28% | Val Acc: 89.28% | LR: 0.00837 Epoch [240/900] Loss: 1.168 | Acc: 69.43% | Val Acc: 88.29% | LR: 0.00836 Epoch [241/900] Loss: 1.177 | Acc: 69.34% | Val Acc: 87.41% | LR: 0.00835 Epoch [242/900] Loss: 1.192 | Acc: 68.30% | Val Acc: 88.04% | LR: 0.00833 Epoch [243/900] Loss: 1.158 | Acc: 69.94% | Val Acc: 86.81% | LR: 0.00832 Epoch [244/900] Loss: 1.174 | Acc: 69.33% | Val Acc: 88.40% | LR: 0.00831 Epoch [245/900] Loss: 1.170 | Acc: 69.30% | Val Acc: 88.61% | LR: 0.00829 Epoch [246/900] Loss: 1.168 | Acc: 69.55% | Val Acc: 87.00% | LR: 0.00828 Epoch [247/900] Loss: 1.187 | Acc: 68.82% | Val Acc: 89.44% | LR: 0.00827 Epoch [248/900] Loss: 1.168 | Acc: 69.49% | Val Acc: 88.82% | LR: 0.00825 Epoch [249/900] Loss: 1.164 | Acc: 69.48% | Val Acc: 88.83% | LR: 0.00824 Epoch [250/900] Loss: 1.175 | Acc: 69.08% | Val Acc: 86.75% | LR: 0.00823 Epoch [251/900] Loss: 1.146 | Acc: 70.28% | Val Acc: 88.71% | LR: 0.00821 Epoch [252/900] Loss: 1.183 | Acc: 68.97% | Val Acc: 88.51% | LR: 0.00820 Epoch [253/900] Loss: 1.162 | Acc: 69.63% | Val Acc: 88.78% | LR: 0.00819 Epoch [254/900] Loss: 1.174 | Acc: 69.49% | Val Acc: 85.17% | LR: 0.00817 Epoch [255/900] Loss: 1.176 | Acc: 69.32% | Val Acc: 89.64% | LR: 0.00816 Epoch [256/900] Loss: 1.146 | Acc: 70.22% | Val Acc: 88.30% | LR: 0.00815 Epoch [257/900] Loss: 1.172 | Acc: 69.27% | Val Acc: 88.81% | LR: 0.00813 Epoch [258/900] Loss: 1.166 | Acc: 69.53% | Val Acc: 88.81% | LR: 0.00812 Epoch [259/900] Loss: 1.168 | Acc: 69.48% | Val Acc: 86.80% | LR: 0.00811 Epoch [260/900] Loss: 1.150 | Acc: 70.11% | Val Acc: 88.40% | LR: 0.00809 Epoch [261/900] Loss: 1.153 | Acc: 70.22% | Val Acc: 89.21% | LR: 0.00808 Epoch [262/900] Loss: 1.182 | Acc: 68.91% | Val Acc: 88.81% | LR: 0.00806 Epoch [263/900] Loss: 1.188 | Acc: 68.72% | Val Acc: 88.82% | LR: 0.00805 Epoch [264/900] Loss: 1.164 | Acc: 69.66% | Val Acc: 89.04% | LR: 0.00804 Epoch [265/900] Loss: 1.196 | Acc: 68.60% | Val Acc: 89.61% | LR: 0.00802 Epoch [266/900] Loss: 1.140 | Acc: 70.68% | Val Acc: 88.01% | LR: 0.00801 Epoch [267/900] Loss: 1.166 | Acc: 69.57% | Val Acc: 90.04% | LR: 0.00800 --> New Best Val Acc: 90.04% (Saved!) Epoch [268/900] Loss: 1.162 | Acc: 69.90% | Val Acc: 89.37% | LR: 0.00798 Epoch [269/900] Loss: 1.170 | Acc: 69.43% | Val Acc: 87.89% | LR: 0.00797 Epoch [270/900] Loss: 1.160 | Acc: 69.70% | Val Acc: 89.01% | LR: 0.00795 Epoch [271/900] Loss: 1.160 | Acc: 69.84% | Val Acc: 89.60% | LR: 0.00794 Epoch [272/900] Loss: 1.165 | Acc: 69.79% | Val Acc: 88.33% | LR: 0.00792 Epoch [273/900] Loss: 1.164 | Acc: 69.60% | Val Acc: 89.07% | LR: 0.00791 Epoch [274/900] Loss: 1.152 | Acc: 69.99% | Val Acc: 88.72% | LR: 0.00790 Epoch [275/900] Loss: 1.154 | Acc: 69.79% | Val Acc: 89.32% | LR: 0.00788 Epoch [276/900] Loss: 1.176 | Acc: 69.32% | Val Acc: 88.09% | LR: 0.00787 Epoch [277/900] Loss: 1.167 | Acc: 69.63% | Val Acc: 89.35% | LR: 0.00785 Epoch [278/900] Loss: 1.162 | Acc: 69.52% | Val Acc: 89.75% | LR: 0.00784 Epoch [279/900] Loss: 1.143 | Acc: 70.34% | Val Acc: 86.78% | LR: 0.00782 Epoch [280/900] Loss: 1.162 | Acc: 69.74% | Val Acc: 88.22% | LR: 0.00781 Epoch [281/900] Loss: 1.154 | Acc: 70.19% | Val Acc: 89.70% | LR: 0.00780 Epoch [282/900] Loss: 1.145 | Acc: 70.65% | Val Acc: 89.02% | LR: 0.00778 Epoch [283/900] Loss: 1.165 | Acc: 69.84% | Val Acc: 89.01% | LR: 0.00777 Epoch [284/900] Loss: 1.153 | Acc: 70.21% | Val Acc: 89.09% | LR: 0.00775 Epoch [285/900] Loss: 1.146 | Acc: 70.38% | Val Acc: 88.81% | LR: 0.00774 Epoch [286/900] Loss: 1.155 | Acc: 69.95% | Val Acc: 89.58% | LR: 0.00772 Epoch [287/900] Loss: 1.148 | Acc: 70.38% | Val Acc: 87.94% | LR: 0.00771 Epoch [288/900] Loss: 1.153 | Acc: 70.21% | Val Acc: 87.83% | LR: 0.00769 Epoch [289/900] Loss: 1.164 | Acc: 69.66% | Val Acc: 89.18% | LR: 0.00768 Epoch [290/900] Loss: 1.153 | Acc: 70.09% | Val Acc: 89.44% | LR: 0.00766 Epoch [291/900] Loss: 1.143 | Acc: 70.31% | Val Acc: 89.30% | LR: 0.00765 Epoch [292/900] Loss: 1.162 | Acc: 69.76% | Val Acc: 88.35% | LR: 0.00763 Epoch [293/900] Loss: 1.155 | Acc: 70.00% | Val Acc: 89.62% | LR: 0.00762 Epoch [294/900] Loss: 1.168 | Acc: 69.83% | Val Acc: 89.10% | LR: 0.00761 Epoch [295/900] Loss: 1.161 | Acc: 69.86% | Val Acc: 90.08% | LR: 0.00759 --> New Best Val Acc: 90.08% (Saved!) Epoch [296/900] Loss: 1.152 | Acc: 69.98% | Val Acc: 89.04% | LR: 0.00758 Epoch [297/900] Loss: 1.164 | Acc: 69.75% | Val Acc: 88.91% | LR: 0.00756 Epoch [298/900] Loss: 1.166 | Acc: 69.65% | Val Acc: 89.78% | LR: 0.00755 Epoch [299/900] Loss: 1.157 | Acc: 69.84% | Val Acc: 90.71% | LR: 0.00753 --> New Best Val Acc: 90.71% (Saved!) Epoch [300/900] Loss: 1.141 | Acc: 70.74% | Val Acc: 89.10% | LR: 0.00752 Epoch [301/900] Loss: 1.160 | Acc: 69.85% | Val Acc: 89.57% | LR: 0.00750 Epoch [302/900] Loss: 1.147 | Acc: 70.48% | Val Acc: 89.92% | LR: 0.00748 Epoch [303/900] Loss: 1.141 | Acc: 70.47% | Val Acc: 88.90% | LR: 0.00747 Epoch [304/900] Loss: 1.148 | Acc: 70.39% | Val Acc: 89.53% | LR: 0.00745 Epoch [305/900] Loss: 1.153 | Acc: 70.26% | Val Acc: 87.73% | LR: 0.00744 Epoch [306/900] Loss: 1.157 | Acc: 69.92% | Val Acc: 89.14% | LR: 0.00742 Epoch [307/900] Loss: 1.149 | Acc: 70.27% | Val Acc: 88.71% | LR: 0.00741 Epoch [308/900] Loss: 1.157 | Acc: 69.68% | Val Acc: 86.63% | LR: 0.00739 Epoch [309/900] Loss: 1.157 | Acc: 70.04% | Val Acc: 89.12% | LR: 0.00738 Epoch [310/900] Loss: 1.170 | Acc: 69.62% | Val Acc: 89.61% | LR: 0.00736 Epoch [311/900] Loss: 1.150 | Acc: 70.48% | Val Acc: 90.45% | LR: 0.00735 Epoch [312/900] Loss: 1.154 | Acc: 70.20% | Val Acc: 89.05% | LR: 0.00733 Epoch [313/900] Loss: 1.127 | Acc: 71.38% | Val Acc: 88.82% | LR: 0.00732 Epoch [314/900] Loss: 1.136 | Acc: 70.89% | Val Acc: 90.42% | LR: 0.00730 Epoch [315/900] Loss: 1.151 | Acc: 70.22% | Val Acc: 89.39% | LR: 0.00729 Epoch [316/900] Loss: 1.157 | Acc: 70.04% | Val Acc: 88.27% | LR: 0.00727 Epoch [317/900] Loss: 1.145 | Acc: 70.40% | Val Acc: 89.00% | LR: 0.00725 Epoch [318/900] Loss: 1.156 | Acc: 70.09% | Val Acc: 89.35% | LR: 0.00724 Epoch [319/900] Loss: 1.142 | Acc: 70.67% | Val Acc: 88.40% | LR: 0.00722 Epoch [320/900] Loss: 1.149 | Acc: 70.31% | Val Acc: 90.03% | LR: 0.00721 Epoch [321/900] Loss: 1.134 | Acc: 70.94% | Val Acc: 89.33% | LR: 0.00719 Epoch [322/900] Loss: 1.161 | Acc: 69.78% | Val Acc: 88.77% | LR: 0.00718 Epoch [323/900] Loss: 1.143 | Acc: 70.70% | Val Acc: 90.20% | LR: 0.00716 Epoch [324/900] Loss: 1.155 | Acc: 70.11% | Val Acc: 90.56% | LR: 0.00714 Epoch [325/900] Loss: 1.146 | Acc: 70.64% | Val Acc: 87.63% | LR: 0.00713 Epoch [326/900] Loss: 1.146 | Acc: 70.44% | Val Acc: 87.47% | LR: 0.00711 Epoch [327/900] Loss: 1.145 | Acc: 70.43% | Val Acc: 89.13% | LR: 0.00710 Epoch [328/900] Loss: 1.160 | Acc: 69.85% | Val Acc: 89.50% | LR: 0.00708 Epoch [329/900] Loss: 1.141 | Acc: 70.75% | Val Acc: 88.87% | LR: 0.00707 Epoch [330/900] Loss: 1.149 | Acc: 70.45% | Val Acc: 90.00% | LR: 0.00705 Epoch [331/900] Loss: 1.148 | Acc: 70.36% | Val Acc: 90.14% | LR: 0.00703 Epoch [332/900] Loss: 1.135 | Acc: 70.86% | Val Acc: 90.21% | LR: 0.00702 Epoch [333/900] Loss: 1.122 | Acc: 71.47% | Val Acc: 89.52% | LR: 0.00700 Epoch [334/900] Loss: 1.156 | Acc: 69.90% | Val Acc: 89.99% | LR: 0.00699 Epoch [335/900] Loss: 1.146 | Acc: 70.44% | Val Acc: 89.50% | LR: 0.00697 Epoch [336/900] Loss: 1.143 | Acc: 70.52% | Val Acc: 88.82% | LR: 0.00695 Epoch [337/900] Loss: 1.138 | Acc: 70.78% | Val Acc: 88.28% | LR: 0.00694 Epoch [338/900] Loss: 1.113 | Acc: 71.78% | Val Acc: 89.34% | LR: 0.00692 Epoch [339/900] Loss: 1.142 | Acc: 70.80% | Val Acc: 89.49% | LR: 0.00691 Epoch [340/900] Loss: 1.142 | Acc: 70.70% | Val Acc: 89.93% | LR: 0.00689 Epoch [341/900] Loss: 1.141 | Acc: 70.84% | Val Acc: 90.59% | LR: 0.00687 Epoch [342/900] Loss: 1.130 | Acc: 71.06% | Val Acc: 89.15% | LR: 0.00686 Epoch [343/900] Loss: 1.149 | Acc: 70.49% | Val Acc: 89.59% | LR: 0.00684 Epoch [344/900] Loss: 1.136 | Acc: 70.84% | Val Acc: 89.75% | LR: 0.00682 Epoch [345/900] Loss: 1.123 | Acc: 71.18% | Val Acc: 90.06% | LR: 0.00681 Epoch [346/900] Loss: 1.140 | Acc: 70.55% | Val Acc: 89.24% | LR: 0.00679 Epoch [347/900] Loss: 1.126 | Acc: 71.15% | Val Acc: 89.44% | LR: 0.00678 Epoch [348/900] Loss: 1.137 | Acc: 70.88% | Val Acc: 89.50% | LR: 0.00676 Epoch [349/900] Loss: 1.145 | Acc: 70.49% | Val Acc: 90.91% | LR: 0.00674 --> New Best Val Acc: 90.91% (Saved!) Epoch [350/900] Loss: 1.147 | Acc: 70.47% | Val Acc: 90.20% | LR: 0.00673 Epoch [351/900] Loss: 1.136 | Acc: 70.97% | Val Acc: 90.21% | LR: 0.00671 Epoch [352/900] Loss: 1.151 | Acc: 70.43% | Val Acc: 90.68% | LR: 0.00669 Epoch [353/900] Loss: 1.128 | Acc: 71.06% | Val Acc: 89.76% | LR: 0.00668 Epoch [354/900] Loss: 1.122 | Acc: 71.33% | Val Acc: 90.88% | LR: 0.00666 Epoch [355/900] Loss: 1.137 | Acc: 70.85% | Val Acc: 89.74% | LR: 0.00664 Epoch [356/900] Loss: 1.149 | Acc: 70.45% | Val Acc: 90.09% | LR: 0.00663 Epoch [357/900] Loss: 1.135 | Acc: 70.83% | Val Acc: 89.08% | LR: 0.00661 Epoch [358/900] Loss: 1.142 | Acc: 70.72% | Val Acc: 89.56% | LR: 0.00659 Epoch [359/900] Loss: 1.130 | Acc: 71.13% | Val Acc: 89.08% | LR: 0.00658 Epoch [360/900] Loss: 1.136 | Acc: 71.15% | Val Acc: 88.16% | LR: 0.00656 Epoch [361/900] Loss: 1.122 | Acc: 71.53% | Val Acc: 90.50% | LR: 0.00655 Epoch [362/900] Loss: 1.139 | Acc: 70.77% | Val Acc: 89.08% | LR: 0.00653 Epoch [363/900] Loss: 1.133 | Acc: 70.96% | Val Acc: 89.92% | LR: 0.00651 Epoch [364/900] Loss: 1.146 | Acc: 70.49% | Val Acc: 89.01% | LR: 0.00650 Epoch [365/900] Loss: 1.132 | Acc: 70.98% | Val Acc: 90.52% | LR: 0.00648 Epoch [366/900] Loss: 1.148 | Acc: 70.50% | Val Acc: 88.64% | LR: 0.00646 Epoch [367/900] Loss: 1.135 | Acc: 70.88% | Val Acc: 90.29% | LR: 0.00645 Epoch [368/900] Loss: 1.132 | Acc: 71.03% | Val Acc: 90.31% | LR: 0.00643 Epoch [369/900] Loss: 1.127 | Acc: 71.22% | Val Acc: 90.13% | LR: 0.00641 Epoch [370/900] Loss: 1.117 | Acc: 71.73% | Val Acc: 89.20% | LR: 0.00639 Epoch [371/900] Loss: 1.133 | Acc: 70.99% | Val Acc: 89.43% | LR: 0.00638 Epoch [372/900] Loss: 1.146 | Acc: 70.63% | Val Acc: 87.91% | LR: 0.00636 Epoch [373/900] Loss: 1.156 | Acc: 69.94% | Val Acc: 89.51% | LR: 0.00634 Epoch [374/900] Loss: 1.120 | Acc: 71.23% | Val Acc: 89.87% | LR: 0.00633 Epoch [375/900] Loss: 1.129 | Acc: 71.29% | Val Acc: 90.38% | LR: 0.00631 Epoch [376/900] Loss: 1.139 | Acc: 70.74% | Val Acc: 90.24% | LR: 0.00629 Epoch [377/900] Loss: 1.143 | Acc: 70.62% | Val Acc: 89.62% | LR: 0.00628 Epoch [378/900] Loss: 1.118 | Acc: 71.51% | Val Acc: 90.32% | LR: 0.00626 Epoch [379/900] Loss: 1.120 | Acc: 71.52% | Val Acc: 89.81% | LR: 0.00624 Epoch [380/900] Loss: 1.126 | Acc: 71.51% | Val Acc: 89.93% | LR: 0.00623 Epoch [381/900] Loss: 1.145 | Acc: 70.37% | Val Acc: 90.06% | LR: 0.00621 Epoch [382/900] Loss: 1.133 | Acc: 71.04% | Val Acc: 90.31% | LR: 0.00619 Epoch [383/900] Loss: 1.151 | Acc: 70.35% | Val Acc: 90.02% | LR: 0.00618 Epoch [384/900] Loss: 1.129 | Acc: 71.12% | Val Acc: 89.44% | LR: 0.00616 Epoch [385/900] Loss: 1.123 | Acc: 71.29% | Val Acc: 90.06% | LR: 0.00614 Epoch [386/900] Loss: 1.145 | Acc: 70.66% | Val Acc: 90.02% | LR: 0.00612 Epoch [387/900] Loss: 1.124 | Acc: 71.62% | Val Acc: 90.26% | LR: 0.00611 Epoch [388/900] Loss: 1.113 | Acc: 71.92% | Val Acc: 89.44% | LR: 0.00609 Epoch [389/900] Loss: 1.127 | Acc: 71.34% | Val Acc: 89.61% | LR: 0.00607 Epoch [390/900] Loss: 1.127 | Acc: 71.25% | Val Acc: 89.69% | LR: 0.00606 Epoch [391/900] Loss: 1.144 | Acc: 70.64% | Val Acc: 90.26% | LR: 0.00604 Epoch [392/900] Loss: 1.125 | Acc: 71.29% | Val Acc: 90.70% | LR: 0.00602 Epoch [393/900] Loss: 1.117 | Acc: 71.64% | Val Acc: 90.09% | LR: 0.00601 Epoch [394/900] Loss: 1.119 | Acc: 71.54% | Val Acc: 90.64% | LR: 0.00599 Epoch [395/900] Loss: 1.113 | Acc: 71.87% | Val Acc: 88.39% | LR: 0.00597 Epoch [396/900] Loss: 1.135 | Acc: 71.06% | Val Acc: 89.96% | LR: 0.00595 Epoch [397/900] Loss: 1.118 | Acc: 71.81% | Val Acc: 90.88% | LR: 0.00594 Epoch [398/900] Loss: 1.121 | Acc: 71.47% | Val Acc: 90.27% | LR: 0.00592 Epoch [399/900] Loss: 1.114 | Acc: 71.73% | Val Acc: 91.05% | LR: 0.00590 --> New Best Val Acc: 91.05% (Saved!) Epoch [400/900] Loss: 1.132 | Acc: 71.05% | Val Acc: 89.35% | LR: 0.00589 Epoch [401/900] Loss: 1.127 | Acc: 71.33% | Val Acc: 89.55% | LR: 0.00587 Epoch [402/900] Loss: 1.120 | Acc: 71.43% | Val Acc: 90.31% | LR: 0.00585 Epoch [403/900] Loss: 1.110 | Acc: 72.07% | Val Acc: 90.38% | LR: 0.00583 Epoch [404/900] Loss: 1.114 | Acc: 71.88% | Val Acc: 89.49% | LR: 0.00582 Epoch [405/900] Loss: 1.121 | Acc: 71.43% | Val Acc: 89.93% | LR: 0.00580 Epoch [406/900] Loss: 1.111 | Acc: 72.00% | Val Acc: 90.50% | LR: 0.00578 Epoch [407/900] Loss: 1.125 | Acc: 71.27% | Val Acc: 89.97% | LR: 0.00576 Epoch [408/900] Loss: 1.130 | Acc: 71.35% | Val Acc: 89.98% | LR: 0.00575 Epoch [409/900] Loss: 1.116 | Acc: 71.83% | Val Acc: 90.37% | LR: 0.00573 Epoch [410/900] Loss: 1.110 | Acc: 71.89% | Val Acc: 90.87% | LR: 0.00571 Epoch [411/900] Loss: 1.127 | Acc: 71.32% | Val Acc: 91.03% | LR: 0.00570 Epoch [412/900] Loss: 1.115 | Acc: 71.85% | Val Acc: 90.56% | LR: 0.00568 Epoch [413/900] Loss: 1.109 | Acc: 71.92% | Val Acc: 89.08% | LR: 0.00566 Epoch [414/900] Loss: 1.141 | Acc: 70.67% | Val Acc: 91.13% | LR: 0.00564 --> New Best Val Acc: 91.13% (Saved!) Epoch [415/900] Loss: 1.116 | Acc: 71.79% | Val Acc: 90.65% | LR: 0.00563 Epoch [416/900] Loss: 1.118 | Acc: 71.54% | Val Acc: 90.58% | LR: 0.00561 Epoch [417/900] Loss: 1.115 | Acc: 71.72% | Val Acc: 90.69% | LR: 0.00559 Epoch [418/900] Loss: 1.092 | Acc: 72.65% | Val Acc: 91.44% | LR: 0.00557 --> New Best Val Acc: 91.44% (Saved!) Epoch [419/900] Loss: 1.111 | Acc: 71.92% | Val Acc: 90.41% | LR: 0.00556 Epoch [420/900] Loss: 1.142 | Acc: 70.80% | Val Acc: 90.38% | LR: 0.00554 Epoch [421/900] Loss: 1.106 | Acc: 72.05% | Val Acc: 90.97% | LR: 0.00552 Epoch [422/900] Loss: 1.119 | Acc: 71.63% | Val Acc: 90.43% | LR: 0.00551 Epoch [423/900] Loss: 1.098 | Acc: 72.28% | Val Acc: 90.50% | LR: 0.00549 Epoch [424/900] Loss: 1.119 | Acc: 71.60% | Val Acc: 90.63% | LR: 0.00547 Epoch [425/900] Loss: 1.108 | Acc: 72.20% | Val Acc: 90.51% | LR: 0.00545 Epoch [426/900] Loss: 1.094 | Acc: 72.31% | Val Acc: 90.83% | LR: 0.00544 Epoch [427/900] Loss: 1.114 | Acc: 71.85% | Val Acc: 90.22% | LR: 0.00542 Epoch [428/900] Loss: 1.122 | Acc: 71.46% | Val Acc: 88.70% | LR: 0.00540 Epoch [429/900] Loss: 1.110 | Acc: 71.98% | Val Acc: 90.94% | LR: 0.00538 Epoch [430/900] Loss: 1.113 | Acc: 72.04% | Val Acc: 90.98% | LR: 0.00537 Epoch [431/900] Loss: 1.123 | Acc: 71.43% | Val Acc: 90.88% | LR: 0.00535 Epoch [432/900] Loss: 1.111 | Acc: 72.06% | Val Acc: 91.14% | LR: 0.00533 Epoch [433/900] Loss: 1.128 | Acc: 71.29% | Val Acc: 90.98% | LR: 0.00531 Epoch [434/900] Loss: 1.089 | Acc: 72.81% | Val Acc: 90.06% | LR: 0.00530 Epoch [435/900] Loss: 1.101 | Acc: 72.44% | Val Acc: 91.09% | LR: 0.00528 Epoch [436/900] Loss: 1.113 | Acc: 71.65% | Val Acc: 90.84% | LR: 0.00526 Epoch [437/900] Loss: 1.108 | Acc: 71.86% | Val Acc: 89.77% | LR: 0.00524 Epoch [438/900] Loss: 1.105 | Acc: 72.14% | Val Acc: 90.97% | LR: 0.00523 Epoch [439/900] Loss: 1.103 | Acc: 72.28% | Val Acc: 91.22% | LR: 0.00521 Epoch [440/900] Loss: 1.117 | Acc: 71.56% | Val Acc: 91.18% | LR: 0.00519 Epoch [441/900] Loss: 1.106 | Acc: 72.22% | Val Acc: 90.15% | LR: 0.00517 Epoch [442/900] Loss: 1.099 | Acc: 72.30% | Val Acc: 91.10% | LR: 0.00516 Epoch [443/900] Loss: 1.105 | Acc: 72.26% | Val Acc: 91.15% | LR: 0.00514 Epoch [444/900] Loss: 1.115 | Acc: 71.87% | Val Acc: 91.15% | LR: 0.00512 Epoch [445/900] Loss: 1.093 | Acc: 72.85% | Val Acc: 91.02% | LR: 0.00510 Epoch [446/900] Loss: 1.105 | Acc: 72.27% | Val Acc: 89.09% | LR: 0.00509 Epoch [447/900] Loss: 1.088 | Acc: 73.04% | Val Acc: 90.65% | LR: 0.00507 Epoch [448/900] Loss: 1.108 | Acc: 72.13% | Val Acc: 91.21% | LR: 0.00505 Epoch [449/900] Loss: 1.090 | Acc: 73.02% | Val Acc: 90.58% | LR: 0.00503 Epoch [450/900] Loss: 1.095 | Acc: 72.65% | Val Acc: 90.70% | LR: 0.00502 Epoch [451/900] Loss: 1.096 | Acc: 72.76% | Val Acc: 90.77% | LR: 0.00500 Epoch [452/900] Loss: 1.095 | Acc: 72.46% | Val Acc: 91.34% | LR: 0.00498 Epoch [453/900] Loss: 1.116 | Acc: 71.46% | Val Acc: 91.70% | LR: 0.00497 --> New Best Val Acc: 91.70% (Saved!) Epoch [454/900] Loss: 1.105 | Acc: 72.34% | Val Acc: 92.01% | LR: 0.00495 --> New Best Val Acc: 92.01% (Saved!) Epoch [455/900] Loss: 1.102 | Acc: 72.39% | Val Acc: 90.39% | LR: 0.00493 Epoch [456/900] Loss: 1.109 | Acc: 72.06% | Val Acc: 91.41% | LR: 0.00491 Epoch [457/900] Loss: 1.112 | Acc: 71.77% | Val Acc: 91.53% | LR: 0.00490 Epoch [458/900] Loss: 1.094 | Acc: 72.41% | Val Acc: 90.37% | LR: 0.00488 Epoch [459/900] Loss: 1.095 | Acc: 72.62% | Val Acc: 89.38% | LR: 0.00486 Epoch [460/900] Loss: 1.116 | Acc: 71.88% | Val Acc: 90.47% | LR: 0.00484 Epoch [461/900] Loss: 1.095 | Acc: 72.83% | Val Acc: 90.71% | LR: 0.00483 Epoch [462/900] Loss: 1.093 | Acc: 72.78% | Val Acc: 90.87% | LR: 0.00481 Epoch [463/900] Loss: 1.086 | Acc: 72.93% | Val Acc: 91.45% | LR: 0.00479 Epoch [464/900] Loss: 1.097 | Acc: 72.54% | Val Acc: 91.05% | LR: 0.00477 Epoch [465/900] Loss: 1.101 | Acc: 72.65% | Val Acc: 91.07% | LR: 0.00476 Epoch [466/900] Loss: 1.089 | Acc: 72.81% | Val Acc: 90.26% | LR: 0.00474 Epoch [467/900] Loss: 1.096 | Acc: 72.55% | Val Acc: 90.97% | LR: 0.00472 Epoch [468/900] Loss: 1.113 | Acc: 71.87% | Val Acc: 91.76% | LR: 0.00470 Epoch [469/900] Loss: 1.098 | Acc: 72.39% | Val Acc: 90.72% | LR: 0.00469 Epoch [470/900] Loss: 1.095 | Acc: 72.57% | Val Acc: 90.77% | LR: 0.00467 Epoch [471/900] Loss: 1.106 | Acc: 72.36% | Val Acc: 91.44% | LR: 0.00465 Epoch [472/900] Loss: 1.089 | Acc: 72.82% | Val Acc: 91.66% | LR: 0.00463 Epoch [473/900] Loss: 1.081 | Acc: 72.99% | Val Acc: 90.90% | LR: 0.00462 Epoch [474/900] Loss: 1.102 | Acc: 72.54% | Val Acc: 89.61% | LR: 0.00460 Epoch [475/900] Loss: 1.077 | Acc: 73.25% | Val Acc: 91.03% | LR: 0.00458 Epoch [476/900] Loss: 1.081 | Acc: 73.23% | Val Acc: 91.66% | LR: 0.00456 Epoch [477/900] Loss: 1.092 | Acc: 73.01% | Val Acc: 91.15% | LR: 0.00455 Epoch [478/900] Loss: 1.107 | Acc: 72.23% | Val Acc: 91.49% | LR: 0.00453 Epoch [479/900] Loss: 1.097 | Acc: 72.63% | Val Acc: 90.87% | LR: 0.00451 Epoch [480/900] Loss: 1.093 | Acc: 72.72% | Val Acc: 91.61% | LR: 0.00449 Epoch [481/900] Loss: 1.101 | Acc: 72.54% | Val Acc: 91.08% | LR: 0.00448 Epoch [482/900] Loss: 1.079 | Acc: 73.31% | Val Acc: 90.68% | LR: 0.00446 Epoch [483/900] Loss: 1.099 | Acc: 72.54% | Val Acc: 91.88% | LR: 0.00444 Epoch [484/900] Loss: 1.095 | Acc: 72.61% | Val Acc: 91.14% | LR: 0.00443 Epoch [485/900] Loss: 1.102 | Acc: 72.50% | Val Acc: 91.83% | LR: 0.00441 Epoch [486/900] Loss: 1.092 | Acc: 72.93% | Val Acc: 91.20% | LR: 0.00439 Epoch [487/900] Loss: 1.082 | Acc: 72.96% | Val Acc: 91.62% | LR: 0.00437 Epoch [488/900] Loss: 1.109 | Acc: 72.11% | Val Acc: 91.86% | LR: 0.00436 Epoch [489/900] Loss: 1.085 | Acc: 73.10% | Val Acc: 90.84% | LR: 0.00434 Epoch [490/900] Loss: 1.073 | Acc: 73.45% | Val Acc: 91.70% | LR: 0.00432 Epoch [491/900] Loss: 1.087 | Acc: 73.04% | Val Acc: 90.97% | LR: 0.00430 Epoch [492/900] Loss: 1.079 | Acc: 73.30% | Val Acc: 90.54% | LR: 0.00429 Epoch [493/900] Loss: 1.098 | Acc: 72.62% | Val Acc: 91.18% | LR: 0.00427 Epoch [494/900] Loss: 1.087 | Acc: 72.93% | Val Acc: 91.52% | LR: 0.00425 Epoch [495/900] Loss: 1.068 | Acc: 73.63% | Val Acc: 90.80% | LR: 0.00424 Epoch [496/900] Loss: 1.080 | Acc: 73.36% | Val Acc: 90.35% | LR: 0.00422 Epoch [497/900] Loss: 1.078 | Acc: 73.39% | Val Acc: 90.89% | LR: 0.00420 Epoch [498/900] Loss: 1.074 | Acc: 73.58% | Val Acc: 91.32% | LR: 0.00418 Epoch [499/900] Loss: 1.085 | Acc: 73.13% | Val Acc: 91.48% | LR: 0.00417 Epoch [500/900] Loss: 1.067 | Acc: 73.75% | Val Acc: 91.46% | LR: 0.00415 Epoch [501/900] Loss: 1.065 | Acc: 73.82% | Val Acc: 91.71% | LR: 0.00413 Epoch [502/900] Loss: 1.079 | Acc: 73.36% | Val Acc: 91.30% | LR: 0.00411 Epoch [503/900] Loss: 1.062 | Acc: 73.73% | Val Acc: 91.80% | LR: 0.00410 Epoch [504/900] Loss: 1.067 | Acc: 73.82% | Val Acc: 91.65% | LR: 0.00408 Epoch [505/900] Loss: 1.090 | Acc: 73.01% | Val Acc: 91.46% | LR: 0.00406 Epoch [506/900] Loss: 1.076 | Acc: 73.56% | Val Acc: 90.94% | LR: 0.00405 Epoch [507/900] Loss: 1.082 | Acc: 72.96% | Val Acc: 91.87% | LR: 0.00403 Epoch [508/900] Loss: 1.094 | Acc: 72.74% | Val Acc: 91.01% | LR: 0.00401 Epoch [509/900] Loss: 1.069 | Acc: 73.60% | Val Acc: 91.13% | LR: 0.00399 Epoch [510/900] Loss: 1.082 | Acc: 73.15% | Val Acc: 91.87% | LR: 0.00398 Epoch [511/900] Loss: 1.065 | Acc: 73.85% | Val Acc: 92.31% | LR: 0.00396 --> New Best Val Acc: 92.31% (Saved!) Epoch [512/900] Loss: 1.084 | Acc: 73.06% | Val Acc: 92.00% | LR: 0.00394 Epoch [513/900] Loss: 1.077 | Acc: 73.51% | Val Acc: 91.63% | LR: 0.00393 Epoch [514/900] Loss: 1.078 | Acc: 73.22% | Val Acc: 91.55% | LR: 0.00391 Epoch [515/900] Loss: 1.082 | Acc: 73.18% | Val Acc: 90.69% | LR: 0.00389 Epoch [516/900] Loss: 1.074 | Acc: 73.47% | Val Acc: 92.07% | LR: 0.00388 Epoch [517/900] Loss: 1.058 | Acc: 74.17% | Val Acc: 92.05% | LR: 0.00386 Epoch [518/900] Loss: 1.077 | Acc: 73.28% | Val Acc: 91.64% | LR: 0.00384 Epoch [519/900] Loss: 1.080 | Acc: 73.23% | Val Acc: 92.46% | LR: 0.00382 --> New Best Val Acc: 92.46% (Saved!) Epoch [520/900] Loss: 1.089 | Acc: 73.16% | Val Acc: 91.96% | LR: 0.00381 Epoch [521/900] Loss: 1.067 | Acc: 73.94% | Val Acc: 90.90% | LR: 0.00379 Epoch [522/900] Loss: 1.087 | Acc: 72.95% | Val Acc: 91.79% | LR: 0.00377 Epoch [523/900] Loss: 1.066 | Acc: 74.03% | Val Acc: 91.42% | LR: 0.00376 Epoch [524/900] Loss: 1.064 | Acc: 73.99% | Val Acc: 92.03% | LR: 0.00374 Epoch [525/900] Loss: 1.064 | Acc: 73.80% | Val Acc: 91.83% | LR: 0.00372 Epoch [526/900] Loss: 1.070 | Acc: 73.73% | Val Acc: 91.99% | LR: 0.00371 Epoch [527/900] Loss: 1.089 | Acc: 72.99% | Val Acc: 90.88% | LR: 0.00369 Epoch [528/900] Loss: 1.064 | Acc: 73.74% | Val Acc: 91.49% | LR: 0.00367 Epoch [529/900] Loss: 1.076 | Acc: 73.37% | Val Acc: 91.40% | LR: 0.00366 Epoch [530/900] Loss: 1.060 | Acc: 74.08% | Val Acc: 92.05% | LR: 0.00364 Epoch [531/900] Loss: 1.074 | Acc: 73.45% | Val Acc: 92.50% | LR: 0.00362 --> New Best Val Acc: 92.50% (Saved!) Epoch [532/900] Loss: 1.071 | Acc: 73.71% | Val Acc: 92.05% | LR: 0.00361 Epoch [533/900] Loss: 1.072 | Acc: 73.49% | Val Acc: 92.09% | LR: 0.00359 Epoch [534/900] Loss: 1.051 | Acc: 74.48% | Val Acc: 92.20% | LR: 0.00357 Epoch [535/900] Loss: 1.067 | Acc: 73.68% | Val Acc: 91.83% | LR: 0.00355 Epoch [536/900] Loss: 1.059 | Acc: 74.02% | Val Acc: 91.64% | LR: 0.00354 Epoch [537/900] Loss: 1.058 | Acc: 74.31% | Val Acc: 92.00% | LR: 0.00352 Epoch [538/900] Loss: 1.055 | Acc: 74.16% | Val Acc: 91.83% | LR: 0.00350 Epoch [539/900] Loss: 1.074 | Acc: 73.47% | Val Acc: 91.82% | LR: 0.00349 Epoch [540/900] Loss: 1.067 | Acc: 73.89% | Val Acc: 91.51% | LR: 0.00347 Epoch [541/900] Loss: 1.049 | Acc: 74.56% | Val Acc: 92.14% | LR: 0.00345 Epoch [542/900] Loss: 1.066 | Acc: 73.90% | Val Acc: 92.36% | LR: 0.00344 Epoch [543/900] Loss: 1.055 | Acc: 74.30% | Val Acc: 91.96% | LR: 0.00342 Epoch [544/900] Loss: 1.055 | Acc: 74.10% | Val Acc: 91.80% | LR: 0.00341 Epoch [545/900] Loss: 1.058 | Acc: 74.30% | Val Acc: 91.94% | LR: 0.00339 Epoch [546/900] Loss: 1.073 | Acc: 73.43% | Val Acc: 92.45% | LR: 0.00337 Epoch [547/900] Loss: 1.055 | Acc: 74.27% | Val Acc: 92.06% | LR: 0.00336 Epoch [548/900] Loss: 1.073 | Acc: 73.54% | Val Acc: 92.69% | LR: 0.00334 --> New Best Val Acc: 92.69% (Saved!) Epoch [549/900] Loss: 1.060 | Acc: 73.99% | Val Acc: 92.61% | LR: 0.00332 Epoch [550/900] Loss: 1.051 | Acc: 74.35% | Val Acc: 92.19% | LR: 0.00331 Epoch [551/900] Loss: 1.058 | Acc: 74.31% | Val Acc: 91.07% | LR: 0.00329 Epoch [552/900] Loss: 1.073 | Acc: 73.74% | Val Acc: 92.34% | LR: 0.00327 Epoch [553/900] Loss: 1.069 | Acc: 73.89% | Val Acc: 91.97% | LR: 0.00326 Epoch [554/900] Loss: 1.058 | Acc: 74.09% | Val Acc: 91.98% | LR: 0.00324 Epoch [555/900] Loss: 1.063 | Acc: 73.88% | Val Acc: 91.85% | LR: 0.00322 Epoch [556/900] Loss: 1.053 | Acc: 74.24% | Val Acc: 92.77% | LR: 0.00321 --> New Best Val Acc: 92.77% (Saved!) Epoch [557/900] Loss: 1.056 | Acc: 74.19% | Val Acc: 92.32% | LR: 0.00319 Epoch [558/900] Loss: 1.054 | Acc: 74.08% | Val Acc: 91.89% | LR: 0.00318 Epoch [559/900] Loss: 1.052 | Acc: 74.32% | Val Acc: 91.48% | LR: 0.00316 Epoch [560/900] Loss: 1.056 | Acc: 74.42% | Val Acc: 91.90% | LR: 0.00314 Epoch [561/900] Loss: 1.050 | Acc: 74.44% | Val Acc: 92.18% | LR: 0.00313 Epoch [562/900] Loss: 1.054 | Acc: 74.32% | Val Acc: 92.11% | LR: 0.00311 Epoch [563/900] Loss: 1.087 | Acc: 73.16% | Val Acc: 92.12% | LR: 0.00309 Epoch [564/900] Loss: 1.061 | Acc: 73.99% | Val Acc: 91.88% | LR: 0.00308 Epoch [565/900] Loss: 1.038 | Acc: 74.91% | Val Acc: 92.89% | LR: 0.00306 --> New Best Val Acc: 92.89% (Saved!) Epoch [566/900] Loss: 1.047 | Acc: 74.50% | Val Acc: 92.84% | LR: 0.00305 Epoch [567/900] Loss: 1.038 | Acc: 74.85% | Val Acc: 92.82% | LR: 0.00303 Epoch [568/900] Loss: 1.063 | Acc: 73.89% | Val Acc: 92.20% | LR: 0.00301 Epoch [569/900] Loss: 1.053 | Acc: 74.34% | Val Acc: 92.22% | LR: 0.00300 Epoch [570/900] Loss: 1.051 | Acc: 74.61% | Val Acc: 91.78% | LR: 0.00298 Epoch [571/900] Loss: 1.059 | Acc: 74.21% | Val Acc: 92.73% | LR: 0.00297 Epoch [572/900] Loss: 1.058 | Acc: 74.08% | Val Acc: 92.34% | LR: 0.00295 Epoch [573/900] Loss: 1.047 | Acc: 74.54% | Val Acc: 92.02% | LR: 0.00293 Epoch [574/900] Loss: 1.057 | Acc: 74.42% | Val Acc: 92.83% | LR: 0.00292 Epoch [575/900] Loss: 1.021 | Acc: 75.73% | Val Acc: 93.04% | LR: 0.00290 --> New Best Val Acc: 93.04% (Saved!) Epoch [576/900] Loss: 1.058 | Acc: 74.27% | Val Acc: 92.80% | LR: 0.00289 Epoch [577/900] Loss: 1.050 | Acc: 74.33% | Val Acc: 92.69% | LR: 0.00287 Epoch [578/900] Loss: 1.050 | Acc: 74.49% | Val Acc: 92.40% | LR: 0.00286 Epoch [579/900] Loss: 1.044 | Acc: 74.76% | Val Acc: 92.06% | LR: 0.00284 Epoch [580/900] Loss: 1.054 | Acc: 74.18% | Val Acc: 92.81% | LR: 0.00282 Epoch [581/900] Loss: 1.044 | Acc: 74.86% | Val Acc: 92.15% | LR: 0.00281 Epoch [582/900] Loss: 1.037 | Acc: 74.98% | Val Acc: 92.89% | LR: 0.00279 Epoch [583/900] Loss: 1.012 | Acc: 76.04% | Val Acc: 91.75% | LR: 0.00278 Epoch [584/900] Loss: 1.049 | Acc: 74.65% | Val Acc: 92.33% | LR: 0.00276 Epoch [585/900] Loss: 1.052 | Acc: 74.57% | Val Acc: 92.69% | LR: 0.00275 Epoch [586/900] Loss: 1.024 | Acc: 75.47% | Val Acc: 92.86% | LR: 0.00273 Epoch [587/900] Loss: 1.045 | Acc: 74.67% | Val Acc: 92.77% | LR: 0.00271 Epoch [588/900] Loss: 1.051 | Acc: 74.35% | Val Acc: 92.76% | LR: 0.00270 Epoch [589/900] Loss: 1.048 | Acc: 74.63% | Val Acc: 92.56% | LR: 0.00268 Epoch [590/900] Loss: 1.053 | Acc: 74.58% | Val Acc: 92.19% | LR: 0.00267 Epoch [591/900] Loss: 1.056 | Acc: 74.36% | Val Acc: 93.19% | LR: 0.00265 --> New Best Val Acc: 93.19% (Saved!) Epoch [592/900] Loss: 1.039 | Acc: 74.94% | Val Acc: 92.76% | LR: 0.00264 Epoch [593/900] Loss: 1.053 | Acc: 74.41% | Val Acc: 92.48% | LR: 0.00262 Epoch [594/900] Loss: 1.033 | Acc: 75.08% | Val Acc: 92.06% | LR: 0.00261 Epoch [595/900] Loss: 1.051 | Acc: 74.49% | Val Acc: 93.22% | LR: 0.00259 --> New Best Val Acc: 93.22% (Saved!) Epoch [596/900] Loss: 1.052 | Acc: 74.52% | Val Acc: 92.59% | LR: 0.00258 Epoch [597/900] Loss: 1.040 | Acc: 74.84% | Val Acc: 92.73% | LR: 0.00256 Epoch [598/900] Loss: 1.038 | Acc: 75.07% | Val Acc: 92.90% | LR: 0.00255 Epoch [599/900] Loss: 1.029 | Acc: 75.33% | Val Acc: 91.90% | LR: 0.00253 Epoch [600/900] Loss: 1.048 | Acc: 74.69% | Val Acc: 92.55% | LR: 0.00252 Epoch [601/900] Loss: 1.047 | Acc: 74.77% | Val Acc: 92.58% | LR: 0.00250 Epoch [602/900] Loss: 1.043 | Acc: 74.87% | Val Acc: 93.00% | LR: 0.00248 Epoch [603/900] Loss: 1.030 | Acc: 75.19% | Val Acc: 92.50% | LR: 0.00247 Epoch [604/900] Loss: 1.032 | Acc: 75.32% | Val Acc: 92.81% | LR: 0.00245 Epoch [605/900] Loss: 1.016 | Acc: 75.81% | Val Acc: 92.82% | LR: 0.00244 Epoch [606/900] Loss: 1.032 | Acc: 75.34% | Val Acc: 92.87% | LR: 0.00242 Epoch [607/900] Loss: 1.029 | Acc: 75.36% | Val Acc: 92.49% | LR: 0.00241 Epoch [608/900] Loss: 1.012 | Acc: 76.20% | Val Acc: 92.65% | LR: 0.00239 Epoch [609/900] Loss: 1.035 | Acc: 75.20% | Val Acc: 92.32% | LR: 0.00238 Epoch [610/900] Loss: 1.015 | Acc: 75.79% | Val Acc: 92.65% | LR: 0.00237 Epoch [611/900] Loss: 1.026 | Acc: 75.39% | Val Acc: 93.30% | LR: 0.00235 --> New Best Val Acc: 93.30% (Saved!) Epoch [612/900] Loss: 1.048 | Acc: 74.72% | Val Acc: 93.39% | LR: 0.00234 --> New Best Val Acc: 93.39% (Saved!) Epoch [613/900] Loss: 1.029 | Acc: 75.30% | Val Acc: 93.60% | LR: 0.00232 --> New Best Val Acc: 93.60% (Saved!) Epoch [614/900] Loss: 1.021 | Acc: 75.49% | Val Acc: 92.28% | LR: 0.00231 Epoch [615/900] Loss: 1.033 | Acc: 75.19% | Val Acc: 93.12% | LR: 0.00229 Epoch [616/900] Loss: 1.026 | Acc: 75.62% | Val Acc: 93.03% | LR: 0.00228 Epoch [617/900] Loss: 1.030 | Acc: 75.26% | Val Acc: 92.62% | LR: 0.00226 Epoch [618/900] Loss: 1.021 | Acc: 75.68% | Val Acc: 92.89% | LR: 0.00225 Epoch [619/900] Loss: 1.016 | Acc: 75.87% | Val Acc: 92.80% | LR: 0.00223 Epoch [620/900] Loss: 1.031 | Acc: 75.37% | Val Acc: 93.01% | LR: 0.00222 Epoch [621/900] Loss: 1.032 | Acc: 75.47% | Val Acc: 92.53% | LR: 0.00220 Epoch [622/900] Loss: 1.012 | Acc: 75.89% | Val Acc: 93.37% | LR: 0.00219 Epoch [623/900] Loss: 1.000 | Acc: 76.55% | Val Acc: 93.18% | LR: 0.00218 Epoch [624/900] Loss: 1.033 | Acc: 75.10% | Val Acc: 92.76% | LR: 0.00216 Epoch [625/900] Loss: 1.013 | Acc: 76.02% | Val Acc: 93.21% | LR: 0.00215 Epoch [626/900] Loss: 1.015 | Acc: 76.00% | Val Acc: 93.20% | LR: 0.00213 Epoch [627/900] Loss: 1.024 | Acc: 75.67% | Val Acc: 93.06% | LR: 0.00212 Epoch [628/900] Loss: 1.012 | Acc: 76.05% | Val Acc: 92.62% | LR: 0.00210 Epoch [629/900] Loss: 1.007 | Acc: 76.21% | Val Acc: 91.80% | LR: 0.00209 Epoch [630/900] Loss: 1.011 | Acc: 76.07% | Val Acc: 93.36% | LR: 0.00208 Epoch [631/900] Loss: 1.027 | Acc: 75.24% | Val Acc: 93.69% | LR: 0.00206 --> New Best Val Acc: 93.69% (Saved!) Epoch [632/900] Loss: 1.003 | Acc: 76.51% | Val Acc: 93.16% | LR: 0.00205 Epoch [633/900] Loss: 1.022 | Acc: 75.74% | Val Acc: 93.13% | LR: 0.00203 Epoch [634/900] Loss: 1.006 | Acc: 76.31% | Val Acc: 93.45% | LR: 0.00202 Epoch [635/900] Loss: 1.027 | Acc: 75.56% | Val Acc: 93.35% | LR: 0.00200 Epoch [636/900] Loss: 1.019 | Acc: 75.87% | Val Acc: 93.29% | LR: 0.00199 Epoch [637/900] Loss: 0.993 | Acc: 76.94% | Val Acc: 93.47% | LR: 0.00198 Epoch [638/900] Loss: 1.022 | Acc: 75.77% | Val Acc: 93.15% | LR: 0.00196 Epoch [639/900] Loss: 1.007 | Acc: 76.15% | Val Acc: 93.41% | LR: 0.00195 Epoch [640/900] Loss: 1.010 | Acc: 76.19% | Val Acc: 92.98% | LR: 0.00194 Epoch [641/900] Loss: 1.018 | Acc: 75.73% | Val Acc: 93.36% | LR: 0.00192 Epoch [642/900] Loss: 1.013 | Acc: 75.99% | Val Acc: 93.04% | LR: 0.00191 Epoch [643/900] Loss: 1.009 | Acc: 76.10% | Val Acc: 93.34% | LR: 0.00189 Epoch [644/900] Loss: 1.004 | Acc: 76.36% | Val Acc: 93.13% | LR: 0.00188 Epoch [645/900] Loss: 1.018 | Acc: 75.75% | Val Acc: 93.12% | LR: 0.00187 Epoch [646/900] Loss: 1.012 | Acc: 75.98% | Val Acc: 92.84% | LR: 0.00185 Epoch [647/900] Loss: 1.028 | Acc: 75.33% | Val Acc: 92.96% | LR: 0.00184 Epoch [648/900] Loss: 1.010 | Acc: 76.12% | Val Acc: 92.99% | LR: 0.00183 Epoch [649/900] Loss: 1.005 | Acc: 76.51% | Val Acc: 93.41% | LR: 0.00181 Epoch [650/900] Loss: 1.012 | Acc: 76.07% | Val Acc: 93.64% | LR: 0.00180 Epoch [651/900] Loss: 0.985 | Acc: 77.11% | Val Acc: 92.86% | LR: 0.00179 Epoch [652/900] Loss: 0.990 | Acc: 76.86% | Val Acc: 93.05% | LR: 0.00177 Epoch [653/900] Loss: 0.997 | Acc: 76.64% | Val Acc: 93.48% | LR: 0.00176 Epoch [654/900] Loss: 0.983 | Acc: 77.18% | Val Acc: 93.80% | LR: 0.00175 --> New Best Val Acc: 93.80% (Saved!) Epoch [655/900] Loss: 0.999 | Acc: 76.81% | Val Acc: 93.57% | LR: 0.00173 Epoch [656/900] Loss: 0.995 | Acc: 76.90% | Val Acc: 93.26% | LR: 0.00172 Epoch [657/900] Loss: 0.995 | Acc: 76.83% | Val Acc: 93.35% | LR: 0.00171 Epoch [658/900] Loss: 0.981 | Acc: 77.20% | Val Acc: 92.96% | LR: 0.00169 Epoch [659/900] Loss: 0.993 | Acc: 76.72% | Val Acc: 93.21% | LR: 0.00168 Epoch [660/900] Loss: 1.000 | Acc: 76.33% | Val Acc: 93.25% | LR: 0.00167 Epoch [661/900] Loss: 0.991 | Acc: 76.74% | Val Acc: 93.02% | LR: 0.00165 Epoch [662/900] Loss: 0.985 | Acc: 77.11% | Val Acc: 93.41% | LR: 0.00164 Epoch [663/900] Loss: 0.998 | Acc: 76.80% | Val Acc: 93.02% | LR: 0.00163 Epoch [664/900] Loss: 0.998 | Acc: 76.65% | Val Acc: 93.07% | LR: 0.00162 Epoch [665/900] Loss: 0.999 | Acc: 76.55% | Val Acc: 93.23% | LR: 0.00160 Epoch [666/900] Loss: 0.997 | Acc: 76.71% | Val Acc: 93.72% | LR: 0.00159 Epoch [667/900] Loss: 0.995 | Acc: 76.82% | Val Acc: 93.76% | LR: 0.00158 Epoch [668/900] Loss: 0.996 | Acc: 76.79% | Val Acc: 93.34% | LR: 0.00156 Epoch [669/900] Loss: 0.991 | Acc: 77.01% | Val Acc: 93.45% | LR: 0.00155 Epoch [670/900] Loss: 0.991 | Acc: 76.70% | Val Acc: 93.20% | LR: 0.00154 Epoch [671/900] Loss: 0.985 | Acc: 77.13% | Val Acc: 94.03% | LR: 0.00153 --> New Best Val Acc: 94.03% (Saved!) Epoch [672/900] Loss: 0.990 | Acc: 76.84% | Val Acc: 93.96% | LR: 0.00151 Epoch [673/900] Loss: 0.999 | Acc: 76.62% | Val Acc: 93.77% | LR: 0.00150 Epoch [674/900] Loss: 0.987 | Acc: 77.19% | Val Acc: 93.43% | LR: 0.00149 Epoch [675/900] Loss: 0.999 | Acc: 76.58% | Val Acc: 93.76% | LR: 0.00148 Epoch [676/900] Loss: 0.986 | Acc: 77.02% | Val Acc: 93.87% | LR: 0.00146 Epoch [677/900] Loss: 0.970 | Acc: 77.83% | Val Acc: 93.88% | LR: 0.00145 Epoch [678/900] Loss: 0.973 | Acc: 77.77% | Val Acc: 93.93% | LR: 0.00144 Epoch [679/900] Loss: 0.974 | Acc: 77.81% | Val Acc: 93.67% | LR: 0.00143 Epoch [680/900] Loss: 0.974 | Acc: 77.59% | Val Acc: 93.82% | LR: 0.00142 Epoch [681/900] Loss: 0.985 | Acc: 77.22% | Val Acc: 93.99% | LR: 0.00140 Epoch [682/900] Loss: 0.988 | Acc: 77.01% | Val Acc: 93.83% | LR: 0.00139 Epoch [683/900] Loss: 0.965 | Acc: 78.08% | Val Acc: 93.86% | LR: 0.00138 Epoch [684/900] Loss: 0.998 | Acc: 76.52% | Val Acc: 93.52% | LR: 0.00137 Epoch [685/900] Loss: 0.967 | Acc: 77.94% | Val Acc: 93.80% | LR: 0.00136 Epoch [686/900] Loss: 0.994 | Acc: 76.84% | Val Acc: 93.35% | LR: 0.00134 Epoch [687/900] Loss: 0.999 | Acc: 76.64% | Val Acc: 94.07% | LR: 0.00133 --> New Best Val Acc: 94.07% (Saved!) Epoch [688/900] Loss: 0.988 | Acc: 77.03% | Val Acc: 93.74% | LR: 0.00132 Epoch [689/900] Loss: 0.966 | Acc: 77.96% | Val Acc: 93.58% | LR: 0.00131 Epoch [690/900] Loss: 0.998 | Acc: 76.89% | Val Acc: 93.88% | LR: 0.00130 Epoch [691/900] Loss: 0.976 | Acc: 77.65% | Val Acc: 93.96% | LR: 0.00128 Epoch [692/900] Loss: 0.975 | Acc: 77.38% | Val Acc: 93.61% | LR: 0.00127 Epoch [693/900] Loss: 0.975 | Acc: 77.45% | Val Acc: 93.89% | LR: 0.00126 Epoch [694/900] Loss: 0.962 | Acc: 78.07% | Val Acc: 94.03% | LR: 0.00125 Epoch [695/900] Loss: 0.970 | Acc: 77.79% | Val Acc: 93.64% | LR: 0.00124 Epoch [696/900] Loss: 0.986 | Acc: 77.16% | Val Acc: 94.06% | LR: 0.00123 Epoch [697/900] Loss: 0.967 | Acc: 77.92% | Val Acc: 93.97% | LR: 0.00122 Epoch [698/900] Loss: 0.967 | Acc: 77.90% | Val Acc: 93.95% | LR: 0.00120 Epoch [699/900] Loss: 0.973 | Acc: 77.51% | Val Acc: 94.11% | LR: 0.00119 --> New Best Val Acc: 94.11% (Saved!) Epoch [700/900] Loss: 0.969 | Acc: 77.65% | Val Acc: 93.83% | LR: 0.00118 Epoch [701/900] Loss: 0.973 | Acc: 77.58% | Val Acc: 93.92% | LR: 0.00117 Epoch [702/900] Loss: 0.971 | Acc: 77.81% | Val Acc: 93.71% | LR: 0.00116 Epoch [703/900] Loss: 0.968 | Acc: 78.01% | Val Acc: 94.04% | LR: 0.00115 Epoch [704/900] Loss: 0.949 | Acc: 78.44% | Val Acc: 93.63% | LR: 0.00114 Epoch [705/900] Loss: 0.974 | Acc: 77.83% | Val Acc: 94.37% | LR: 0.00113 --> New Best Val Acc: 94.37% (Saved!) Epoch [706/900] Loss: 0.975 | Acc: 77.71% | Val Acc: 93.79% | LR: 0.00111 Epoch [707/900] Loss: 0.975 | Acc: 77.46% | Val Acc: 93.82% | LR: 0.00110 Epoch [708/900] Loss: 0.959 | Acc: 78.04% | Val Acc: 93.97% | LR: 0.00109 Epoch [709/900] Loss: 0.960 | Acc: 78.18% | Val Acc: 94.02% | LR: 0.00108 Epoch [710/900] Loss: 0.981 | Acc: 77.44% | Val Acc: 94.30% | LR: 0.00107 Epoch [711/900] Loss: 0.953 | Acc: 78.42% | Val Acc: 93.98% | LR: 0.00106 Epoch [712/900] Loss: 0.963 | Acc: 77.99% | Val Acc: 94.00% | LR: 0.00105 Epoch [713/900] Loss: 0.943 | Acc: 78.63% | Val Acc: 93.98% | LR: 0.00104 Epoch [714/900] Loss: 0.954 | Acc: 78.43% | Val Acc: 94.26% | LR: 0.00103 Epoch [715/900] Loss: 0.967 | Acc: 78.01% | Val Acc: 93.67% | LR: 0.00102 Epoch [716/900] Loss: 0.980 | Acc: 77.46% | Val Acc: 94.36% | LR: 0.00101 Epoch [717/900] Loss: 0.965 | Acc: 77.91% | Val Acc: 94.16% | LR: 0.00100 Epoch [718/900] Loss: 0.962 | Acc: 77.94% | Val Acc: 94.07% | LR: 0.00099 Epoch [719/900] Loss: 0.950 | Acc: 78.50% | Val Acc: 94.21% | LR: 0.00098 Epoch [720/900] Loss: 0.968 | Acc: 77.84% | Val Acc: 94.16% | LR: 0.00097 Epoch [721/900] Loss: 0.955 | Acc: 78.30% | Val Acc: 94.15% | LR: 0.00095 Epoch [722/900] Loss: 0.940 | Acc: 79.03% | Val Acc: 94.29% | LR: 0.00094 Epoch [723/900] Loss: 0.955 | Acc: 78.29% | Val Acc: 94.15% | LR: 0.00093 Epoch [724/900] Loss: 0.970 | Acc: 77.72% | Val Acc: 94.03% | LR: 0.00092 Epoch [725/900] Loss: 0.955 | Acc: 78.24% | Val Acc: 94.48% | LR: 0.00091 --> New Best Val Acc: 94.48% (Saved!) Epoch [726/900] Loss: 0.957 | Acc: 78.40% | Val Acc: 94.24% | LR: 0.00090 Epoch [727/900] Loss: 0.952 | Acc: 78.42% | Val Acc: 94.38% | LR: 0.00089 Epoch [728/900] Loss: 0.961 | Acc: 78.08% | Val Acc: 94.38% | LR: 0.00088 Epoch [729/900] Loss: 0.954 | Acc: 78.65% | Val Acc: 94.50% | LR: 0.00087 --> New Best Val Acc: 94.50% (Saved!) Epoch [730/900] Loss: 0.968 | Acc: 77.86% | Val Acc: 94.13% | LR: 0.00086 Epoch [731/900] Loss: 0.952 | Acc: 78.62% | Val Acc: 94.53% | LR: 0.00085 --> New Best Val Acc: 94.53% (Saved!) Epoch [732/900] Loss: 0.940 | Acc: 78.79% | Val Acc: 94.61% | LR: 0.00085 --> New Best Val Acc: 94.61% (Saved!) Epoch [733/900] Loss: 0.947 | Acc: 78.47% | Val Acc: 94.47% | LR: 0.00084 Epoch [734/900] Loss: 0.942 | Acc: 78.74% | Val Acc: 94.36% | LR: 0.00083 Epoch [735/900] Loss: 0.948 | Acc: 78.70% | Val Acc: 94.06% | LR: 0.00082 Epoch [736/900] Loss: 0.948 | Acc: 78.60% | Val Acc: 94.26% | LR: 0.00081 Epoch [737/900] Loss: 0.942 | Acc: 78.88% | Val Acc: 94.60% | LR: 0.00080 Epoch [738/900] Loss: 0.961 | Acc: 78.15% | Val Acc: 94.37% | LR: 0.00079 Epoch [739/900] Loss: 0.936 | Acc: 79.09% | Val Acc: 94.26% | LR: 0.00078 Epoch [740/900] Loss: 0.930 | Acc: 79.32% | Val Acc: 94.12% | LR: 0.00077 Epoch [741/900] Loss: 0.953 | Acc: 78.35% | Val Acc: 94.56% | LR: 0.00076 Epoch [742/900] Loss: 0.938 | Acc: 78.89% | Val Acc: 94.23% | LR: 0.00075 Epoch [743/900] Loss: 0.943 | Acc: 78.81% | Val Acc: 94.36% | LR: 0.00074 Epoch [744/900] Loss: 0.920 | Acc: 79.61% | Val Acc: 94.36% | LR: 0.00073 Epoch [745/900] Loss: 0.942 | Acc: 78.86% | Val Acc: 94.09% | LR: 0.00072 Epoch [746/900] Loss: 0.946 | Acc: 78.66% | Val Acc: 94.72% | LR: 0.00071 --> New Best Val Acc: 94.72% (Saved!) Epoch [747/900] Loss: 0.927 | Acc: 79.27% | Val Acc: 94.88% | LR: 0.00071 --> New Best Val Acc: 94.88% (Saved!) Epoch [748/900] Loss: 0.945 | Acc: 78.79% | Val Acc: 94.73% | LR: 0.00070 Epoch [749/900] Loss: 0.942 | Acc: 78.65% | Val Acc: 94.51% | LR: 0.00069 Epoch [750/900] Loss: 0.936 | Acc: 79.13% | Val Acc: 94.67% | LR: 0.00068 Epoch [751/900] Loss: 0.917 | Acc: 79.57% | Val Acc: 95.01% | LR: 0.00067 --> New Best Val Acc: 95.01% (Saved!) Epoch [752/900] Loss: 0.938 | Acc: 79.04% | Val Acc: 94.60% | LR: 0.00066 Epoch [753/900] Loss: 0.920 | Acc: 79.60% | Val Acc: 94.74% | LR: 0.00065 Epoch [754/900] Loss: 0.933 | Acc: 79.10% | Val Acc: 94.46% | LR: 0.00064 Epoch [755/900] Loss: 0.931 | Acc: 79.15% | Val Acc: 94.65% | LR: 0.00064 Epoch [756/900] Loss: 0.947 | Acc: 78.60% | Val Acc: 94.70% | LR: 0.00063 Epoch [757/900] Loss: 0.938 | Acc: 79.02% | Val Acc: 94.52% | LR: 0.00062 Epoch [758/900] Loss: 0.921 | Acc: 79.69% | Val Acc: 94.52% | LR: 0.00061 Epoch [759/900] Loss: 0.921 | Acc: 79.62% | Val Acc: 94.53% | LR: 0.00060 Epoch [760/900] Loss: 0.911 | Acc: 80.08% | Val Acc: 94.59% | LR: 0.00059 Epoch [761/900] Loss: 0.926 | Acc: 79.54% | Val Acc: 94.53% | LR: 0.00059 Epoch [762/900] Loss: 0.929 | Acc: 79.53% | Val Acc: 94.78% | LR: 0.00058 Epoch [763/900] Loss: 0.904 | Acc: 80.16% | Val Acc: 94.69% | LR: 0.00057 Epoch [764/900] Loss: 0.936 | Acc: 78.94% | Val Acc: 94.50% | LR: 0.00056 Epoch [765/900] Loss: 0.937 | Acc: 78.94% | Val Acc: 94.49% | LR: 0.00055 Epoch [766/900] Loss: 0.927 | Acc: 79.51% | Val Acc: 94.90% | LR: 0.00054 Epoch [767/900] Loss: 0.924 | Acc: 79.51% | Val Acc: 94.83% | LR: 0.00054 Epoch [768/900] Loss: 0.927 | Acc: 79.26% | Val Acc: 94.98% | LR: 0.00053 Epoch [769/900] Loss: 0.939 | Acc: 78.85% | Val Acc: 94.74% | LR: 0.00052 Epoch [770/900] Loss: 0.918 | Acc: 79.70% | Val Acc: 94.82% | LR: 0.00051 Epoch [771/900] Loss: 0.909 | Acc: 79.98% | Val Acc: 94.85% | LR: 0.00051 Epoch [772/900] Loss: 0.915 | Acc: 79.82% | Val Acc: 94.82% | LR: 0.00050 Epoch [773/900] Loss: 0.913 | Acc: 79.90% | Val Acc: 95.06% | LR: 0.00049 --> New Best Val Acc: 95.06% (Saved!) Epoch [774/900] Loss: 0.916 | Acc: 79.85% | Val Acc: 94.97% | LR: 0.00048 Epoch [775/900] Loss: 0.909 | Acc: 79.94% | Val Acc: 94.91% | LR: 0.00048 Epoch [776/900] Loss: 0.903 | Acc: 80.11% | Val Acc: 94.84% | LR: 0.00047 Epoch [777/900] Loss: 0.916 | Acc: 79.99% | Val Acc: 94.97% | LR: 0.00046 Epoch [778/900] Loss: 0.914 | Acc: 79.86% | Val Acc: 94.71% | LR: 0.00045 Epoch [779/900] Loss: 0.919 | Acc: 79.67% | Val Acc: 94.83% | LR: 0.00045 Epoch [780/900] Loss: 0.924 | Acc: 79.33% | Val Acc: 94.65% | LR: 0.00044 Epoch [781/900] Loss: 0.938 | Acc: 78.80% | Val Acc: 94.69% | LR: 0.00043 Epoch [782/900] Loss: 0.919 | Acc: 79.56% | Val Acc: 95.09% | LR: 0.00043 --> New Best Val Acc: 95.09% (Saved!) Epoch [783/900] Loss: 0.903 | Acc: 80.47% | Val Acc: 95.11% | LR: 0.00042 --> New Best Val Acc: 95.11% (Saved!) Epoch [784/900] Loss: 0.921 | Acc: 79.52% | Val Acc: 94.76% | LR: 0.00041 Epoch [785/900] Loss: 0.911 | Acc: 79.94% | Val Acc: 95.27% | LR: 0.00040 --> New Best Val Acc: 95.27% (Saved!) Epoch [786/900] Loss: 0.903 | Acc: 79.98% | Val Acc: 95.33% | LR: 0.00040 --> New Best Val Acc: 95.33% (Saved!) Epoch [787/900] Loss: 0.905 | Acc: 80.04% | Val Acc: 95.14% | LR: 0.00039 Epoch [788/900] Loss: 0.922 | Acc: 79.43% | Val Acc: 94.94% | LR: 0.00038 Epoch [789/900] Loss: 0.914 | Acc: 79.85% | Val Acc: 95.06% | LR: 0.00038 Epoch [790/900] Loss: 0.900 | Acc: 80.45% | Val Acc: 94.94% | LR: 0.00037 Epoch [791/900] Loss: 0.909 | Acc: 80.05% | Val Acc: 94.87% | LR: 0.00036 Epoch [792/900] Loss: 0.921 | Acc: 79.74% | Val Acc: 95.11% | LR: 0.00036 Epoch [793/900] Loss: 0.908 | Acc: 80.20% | Val Acc: 95.04% | LR: 0.00035 Epoch [794/900] Loss: 0.911 | Acc: 79.96% | Val Acc: 95.03% | LR: 0.00034 Epoch [795/900] Loss: 0.909 | Acc: 79.94% | Val Acc: 95.09% | LR: 0.00034 Epoch [796/900] Loss: 0.909 | Acc: 79.96% | Val Acc: 94.93% | LR: 0.00033 Epoch [797/900] Loss: 0.910 | Acc: 79.93% | Val Acc: 95.26% | LR: 0.00033 Epoch [798/900] Loss: 0.890 | Acc: 80.65% | Val Acc: 95.28% | LR: 0.00032 Epoch [799/900] Loss: 0.901 | Acc: 80.41% | Val Acc: 95.28% | LR: 0.00031 Epoch [800/900] Loss: 0.924 | Acc: 79.30% | Val Acc: 95.31% | LR: 0.00031 Epoch [801/900] Loss: 0.905 | Acc: 80.21% | Val Acc: 95.12% | LR: 0.00030 Epoch [802/900] Loss: 0.909 | Acc: 80.13% | Val Acc: 95.24% | LR: 0.00030 Epoch [803/900] Loss: 0.905 | Acc: 80.05% | Val Acc: 95.13% | LR: 0.00029 Epoch [804/900] Loss: 0.902 | Acc: 80.29% | Val Acc: 95.09% | LR: 0.00028 Epoch [805/900] Loss: 0.919 | Acc: 79.72% | Val Acc: 95.28% | LR: 0.00028 Epoch [806/900] Loss: 0.896 | Acc: 80.56% | Val Acc: 95.43% | LR: 0.00027 --> New Best Val Acc: 95.43% (Saved!) Epoch [807/900] Loss: 0.899 | Acc: 80.42% | Val Acc: 95.37% | LR: 0.00027 Epoch [808/900] Loss: 0.903 | Acc: 80.23% | Val Acc: 95.19% | LR: 0.00026 Epoch [809/900] Loss: 0.900 | Acc: 80.18% | Val Acc: 95.59% | LR: 0.00026 --> New Best Val Acc: 95.59% (Saved!) Epoch [810/900] Loss: 0.910 | Acc: 80.02% | Val Acc: 95.37% | LR: 0.00025 Epoch [811/900] Loss: 0.888 | Acc: 80.78% | Val Acc: 95.43% | LR: 0.00024 Epoch [812/900] Loss: 0.885 | Acc: 80.93% | Val Acc: 95.37% | LR: 0.00024 Epoch [813/900] Loss: 0.894 | Acc: 80.70% | Val Acc: 95.41% | LR: 0.00023 Epoch [814/900] Loss: 0.913 | Acc: 79.86% | Val Acc: 95.32% | LR: 0.00023 Epoch [815/900] Loss: 0.912 | Acc: 79.78% | Val Acc: 95.59% | LR: 0.00022 Epoch [816/900] Loss: 0.910 | Acc: 80.05% | Val Acc: 95.39% | LR: 0.00022 Epoch [817/900] Loss: 0.882 | Acc: 81.30% | Val Acc: 95.45% | LR: 0.00021 Epoch [818/900] Loss: 0.888 | Acc: 80.93% | Val Acc: 95.57% | LR: 0.00021 Epoch [819/900] Loss: 0.888 | Acc: 80.84% | Val Acc: 95.56% | LR: 0.00020 Epoch [820/900] Loss: 0.891 | Acc: 80.69% | Val Acc: 95.50% | LR: 0.00020 Epoch [821/900] Loss: 0.889 | Acc: 80.99% | Val Acc: 95.48% | LR: 0.00019 Epoch [822/900] Loss: 0.894 | Acc: 80.44% | Val Acc: 95.55% | LR: 0.00019 Epoch [823/900] Loss: 0.875 | Acc: 81.44% | Val Acc: 95.59% | LR: 0.00018 Epoch [824/900] Loss: 0.897 | Acc: 80.51% | Val Acc: 95.36% | LR: 0.00018 Epoch [825/900] Loss: 0.901 | Acc: 80.25% | Val Acc: 95.45% | LR: 0.00017 Epoch [826/900] Loss: 0.891 | Acc: 80.66% | Val Acc: 95.76% | LR: 0.00017 --> New Best Val Acc: 95.76% (Saved!) Epoch [827/900] Loss: 0.890 | Acc: 80.58% | Val Acc: 95.39% | LR: 0.00017 Epoch [828/900] Loss: 0.897 | Acc: 80.43% | Val Acc: 95.31% | LR: 0.00016 Epoch [829/900] Loss: 0.886 | Acc: 80.76% | Val Acc: 95.56% | LR: 0.00016 Epoch [830/900] Loss: 0.883 | Acc: 80.99% | Val Acc: 95.44% | LR: 0.00015 Epoch [831/900] Loss: 0.893 | Acc: 80.70% | Val Acc: 95.55% | LR: 0.00015 Epoch [832/900] Loss: 0.912 | Acc: 79.78% | Val Acc: 95.70% | LR: 0.00014 Epoch [833/900] Loss: 0.880 | Acc: 81.12% | Val Acc: 95.40% | LR: 0.00014 Epoch [834/900] Loss: 0.877 | Acc: 81.04% | Val Acc: 95.47% | LR: 0.00014 Epoch [835/900] Loss: 0.866 | Acc: 81.58% | Val Acc: 95.58% | LR: 0.00013 Epoch [836/900] Loss: 0.883 | Acc: 80.74% | Val Acc: 95.57% | LR: 0.00013 Epoch [837/900] Loss: 0.883 | Acc: 81.11% | Val Acc: 95.68% | LR: 0.00012 Epoch [838/900] Loss: 0.877 | Acc: 81.28% | Val Acc: 95.80% | LR: 0.00012 --> New Best Val Acc: 95.80% (Saved!) Epoch [839/900] Loss: 0.868 | Acc: 81.49% | Val Acc: 95.47% | LR: 0.00012 Epoch [840/900] Loss: 0.900 | Acc: 80.20% | Val Acc: 95.58% | LR: 0.00011 Epoch [841/900] Loss: 0.873 | Acc: 81.38% | Val Acc: 95.50% | LR: 0.00011 Epoch [842/900] Loss: 0.884 | Acc: 80.81% | Val Acc: 95.70% | LR: 0.00011 Epoch [843/900] Loss: 0.912 | Acc: 79.80% | Val Acc: 95.57% | LR: 0.00010 Epoch [844/900] Loss: 0.883 | Acc: 80.78% | Val Acc: 95.61% | LR: 0.00010 Epoch [845/900] Loss: 0.893 | Acc: 80.63% | Val Acc: 95.67% | LR: 0.00010 Epoch [846/900] Loss: 0.877 | Acc: 81.28% | Val Acc: 95.66% | LR: 0.00009 Epoch [847/900] Loss: 0.886 | Acc: 80.86% | Val Acc: 95.71% | LR: 0.00009 Epoch [848/900] Loss: 0.876 | Acc: 81.26% | Val Acc: 95.79% | LR: 0.00009 Epoch [849/900] Loss: 0.865 | Acc: 81.44% | Val Acc: 95.59% | LR: 0.00008 Epoch [850/900] Loss: 0.890 | Acc: 80.52% | Val Acc: 95.48% | LR: 0.00008 Epoch [851/900] Loss: 0.870 | Acc: 81.61% | Val Acc: 95.78% | LR: 0.00008 Epoch [852/900] Loss: 0.870 | Acc: 81.56% | Val Acc: 95.75% | LR: 0.00007 Epoch [853/900] Loss: 0.893 | Acc: 80.43% | Val Acc: 95.75% | LR: 0.00007 Epoch [854/900] Loss: 0.865 | Acc: 81.67% | Val Acc: 95.85% | LR: 0.00007 --> New Best Val Acc: 95.85% (Saved!) Epoch [855/900] Loss: 0.868 | Acc: 81.45% | Val Acc: 95.78% | LR: 0.00006 Epoch [856/900] Loss: 0.864 | Acc: 81.66% | Val Acc: 95.68% | LR: 0.00006 Epoch [857/900] Loss: 0.880 | Acc: 81.10% | Val Acc: 95.71% | LR: 0.00006 Epoch [858/900] Loss: 0.878 | Acc: 81.08% | Val Acc: 95.67% | LR: 0.00006 Epoch [859/900] Loss: 0.879 | Acc: 81.14% | Val Acc: 95.69% | LR: 0.00005 Epoch [860/900] Loss: 0.889 | Acc: 80.76% | Val Acc: 95.64% | LR: 0.00005 Epoch [861/900] Loss: 0.854 | Acc: 81.85% | Val Acc: 95.77% | LR: 0.00005 Epoch [862/900] Loss: 0.865 | Acc: 81.67% | Val Acc: 95.74% | LR: 0.00005 Epoch [863/900] Loss: 0.876 | Acc: 81.05% | Val Acc: 95.77% | LR: 0.00004 Epoch [864/900] Loss: 0.864 | Acc: 81.72% | Val Acc: 95.87% | LR: 0.00004 --> New Best Val Acc: 95.87% (Saved!) Epoch [865/900] Loss: 0.875 | Acc: 81.17% | Val Acc: 95.80% | LR: 0.00004 Epoch [866/900] Loss: 0.892 | Acc: 80.60% | Val Acc: 95.85% | LR: 0.00004 Epoch [867/900] Loss: 0.864 | Acc: 81.62% | Val Acc: 95.74% | LR: 0.00004 Epoch [868/900] Loss: 0.872 | Acc: 81.19% | Val Acc: 95.88% | LR: 0.00003 --> New Best Val Acc: 95.88% (Saved!) Epoch [869/900] Loss: 0.873 | Acc: 81.10% | Val Acc: 95.75% | LR: 0.00003 Epoch [870/900] Loss: 0.886 | Acc: 80.54% | Val Acc: 95.77% | LR: 0.00003 Epoch [871/900] Loss: 0.890 | Acc: 80.53% | Val Acc: 95.76% | LR: 0.00003 Epoch [872/900] Loss: 0.861 | Acc: 81.85% | Val Acc: 95.77% | LR: 0.00003 Epoch [873/900] Loss: 0.860 | Acc: 81.64% | Val Acc: 95.69% | LR: 0.00002 Epoch [874/900] Loss: 0.888 | Acc: 80.41% | Val Acc: 95.83% | LR: 0.00002 Epoch [875/900] Loss: 0.881 | Acc: 80.95% | Val Acc: 95.93% | LR: 0.00002 --> New Best Val Acc: 95.93% (Saved!) Epoch [876/900] Loss: 0.856 | Acc: 81.92% | Val Acc: 95.82% | LR: 0.00002 Epoch [877/900] Loss: 0.865 | Acc: 81.37% | Val Acc: 95.84% | LR: 0.00002 Epoch [878/900] Loss: 0.878 | Acc: 81.09% | Val Acc: 96.01% | LR: 0.00002 --> New Best Val Acc: 96.01% (Saved!) Epoch [879/900] Loss: 0.871 | Acc: 81.43% | Val Acc: 95.77% | LR: 0.00001 Epoch [880/900] Loss: 0.876 | Acc: 81.25% | Val Acc: 95.80% | LR: 0.00001 Epoch [881/900] Loss: 0.882 | Acc: 80.93% | Val Acc: 95.87% | LR: 0.00001 Epoch [882/900] Loss: 0.885 | Acc: 80.87% | Val Acc: 95.66% | LR: 0.00001 Epoch [883/900] Loss: 0.871 | Acc: 81.13% | Val Acc: 95.82% | LR: 0.00001 Epoch [884/900] Loss: 0.862 | Acc: 81.75% | Val Acc: 95.82% | LR: 0.00001 Epoch [885/900] Loss: 0.873 | Acc: 81.30% | Val Acc: 95.78% | LR: 0.00001 Epoch [886/900] Loss: 0.866 | Acc: 81.69% | Val Acc: 95.86% | LR: 0.00001 Epoch [887/900] Loss: 0.873 | Acc: 81.08% | Val Acc: 95.89% | LR: 0.00001 Epoch [888/900] Loss: 0.848 | Acc: 82.26% | Val Acc: 95.85% | LR: 0.00001 Epoch [889/900] Loss: 0.887 | Acc: 80.57% | Val Acc: 95.80% | LR: 0.00000 Epoch [890/900] Loss: 0.883 | Acc: 80.61% | Val Acc: 95.82% | LR: 0.00000 Epoch [891/900] Loss: 0.866 | Acc: 81.73% | Val Acc: 95.79% | LR: 0.00000 Epoch [892/900] Loss: 0.884 | Acc: 80.82% | Val Acc: 95.82% | LR: 0.00000 Epoch [893/900] Loss: 0.869 | Acc: 81.36% | Val Acc: 95.80% | LR: 0.00000 Epoch [894/900] Loss: 0.878 | Acc: 81.01% | Val Acc: 95.81% | LR: 0.00000 Epoch [895/900] Loss: 0.861 | Acc: 81.67% | Val Acc: 95.93% | LR: 0.00000 Epoch [896/900] Loss: 0.873 | Acc: 81.23% | Val Acc: 95.83% | LR: 0.00000 Epoch [897/900] Loss: 0.884 | Acc: 80.79% | Val Acc: 95.94% | LR: 0.00000 Epoch [898/900] Loss: 0.877 | Acc: 80.99% | Val Acc: 95.90% | LR: 0.00000 Epoch [899/900] Loss: 0.887 | Acc: 80.61% | Val Acc: 95.81% | LR: 0.00000 Epoch [900/900] Loss: 0.865 | Acc: 81.45% | Val Acc: 95.89% | LR: 0.00000
plt.clf()
plt.plot(train_loss_history, label="train")
plt.plot(val_loss_history, label="valid")
plt.legend(loc="upper right")
plt.xlabel("Epochs")
plt.ylabel("Loss")
Text(0, 0.5, 'Loss')
Results & Evaluation¶
This section evaluates the high-capacity version of the custom architecture (5.3M parameters). By increasing the convolution window size and extending the training to 900 epochs, the model achieves state-of-the-art performance for its size.
Verified Accuracy: The model reaches an elite 96.01%, placing it in the same performance bracket as EfficientNet-B0 (~ 5.3M params) and ResNet-18 (~ 11.7M params).
Efficiency Win: This architecture matches ResNet-level accuracy while utilizing less than half the parameter count (5.3M vs 11.7M), demonstrating the effectiveness of the bespoke design and the 900-epoch training regimen.
final_net = CNN().to(device)
try:
final_net.load_state_dict(torch.load('best_model_wide.pth'))
print("Successfully loaded the Best Model weights!")
except FileNotFoundError:
print("Weight file not found. Ensure 'best_model.pth' is in the current directory.")
final_net.eval()
all_preds = []
all_labels = []
with torch.no_grad():
for X, y in testloader:
X, y = X.to(device), y.to(device)
outputs = final_net(X)
all_preds.extend(outputs.argmax(1).cpu().numpy())
all_labels.extend(y.cpu().numpy())
from sklearn.metrics import confusion_matrix, classification_report
import numpy as np
accuracy = np.mean(np.array(all_preds) == np.array(all_labels))
print(f"\nFINAL VERIFIED ACCURACY: {accuracy*100:.2f}%")
print("\nDetailed Report:")
print(classification_report(all_labels, all_preds, target_names=trainset.classes))
C:\Users\alexa\AppData\Local\Temp\ipykernel_11956\4058561540.py:3: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature.
final_net.load_state_dict(torch.load('best_model_wide.pth'))
Successfully loaded the Best Model weights!
FINAL VERIFIED ACCURACY: 96.01%
Detailed Report:
precision recall f1-score support
airplane 0.97 0.97 0.97 1000
automobile 0.97 0.99 0.98 1000
bird 0.94 0.95 0.95 1000
cat 0.92 0.90 0.91 1000
deer 0.98 0.96 0.97 1000
dog 0.91 0.94 0.92 1000
frog 0.97 0.97 0.97 1000
horse 0.99 0.98 0.98 1000
ship 0.97 0.98 0.98 1000
truck 0.98 0.96 0.97 1000
accuracy 0.96 10000
macro avg 0.96 0.96 0.96 10000
weighted avg 0.96 0.96 0.96 10000
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.metrics import confusion_matrix
def plot_confusion_matrix(model, device, test_loader, classes):
model.eval()
all_preds = []
all_labels = []
with torch.no_grad():
for images, labels in test_loader:
images, labels = images.to(device), labels.to(device)
outputs = model(images)
_, preds = torch.max(outputs, 1)
all_preds.extend(preds.cpu().numpy())
all_labels.extend(labels.cpu().numpy())
# Compute confusion matrix
cm = confusion_matrix(all_labels, all_preds)
# Plotting
plt.figure(figsize=(10, 8))
sns.heatmap(cm, annot=True, fmt='d', cmap='Blues',
xticklabels=classes, yticklabels=classes)
plt.xlabel('Predicted Labels')
plt.ylabel('True Labels')
plt.title('Confusion Matrix: Where is the model failing?')
plt.show()
return cm
classes = ('plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck')
cm = plot_confusion_matrix(final_net, device, testloader, classes)
print(cm)
[[966 2 7 1 0 0 1 1 15 7] [ 0 988 1 0 0 0 0 0 3 8] [ 8 0 951 12 5 13 7 1 2 1] [ 5 1 11 901 8 58 9 4 1 2] [ 0 0 11 12 963 6 5 2 1 0] [ 3 0 5 42 7 935 2 4 2 0] [ 2 1 12 6 1 4 972 1 0 1] [ 4 0 4 4 3 6 0 979 0 0] [ 7 4 3 1 0 0 0 0 983 2] [ 4 22 2 1 0 1 1 0 6 963]]