A sample for Tensorial Convolutional Neural NetworkΒΆ
By replacing convolutional kernel with tensor cores, tensorial CNN is constructed.
Here is an tensor ring example to use a TR-based model with tednet
.
[1]:
from managpu import GpuManager
my_gpu = GpuManager()
my_gpu.set_by_memory(1)
import random
import tednet as tdt
import tednet.tnn.tensor_ring as tr
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
No GPU Util Limit!
Sorted by memory:
GPU Index: 1 GPU FreeMemory: 11176 MB GPU Util: 0%
GPU Index: 2 GPU FreeMemory: 11176 MB GPU Util: 0%
GPU Index: 4 GPU FreeMemory: 11176 MB GPU Util: 0%
GPU Index: 0 GPU FreeMemory: 6133 MB GPU Util: 74%
GPU Index: 3 GPU FreeMemory: 1109 MB GPU Util: 100%
GPU Index: 5 GPU FreeMemory: 1109 MB GPU Util: 100%
GPU Index: 6 GPU FreeMemory: 1109 MB GPU Util: 100%
GPU Index: 7 GPU FreeMemory: 1109 MB GPU Util: 0%
Qualified GPU Index is: [1]
Set basic environment
[2]:
use_cuda = torch.cuda.is_available()
device = torch.device("cuda" if use_cuda else "cpu")
seed = 233
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
if use_cuda:
torch.cuda.manual_seed_all(seed)
torch.backends.cudnn.benchmark = True
torch.backends.cudnn.deterministic = True
Set dataloader
[3]:
kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}
train_loader = torch.utils.data.DataLoader(
datasets.MNIST('./data', train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=128, shuffle=True, **kwargs)
test_loader = torch.utils.data.DataLoader(
datasets.MNIST('./data', train=False, transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])),
batch_size=256, shuffle=True, **kwargs)
Set training and testing process
[4]:
def train(model, device, train_loader, optimizer, epoch, log_interval=200):
model.train()
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
loss = F.cross_entropy(output, target)
loss.backward()
optimizer.step()
if batch_idx % log_interval == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
def test(model, device, test_loader):
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
output = model(data)
test_loss += F.cross_entropy(output, target, reduction='sum').item() # sum up batch loss
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(test_loader.dataset)
print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(
test_loss, correct, len(test_loader.dataset),
100. * correct / len(test_loader.dataset)))
Begin training
[5]:
# Define a TR-LeNet5
model = tr.TRLeNet5(10, [6, 6, 6, 6])
model.to(device)
optimizer = optim.SGD(model.parameters(), lr=2e-2, momentum=0.9, weight_decay=5e-4)
for epoch in range(20):
train(model, device, train_loader, optimizer, epoch)
test(model, device, test_loader)
compression_ration is: 0.3968253968253968
compression_ration is: 14.17233560090703
compression_ration is: 241.54589371980677
compression_ration is: 2.867383512544803
Train Epoch: 0 [0/60000 (0%)] Loss: 2.633792
Train Epoch: 0 [25600/60000 (43%)] Loss: 0.109367
Train Epoch: 0 [51200/60000 (85%)] Loss: 0.133933
Test set: Average loss: 0.0756, Accuracy: 9751/10000 (98%)
Train Epoch: 1 [0/60000 (0%)] Loss: 0.074946
Train Epoch: 1 [25600/60000 (43%)] Loss: 0.039371
Train Epoch: 1 [51200/60000 (85%)] Loss: 0.029103
Test set: Average loss: 0.0691, Accuracy: 9782/10000 (98%)
Train Epoch: 2 [0/60000 (0%)] Loss: 0.113578
Train Epoch: 2 [25600/60000 (43%)] Loss: 0.099431
Train Epoch: 2 [51200/60000 (85%)] Loss: 0.084437
Test set: Average loss: 0.0544, Accuracy: 9826/10000 (98%)
Train Epoch: 3 [0/60000 (0%)] Loss: 0.130137
Train Epoch: 3 [25600/60000 (43%)] Loss: 0.083295
Train Epoch: 3 [51200/60000 (85%)] Loss: 0.021406
Test set: Average loss: 0.0608, Accuracy: 9799/10000 (98%)
Train Epoch: 4 [0/60000 (0%)] Loss: 0.044310
Train Epoch: 4 [25600/60000 (43%)] Loss: 0.025041
Train Epoch: 4 [51200/60000 (85%)] Loss: 0.017827
Test set: Average loss: 0.0446, Accuracy: 9861/10000 (99%)
Train Epoch: 5 [0/60000 (0%)] Loss: 0.035976
Train Epoch: 5 [25600/60000 (43%)] Loss: 0.130144
Train Epoch: 5 [51200/60000 (85%)] Loss: 0.066351
Test set: Average loss: 0.0457, Accuracy: 9854/10000 (99%)
Train Epoch: 6 [0/60000 (0%)] Loss: 0.071825
Train Epoch: 6 [25600/60000 (43%)] Loss: 0.031684
Train Epoch: 6 [51200/60000 (85%)] Loss: 0.049287
Test set: Average loss: 0.0444, Accuracy: 9854/10000 (99%)
Train Epoch: 7 [0/60000 (0%)] Loss: 0.074904
Train Epoch: 7 [25600/60000 (43%)] Loss: 0.083052
Train Epoch: 7 [51200/60000 (85%)] Loss: 0.021132
Test set: Average loss: 0.0397, Accuracy: 9880/10000 (99%)
Train Epoch: 8 [0/60000 (0%)] Loss: 0.020113
Train Epoch: 8 [25600/60000 (43%)] Loss: 0.022854
Train Epoch: 8 [51200/60000 (85%)] Loss: 0.008770
Test set: Average loss: 0.0424, Accuracy: 9866/10000 (99%)
Train Epoch: 9 [0/60000 (0%)] Loss: 0.007447
Train Epoch: 9 [25600/60000 (43%)] Loss: 0.095077
Train Epoch: 9 [51200/60000 (85%)] Loss: 0.018731
Test set: Average loss: 0.0339, Accuracy: 9896/10000 (99%)
Train Epoch: 10 [0/60000 (0%)] Loss: 0.025279
Train Epoch: 10 [25600/60000 (43%)] Loss: 0.038482
Train Epoch: 10 [51200/60000 (85%)] Loss: 0.043692
Test set: Average loss: 0.0391, Accuracy: 9882/10000 (99%)
Train Epoch: 11 [0/60000 (0%)] Loss: 0.022135
Train Epoch: 11 [25600/60000 (43%)] Loss: 0.008357
Train Epoch: 11 [51200/60000 (85%)] Loss: 0.031139
Test set: Average loss: 0.0380, Accuracy: 9882/10000 (99%)
Train Epoch: 12 [0/60000 (0%)] Loss: 0.004145
Train Epoch: 12 [25600/60000 (43%)] Loss: 0.024185
Train Epoch: 12 [51200/60000 (85%)] Loss: 0.030595
Test set: Average loss: 0.0354, Accuracy: 9887/10000 (99%)
Train Epoch: 13 [0/60000 (0%)] Loss: 0.013407
Train Epoch: 13 [25600/60000 (43%)] Loss: 0.008846
Train Epoch: 13 [51200/60000 (85%)] Loss: 0.061894
Test set: Average loss: 0.0380, Accuracy: 9867/10000 (99%)
Train Epoch: 14 [0/60000 (0%)] Loss: 0.017808
Train Epoch: 14 [25600/60000 (43%)] Loss: 0.002656
Train Epoch: 14 [51200/60000 (85%)] Loss: 0.013447
Test set: Average loss: 0.0354, Accuracy: 9887/10000 (99%)
Train Epoch: 15 [0/60000 (0%)] Loss: 0.009893
Train Epoch: 15 [25600/60000 (43%)] Loss: 0.081577
Train Epoch: 15 [51200/60000 (85%)] Loss: 0.018266
Test set: Average loss: 0.0326, Accuracy: 9893/10000 (99%)
Train Epoch: 16 [0/60000 (0%)] Loss: 0.011158
Train Epoch: 16 [25600/60000 (43%)] Loss: 0.004466
Train Epoch: 16 [51200/60000 (85%)] Loss: 0.034247
Test set: Average loss: 0.0343, Accuracy: 9891/10000 (99%)
Train Epoch: 17 [0/60000 (0%)] Loss: 0.030956
Train Epoch: 17 [25600/60000 (43%)] Loss: 0.010426
Train Epoch: 17 [51200/60000 (85%)] Loss: 0.061093
Test set: Average loss: 0.0315, Accuracy: 9897/10000 (99%)
Train Epoch: 18 [0/60000 (0%)] Loss: 0.017390
Train Epoch: 18 [25600/60000 (43%)] Loss: 0.023027
Train Epoch: 18 [51200/60000 (85%)] Loss: 0.029767
Test set: Average loss: 0.0332, Accuracy: 9888/10000 (99%)
Train Epoch: 19 [0/60000 (0%)] Loss: 0.034303
Train Epoch: 19 [25600/60000 (43%)] Loss: 0.003748
Train Epoch: 19 [51200/60000 (85%)] Loss: 0.026581
Test set: Average loss: 0.0307, Accuracy: 9898/10000 (99%)