Sucran
9/14/2017 - 11:00 AM

PyTorch Basic operation

Tensors.py --- tensors initialization NumpyBridge.py --- Converting a torch Tensor to a numpy array and vice versa is a breeze. The torch Tensor and numpy array will share their underlying memory locations, and changing one will change the other. Autograd.py ---- some example for autograd with Variable and Function

# numpy -----> torch

import numpy as np
c = np.ones(5)
d = torch.from_numpy(c)
np.add(c, 1, out = c)
print c
print d

# torch ----> numpy

a = torch.ones(5)
print type(a)
b = a.numpy()
print type(b)
x = Variable(torch.ones(2,2), requires_grad = True)
y = x * 2
while y.data.norm() < 1000:
    y = y * 2
print y
grad = torch.FloatTensor([0.1, 1.0, 100.0]) # multiple the grad of x
y.backward(grad)
print x.grad
x = torch.Tensor(5, 3)
print x

#-2.9226e-26  1.5549e-41  1.5885e+14
# 0.0000e+00  7.0065e-45  0.0000e+00
# 7.0065e-45  0.0000e+00  4.4842e-44
# 0.0000e+00  4.6243e-44  0.0000e+00
# 1.5810e+14  0.0000e+00  1.6196e+14
#[torch.FloatTensor of size 5x3]

x = torch.rand(5, 3)
print(x)

#0.8168  0.4588  0.8139
# 0.7271  0.3067  0.2826
# 0.1570  0.2931  0.3173
# 0.8638  0.6364  0.6177
# 0.2296  0.1411  0.1117
#[torch.FloatTensor of size 5x3]

print x.size()
# torch.Size([5, 3])