stack
x = tensor([[1,2], [9,4], [6,78]])
>>> torch.max(x, dim=1)
(tensor([ 2, 9, 78]), tensor([1, 0, 1]))
>>> y = tensor([1,2])
>>> torch.stack([y,y], dim=0)
tensor([[1, 2],
[1, 2]])
>>> torch.stack([y,y], dim=1)
tensor([[1, 1],
[2, 2]])
torch.bmm
>>> batch1 = torch.randn(10, 3, 4)
>>> batch2 = torch.randn(10, 4, 5)
>>> res = torch.bmm(batch1, batch2)
>>> res.size()
torch.Size([10, 3, 5])
unsqueeze / squeeze
energy = torch.bmm(query.unsqueeze(1), key.transpose(1, 2)).squeeze(
dim=1)
softmax
self.softmax(torch.Tensor([[1,1, 8], [2,2, 2]]), )
tensor([[0.0009, 0.0009, 0.9982],
[0.3333, 0.3333, 0.3333]])
---cat---
>>> x = torch.randn(2, 3)
>>> x
tensor([[ 0.6580, -1.0969, -0.4614],
[-0.1034, -0.5790, 0.1497]])
>>> torch.cat((x, x, x), 0)
tensor([[ 0.6580, -1.0969, -0.4614],
[-0.1034, -0.5790, 0.1497],
[ 0.6580, -1.0969, -0.4614],
[-0.1034, -0.5790, 0.1497],
[ 0.6580, -1.0969, -0.4614],
[-0.1034, -0.5790, 0.1497]])
>>> torch.cat((x, x, x), 1)
tensor([[ 0.6580, -1.0969, -0.4614, 0.6580, -1.0969, -0.4614, 0.6580,
-1.0969, -0.4614],
[-0.1034, -0.5790, 0.1497, -0.1034, -0.5790, 0.1497, -0.1034,
-0.5790, 0.1497]])
---zip---
a = [1,2]
b = ['a','b']
for x in zip(a,b):
... print(x)
(1, 'a')
(2, 'b')
-----max----
torch.max(torch.Tensor([[1,2], [3,4]]), dim=1)
>>
[2,4]
[1, 1]