transpose(), adjoint(), mT, mH, view(), reshape() in PyTorch

Rmag Breaking News

*My post explains permute() and movedim().

transpose(), adjoint(), mT or mH can transpose a 0D or more D tensor without losing data as shown below:

import torch

my_tensor = torch.tensor([[[0, 1, 2], [3, 4, 5]],
[[6, 7, 8], [9, 10, 11]],
[[12, 13, 14], [15, 16, 17]],
[[18, 19, 20], [21, 22, 23]]])
# The size is [4, 2, 3].
torch.transpose(my_tensor, 0, 0)
my_tensor.transpose(0, 0)
torch.transpose(my_tensor, 1, 1)
my_tensor.transpose(1, 1)
torch.transpose(my_tensor, 2, 2)
my_tensor.transpose(2, 2)
torch.transpose(my_tensor, 1, 2)
my_tensor.transpose(1, 2)
torch.transpose(my_tensor, 2, 1)
my_tensor.transpose(2, 1)
torch.transpose(my_tensor, 2, 2)
my_tensor.transpose(2, 2)
torch.transpose(my_tensor, 1, 2)
my_tensor.transpose(1, 2)
torch.transpose(my_tensor, 2, 1)
my_tensor.transpose(2, 1)
torch.transpose(my_tensor, 1, 1)
my_tensor.transpose(1, 1)
torch.transpose(my_tensor, 2, 2)
my_tensor.transpose(2, 2)
# tensor([[[0, 1, 2], [3, 4, 5]],
# [[6, 7, 8], [9, 10, 11]],
# [[12, 13, 14], [15, 16, 17]],
# [[18, 19, 20], [21, 22, 23]]])
# The size is [4, 2, 3].

torch.transpose(my_tensor, 0, 1)
my_tensor.transpose(0, 1)
torch.transpose(my_tensor, 1, 0)
my_tensor.transpose(1, 0)
torch.transpose(my_tensor, 0, 2)
my_tensor.transpose(0, 2)
torch.transpose(my_tensor, 2, 0)
my_tensor.transpose(2, 0)
# tensor([[[0, 1, 2], [6, 7, 8], [12, 13, 14], [18, 19, 20]],
# [[3, 4, 5], [ 9, 10, 11], [15, 16, 17], [21, 22, 23]]])
# The size is [2, 4, 3].

torch.transpose(my_tensor, 0, 2)
my_tensor.transpose(0, 2)
torch.transpose(my_tensor, 2, 0)
my_tensor.transpose(2, 0)
torch.transpose(my_tensor, 0, 1)
my_tensor.transpose(0, 1)
torch.transpose(my_tensor, 1, 0)
my_tensor.transpose(1, 0)
# tensor([[[0, 6, 12, 18], [3, 9, 15, 21]],
# [[1, 7, 13, 19], [4, 10, 16, 22]],
# [[2, 8, 14, 20], [5, 11, 17, 23]]])
# The size is [3, 2, 4].

torch.transpose(my_tensor, 1, 2)
my_tensor.transpose(1, 2)
torch.transpose(my_tensor, 2, 1)
my_tensor.transpose(2, 1)
torch.transpose(my_tensor, 1, 1)
my_tensor.transpose(1, 1)
torch.transpose(my_tensor, 1, 1)
my_tensor.transpose(1, 1)
torch.transpose(my_tensor, 1, 2)
my_tensor.transpose(1, 2)
torch.transpose(my_tensor, 2, 1)
my_tensor.transpose(2, 1)
torch.transpose(my_tensor, 2, 2)
my_tensor.transpose(2, 2)
torch.adjoint(my_tensor)
my_tensor.adjoint()
my_tensor.mT
my_tensor.mH
# tensor([[[0, 3], [1, 4], [2, 5]],
# [[6, 9], [7, 10], [8, 11]],
# [[12, 15], [13, 16], [14, 17]],
# [[18, 21], [19, 22], [20, 23]]])
# The size is [4, 3, 2].

*Memos:

transpose() and adjoint() can be used both from torch and a tensor.
The 2nd and 3rd argument of transpose() are a dimension with torch.
The 1st and 2nd argument of transpose() are a dimension with a tensor.

transpose(), swapaxes() and swapdims() are the same because swapaxes() and swapdims() are aliases of transpose().

mT or mH can be used only from a tensor but not from torch.

adjoint(), mT or mH has only one way to transpose a tensor.

view() or reshape() can reshape a 0D or more D tensor without losing data by setting desired size as shown below:

import torch

my_tensor = torch.tensor([[[0, 1, 2], [3, 4, 5]],
[[6, 7, 8], [9, 10, 11]],
[[12, 13, 14], [15, 16, 17]],
[[18, 19, 20], [21, 22, 23]]])
# The size is [4, 3, 2].
my_tensor.view(24)
my_tensor.view(1)
my_tensor.view(1, 24)
my_tensor.view((24,))
my_tensor.view((1,))
my_tensor.view((1, 24))
my_tensor.reshape(24)
my_tensor.reshape(1)
my_tensor.reshape(1, 24)
my_tensor.reshape((24,))
my_tensor.reshape((1,))
my_tensor.reshape((1, 24))
torch.reshape(my_tensor, (24,))
torch.reshape(my_tensor, (1,))
torch.reshape(my_tensor, (1, 24))
# tensor([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
# 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23])
# The size is [24].

my_tensor.view(2, 12)
my_tensor.view(1, 12)
my_tensor.view((2, 12))
my_tensor.view((1, 12))
my_tensor.reshape(2, 12)
my_tensor.reshape(1, 12)
my_tensor.reshape((2, 12))
my_tensor.reshape((1, 12))
torch.reshape(my_tensor, (2, 12))
torch.reshape(my_tensor, (1, 12))
# tensor([[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
# [12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]])
# The size is [2, 12].

my_tensor.view(3, 8)
my_tensor.view(1, 8)
my_tensor.view((3, 8))
my_tensor.view((1, 8))
my_tensor.reshape(3, 8)
my_tensor.reshape(1, 8)
my_tensor.reshape((3, 8))
my_tensor.reshape((1, 8))
torch.reshape(my_tensor, (3, 8))
torch.reshape(my_tensor, (1, 8))
# tensor([[0, 1, 2, 3, 4, 5, 6, 7],
# [8, 9, 10, 11, 12, 13, 14, 15],
# [16, 17, 18, 19, 20, 21, 22, 23]])
# The size is [3, 8].

my_tensor.view(4, 6)
my_tensor.view(1, 6)
my_tensor.view((4, 6))
my_tensor.view((1, 6))
my_tensor.reshape(4, 6)
my_tensor.reshape(1, 6)
my_tensor.reshape((4, 6))
my_tensor.reshape((1, 6))
torch.reshape(my_tensor, (4, 6))
torch.reshape(my_tensor, (1, 6))
# tensor([[0, 1, 2, 3, 4, 5],
# [6, 7, 8, 9, 10, 11],
# [12, 13, 14, 15, 16, 17],
# [18, 19, 20, 21, 22, 23]])
# The size is [4, 6].

my_tensor.view(6, 4)
my_tensor.view(1, 4)
my_tensor.view((6, 4))
my_tensor.view((1, 4))
my_tensor.reshape(6, 4)
my_tensor.reshape(1, 4)
my_tensor.reshape((6, 4))
my_tensor.reshape((1, 4))
torch.reshape(my_tensor, (6, 4))
torch.reshape(my_tensor, (1, 4))
# tensor([[0, 1, 2, 3],
# [4, 5, 6, 7],
# [8, 9, 10, 11],
# [12, 13, 14, 15],
# [16, 17, 18, 19],
# [20, 21, 22, 23]])
# The size is [6, 4].

my_tensor.view(8, 3)
my_tensor.view(1, 3)
my_tensor.view((8, 3))
my_tensor.view((1, 3))
my_tensor.reshape(8, 3)
my_tensor.reshape(1, 3)
my_tensor.reshape((8, 3))
my_tensor.reshape((1, 3))
torch.reshape(my_tensor, (8, 3))
torch.reshape(my_tensor, (1, 3))
# tensor([[0, 1, 2],
# [3, 4, 5],
# [6, 7, 8],
# [9, 10, 11],
# [12, 13, 14],
# [15, 16, 17],
# [18, 19, 20],
# [21, 22, 23]])
# The size is [8, 3].

my_tensor.view(12, 2)
my_tensor.view(1, 2)
my_tensor.view((12, 2))
my_tensor.view((1, 2))
my_tensor.reshape(12, 2)
my_tensor.reshape(1, 2)
my_tensor.reshape((12, 2))
my_tensor.reshape((1, 2))
torch.reshape(my_tensor, (12, 2))
torch.reshape(my_tensor, (1, 2))
# tensor([[0, 1],
# [2, 3],
# [4, 5],
# [6, 7],
# [8, 9],
# [10, 11],
# [12, 13],
# [14, 15],
# [16, 17],
# [18, 19],
# [20, 21],
# [22, 23]])
# The size is [12, 2].

my_tensor.view(24, 1)
my_tensor.view(1, 1)
my_tensor.view((24, 1))
my_tensor.view((1, 1))
my_tensor.reshape(24, 1)
my_tensor.reshape(1, 1)
my_tensor.reshape((24, 1))
my_tensor.reshape((1, 1))
torch.reshape(my_tensor, (24, 1))
torch.reshape(my_tensor, (1, 1))
# tensor([[0],
# [1],
# [2],
# [3],
# [4],
# [5],
# [6],
# [7],
# [8],
# [9],
# [10],
# [11],
# [12],
# [13],
# [14],
# [15],
# [16],
# [17],
# [18],
# [19],
# [20],
# [21],
# [22],
# [23]])
# The size is [24, 1].
etc.

my_tensor.view(2, 3, 4)
my_tensor.view(1, 3, 4)
my_tensor.view((2, 3, 4))
my_tensor.view((1, 3, 4))
my_tensor.reshape(2, 3, 4)
my_tensor.reshape(1, 3, 4)
my_tensor.reshape((2, 3, 4))
my_tensor.reshape((1, 3, 4))
torch.reshape(my_tensor, (2, 3, 4))
torch.reshape(my_tensor, (1, 3, 4))
# tensor([[[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11]],
# [[12, 13, 14, 15], [16, 17, 18, 19], [20, 21, 22, 23]]])
# The size is [2, 3, 4].
etc.

my_tensor.view(3, 2, 2, 2)
my_tensor.view(1, 2, 2, 2)
my_tensor.view((3, 2, 2, 2))
my_tensor.view((1, 2, 2, 2))
my_tensor.reshape(3, 2, 2, 2)
my_tensor.reshape(1, 2, 2, 2)
my_tensor.reshape((3, 2, 2, 2))
my_tensor.reshape((1, 2, 2, 2))
torch.reshape(my_tensor, (3, 2, 2, 2))
torch.reshape(my_tensor, (1, 2, 2, 2))
# tensor([[[[0, 1], [2, 3]],
# [[4, 5], [6, 7]]],
# [[[8, 9], [10, 11]],
# [[12, 13], [14, 15]]],
# [[[16, 17], [18, 19]],
# [[20, 21], [22, 23]]]])
# The size is [3, 2, 2, 2].
etc.

*Memos:

Setting -1 as the 1st number can adjust the size automatically so you don’t need to set 24, 4, 2 or 3 as the lst number. *-1 is available only as the 1st number.

view() can be used only from a tensor but not from torch while reshape() can be used both from torch and a tensor.

view() doesn’t create a copy while reshape() can create a copy taking more memory so view() can be ligher and faster than reshape().

Leave a Reply

Your email address will not be published. Required fields are marked *