DEV Community

Super Kai (Kazuya Ito)
Super Kai (Kazuya Ito)

Posted on • Edited on

tile in PyTorch

Buy Me a Coffee

*Memos:

tile() can get the 1D or more D tensor of zero or more repeated elements from the 0D or more D tensor of zero or more elements as shown below:

*Memos:

  • tile() can be used with torch or a tensor.
  • The 1st argument(input) with torch or using a tensor(Required-Type:tensor of int, float, complex or bool).
  • The 2nd argument with torch or the 1st or more arguments with a tensor are dims(Required-Type:tuple of int, list of int or size()): *Memos:
    • If at least one dimension is 0, an empty tensor is returned.
    • dims= mustn't be used for the one or more dimensions without a tuple or list.
import torch

my_tensor = torch.tensor([7, 4, 5])

torch.tile(input=my_tensor, dims=(0,))
my_tensor.tile(dims=(0,))
my_tensor.tile(0,)
torch.tile(input=my_tensor, dims=torch.tensor([]).size())
# tensor([], dtype=torch.int64)

torch.tile(input=my_tensor, dims=())
torch.tile(input=my_tensor, dims=(1,))
torch.tile(input=my_tensor, dims=torch.tensor(8).size())
torch.tile(input=my_tensor, dims=torch.tensor([8]).size())
# tensor([7, 4, 5])

torch.tile(input=my_tensor, dims=(2,))
torch.tile(input=my_tensor, dims=torch.tensor([8, 3]).size())
# tensor([7, 4, 5, 7, 4, 5])

torch.tile(input=my_tensor, dims=(3,))
torch.tile(input=my_tensor, dims=torch.tensor([8, 3, 6]).size())
# tensor([7, 4, 5, 7, 4, 5, 7, 4, 5])
etc.

torch.tile(input=my_tensor, dims=(1, 1))
torch.tile(input=my_tensor, dims=torch.tensor([[8]]).size())
# tensor([[7, 4, 5]])

torch.tile(input=my_tensor, dims=(1, 2))
torch.tile(input=my_tensor, dims=torch.tensor([[8, 3]]).size())
# tensor([[7, 4, 5, 7, 4, 5]])

torch.tile(input=my_tensor, dims=(1, 3))
torch.tile(input=my_tensor, dims=torch.tensor([[8, 2, 4]]).size())
# tensor([[3, 5, 1, 3, 5, 1, 3, 5, 1]])
etc.

torch.tile(input=my_tensor, dims=(2, 1))
torch.tile(input=my_tensor, dims=torch.tensor([[8], [2]]).size())
# tensor([[3, 5, 1],
#         [3, 5, 1]])

torch.tile(input=my_tensor, dims=(2, 2))
torch.tile(input=my_tensor, dims=torch.tensor([[8, 2], [4, 0]]).size())
# tensor([[3, 5, 1, 3, 5, 1],
#         [3, 5, 1, 3, 5, 1]])

torch.tile(input=my_tensor, dims=(2, 3))
torch.tile(input=my_tensor, dims=torch.tensor([[8, 2, 4], [0, 7, 9]]).size())
# tensor([[3, 5, 1, 3, 5, 1, 3, 5, 1],
#         [3, 5, 1, 3, 5, 1, 3, 5, 1]])
etc.

torch.tile(input=my_tensor, dims=(3, 1))
torch.tile(input=my_tensor, dims=torch.tensor([[8], [2], [4]]).size())
# tensor([[3, 5, 1],
#         [3, 5, 1],
#         [3, 5, 1]])
etc.

torch.tile(input=my_tensor, dims=(1, 1, 1))
torch.tile(input=my_tensor, dims=torch.tensor([[[8]]]).size())
# tensor([[[3, 5, 1]]])
etc.

torch.tile(input=my_tensor, dims=(3, 2, 1))
# tensor([[[3, 5, 1], [3, 5, 1]],
#         [[3, 5, 1], [3, 5, 1]],
#         [[3, 5, 1], [3, 5, 1]]])

torch.tile(input=my_tensor, dims=(1, 0, 1))
# tensor([], size=(1, 0, 3), dtype=torch.int64)

my_tensor = torch.tensor([3., 5., 1.])

torch.tile(input=my_tensor, dims=(2,))
# tensor([3., 5., 1., 3., 5., 1.])

my_tensor = torch.tensor([3.+0.j, 5.+0.j, 1.+0.j])

torch.tile(input=my_tensor, dims=(2,))
# tensor([3.+0.j, 5.+0.j, 1.+0.j, 3.+0.j, 5.+0.j, 1.+0.j])

my_tensor = torch.tensor([True, False, True])

torch.tile(input=my_tensor, dims=(2,))
# tensor([True, False, True, True, False, True])

my_tensor = torch.tensor([[3, 5, 1],
                          [6, 0, 5]])
torch.tile(input=my_tensor, dims=())
torch.tile(input=my_tensor, dims=(1,))
torch.tile(input=my_tensor, dims=torch.tensor(8).size())
torch.tile(input=my_tensor, dims=torch.tensor([8]).size())
# tensor([[3, 5, 1],
#         [6, 0, 5]])

torch.tile(input=my_tensor, dims=(2,))
torch.tile(input=my_tensor, dims=torch.tensor([8, 2]).size())
# tensor([[3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5]])

torch.tile(input=my_tensor, dims=(3,))
torch.tile(input=my_tensor, dims=torch.tensor([8, 2, 4]).size())
# tensor([[3, 5, 1, 3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5, 6, 0, 5]])
etc.

torch.tile(input=my_tensor, dims=(1, 1))
torch.tile(input=my_tensor, dims=torch.tensor([[8]]).size())
# tensor([[3, 5, 1],
#         [6, 0, 5]])

torch.tile(input=my_tensor, dims=(1, 2))
torch.tile(input=my_tensor, dims=torch.tensor([[8, 2]]).size())
# tensor([[3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5]])

torch.tile(input=my_tensor, dims=(1, 3))
torch.tile(input=my_tensor, dims=torch.tensor([[8, 2, 4]]).size())
# tensor([[3, 5, 1, 3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5, 6, 0, 5]])
etc.

torch.tile(input=my_tensor, dims=(2, 1))
torch.tile(input=my_tensor, dims=torch.tensor([[8], [2]]).size())
# tensor([[3, 5, 1],
#         [6, 0, 5],
#         [3, 5, 1],
#         [6, 0, 5]])

torch.tile(input=my_tensor, dims=(2, 2))
torch.tile(input=my_tensor, dims=torch.tensor([[8, 2], [4, 0]]).size())
# tensor([[3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5],
#         [3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5]])

torch.tile(input=my_tensor, dims=(2, 3))
torch.tile(input=my_tensor, dims=torch.tensor([[8, 2, 4], [0, 7, 9]]).size())
# tensor([[3, 5, 1, 3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5, 6, 0, 5],
#         [3, 5, 1, 3, 5, 1, 3, 5, 1],
#         [6, 0, 5, 6, 0, 5, 6, 0, 5]])
etc.

torch.tile(input=my_tensor, dims=(3, 1))
torch.tile(input=my_tensor, dims=torch.tensor([[8], [2], [4]]).size())
# tensor([[3, 5, 1],
#         [6, 0, 5],
#         [3, 5, 1],
#         [6, 0, 5],
#         [3, 5, 1],
#         [6, 0, 5]])
etc.

torch.tile(input=my_tensor, dims=(1, 1, 1))
torch.tile(input=my_tensor, dims=torch.tensor([[[8]]]).size())
# tensor([[[3, 5, 1],
#          [6, 0, 5]]])
etc.
Enter fullscreen mode Exit fullscreen mode

Top comments (0)