DEV Community

Super Kai (Kazuya Ito)
Super Kai (Kazuya Ito)

Posted on • Updated on

vstack() and dstack() in PyTorch

Buy Me a Coffee

*Memos:

vstack() can get the 1D or more D vertically(row-wisely) stacked tensor of zero or more elements from the one or more 0D or more D tensors of zero or more elements as shown below:

*Memos:

  • vstack() can be used with torch but not with a tensor.
  • The 1st argument with torch is tensors(Required-Type:tuple or list of tensor of int, float, complex or bool). *Basically, the size of tensors must be the same.
  • There is out argument with torch(Optional-Type:tensor): *Memos:
    • out= must be used.
    • My post explains out argument.
  • row_stack() is the alias of vstack().
import torch

tensor1 = torch.tensor(2)
tensor2 = torch.tensor(7)
tensor3 = torch.tensor(4)

torch.vstack(tensors=(tensor1, tensor2, tensor3))
# tensor([[2], [7], [4]])

tensor1 = torch.tensor([2, 7, 4])
tensor2 = torch.tensor([8, 3, 2])
tensor3 = torch.tensor([5, 0, 8])

torch.vstack(tensors=(tensor1, tensor2, tensor3))
# tensor([[2, 7, 4], [8, 3, 2], [5, 0, 8]])

tensor1 = torch.tensor([[2, 7, 4], [8, 3, 2]])
tensor2 = torch.tensor([[5, 0, 8], [3, 6, 1]])
tensor3 = torch.tensor([[9, 4, 7], [1, 0, 5]])

torch.vstack(tensors=(tensor1, tensor2, tensor3))
# tensor([[2, 7, 4],
#         [8, 3, 2],
#         [5, 0, 8],
#         [3, 6, 1],
#         [9, 4, 7],
#         [1, 0, 5]])

tensor1 = torch.tensor([[2., 7., 4.], [8., 3., 2.]])
tensor2 = torch.tensor([[5., 0., 8.], [3., 6., 1.]])
tensor3 = torch.tensor([[9., 4., 7.], [1., 0., 5.]])

torch.vstack(tensors=(tensor1, tensor2, tensor3))
# tensor([[2., 7., 4.],
#         [8., 3., 2.],
#         [5., 0., 8.],
#         [3., 6., 1.],
#         [9., 4., 7.],
#         [1., 0., 5.]])

tensor1 = torch.tensor([[2.+0.j, 7.+0.j, 4.+0.j],
                        [8.+0.j, 3.+0.j, 2.+0.j]])
tensor2 = torch.tensor([[5.+0.j, 0.+0.j, 8.+0.j],
                        [3.+0.j, 6.+0.j, 1.+0.j]])
tensor3 = torch.tensor([[9.+0.j, 4.+0.j, 7.+0.j],
                        [1.+0.j, 0.+0.j, 5.+0.j]])
torch.vstack(tensors=(tensor1, tensor2, tensor3))
# tensor([[2.+0.j, 7.+0.j, 4.+0.j],
#         [8.+0.j, 3.+0.j, 2.+0.j],
#         [5.+0.j, 0.+0.j, 8.+0.j],
#         [3.+0.j, 6.+0.j, 1.+0.j],
#         [9.+0.j, 4.+0.j, 7.+0.j],
#         [1.+0.j, 0.+0.j, 5.+0.j]])

tensor1 = torch.tensor([[True, False, True], [False, True, False]])
tensor2 = torch.tensor([[False, True, False], [True, False, True]])
tensor3 = torch.tensor([[True, False, True], [False, True, False]])

torch.vstack(tensors=(tensor1, tensor2, tensor3))
# tensor([[True, False, True],
#         [False, True, False],
#         [False, True, False],
#         [True, False, True],
#         [True, False, True],
#         [False, True, False]])

tensor1 = torch.tensor([[]])
tensor2 = torch.tensor([])
tensor3 = torch.tensor([[]])

torch.vstack(tensors=(tensor1, tensor2, tensor3))
# tensor([], size=(3, 0))
Enter fullscreen mode Exit fullscreen mode

dstack() can get the 3D or more D depth-wisely stacked tensor of zero or more elements from the one or more 0D or more D tensors of zero or more elements as shown below:

*Memos:

  • dstack() can be used with torch but not with a tensor.
  • The 1st argument with torch is tensors(Required-Type:tuple or list of tensor of int, float, complex or bool). *Basically, the size of tensors must be the same.
  • There is out argument with torch(Optional-Type:tensor): *Memos:
    • out= must be used.
    • My post explains out argument.
import torch

tensor1 = torch.tensor(2)
tensor2 = torch.tensor(7)
tensor3 = torch.tensor(4)

torch.dstack(tensors=(tensor1, tensor2, tensor3))
# tensor([[[2, 7, 4]]])

tensor1 = torch.tensor([2, 7, 4])
tensor2 = torch.tensor([8, 3, 2])
tensor3 = torch.tensor([5, 0, 8])

torch.dstack(tensors=(tensor1, tensor2, tensor3))
# tensor([[[2, 8, 5], [7, 3, 0], [4, 2, 8]]])

tensor1 = torch.tensor([[2, 7, 4], [8, 3, 2]])
tensor2 = torch.tensor([[5, 0, 8], [3, 6, 1]])
tensor3 = torch.tensor([[9, 4, 7], [1, 0, 5]])

torch.dstack(tensors=(tensor1, tensor2, tensor3))
# tensor([[[2, 5, 9], [7, 0, 4], [4, 8, 7]],
#         [[8, 3, 1], [3, 6, 0], [2, 1, 5]]])

tensor1 = torch.tensor([[2., 7., 4.], [8., 3., 2.]])
tensor2 = torch.tensor([[5., 0., 8.], [3., 6., 1.]])
tensor3 = torch.tensor([[9., 4., 7.], [1., 0., 5.]])

torch.dstack(tensors=(tensor1, tensor2, tensor3))
# tensor([[[2., 5., 9.], [7., 0., 4.], [4., 8., 7.]],
#         [[8., 3., 1.], [3., 6., 0.], [2., 1., 5.]]])

tensor1 = torch.tensor([[2.+0.j, 7.+0.j, 4.+0.j],
                        [8.+0.j, 3.+0.j, 2.+0.j]])
tensor2 = torch.tensor([[5.+0.j, 0.+0.j, 8.+0.j],
                        [3.+0.j, 6.+0.j, 1.+0.j]])
tensor3 = torch.tensor([[9.+0.j, 4.+0.j, 7.+0.j],
                        [1.+0.j, 0.+0.j, 5.+0.j]])
torch.dstack(tensors=(tensor1, tensor2, tensor3))
# tensor([[[2.+0.j, 5.+0.j, 9.+0.j],
#          [7.+0.j, 0.+0.j, 4.+0.j],
#          [4.+0.j, 8.+0.j, 7.+0.j]],
#         [[8.+0.j, 3.+0.j, 1.+0.j],
#          [3.+0.j, 6.+0.j, 0.+0.j],
#          [2.+0.j, 1.+0.j, 5.+0.j]]])

tensor1 = torch.tensor([[True, False, True], [False, True, False]])
tensor2 = torch.tensor([[False, True, False], [True, False, True]])
tensor3 = torch.tensor([[True, False, True], [False, True, False]])

torch.dstack(tensors=(tensor1, tensor2, tensor3))
# tensor([[[True, False, True],
#          [False, True, False],
#          [True, False, True]],
#         [[False, True, False],
#          [True, False, True],
#          [False, True, False]]])

tensor1 = torch.tensor([[]])
tensor2 = torch.tensor([])
tensor3 = torch.tensor([[]])

torch.dstack(tensors=(tensor1, tensor2, tensor3))
# tensor([], size=(1, 0, 3))
Enter fullscreen mode Exit fullscreen mode

Top comments (0)