# Pytorch框架的学习与使用_day02

2019/06/06 15:29

torch.cat(inputs, dimension = 0) -> Tensor

torch.cat可以看作torch.split和torch.chunk的反操作

inputs(sequence of Tensors) - 可以是任意相同Tensor类型的Python序列

dimension(int, optional) - 沿着此维连接张量序列

>>> x = torch.randn(2, 3)
>>> x
tensor([[ 0.8240, -0.6438,  0.1779],
[ 1.8698, -0.3803,  0.0556]])
>>> torch.cat((x, x, x), 0)
tensor([[ 0.8240, -0.6438,  0.1779],
[ 1.8698, -0.3803,  0.0556],
[ 0.8240, -0.6438,  0.1779],
[ 1.8698, -0.3803,  0.0556],
[ 0.8240, -0.6438,  0.1779],
[ 1.8698, -0.3803,  0.0556]])  # size 6*3 0维向量进行连接
>>> torch.cat((x, x, x), 1)
tensor([[ 0.8240, -0.6438,  0.1779,  0.8240, -0.6438,  0.1779,  0.8240, -0.6438,
0.1779],
[ 1.8698, -0.3803,  0.0556,  1.8698, -0.3803,  0.0556,  1.8698, -0.3803,
0.0556]]) # size 2*9 1维向量进行连接

torch.chunk

torch.chunk(tensor, chunks, dim = 0)

tensor - 待分块的输入张量

chunks(int) - 分块的个数

dim(int) - 沿着此维度进行分块

>>> x
tensor([[ 0.8240, -0.6438,  0.1779],
[ 1.8698, -0.3803,  0.0556]])


>>> torch.chunk(x, 2, 0)
(tensor([[ 0.8240, -0.6438, 0.1779]]), tensor([[ 1.8698, -0.3803, 0.0556]]))

>>> torch.chunk(x, 2, 1)
(tensor([[ 0.8240, -0.6438],
[ 1.8698, -0.3803]]), tensor([[0.1779],
[0.0556]]))

torch.gather

torch.gather(input, dim, index, out = None) ->Tensor

out[i][j][k] = tensor[index[i][j][k]][j][k] #dim = 0
out[i][j][k] = tensor[i][index[i][j][k]][k] #dim = 1
out[i][j][k] = tensor[i][j][index[i][j][k]] #dim = 2

input(Tensor) - 源张量

dim(int) - 索引的轴

index(Long Tensor) - 聚合元素的下标

out(Tensor, optional) - 目标张量

>>> t = torch.Tensor([[1, 2], [3, 4]])
>>> torch.gather(t, 1, torch.LongTensor([[0, 0], [1,0]]))
tensor([[1., 1.],
[4., 3.]])

torch.index_select

torch.index_select(input, dim, index, out = None) -->Tensor

input(Tensor) - 源张量

dim(int) - 索引的轴

index(Long Tensor) - 包含索引下标的一维张量

out(Tensor, optional) - 目标张量

>>> x = torch.randn(3, 4)
>>> x
tensor([[-0.1374, -0.6321,  0.8015, -2.0121],
[-0.8106,  1.0078, -0.7167,  0.5915],
[-0.8627,  0.5883,  0.9542,  0.1841]])
>>> indices = torch.LongTensor([0, 2])
>>> torch.index_select(x, 0, indices)    # dim 0 按列索引
tensor([[-0.1374, -0.6321,  0.8015, -2.0121],
[-0.8627,  0.5883,  0.9542,  0.1841]])
>>> torch.index_select(x, 1, indices)    # dim 1 按行索引
tensor([[-0.1374,  0.8015],
[-0.8106, -0.7167],
[-0.8627,  0.9542]])

torch.nonzero

torch.nonzero(input, out = None)  --> Tensor

input(Tensor) - 源张量

out(LongTensor, optional) - 包含索引值的结果张量

>>> torch.nonzero(torch.Tensor([1, 1, 1, 0, 1]))
tensor([[0],
[1],
[2],
[4]])   # 4×1

torch.split

torch.split(tensor, split_size, dim = 0)

tensor(Tensor) - 待分割张量

split_size(int) - 单个分块的形状大小

dim(int) - 沿着此维度进行分割

torch.squeeze

squeeze(input, 0) 将会保持张量不变，只有用 squeeze(input, 1) ，形状会变成（A×B） 。

input(Tensor) - 源张量

dim(int， optional) - 如果给定，则input只会在给定维度挤压

out(Tensor, optional) - 目标张量

>>> x = torch.zeros(2, 1, 2, 1, 2)
>>> x.size()
torch.Size([2, 1, 2, 1, 2])
>>> y = torch.squeeze(x)
>>> y.size()
torch.Size([2, 2, 2])
>>> y = torch.squeeze(x, 0)
>>> y.size()
torch.Size([2, 1, 2, 1, 2])
>>> y = torch.squeeze(x, 1)
>>> y.size()
torch.Size([2, 2, 1, 2])

torch.stack

torch.stack(squence, dim = 0)

sqequence(Sqequence) - 待连接的张量序列

dim(int) - 插入的维度，必须介于0与待连接的张量序列数之间

torch.t

torch.t(input, out = None) --> Tensor

input（Tensor）- 输入张量

out(Tensor, optional) - 结果张量

>>> x = torch.randn(2, 3)
>>> x
tensor([[ 0.5121, -0.5057, -0.5253],
[ 0.0310,  0.8405, -0.7914]])
>>> torch.t(x)
tensor([[ 0.5121,  0.0310],
[-0.5057,  0.8405],
[-0.5253, -0.7914]])

torch.transpose

torch.transpose(input, dim0, dim1, out = None) --> Tensor

torch.unbind

torch.unbind(tensor, dim = 0)

tensor(Tensor) - 输入张量

dim(int) - 删除的维度

>>> x = torch.randn(2, 3)
>>> x
tensor([[-0.1249, -0.3148,  1.8918],
[ 1.9341, -2.3968, -1.0895]])
>>> torch.unbind(x, dim = 1)
(tensor([-0.1249,  1.9341]), tensor([-0.3148, -2.3968]), tensor([ 1.8918, -1.0895]))

torch.unsqueeze

torch.unsqueeze(input, dim, out = None)

tensor(Tensor) - 输入张量

dim(int) - 插入维度的索引

out(Tensor, optional) - 结果张量

>>> x = torch.tensor([1, 2, 3, 4])
>>> torch.unsqueeze(x, 0)
tensor([[1, 2, 3, 4]])
>>> torch.unsqueeze(x, 1)
tensor([[1],
[2],
[3],
[4]])

0
0 收藏

0 评论
0 收藏
0