目录
常用张量的定义
全零矩阵
>>> torch.zeros([2, 4], dtype=torch.int32)
tensor([[ 0, 0, 0, 0],
[ 0, 0, 0, 0]], dtype=torch.int32)
全一矩阵
>>> cuda0 = torch.device('cuda:0')
>>> torch.ones([2, 4], dtype=torch.float64, device=cuda0)
tensor([[ 1.0000, 1.0000, 1.0000, 1.0000],
[ 1.0000, 1.0000, 1.0000, 1.0000]], dtype=torch.float64, device='cuda:0')
和对角矩阵
>>> torch.eye(3)
tensor([[ 1., 0., 0.],
[ 0., 1., 0.],
[ 0., 0., 1.]])
获取张量的信息
t = torch.randn(3,4,5)
print(t.type()) # 数据类型
print(t.size()) # 张量大小
print(t.dim()) # 维度的数量
张量和Numpy Array的转换
nda = tensor.cpu().numpy()
t = torch.from_numpy(nda).float()
对张量的常用操作
变形
Tensor.view
>>> d = torch.eye(4)
>>> d
tensor([[1., 0., 0., 0.],
[0., 1., 0., 0.],
[0., 0., 1., 0.],
[0., 0., 0., 1.]])
>>> d.view(2,8)
tensor([[1., 0., 0., 0., 0., 1., 0., 0.],
[0., 0., 1., 0., 0., 0., 0., 1.]])
>>> d
tensor([[1., 0., 0., 0.],
[0., 1., 0., 0.],
[0., 0., 1., 0.],
[0., 0., 0., 1.]])
reshape
>>> d.reshape(2,8)
tensor([[1., 0., 0., 0., 0., 1., 0., 0.],
[0., 0., 1., 0., 0., 0., 0., 1.]])
>>> d
tensor([[1., 0., 0., 0.],
[0., 1., 0., 0.],
[0., 0., 1., 0.],
[0., 0., 0., 1.]])
>>> torch.reshape(d,(2, 8))
tensor([[1., 0., 0., 0., 0., 1., 0., 0.],
[0., 0., 1., 0., 0., 0., 0., 1.]])
resize
将张量的尺寸调整为指定的大小
>>> x = torch.Tensor([[1, 2], [3, 4], [5, 6]])
>>> x.resize_(2, 2)
tensor([[1., 2.],
[3., 4.]])
升降维度
expand()
x = torch.tensor([[1], [2], [3]])
x.size()
x.expand(3, 4)
x.expand(-1, 4) # -1 means not changing the size of that dimension
拼接 torch.cat ()
>>> d = torch.eye(2)
>>> d
tensor([[1., 0.],
[0., 1.]])
>>> torch.cat([d, d], dim=0)
tensor([[1., 0.],
[0., 1.],
[1., 0.],
[0., 1.]])
>>> torch.cat([d, d], dim=1)
tensor([[1., 0., 1., 0.],
[0., 1., 0., 1.]])
拼接 stack()
>>> a = torch.IntTensor([[1,2,3],[11,22,33]])
>>> b= torch.IntTensor([[4,5,6],[44,55,66]])
>>> c=torch.stack([a,b],0) # dim=0 拼接
>>> c.size()
torch.Size([2, 2, 3])
>>> c
tensor([[[ 1, 2, 3],
[11, 22, 33]],
[[ 4, 5, 6],
[44, 55, 66]]], dtype=torch.int32)
>>> c=torch.stack([a,b],1) # dim=1 拼接
>>> c.size()
torch.Size([2, 2, 3])
>>> c
tensor([[[ 1, 2, 3],
[ 4, 5, 6]],
[[11, 22, 33],
[44, 55, 66]]], dtype=torch.int32)
>>> c=torch.stack([a,b],2) # dim=2 拼接
>>> c.size()
torch.Size([2, 3, 2])
>>> c
tensor([[[ 1, 4],
[ 2, 5],
[ 3, 6]],
[[11, 44],
[22, 55],
[33, 66]]], dtype=torch.int32)
压缩维度squeeze()
代码来自blog:
import torch
>>> x = torch.randn(3,1,4,1,2)
>>> a = x.squeeze(dim=1) # 成员函数删除第二维度
>>> print(a.shape)
torch.Size([3, 4, 1, 2])
>>> b = torch.squeeze(x, dim=1) # torch自带函数删除第二维度
>>> print(b.shape)
torch.Size([3, 4, 1, 2])
>>> c = torch.squeeze(x, dim=3) # 删除第三维度
>>> print(c.shape)
torch.Size([3, 1, 4, 2])
>>> d = torch.squeeze(x) # 若不标注删除第几维度,则会删除所有为1的维度
>>> print(d.shape)
torch.Size([3, 4, 2])
增加维度 unsqueeze()
>>> x = torch.randn(3,1,4)
>>> x.size()
torch.Size([3, 1, 4])
>>> e = torch.unsqueeze(x, dim=0)
>>> e.size()
torch.Size([1, 3, 1, 4])
>>> e = torch.unsqueeze(x, dim=3)
>>> e.size()
torch.Size([3, 1, 4, 1])
重复repeat()
>>> x = torch.Tensor([1, 2, 3])
>>> x.size()
torch.Size([3])
>>> x.repeat(2, 2)
tensor([[1., 2., 3., 1., 2., 3.],
[1., 2., 3., 1., 2., 3.]])
>>> x.repeat(3, 2)
tensor([[1., 2., 3., 1., 2., 3.],
[1., 2., 3., 1., 2., 3.],
[1., 2., 3., 1., 2., 3.]])
缩小 narrow()
narrow(dimension, start, length)
返回一个经过缩小后的张量。操作的维度由dimension指定。缩小范围是从start开始到start+length。
>>> x = torch.Tensor([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
>>> x.size()
torch.Size([3, 3])
>>> x.narrow(1, 0, 2)
tensor([[1., 2.],
[4., 5.],
[7., 8.]])
>>> x.narrow(1, 0, 1)
tensor([[1.],
[4.],
[7.]])
>>> x.narrow(1, 0, 2)
tensor([[1., 2.],
[4., 5.],
[7., 8.]])
>>> x.narrow(1, 0, 3)
tensor([[1., 2., 3.],
[4., 5., 6.],
[7., 8., 9.]])
>>> x.narrow(0, 1, 2)
tensor([[4., 5., 6.],
[7., 8., 9.]])
>>> x.narrow(0, 0, 1)
tensor([[1., 2., 3.]])
>>> x.narrow(0, 0, 3)
tensor([[1., 2., 3.],
[4., 5., 6.],
[7., 8., 9.]])
打乱
延指定轴打乱
>>> diagmtx[torch.randperm(d.size(0))] # Shuffle the first dimension
tensor([[0., 1., 0., 0.],
[1., 0., 0., 0.],
[0., 0., 0., 1.],
[0., 0., 1., 0.]])
转置1 transpose()
只能转置2维矩阵
>>> x = torch.randn(2, 2)
>>> x
tensor([[ 0.3500, -1.1154],
[ 0.1040, -0.8325]])
>>> x.permute(1, 0)
tensor([[ 0.3500, 0.1040],
[-1.1154, -0.8325]])
转置2 permute()
可转置高维矩阵
>>> x = torch.randn(2, 2)
>>> x
tensor([[ 0.3500, -1.1154],
[ 0.1040, -0.8325]])
>>> x.permute(1, 0)
tensor([[ 0.3500, 0.1040],
[-1.1154, -0.8325]])
>>> x = torch.randn(2, 2, 3)
>>> x.size()
torch.Size([2, 2, 3])
>>> x
tensor([[[ 0.3758, -0.1668, -0.1701],
[ 0.5966, -2.2902, -0.0383]],
[[ 1.1659, -0.1195, 1.0433],
[ 1.3034, -0.8000, 0.2944]]])
>>> x.permute(2,0,1)
tensor([[[ 0.3758, 0.5966],
[ 1.1659, 1.3034]],
[[-0.1668, -2.2902],
[-0.1195, -0.8000]],
[[-0.1701, -0.0383],
[ 1.0433, 0.2944]]])
水平翻转
t = tensor([[[[-0.9522, 0.8250, 0.8554, -1.1466],
[ 0.0150, -0.7566, -0.5744, -0.1235],
[-0.3626, 1.3205, -0.6283, -0.0702],
[-0.1891, 0.2699, -0.5164, -1.1122]],
[[-0.3721, 3.0085, 0.8913, -1.5249],
[ 0.2988, 0.6518, 0.1627, -0.4917],
[-0.1809, -1.8154, -0.3222, -0.0755],
[ 0.5039, -1.4150, -0.3179, -0.0486]],
[[ 0.8193, 0.6153, 1.6996, -0.4988],
[ 1.8613, -0.3078, 0.9126, -0.9262],
[ 0.0682, 0.6573, 0.6817, -1.1404],
[-1.6896, 0.8261, -0.6210, 0.3598]]]])
>>> t[:, :, :, torch.arange(t.size(3) - 1, -1, -1).long()]
tensor([[[[-1.1466, 0.8554, 0.8250, -0.9522],
[-0.1235, -0.5744, -0.7566, 0.0150],
[-0.0702, -0.6283, 1.3205, -0.3626],
[-1.1122, -0.5164, 0.2699, -0.1891]],
[[-1.5249, 0.8913, 3.0085, -0.3721],
[-0.4917, 0.1627, 0.6518, 0.2988],
[-0.0755, -0.3222, -1.8154, -0.1809],
[-0.0486, -0.3179, -1.4150, 0.5039]],
[[-0.4988, 1.6996, 0.6153, 0.8193],
[-0.9262, 0.9126, -0.3078, 1.8613],
[-1.1404, 0.6817, 0.6573, 0.0682],
[ 0.3598, -0.6210, 0.8261, -1.6896]]]])
复制
# Operation | New/Shared memory | Still in computation graph |
t.clone() # | New | Yes |
t.detach() # | Shared | No |
t.detach.clone()() # | New | No |
判断是否相等
>>>> print(d.equal(t))
False