赞
踩
**
**
In [40]: a = np.array([2,3.3]) In [41]: a Out[41]: array([2. , 3.3]) In [42]: b = torch.from_numpy(a) In [43]: b Out[43]: tensor([2.0000, 3.3000], dtype=torch.float64) In [44]: a = np.ones([2,3]) In [46]: b = torch.from_numpy(a) In [47]: b Out[47]: tensor([[1., 1., 1.], [1., 1., 1.]], dtype=torch.float64)
**
**
torch.tensor()#接收现有的数据,以List输入
torch.Tensor() torch.FloatTensor()#接收现有的数据或者数据的维度
In [48]: torch.tensor([2.,3.2])
Out[48]: tensor([2.0000, 3.2000])
In [50]: torch.FloatTensor([2.,3.2])
Out[50]: tensor([2.0000, 3.2000])
In [51]: torch.FloatTensor([[2.,3.2],[1.,22.3]])
Out[51]:
tensor([[ 2.0000, 3.2000],
[ 1.0000, 22.3000]])
**
**
Torch.empty()
Torch.FloatTensor(d1,d2,d3)
Torch.IntTensor(d1,d2,d3)
未初始化的tensor一定要跟写入数据的后续步骤
In [1]: import torch In [2]: torch.empty(1) Out[2]: tensor([-6.0475e+26]) In [3]: torch.Tensor(2,3) Out[3]: tensor([[-4.4539e+19, 4.5912e-41, -1.4528e+26], [ 6.2218e-43, 0.0000e+00, -0.0000e+00]]) In [4]: torch.FloatTensor(2,3) Out[4]: tensor([[ 0.0000e+00, 0.0000e+00, -1.4539e+26], [ 6.2218e-43, 0.0000e+00, -0.0000e+00]]) In [5]: torch.IntTensor(2,3) Out[5]: tensor([[ -535132572, 32764, -461452272], [ 444, 0, -2147483648]], dtype=torch.int32)
set default type
In [7]: torch.tensor([1.2,3]).type()
Out[7]: 'torch.FloatTensor'
In [8]: torch.set_default_tensor_type(torch.DoubleTensor)
In [9]: torch.tensor([1.2,3]).type()
Out[9]: 'torch.DoubleTensor'
**
torch.rand()#随机产生均匀分布,输入参数为shape
torch.rand_like(a)#将a.shape读出来后,送给rand函数
torch.randint(1,10,[3,3])#随机产生1-10之间的整数,其shape为(3,3)
torch.randn(3,3)#随机产生标准正态分布,输入参数为shape,均值为0,方差为1
**
In [10]: torch.rand(3,3)#随机产生0-1之间的数值,不包括1
Out[10]:
tensor([[0.5588, 0.6363, 0.8763],
[0.3796, 0.5534, 0.7435],
[0.3892, 0.7436, 0.8083]])
In [11]: a = torch.rand(3,3)
In [12]: torch.rand_like(a)#把a.shape读出来后,送给rand函数
Out[12]:
tensor([[0.6652, 0.6416, 0.6140],
[0.0278, 0.4688, 0.4863],
[0.7347, 0.9787, 0.1331]])
In [16]: a = torch.randint(1,10,[3,3])#随机产生1-10之间的整数,其shape为(3,3)
In [17]: a.shape
Out[17]: torch.Size([3, 3])
In [18]: a
Out[18]:
tensor([[4, 9, 3],
[4, 1, 4],
[1, 1, 8]])
In [19]: torch.randn(3,3)
Out[19]:
tensor([[ 1.1848, 0.0742, 0.9433],
[-0.5318, -0.9366, -0.0956],
[-0.3990, 0.0978, 1.6421]])
In [20]: torch.normal(mean=torch.full([10],0),std=torch.arange(1,0,-0.1))
#均值全为0,方差为[1,0.9,...,0.1]
#第1个数据从N(0,1)中采样,第2个数据从N(0,0.9)中采样...第10个数据从N(0,0.1)中采样
Out[20]:
tensor([-0.9845, 0.0048, 0.0205, -0.9091, -0.0573, 0.7027, 0.0539, 0.3360,
0.2018, 0.0567])
**
**
In [21]: torch.full([2,3],7)#2行3列张量
Out[21]:
tensor([[7., 7., 7.],
[7., 7., 7.]])
In [22]: torch.full([],7)#标量
Out[22]: tensor(7.)
In [23]: torch.full([1],7)#1维矢量
Out[23]: tensor([7.])
**
**
In [24]: torch.arange(0,10)#步长为1,从0到10,含0不含10
Out[24]: tensor([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
In [25]: torch.arange(0,10,2)#步长为2,从0到10,含0不含10
Out[25]: tensor([0, 2, 4, 6, 8])
In [26]: torch.range(0,10)#即将丢弃不再使用,建议不使用
D:\Anaconda3\Scripts\ipython:1: UserWarning: torch.range is deprecated in favor of torch.arange and will be removed in 0.5. Note that arange generates values in [start; end), not [start; end].
Out[26]: tensor([ 0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.])
**
**
In [27]: torch.linspace(0,10,steps=4)#0-10,含0且含10,等分4份 Out[27]: tensor([ 0.0000, 3.3333, 6.6667, 10.0000]) In [28]: torch.linspace(0,10,steps=10)#0-10,含0且含10,等分10份 Out[28]: tensor([ 0.0000, 1.1111, 2.2222, 3.3333, 4.4444, 5.5556, 6.6667, 7.7778, 8.8889, 10.0000]) In [29]: torch.linspace(0,11,steps=11)#0-11,含0且含11,等分11份 Out[29]: tensor([ 0.0000, 1.1000, 2.2000, 3.3000, 4.4000, 5.5000, 6.6000, 7.7000, 8.8000, 9.9000, 11.0000]) In [30]: torch.linspace(0,10,steps=11)#0-10,含0且含10,等分11份 Out[30]: tensor([ 0., 1., 2., 3., 4., 5., 6., 7., 8., 9., 10.]) In [31]: torch.logspace(0,-1,steps=10)#基底默认为10,10的0次方到10的-1次方,等分10份,可修改基底 Out[31]: tensor([1.0000, 0.7743, 0.5995, 0.4642, 0.3594, 0.2783, 0.2154, 0.1668, 0.1292, 0.1000]) In [32]: torch.logspace(0,1,steps=10)#基底默认为10,10的0次方到10的1次方,等分10份,可修改基底 Out[32]: tensor([ 1.0000, 1.2915, 1.6681, 2.1544, 2.7826, 3.5938, 4.6416, 5.9948, 7.7426, 10.0000])
**
**
In [33]: torch.eye(3,3) Out[33]: tensor([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]) In [34]: torch.zeros(3,3) Out[34]: tensor([[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]]) In [35]: a = torch.ones(3,3) In [36]: torch.ones_like(a) Out[36]: tensor([[1., 1., 1.], [1., 1., 1.], [1., 1., 1.]])
**
**
randperm随机打散索引
In [11]: torch.randperm(10)
Out[11]: tensor([4, 8, 9, 3, 6, 1, 0, 7, 5, 2])
In [2]: a = torch.rand(2,3) In [3]: b = torch.rand(2,2) In [4]: idx = torch.randperm(2) In [5]: idx Out[5]: tensor([1, 0]) In [7]: a[idx] Out[7]: tensor([[0.1356, 0.8077, 0.3847], [0.4126, 0.8919, 0.8108]]) In [8]: a Out[8]: tensor([[0.4126, 0.8919, 0.8108], [0.1356, 0.8077, 0.3847]]) In [9]: b[idx] Out[9]: tensor([[0.3102, 0.5754], [0.6128, 0.0977]]) In [10]: b Out[10]: tensor([[0.6128, 0.0977], [0.3102, 0.5754]])
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。