import torch
import numpy as np
# 张量tensor 随机初始化
x = torch.rand(4,3)
print(x)
y =torch.randn(4,3)
print(y)
tensor([[0.9480, 0.9501, 0.2717],
[0.8003, 0.0821, 0.6529],
[0.3265, 0.4726, 0.6464],
[0.9685, 0.5453, 0.2186]])
tensor([[-0.5172, -0.1762, -1.0094],
[ 0.1688, -1.6217, -0.8422],
[-0.4597, -0.5814, -1.3831],
[ 0.1718, 0.2061, 1.0907]])
# 初始化全零 张量
a = torch.zeros((4,4),dtype=torch.long)
print(a)
#初始化全一 张量
b = torch.ones(4,4)
print(b)
c = torch.tensor(np.ones((2,3),dtype='int32'))
print(c)
tensor([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]])
tensor([[1., 1., 1., 1.],
[1., 1., 1., 1.],
[1., 1., 1., 1.],
[1., 1., 1., 1.]])
tensor([[1, 1, 1],
[1, 1, 1]], dtype=torch.int32)
常见构造Tensor的方法:
# tensor 的基本操作
# 加法
print(a+b)
# add_ = replace in 操作
y = a.add_(3)
print(y)
tensor([[1., 1., 1., 1.],
[1., 1., 1., 1.],
[1., 1., 1., 1.],
[1., 1., 1., 1.]])
tensor([[3, 3, 3, 3],
[3, 3, 3, 3],
[3, 3, 3, 3],
[3, 3, 3, 3]])
#索引操作
x = torch.rand(3,4)
print(x)
# 第二列
print(x[:,1])
# 第二行
print(x[1,:])
tensor([[-0.0617, 2.3109, 0.0030, 0.6941],
[ 0.4677, -1.9160, 0.6614, -1.7743],
[-0.3349, 0.2371, 2.1070, -1.0076],
[ 0.3823, -1.2401, -0.3766, -1.0454]])
tensor([[-0.0617, 2.3109, 0.0030, 0.6941, 0.4677, -1.9160, 0.6614, -1.7743],
[-0.3349, 0.2371, 2.1070, -1.0076, 0.3823, -1.2401, -0.3766, -1.0454]])
#广播机制
#当对两个形状不同的 Tensor 按元素运算时,可能会触发广播(broadcasting)机制:先适当复制元素使这两个 Tensor 形状相同后再按元素运算。
x = torch.arange(1,4).view(1,3)
print(x)
y = torch.arange(1,5).view(4,1)
print(y)
print(x+y)
tensor([[1, 2, 3]])
tensor([[1],
[2],
[3],
[4]])
tensor([[2, 3, 4],
[3, 4, 5],
[4, 5, 6],
[5, 6, 7]])
阅读量:2035
点赞量:0
收藏量:0