Pytorch學(xué)習(xí)筆記4:合并切割與基本運(yùn)算
#添加到學(xué)習(xí)筆記2末尾,直接運(yùn)行。代碼意義可以看注釋。
print('——————————合并與切割——————————')
cat1=torch.rand(4,32,8)#在0維度上進(jìn)行合并
cat2=torch.rand(5,32,8)
cat3=torch.cat([cat1,cat2],dim=0)
print('tensor shape:',cat3.shape)
stack1=torch.rand(32,8)#在0維度前增加一個(gè)維度進(jìn)行合并
stack2=torch.rand(32,8)
stack3=torch.stack([stack1,stack2],dim=0)
print('tensor shape:',stack3.shape)
split1=torch.rand(6,32,8)#根據(jù)長度列表或者長度數(shù)值拆分
split2,split3,split4=split1.split([1,2,3],dim=0)#根據(jù)長度列表拆分,在0維度,拆分成3個(gè)tensor,每個(gè)tensor分別有1,2,3個(gè)元素
print('tensor shape:',split2.shape)
print('tensor shape:',split3.shape)
print('tensor shape:',split4.shape)
split5,split6=split1.split(3,dim=0)#根據(jù)長度拆分,在0維度,拆分成2個(gè)tensor,每個(gè)tensor有3個(gè)元素
print('tensor shape:',split5.shape)
print('tensor shape:',split6.shape)
chunk1=torch.rand(6,32,8)#按照數(shù)量來拆分
chunk2,chunk3,chunk4=chunk1.chunk(3,dim=0)#在0維度,拆分成3個(gè)tensor
print('tensor shape:',chunk2.shape)
print('tensor shape:',chunk3.shape)
print('tensor shape:',chunk4.shape)
print('——————————合并與切割——————————')
print('——————————基本運(yùn)算——————————')
#推薦使用重載運(yùn)算符+(加法)-(減法)*(對(duì)應(yīng)位置的元素相乘)/(除法)@(矩陣乘法)
calc1=torch.rand(2,2,2)
calc2=torch.rand(2,2,2)
calc3=calc1+calc2
print('tensor shape:',calc1)
print('tensor shape:',calc2)
print('tensor shape:',calc3)
calc3=calc1-calc2
print('tensor shape:',calc1)
print('tensor shape:',calc2)
print('tensor shape:',calc3)
calc3=calc1*calc2
print('tensor shape:',calc1)
print('tensor shape:',calc2)
print('tensor shape:',calc3)
calc3=calc1/calc2
print('tensor shape:',calc1)
print('tensor shape:',calc2)
print('tensor shape:',calc3)
calc3=calc1@calc2#矩陣乘法
print('tensor shape:',calc1)
print('tensor shape:',calc2)
print('tensor shape:',calc3)
#線性層降維例子
#aaa=torch.rand(4,784)
xxx=torch.rand(4,784)
www=torch.rand(512,784)#pytorch習(xí)慣的參數(shù)順序ch-out,ch-in
ooo=xxx@www.t()#如果是3維或以上tensor就要使用transpose來轉(zhuǎn)置
www2=torch.rand(64,512)
ooo2=ooo@www2.t()
www3=torch.rand(10,64)
ooo3=ooo2@www3.t()
print('tensor shape:',xxx.shape)
print('tensor shape:',ooo.shape)
print('tensor shape:',ooo2.shape)
print('tensor shape:',ooo3.shape)
#4維tensor的矩陣乘法
aaaa=torch.rand(4,3,28,64)
bbbb=torch.rand(4,3,64,32)
cccc=aaaa@bbbb#pytorch只會(huì)在最后兩維進(jìn)行矩陣乘法
print('tensor shape:',cccc.shape)
bbbb2=torch.rand(4,1,64,32)#這里會(huì)自動(dòng)boardcast,第1維會(huì)由1擴(kuò)展成3再進(jìn)行矩陣乘法
cccc=aaaa@bbbb2#pytorch只會(huì)在最后兩維進(jìn)行矩陣乘法
print('tensor shape:',cccc.shape)
aaaaa=torch.full([2,2],3.0)
bbbbb=aaaaa**2#平方
print('tensor:',bbbbb)
bbbbb=aaaaa.sqrt()#開根號(hào)
print('tensor:',bbbbb)
bbbbb=aaaaa.rsqrt()#平方根的倒數(shù)
print('tensor:',bbbbb)
exp1=torch.exp(torch.ones(2,2))#自然對(duì)數(shù)
print('tensor:',exp1)
log1=torch.log(exp1)#log
print('tensor:',log1)
appx=torch.rand(2,2)#近似值
floor=appx.floor()#向下取整
ceil=appx.ceil()#向上取整
trunc=appx.trunc()#整數(shù)部分
frac=appx.frac()#小數(shù)部分
round=appx.round()#四舍五入
print('tensor:',appx)
print('tensor:',floor)
print('tensor:',ceil)
print('tensor:',trunc)
print('tensor:',frac)
print('tensor:',round)
#幅值限制
grad=torch.rand(2,3)*15
print('max:',grad.max())
print('median:',grad.median())
print('min:',grad.min())
print('clamp 10-:',grad.clamp(10))#最小值限制在10
print('clamp 0-10:',grad.clamp(0,10))#數(shù)值范圍限制在0-10
print('——————————基本運(yùn)算——————————')