pytorch库介绍

Pytorch autograd,backward 详解

https://zhuanlan.zhihu.com/p/83172023

Pytorch-view 的用法

https://zhuanlan.zhihu.com/p/87856193

pytorch 中 nn.Embedding 原理及使用

https://www.jianshu.com/p/63e7acc5e890

GRU 使用

https://zhuanlan.zhihu.com/p/37217734
https://www.jianshu.com/p/0c87b6ab0c4a

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
import torch.nn as nn
import torch

# gru = nn.GRU(input_size=50, hidden_size=50, batch_first=True)
# embed = nn.Embedding(3, 50)
# x = torch.LongTensor([[0, 1, 2]])
# x_embed = embed(x)
# out, hidden = gru(x_embed)


gru = nn.GRU(input_size=5, hidden_size=6,
num_layers=2, # gru层数
batch_first=False, # 默认参数 True:(batch, seq, feature) False:True:( seq,batch, feature),
bidirectional=False, # 默认参数
)

# N=batch size
# L=sequence length
# D=2 if bidirectional=True else 1
# Hin=input size
# Hout=outout size


input_ = torch.randn(1, 3, 5) # (L,N,hin)(序列长度,batch size大小,输入维度大小)
h0 = torch.randn(2 * 1, 3, 6) # (D∗num_layers,N,Hout)(是否双向乘以层数,batch size大小,输出维度大小)

output, hn = gru(input_, h0)
# output:[1, 3, 6] (L,N,D*Hout)=(1,3,1*6)
# hn:[2, 3, 6] (D*num_layers,N,Hout)(1*2,3,6)

print(output.shape, hn.shape)
# torch.Size([1, 3, 6]) torch.Size([2, 3, 6])

样例二
# 创建一个输入输入50维的GRU
gru = nn.GRU(input_size=50, hidden_size=50, batch_first=True)

# 创建一个字典大小为3,词向量维度为50维的Embedding。
embed = nn.Embedding(3, 50)

# 创建一个二维LongTensor索引数据(作为索引数据的时候一般都使用LongTensor)
x = torch.LongTensor([[0, 1, 2]]) # x.size() --> torch.Size([1, 3])

# 将索引映射到向量
x_embed = embed(x) # x_embed.size() --> torch.Size([1, 3, 50])