kigichang commited on
Commit
580baa9
·
verified ·
1 Parent(s): acff78f

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +151 -3
README.md CHANGED
@@ -1,3 +1,151 @@
1
- ---
2
- license: mit
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: mit
3
+ ---
4
+
5
+ # test rnn
6
+
7
+ The Models in test rnn is for [Huggingface Candle PR#2542](https://github.com/huggingface/candle/pull/2542) as example test cases.
8
+
9
+ Test models are refered to Pytorch [LSTM](https://pytorch.org/docs/stable/generated/torch.nn.LSTM.html) and [GRU](https://pytorch.org/docs/stable/generated/torch.nn.GRU.html).
10
+
11
+ Test models are generated by the following codes:
12
+
13
+ - lstm_test.pt: A simple LSTM model with 1 layer.
14
+
15
+ ```python
16
+ import torch
17
+ import torch.nn as nn
18
+
19
+ rnn = nn.LSTM(10, 20, num_layers=1, batch_first=True)
20
+ input = torch.randn(5, 3, 10)
21
+ output, (hn, cn) = rnn(input)
22
+
23
+ state_dict = rnn.state_dict()
24
+ state_dict['input'] = input
25
+ state_dict['output'] = output.contiguous()
26
+ state_dict['hn'] = hn
27
+ state_dict['cn'] = cn
28
+ torch.save(state_dict, "lstm_test.pt")
29
+ ```
30
+
31
+ - gru_test.pt: A simple GRU model with 1 layer.
32
+
33
+ ```python
34
+ import torch
35
+ import torch.nn as nn
36
+
37
+ rnn = nn.GRU(10, 20, num_layers=1, batch_first=True)
38
+ input = torch.randn(5, 3, 10)
39
+ output, hn = rnn(input)
40
+
41
+ state_dict = rnn.state_dict()
42
+ state_dict['input'] = input
43
+ state_dict['output'] = output.contiguous()
44
+ state_dict['hn'] = hn
45
+ torch.save(state_dict, "gru_test.pt")
46
+ ```
47
+
48
+ - bi_lstm_test.pt: A bidirectional LSTM model with 1 layer.
49
+
50
+ ```python
51
+ import torch
52
+ import torch.nn as nn
53
+
54
+ rnn = nn.LSTM(10, 20, num_layers=1, bidirectional=True, batch_first=True)
55
+ input = torch.randn(5, 3, 10)
56
+ output, (hn, cn) = rnn(input)
57
+
58
+ state_dict = rnn.state_dict()
59
+ state_dict['input'] = input
60
+ state_dict['output'] = output.contiguous()
61
+ state_dict['hn'] = hn
62
+ state_dict['cn'] = cn
63
+ torch.save(state_dict, "bi_lstm_test.pt")
64
+ ```
65
+
66
+ - bi_gru_test.pt: A bidirectional GRU model with 1 layer.
67
+
68
+ ```python
69
+ import torch
70
+ import torch.nn as nn
71
+
72
+ rnn = nn.GRU(10, 20, num_layers=1, bidirectional=True, batch_first=True)
73
+ input = torch.randn(5, 3, 10)
74
+ output, hn = rnn(input)
75
+
76
+ state_dict = rnn.state_dict()
77
+ state_dict['input'] = input
78
+ state_dict['output'] = output.contiguous()
79
+ state_dict['hn'] = hn
80
+ torch.save(state_dict, "bi_gru_test.pt")
81
+ ```
82
+
83
+ - lstm_nlayer_test.pt: A LSTM model with 3 layers.
84
+
85
+ ```python
86
+ import torch
87
+ import torch.nn as nn
88
+
89
+ rnn = nn.LSTM(10, 20, num_layers=3, batch_first=True)
90
+ input = torch.randn(5, 3, 10)
91
+ output, (hn, cn) = rnn(input)
92
+
93
+ state_dict = rnn.state_dict()
94
+ state_dict['input'] = input
95
+ state_dict['output'] = output.contiguous()
96
+ state_dict['hn'] = hn
97
+ state_dict['cn'] = cn
98
+ torch.save(state_dict, "lstm_nlayer_test.pt")
99
+ ```
100
+
101
+ - bi_lstm_nlayer_test.pt: A bidirectional LSTM model with 3 layers.
102
+
103
+ ```python
104
+ import torch
105
+ import torch.nn as nn
106
+
107
+ rnn = nn.LSTM(10, 20, num_layers=3, bidirectional=True, batch_first=True)
108
+ input = torch.randn(5, 3, 10)
109
+ output, (hn, cn) = rnn(input)
110
+
111
+ state_dict = rnn.state_dict()
112
+ state_dict['input'] = input
113
+ state_dict['output'] = output.contiguous()
114
+ state_dict['hn'] = hn
115
+ state_dict['cn'] = cn
116
+ torch.save(state_dict, "bi_lstm_nlayer_test.pt")
117
+ ```
118
+
119
+ - gru_nlayer_test.pt: A GRU model with 3 layers.
120
+
121
+ ```python
122
+ import torch
123
+ import torch.nn as nn
124
+
125
+ rnn = nn.GRU(10, 20, num_layers=3, batch_first=True)
126
+ input = torch.randn(5, 3, 10)
127
+ output, hn = rnn(input)
128
+
129
+ state_dict = rnn.state_dict()
130
+ state_dict['input'] = input
131
+ state_dict['output'] = output.contiguous()
132
+ state_dict['hn'] = hn
133
+ torch.save(state_dict, "gru_nlayer_test.pt")
134
+ ```
135
+
136
+ - bi_gru_nlayer_test.pt: A bidirectional GRU model with 3 layers.
137
+
138
+ ```python
139
+ import torch
140
+ import torch.nn as nn
141
+
142
+ rnn = nn.GRU(10, 20, num_layers=3, bidirectional=True, batch_first=True)
143
+ input = torch.randn(5, 3, 10)
144
+ output, hn = rnn(input)
145
+
146
+ state_dict = rnn.state_dict()
147
+ state_dict['input'] = input
148
+ state_dict['output'] = output.contiguous()
149
+ state_dict['hn'] = hn
150
+ torch.save(state_dict, "bi_gru_nlayer_test.pt")
151
+ ```