-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathlstm.py
More file actions
148 lines (113 loc) · 4.96 KB
/
lstm.py
File metadata and controls
148 lines (113 loc) · 4.96 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
import numpy as np
from lstm import LSTMCell
class LSTM:
def __init__(self, n_in, n_hidden, n_out, n_timestamps, weights=None):
self.n_in = n_in
self.n_hidden = n_hidden
self.n_out = n_out
self.n_timestamps = n_timestamps
if weights is None:
self.weights = {}
self.weights["Wi"] = np.random.randn(n_hidden, n_in + n_hidden)
self.weights["bi"] = np.random.randn(n_hidden, 1)
self.weights["Wf"] = np.random.randn(n_hidden, n_in + n_hidden)
self.weights["bf"] = np.random.randn(n_hidden, 1)
self.weights["Wo"] = np.random.randn(n_hidden, n_in + n_hidden)
self.weights["bo"] = np.random.randn(n_hidden, 1)
self.weights["Wg"] = np.random.randn(n_hidden, n_in + n_hidden)
self.weights["bg"] = np.random.randn(n_hidden, 1)
# dense layer
self.weights["Wd"] = np.random.randn(n_out, n_hidden)
self.weights["bd"] = np.random.randn(n_out, 1)
self.model = [ LSTMCell() for _ in range(self.n_timestamps)]
def forward(self, x, y):
h = np.zeros((self.n_hidden, 1))
c = np.zeros((self.n_hidden, 1))
total_cost = 0
for t in range(self.n_timestamps):
lstm_cell = self.model[t]
h, c, _ = lstm_cell.forward(x[:, [t]], h, c, self.weights)
total_cost += lstm_cell.cost(y[:, [t]])
return total_cost/self.n_timestamps
def backward(self):
T = self.n_timestamps
dh_next = np.zeros((self.n_hidden, 1))
dc_next = np.zeros((self.n_hidden, 1))
gradients = {}
for t in range(T-1, -1, -1):
partial_gradients, dh_next, dc_next = self.model[t].backward(dh_next, dc_next, self.weights)
for k, v in partial_gradients.items():
if k not in gradients:
gradients[k] = 0
gradients[k] += v/T
return gradients
def train(self, x, y, alpha):
cost = self.forward(x, y)
gradients = self.backward()
# update weights
for weight in self.weights.keys():
self.weights[weight] -= alpha * gradients[weight]
return cost
def predict(self, x, h, c):
y_preds = []
for t in range(self.n_timestamps):
lstm_cell = self.model[t]
h, c, y_pred = lstm_cell.forward(x, h, c, self.weights)
y_preds.append(y_pred)
return y_preds
def __init__(self, n_in, n_hidden, n_out, n_timestamps, weights=None):
self.n_in = n_in
self.n_hidden = n_hidden
self.n_out = n_out
self.n_timestamps = n_timestamps
if weights is None:
self.weights = {}
self.weights["Wi"] = np.random.randn(n_hidden, n_in + n_hidden)
self.weights["bi"] = np.random.randn(n_hidden, 1)
self.weights["Wf"] = np.random.randn(n_hidden, n_in + n_hidden)
self.weights["bf"] = np.random.randn(n_hidden, 1)
self.weights["Wo"] = np.random.randn(n_hidden, n_in + n_hidden)
self.weights["bo"] = np.random.randn(n_hidden, 1)
self.weights["Wg"] = np.random.randn(n_hidden, n_in + n_hidden)
self.weights["bg"] = np.random.randn(n_hidden, 1)
# dense layer
self.weights["Wd"] = np.random.randn(n_out, n_hidden)
self.weights["bd"] = np.random.randn(n_out, 1)
else:
self.weights = weights
self.model = [ LSTMCell() for _ in range(self.n_timestamps)]
def forward(self, x, y):
h = np.zeros((self.n_hidden, 1))
c = np.zeros((self.n_hidden, 1))
total_cost = 0
for t in range(self.n_timestamps):
lstm_cell = self.model[t]
h, c, _ = lstm_cell.forward(x[:, [t]], h, c, self.weights)
total_cost += lstm_cell.cost(y[:, [t]])
return total_cost/self.n_timestamps
def backward(self):
T = self.n_timestamps
dh_next = np.zeros((self.n_hidden, 1))
dc_next = np.zeros((self.n_hidden, 1))
gradients = {}
for t in range(T-1, -1, -1):
partial_gradients, dh_next, dc_next = self.model[t].backward(dh_next, dc_next, self.weights)
for k, v in partial_gradients.items():
if k not in gradients:
gradients[k] = 0
gradients[k] += v/T
return gradients
def train(self, x, y, alpha):
cost = self.forward(x, y)
gradients = self.backward()
# update weights
for weight in self.weights.keys():
self.weights[weight] -= alpha * gradients[weight]
return cost
def predict(self, x, h, c):
y_preds = []
for t in range(self.n_timestamps):
lstm_cell = self.model[t]
h, c, y_pred = lstm_cell.forward(x, h, c, self.weights)
y_preds.append(y_pred)
return y_preds