-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathactivation.py
More file actions
38 lines (25 loc) · 808 Bytes
/
activation.py
File metadata and controls
38 lines (25 loc) · 808 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
import numpy as np
class Activation:
def __init__(self):
raise NotImplementedError
def forward(self, input_data):
raise NotImplementedError
def backward(self, gradient):
raise NotImplementedError
class ReLU(Activation):
def __init__(self):
self.input = None
def forward(self, input_data):
self.input = input_data
return np.maximum(0, input_data)
def backward(self, gradient):
grad = self.input > 0
return gradient * grad
class Sigmoid(Activation):
def __init__(self):
self.output = None
def forward(self, input_data):
self.output = 1 / (1 + np.exp(-input_data))
return self.output
def backward(self, gradient):
return gradient * (self.output * (1 - self.output))