-
Notifications
You must be signed in to change notification settings - Fork 2
/
activations.py
60 lines (40 loc) · 1.33 KB
/
activations.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
from abc import ABC
import numpy as np
class ActivationFunction(ABC):
def __init__(self, *args, **kwargs):
...
def __call__(self, *args, **kwargs):
...
def first_derivative(self, *args, **kwargs):
...
class Linear(ActivationFunction):
def __init__(self, c=None):
super(ActivationFunction).__init__()
self.c = c if c else 1
def __call__(self, x):
return self.c*x
def first_derivative(self, x):
return self.c
class ReLU(ActivationFunction):
def __init__(self):
super(ActivationFunction).__init__()
def __call__(self, x):
return np.where(x <= 0, 0, x)
def first_derivative(self, x):
return np.where(x <= 0, 0, 1)
class Sigmoid(ActivationFunction):
def __init__(self):
super(ActivationFunction).__init__()
def __call__(self, x: np.array):
return np.exp(x) / (1 + np.exp(x))
def first_derivative(self, x):
a = self(x)
return a * (1 - a)
class Softmax(ActivationFunction):
def __init__(self):
super(ActivationFunction).__init__()
def __call__(self, x: np.array):
a = np.exp(x)
return a / (np.sum(a, axis=0, keepdims=True))
def first_derivative(self, x):
raise NotImplementedError("Use Softmax activation only together with Cross-Entropy Loss")