-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathactivation_functions.py
More file actions
103 lines (63 loc) · 2.08 KB
/
activation_functions.py
File metadata and controls
103 lines (63 loc) · 2.08 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
"""
This modules contains a few useful activation functions.
You can read more about it here: https://en.wikipedia.org/wiki/Activation_function#Comparison_of_activation_functions
"""
import math
class ActivationFunction:
# A simple class to hold an activation function and its derivative.
def __init__(self, func, dfunc, range=(0, 0), use_x_vals=False):
self.func = func
self.dfunc = dfunc
self.range = range
self.use_x_vals = use_x_vals
# This could be turned into lambda functions but I'm keeping it this way for the pickle module to work.
def sig(x):
return 1 / (1 + math.exp(-x))
def dsig(y):
return y * (1 - y)
def tanh(x):
return math.tanh(x)
def dtanh(y):
return 1 - (y * y)
def arctan(x):
return math.atan(x)
def darctan(y):
return 1 / (y**2 + 1)
def softsign(x):
return x / (1 + math.fabs(x))
def dsoftsign(y):
return 1 / (math.pow((math.fabs(y) + 1), 2))
def relu(x):
return max(0, x)
def drelu(y):
return 0 if y < 0 else 1
def leaky_relu(x):
return 0.01 * x if x < 0 else x
def dleaky_relu(y):
return 0.01 if y < 0 else 1
def softplus(x):
return math.log(1 + math.exp(x))
def dsoftplus(y):
return 1 / (1 + math.exp(-y))
def gaussian(x):
return math.exp(-1 * (x * x))
def dgaussian(y):
return -2 * y * math.exp(-1 * (y * y))
SIGMOID = ActivationFunction(sig, dsig, (0, 1))
TANH = ActivationFunction(tanh, dtanh)
ARCTAN = ActivationFunction(arctan, darctan, range=(-math.pi/2, math.pi/2), use_x_vals=True)
SOFTSIGN = ActivationFunction(softsign, dsoftsign, range=(-1, 1), use_x_vals=True)
RELU = ActivationFunction(relu, drelu, use_x_vals=True)
LEAKY_RELU = ActivationFunction(leaky_relu, dleaky_relu, use_x_vals=True)
SOFTPLUS = ActivationFunction(softplus, dsoftplus, use_x_vals=True)
GAUSSIAN = ActivationFunction(gaussian, dgaussian, range=(0, 1), use_x_vals=True)
A_FUNCTIONS = {
'sigmoid': SIGMOID,
'tanh': TANH,
'arctan': ARCTAN,
'softsign': SOFTSIGN,
'relu': RELU,
'leaky_relu': LEAKY_RELU,
'softplus': SOFTPLUS,
'gaussian': GAUSSIAN
}