Skip to content

Commit 18e3477

Browse files
committed
relu
1 parent 5692b8f commit 18e3477

File tree

2 files changed

+38
-0
lines changed

2 files changed

+38
-0
lines changed

ch01/forward_and_backward.py

+18
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,24 @@ def backword(self, dout):
2323
# xに対して1.1という値の打ち消し微分を渡したいので。
2424
return dx, dy
2525

26+
class AddLayer:
27+
def __init__(self):
28+
self.x = None
29+
self.y = None
30+
return
31+
32+
def forward(self, x, y):
33+
self.x = x
34+
self.y = y
35+
return x + y
36+
37+
# doutは逆伝播時の微分値
38+
# このメソッドは出力として微分値を返します
39+
def backword(self, dout):
40+
dx = dout * 1
41+
dy = dout * 1
42+
return dx, dy
43+
2644
origin_price = 100
2745
buy_count = 2
2846
tax = 1.1

ch01/relu.py

+20
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
import numpy as np
2+
class ReluLayer:
3+
def __init__(self):
4+
self.mask = None
5+
6+
def forward(self, x):
7+
self.mask = (x <= 0)
8+
out = x.copy()
9+
# マスク部分を0にする
10+
# [[ 2. -0.6]
11+
# [ 1. -0.5]]
12+
# =>
13+
# [[ 2. 0.]
14+
# [ 1. 0.]]
15+
out[self.mask] = 0
16+
return out
17+
18+
x = np.array([[2.0,-0.6], [1.0,-0.5]])
19+
relu_layer = ReluLayer()
20+
relu_layer.forward(x)

0 commit comments

Comments
 (0)