Skip to content

Commit af729f2

Browse files
committed
Added gradcheck w.r.t. dx only, fixed layers
1 parent 63b569c commit af729f2

File tree

2 files changed

+49
-5
lines changed

2 files changed

+49
-5
lines changed

GradientCheck.py

+46
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
from NumericalGradient import *
2+
from Layers import *
3+
4+
def main():
5+
CheckAllLayers()
6+
7+
def CheckAllLayers():
8+
CheckBias()
9+
CheckMatMul()
10+
CheckReLU()
11+
CheckSigmoid()
12+
13+
def CheckBias():
14+
bias = Bias(10)
15+
test_input = np.random.randn(50, 10)
16+
dout = np.random.randn(50, 10)
17+
dx_num = numerical_gradient_layer(lambda x : bias.Forward(x), test_input, dout)
18+
dx, db = bias.Backward(test_input, dout)
19+
print('Bias dx error:', np.max(relative_error(dx, dx_num)))
20+
21+
def CheckMatMul():
22+
mm = MatMul(30, 10)
23+
test_input = np.random.randn(50, 30)
24+
dout = np.random.randn(50, 10)
25+
dx_num = numerical_gradient_layer(lambda x : mm.Forward(x), test_input, dout)
26+
dx, dW = mm.Backward(test_input, dout)
27+
print('MatMul dx error:', np.max(relative_error(dx, dx_num)))
28+
29+
def CheckReLU():
30+
relu = ReLU()
31+
test_input = np.random.randn(50, 30)
32+
dout = np.random.randn(50, 30)
33+
dx_num = numerical_gradient_layer(lambda x : relu.Forward(x), test_input, dout)
34+
dx = relu.Backward(test_input, dout)
35+
print('ReLU dx error:', np.max(relative_error(dx, dx_num)))
36+
37+
def CheckSigmoid():
38+
sig = Sigmoid()
39+
test_input = np.random.randn(50, 30)
40+
dout = np.random.randn(50, 30)
41+
dx_num = numerical_gradient_layer(lambda x : sig.Forward(x), test_input, dout)
42+
dx = sig.Backward(test_input, dout)
43+
print('Sigmoid dx error:', np.max(relative_error(dx, dx_num)))
44+
45+
46+
main()

Layers.py

+3-5
Original file line numberDiff line numberDiff line change
@@ -35,20 +35,18 @@ def Backward(self, prev_inputs, dout):
3535
class ReLU(Layer):
3636

3737
def Forward(self, inputs):
38-
inputs[inputs < 0] = 0
39-
return inputs
38+
return inputs * (inputs > 0)
4039

4140
def Backward(self, prev_inputs, dout):
42-
dout[prev_inputs < 0] = 0.0
43-
return dout
41+
return dout * (prev_inputs >= 0)
4442

4543
class Sigmoid(Layer):
4644

4745
def Forward(self, inputs):
4846
return 1. / (1 + np.exp(-inputs))
4947

5048
def Backward(self, prev_inputs, dout):
51-
return (self.sigmoid(prev_inputs) * (1. - self.sigmoid(prev_inputs))) * dout
49+
return (self.Forward(prev_inputs) * (1. - self.Forward(prev_inputs))) * dout
5250

5351
class BinaryStochastic(Layer):
5452

0 commit comments

Comments
 (0)