|
| 1 | +from NumericalGradient import * |
| 2 | +from Layers import * |
| 3 | + |
| 4 | +def main(): |
| 5 | + CheckAllLayers() |
| 6 | + |
| 7 | +def CheckAllLayers(): |
| 8 | + CheckBias() |
| 9 | + CheckMatMul() |
| 10 | + CheckReLU() |
| 11 | + CheckSigmoid() |
| 12 | + |
| 13 | +def CheckBias(): |
| 14 | + bias = Bias(10) |
| 15 | + test_input = np.random.randn(50, 10) |
| 16 | + dout = np.random.randn(50, 10) |
| 17 | + dx_num = numerical_gradient_layer(lambda x : bias.Forward(x), test_input, dout) |
| 18 | + dx, db = bias.Backward(test_input, dout) |
| 19 | + print('Bias dx error:', np.max(relative_error(dx, dx_num))) |
| 20 | + |
| 21 | +def CheckMatMul(): |
| 22 | + mm = MatMul(30, 10) |
| 23 | + test_input = np.random.randn(50, 30) |
| 24 | + dout = np.random.randn(50, 10) |
| 25 | + dx_num = numerical_gradient_layer(lambda x : mm.Forward(x), test_input, dout) |
| 26 | + dx, dW = mm.Backward(test_input, dout) |
| 27 | + print('MatMul dx error:', np.max(relative_error(dx, dx_num))) |
| 28 | + |
| 29 | +def CheckReLU(): |
| 30 | + relu = ReLU() |
| 31 | + test_input = np.random.randn(50, 30) |
| 32 | + dout = np.random.randn(50, 30) |
| 33 | + dx_num = numerical_gradient_layer(lambda x : relu.Forward(x), test_input, dout) |
| 34 | + dx = relu.Backward(test_input, dout) |
| 35 | + print('ReLU dx error:', np.max(relative_error(dx, dx_num))) |
| 36 | + |
| 37 | +def CheckSigmoid(): |
| 38 | + sig = Sigmoid() |
| 39 | + test_input = np.random.randn(50, 30) |
| 40 | + dout = np.random.randn(50, 30) |
| 41 | + dx_num = numerical_gradient_layer(lambda x : sig.Forward(x), test_input, dout) |
| 42 | + dx = sig.Backward(test_input, dout) |
| 43 | + print('Sigmoid dx error:', np.max(relative_error(dx, dx_num))) |
| 44 | + |
| 45 | + |
| 46 | +main() |
0 commit comments