Skip to content

Commit 13b7d4c

Browse files
committed
Deep Neural Network Application
Cat classifier Week 4 - Course 1
1 parent 9d0ef62 commit 13b7d4c

30 files changed

+3458
-0
lines changed

Week4/Building your Deep Neural Network - Step by Step/Building+your+Deep+Neural+Network+-+Step+by+Step+v5.ipynb

Lines changed: 1523 additions & 0 deletions
Large diffs are not rendered by default.
Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
import numpy as np
2+
3+
def sigmoid(Z):
4+
"""
5+
Implements the sigmoid activation in numpy
6+
7+
Arguments:
8+
Z -- numpy array of any shape
9+
10+
Returns:
11+
A -- output of sigmoid(z), same shape as Z
12+
cache -- returns Z as well, useful during backpropagation
13+
"""
14+
15+
A = 1/(1+np.exp(-Z))
16+
cache = Z
17+
18+
return A, cache
19+
20+
def relu(Z):
21+
"""
22+
Implement the RELU function.
23+
24+
Arguments:
25+
Z -- Output of the linear layer, of any shape
26+
27+
Returns:
28+
A -- Post-activation parameter, of the same shape as Z
29+
cache -- a python dictionary containing "A" ; stored for computing the backward pass efficiently
30+
"""
31+
32+
A = np.maximum(0,Z)
33+
34+
assert(A.shape == Z.shape)
35+
36+
cache = Z
37+
return A, cache
38+
39+
40+
def relu_backward(dA, cache):
41+
"""
42+
Implement the backward propagation for a single RELU unit.
43+
44+
Arguments:
45+
dA -- post-activation gradient, of any shape
46+
cache -- 'Z' where we store for computing backward propagation efficiently
47+
48+
Returns:
49+
dZ -- Gradient of the cost with respect to Z
50+
"""
51+
52+
Z = cache
53+
dZ = np.array(dA, copy=True) # just converting dz to a correct object.
54+
55+
# When z <= 0, you should set dz to 0 as well.
56+
dZ[Z <= 0] = 0
57+
58+
assert (dZ.shape == Z.shape)
59+
60+
return dZ
61+
62+
def sigmoid_backward(dA, cache):
63+
"""
64+
Implement the backward propagation for a single SIGMOID unit.
65+
66+
Arguments:
67+
dA -- post-activation gradient, of any shape
68+
cache -- 'Z' where we store for computing backward propagation efficiently
69+
70+
Returns:
71+
dZ -- Gradient of the cost with respect to Z
72+
"""
73+
74+
Z = cache
75+
76+
s = 1/(1+np.exp(-Z))
77+
dZ = dA * s * (1-s)
78+
79+
assert (dZ.shape == Z.shape)
80+
81+
return dZ
82+

0 commit comments

Comments
 (0)