diff --git a/Neural Networks and Deep Learning/Building your Deep Neural Network - Step by Step.ipynb b/Neural Networks and Deep Learning/Building your Deep Neural Network - Step by Step.ipynb index 349f6c5..002d426 100755 --- a/Neural Networks and Deep Learning/Building your Deep Neural Network - Step by Step.ipynb +++ b/Neural Networks and Deep Learning/Building your Deep Neural Network - Step by Step.ipynb @@ -1231,15 +1231,13 @@ " \n", " # Initializing the backpropagation\n", " ### START CODE HERE ### (1 line of code)\n", - " dAL = dAL = - (np.divide(Y, AL) - np.divide(1 - Y, 1 - AL))\n", + " dAL = - (np.divide(Y, AL) - np.divide(1 - Y, 1 - AL))\n", " ### END CODE HERE ###\n", " \n", " # Lth layer (SIGMOID -> LINEAR) gradients. Inputs: \"AL, Y, caches\". Outputs: \"grads[\"dAL\"], grads[\"dWL\"], grads[\"dbL\"]\n", " ### START CODE HERE ### (approx. 2 lines)\n", " current_cache = caches[-1]\n", - " grads[\"dA\" + str(L)], grads[\"dW\" + str(L)], grads[\"db\" + str(L)] = linear_backward(sigmoid_backward(dAL, \n", - " current_cache[1]), \n", - " current_cache[0])\n", + " grads[\"dA\" + str(L-1)], grads[\"dW\" + str(L)], grads[\"db\" + str(L)] = linear_activation_backward(dAL, current_cache, activation="sigmoid")\n", " ### END CODE HERE ###\n", " \n", " for l in reversed(range(L-1)):\n", @@ -1247,7 +1245,7 @@ " # Inputs: \"grads[\"dA\" + str(l + 2)], caches\". Outputs: \"grads[\"dA\" + str(l + 1)] , grads[\"dW\" + str(l + 1)] , grads[\"db\" + str(l + 1)] \n", " ### START CODE HERE ### (approx. 5 lines)\n", " current_cache = caches[l]\n", - " dA_prev_temp, dW_temp, db_temp = linear_backward(sigmoid_backward(dAL, caches[1]), caches[0])\n", + " dA_prev_temp, dW_temp, db_temp = linear_activation_backward(grads["dA" + str(L-1)], current_cache, activation="relu")\n", " grads[\"dA\" + str(l + 1)] = dA_prev_temp\n", " grads[\"dW\" + str(l + 1)] = dW_temp\n", " grads[\"db\" + str(l + 1)] = db_temp\n",