From 110de1488b389f6d915a8d77d5b373ee1073c167 Mon Sep 17 00:00:00 2001 From: udlbook <110402648+udlbook@users.noreply.github.com> Date: Tue, 31 Oct 2023 12:01:20 +0000 Subject: [PATCH] Created using Colaboratory --- Notebooks/Chap06/6_2_Gradient_Descent.ipynb | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Notebooks/Chap06/6_2_Gradient_Descent.ipynb b/Notebooks/Chap06/6_2_Gradient_Descent.ipynb index 921b75b..276d8a3 100644 --- a/Notebooks/Chap06/6_2_Gradient_Descent.ipynb +++ b/Notebooks/Chap06/6_2_Gradient_Descent.ipynb @@ -4,7 +4,7 @@ "metadata": { "colab": { "provenance": [], - "authorship_tag": "ABX9TyM/FIXDTd6tZYs6WRzK00hB", + "authorship_tag": "ABX9TyN2N4cCnlIobOZXEjcwAvZ5", "include_colab_link": true }, "kernelspec": { @@ -301,7 +301,7 @@ { "cell_type": "markdown", "source": [ - "Now we are ready to perform gradient descent. We'll need to use our line search routine from part I, which I've reproduced here plus the helper function loss_function_1D that converts from a 2D problem to a 1D problem" + "Now we are ready to perform gradient descent. We'll need to use our line search routine from noteboo 6.1, which I've reproduced here plus the helper function loss_function_1D that converts from a 2D problem to a 1D problem" ], "metadata": { "id": "5EIjMM9Fw2eT" @@ -375,9 +375,9 @@ "source": [ "def gradient_descent_step(phi, data, model):\n", " # TODO -- update Phi with the gradient descent step (equation 6.3)\n", - " # 1. Compute the gradient\n", - " # 2. Find the best step size alpha (use negative gradient as going downhill)\n", - " # 3. Update the parameters phi\n", + " # 1. Compute the gradient (you wrote this function above)\n", + " # 2. Find the best step size alpha using line search function (above) -- use negative gradient as going downhill\n", + " # 3. Update the parameters phi based on the gradient and the step size alpha.\n", "\n", " return phi" ],