From 5680e5d7f73ffc758dc7aa4c6ecb462012a11903 Mon Sep 17 00:00:00 2001 From: udlbook <110402648+udlbook@users.noreply.github.com> Date: Tue, 10 Oct 2023 11:52:48 +0100 Subject: [PATCH] Created using Colaboratory --- Notebooks/Chap03/3_4_Activation_Functions.ipynb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Notebooks/Chap03/3_4_Activation_Functions.ipynb b/Notebooks/Chap03/3_4_Activation_Functions.ipynb index d9277e2..b08449a 100644 --- a/Notebooks/Chap03/3_4_Activation_Functions.ipynb +++ b/Notebooks/Chap03/3_4_Activation_Functions.ipynb @@ -4,7 +4,7 @@ "metadata": { "colab": { "provenance": [], - "authorship_tag": "ABX9TyOu5BvK3aFb7ZEQKG5vfOZ1", + "authorship_tag": "ABX9TyPmra+JD+dm2M3gCqx3bMak", "include_colab_link": true }, "kernelspec": { @@ -185,7 +185,7 @@ "The ReLU isn't the only kind of activation function. For a long time, people used sigmoid functions. A logistic sigmoid function is defined by the equation\n", "\n", "\\begin{equation}\n", - "f[h] = \\frac{1}{1+\\exp{[-10 z ]}}\n", + "f[z] = \\frac{1}{1+\\exp{[-10 z ]}}\n", "\\end{equation}\n", "\n", "(Note that the factor of 10 is not standard -- but it allow us to plot on the same axes as the ReLU examples)"