Created using Colaboratory

This commit is contained in:
udlbook
2023-10-10 11:52:48 +01:00
parent 0a5a97f55d
commit 5680e5d7f7

View File

@@ -4,7 +4,7 @@
"metadata": { "metadata": {
"colab": { "colab": {
"provenance": [], "provenance": [],
"authorship_tag": "ABX9TyOu5BvK3aFb7ZEQKG5vfOZ1", "authorship_tag": "ABX9TyPmra+JD+dm2M3gCqx3bMak",
"include_colab_link": true "include_colab_link": true
}, },
"kernelspec": { "kernelspec": {
@@ -185,7 +185,7 @@
"The ReLU isn't the only kind of activation function. For a long time, people used sigmoid functions. A logistic sigmoid function is defined by the equation\n", "The ReLU isn't the only kind of activation function. For a long time, people used sigmoid functions. A logistic sigmoid function is defined by the equation\n",
"\n", "\n",
"\\begin{equation}\n", "\\begin{equation}\n",
"f[h] = \\frac{1}{1+\\exp{[-10 z ]}}\n", "f[z] = \\frac{1}{1+\\exp{[-10 z ]}}\n",
"\\end{equation}\n", "\\end{equation}\n",
"\n", "\n",
"(Note that the factor of 10 is not standard -- but it allow us to plot on the same axes as the ReLU examples)" "(Note that the factor of 10 is not standard -- but it allow us to plot on the same axes as the ReLU examples)"