Created using Colaboratory

This commit is contained in:
udlbook
2024-03-25 15:00:01 -04:00
parent 1b53be1e08
commit b535a13d57

View File

@@ -4,7 +4,7 @@
"metadata": { "metadata": {
"colab": { "colab": {
"provenance": [], "provenance": [],
"authorship_tag": "ABX9TyNHLXFpiSnUzAbzhtOk+bxu", "authorship_tag": "ABX9TyOaATWBrwVMylV1akcKtHjt",
"include_colab_link": true "include_colab_link": true
}, },
"kernelspec": { "kernelspec": {
@@ -177,7 +177,7 @@
"data_in = np.random.normal(size=(1,n_data))\n", "data_in = np.random.normal(size=(1,n_data))\n",
"net_output, all_f, all_h = compute_network_output(data_in, all_weights, all_biases)\n", "net_output, all_f, all_h = compute_network_output(data_in, all_weights, all_biases)\n",
"\n", "\n",
"for layer in range(K):\n", "for layer in range(1,K+1):\n",
" print(\"Layer %d, std of hidden units = %3.3f\"%(layer, np.std(all_h[layer])))" " print(\"Layer %d, std of hidden units = %3.3f\"%(layer, np.std(all_h[layer])))"
], ],
"metadata": { "metadata": {
@@ -249,6 +249,9 @@
"\n", "\n",
"# Main backward pass routine\n", "# Main backward pass routine\n",
"def backward_pass(all_weights, all_biases, all_f, all_h, y):\n", "def backward_pass(all_weights, all_biases, all_f, all_h, y):\n",
" # Retrieve number of layers\n",
" K = all_weights\n",
"\n",
" # We'll store the derivatives dl_dweights and dl_dbiases in lists as well\n", " # We'll store the derivatives dl_dweights and dl_dbiases in lists as well\n",
" all_dl_dweights = [None] * (K+1)\n", " all_dl_dweights = [None] * (K+1)\n",
" all_dl_dbiases = [None] * (K+1)\n", " all_dl_dbiases = [None] * (K+1)\n",