diff --git a/notebooks/Block_1/NN from scratch.ipynb b/notebooks/Block_1/NN from scratch.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..873a411c9bf99c350bc29d563036e9f06d716078
--- /dev/null
+++ b/notebooks/Block_1/NN from scratch.ipynb	
@@ -0,0 +1,58 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "bd546fce-edaa-4c2b-9de2-2d4b290d7efe",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import numpy as np\n",
+    "from numpy.random import randn\n",
+    "\n",
+    "# define the network\n",
+    "N, D_in, H, D_out = 64, 1000, 100, 10\n",
+    "x, y = randn(N, D_in), randn(N, D_out)\n",
+    "w1, w2 = randn(D_in, H), randn(H, D_out)\n",
+    "\n",
+    "for t in range(2000):\n",
+    "    # forward pass\n",
+    "    h = 1 / (1 + np.exp(-x.dot(w1)))          # sigmoid activation function\n",
+    "    y_pred = h.dot(w2)\n",
+    "    loss = np.square(y_pred - y).sum()\n",
+    "    print(t, loss)\n",
+    "    \n",
+    "    # calculate analytical gradients\n",
+    "    grad_y_pred = 2.0 * (y_pred - y)\n",
+    "    grad_w2 = h.T.dot(grad_y_pred)\n",
+    "    grad_h = grad_y_pred.dot(w2.T)\n",
+    "    grad_w1 = x.T.dot(grad_h * h * (1 - h))\n",
+    "    \n",
+    "    # update parameters of gradient descent\n",
+    "    w1 -= 1e-4 * grad_w1\n",
+    "    w2 -= 1e-4 * grad_w2    "
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 3 (ipykernel)",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.9.7"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}