Machine Learning Kurs im Rahmen der Studierendentage im SS 2023
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

364 lines
59 KiB

2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
2 years ago
  1. {
  2. "cells": [
  3. {
  4. "cell_type": "markdown",
  5. "id": "0b7d591a",
  6. "metadata": {},
  7. "source": [
  8. "\n",
  9. " An example of the minimzer usage in tensorflow\n",
  10. " the loss function is plotted and the result in terms of a line\n"
  11. ]
  12. },
  13. {
  14. "cell_type": "code",
  15. "execution_count": 1,
  16. "id": "270932f3",
  17. "metadata": {},
  18. "outputs": [
  19. {
  20. "name": "stderr",
  21. "output_type": "stream",
  22. "text": [
  23. "2023-04-11 16:44:42.299418: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: SSE4.1 SSE4.2 AVX AVX2 AVX512F AVX512_VNNI FMA\n",
  24. "To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.\n",
  25. "2023-04-11 16:44:42.360104: I tensorflow/core/util/port.cc:104] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\n"
  26. ]
  27. }
  28. ],
  29. "source": [
  30. "import numpy as np\n",
  31. "import matplotlib.pyplot as plt\n",
  32. "import tensorflow as tf"
  33. ]
  34. },
  35. {
  36. "cell_type": "code",
  37. "execution_count": 2,
  38. "id": "77cd99a8",
  39. "metadata": {},
  40. "outputs": [],
  41. "source": [
  42. "# Define the training data\n",
  43. "train_X = np.asarray([3.3,4.4,5.5,6.71,6.93,4.168,9.779,6.182,7.59,2.167,\n",
  44. " 7.042,10.791,5.313,7.997,5.654,9.27,3.1])\n",
  45. "train_Y = np.asarray([1.7,2.76,2.09,3.19,1.694,1.573,3.366,2.596,2.53,1.221,\n",
  46. " 2.827,3.465,1.65,2.904,2.42,2.94,1.3])\n"
  47. ]
  48. },
  49. {
  50. "cell_type": "markdown",
  51. "id": "f39cbcd9",
  52. "metadata": {},
  53. "source": [
  54. "The input to the model is represented by the train_X \n",
  55. "Y_train represents the target or the truth values for the training data\n",
  56. "The model will recieve train_X and make predictions on the weights\n",
  57. "The difference between these predictions and the actual target values\n",
  58. "train_Y will be used to update the weights and minimize the loss function."
  59. ]
  60. },
  61. {
  62. "cell_type": "code",
  63. "execution_count": 3,
  64. "id": "ed8449c3",
  65. "metadata": {},
  66. "outputs": [],
  67. "source": [
  68. "# Define the model to a simple linear regression with only one dense layer and\n",
  69. "# no activation function for the first layer all train_X points are input\n",
  70. "\n",
  71. "# model = tf.keras.models.Sequential([\n",
  72. "# tf.keras.layers.Dense(1, input_shape=[1])\n",
  73. "#])"
  74. ]
  75. },
  76. {
  77. "cell_type": "code",
  78. "execution_count": 4,
  79. "id": "71e072b4",
  80. "metadata": {},
  81. "outputs": [
  82. {
  83. "name": "stderr",
  84. "output_type": "stream",
  85. "text": [
  86. "2023-04-11 16:44:43.775141: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: SSE4.1 SSE4.2 AVX AVX2 AVX512F AVX512_VNNI FMA\n",
  87. "To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.\n"
  88. ]
  89. }
  90. ],
  91. "source": [
  92. "# This model has 2 dense layers the first with relu activation\n",
  93. "# and the 2nd layer has 1 output unit and uses the default\n",
  94. "# linear activation function.\n",
  95. "\n",
  96. "model = tf.keras.models.Sequential([\n",
  97. " tf.keras.layers.Dense(17, activation='relu',input_shape=[1]),\n",
  98. " tf.keras.layers.Dense(1)\n",
  99. "])"
  100. ]
  101. },
  102. {
  103. "cell_type": "code",
  104. "execution_count": 5,
  105. "id": "5fabf184",
  106. "metadata": {},
  107. "outputs": [],
  108. "source": [
  109. "# different optimizer methods can be enabled\n",
  110. "\n",
  111. "model.compile(optimizer=tf.keras.optimizers.Adam(0.01), loss='mean_squared_error')\n",
  112. "#model.compile(optimizer=tf.keras.optimizers.SGD(0.01), loss='mean_squared_error')\n",
  113. "#model.compile(optimizer=tf.keras.optimizers.Adagrad(learning_rate=0.01), loss='mean_squared_error')\n",
  114. "#model.compile(optimizer=tf.keras.optimizers.RMSprop(learning_rate=0.01), loss='mean_squared_error')\n",
  115. "#model.compile(optimizer=tf.keras.optimizers.Ftrl(learning_rate=0.015), loss='mean_squared_error')"
  116. ]
  117. },
  118. {
  119. "cell_type": "code",
  120. "execution_count": 6,
  121. "id": "22c4124f",
  122. "metadata": {},
  123. "outputs": [
  124. {
  125. "name": "stdout",
  126. "output_type": "stream",
  127. "text": [
  128. "Epoch 1/60\n",
  129. "1/1 [==============================] - 0s 396ms/step - loss: 15.2606\n",
  130. "Epoch 2/60\n",
  131. "1/1 [==============================] - 0s 4ms/step - loss: 12.7440\n",
  132. "Epoch 3/60\n",
  133. "1/1 [==============================] - 0s 3ms/step - loss: 10.4745\n",
  134. "Epoch 4/60\n",
  135. "1/1 [==============================] - 0s 3ms/step - loss: 8.4519\n",
  136. "Epoch 5/60\n",
  137. "1/1 [==============================] - 0s 3ms/step - loss: 6.6738\n",
  138. "Epoch 6/60\n",
  139. "1/1 [==============================] - 0s 3ms/step - loss: 5.1356\n",
  140. "Epoch 7/60\n",
  141. "1/1 [==============================] - 0s 4ms/step - loss: 3.8306\n",
  142. "Epoch 8/60\n",
  143. "1/1 [==============================] - 0s 3ms/step - loss: 2.7499\n",
  144. "Epoch 9/60\n",
  145. "1/1 [==============================] - 0s 3ms/step - loss: 1.8823\n",
  146. "Epoch 10/60\n",
  147. "1/1 [==============================] - 0s 3ms/step - loss: 1.2142\n",
  148. "Epoch 11/60\n",
  149. "1/1 [==============================] - 0s 3ms/step - loss: 0.7292\n",
  150. "Epoch 12/60\n",
  151. "1/1 [==============================] - 0s 4ms/step - loss: 0.4087\n",
  152. "Epoch 13/60\n",
  153. "1/1 [==============================] - 0s 3ms/step - loss: 0.2310\n",
  154. "Epoch 14/60\n",
  155. "1/1 [==============================] - 0s 4ms/step - loss: 0.1722\n",
  156. "Epoch 15/60\n",
  157. "1/1 [==============================] - 0s 3ms/step - loss: 0.2060\n",
  158. "Epoch 16/60\n",
  159. "1/1 [==============================] - 0s 3ms/step - loss: 0.3049\n",
  160. "Epoch 17/60\n",
  161. "1/1 [==============================] - 0s 2ms/step - loss: 0.4419\n",
  162. "Epoch 18/60\n",
  163. "1/1 [==============================] - 0s 3ms/step - loss: 0.5909\n",
  164. "Epoch 19/60\n",
  165. "1/1 [==============================] - 0s 3ms/step - loss: 0.7296\n",
  166. "Epoch 20/60\n",
  167. "1/1 [==============================] - 0s 4ms/step - loss: 0.8423\n",
  168. "Epoch 21/60\n",
  169. "1/1 [==============================] - 0s 3ms/step - loss: 0.9194\n",
  170. "Epoch 22/60\n",
  171. "1/1 [==============================] - 0s 3ms/step - loss: 0.9578\n",
  172. "Epoch 23/60\n",
  173. "1/1 [==============================] - 0s 3ms/step - loss: 0.9579\n",
  174. "Epoch 24/60\n",
  175. "1/1 [==============================] - 0s 3ms/step - loss: 0.9233\n",
  176. "Epoch 25/60\n",
  177. "1/1 [==============================] - 0s 3ms/step - loss: 0.8606\n",
  178. "Epoch 26/60\n",
  179. "1/1 [==============================] - 0s 2ms/step - loss: 0.7777\n",
  180. "Epoch 27/60\n",
  181. "1/1 [==============================] - 0s 3ms/step - loss: 0.6828\n",
  182. "Epoch 28/60\n",
  183. "1/1 [==============================] - 0s 3ms/step - loss: 0.5839\n",
  184. "Epoch 29/60\n",
  185. "1/1 [==============================] - 0s 4ms/step - loss: 0.4879\n",
  186. "Epoch 30/60\n",
  187. "1/1 [==============================] - 0s 2ms/step - loss: 0.4003\n",
  188. "Epoch 31/60\n",
  189. "1/1 [==============================] - 0s 2ms/step - loss: 0.3253\n",
  190. "Epoch 32/60\n",
  191. "1/1 [==============================] - 0s 3ms/step - loss: 0.2652\n",
  192. "Epoch 33/60\n",
  193. "1/1 [==============================] - 0s 3ms/step - loss: 0.2208\n",
  194. "Epoch 34/60\n",
  195. "1/1 [==============================] - 0s 3ms/step - loss: 0.1917\n",
  196. "Epoch 35/60\n",
  197. "1/1 [==============================] - 0s 4ms/step - loss: 0.1766\n",
  198. "Epoch 36/60\n",
  199. "1/1 [==============================] - 0s 4ms/step - loss: 0.1731\n",
  200. "Epoch 37/60\n",
  201. "1/1 [==============================] - 0s 3ms/step - loss: 0.1785\n",
  202. "Epoch 38/60\n",
  203. "1/1 [==============================] - 0s 3ms/step - loss: 0.1899\n",
  204. "Epoch 39/60\n",
  205. "1/1 [==============================] - 0s 3ms/step - loss: 0.2046\n",
  206. "Epoch 40/60\n",
  207. "1/1 [==============================] - 0s 4ms/step - loss: 0.2202\n",
  208. "Epoch 41/60\n",
  209. "1/1 [==============================] - 0s 3ms/step - loss: 0.2345\n",
  210. "Epoch 42/60\n",
  211. "1/1 [==============================] - 0s 3ms/step - loss: 0.2460\n",
  212. "Epoch 43/60\n",
  213. "1/1 [==============================] - 0s 3ms/step - loss: 0.2537\n",
  214. "Epoch 44/60\n",
  215. "1/1 [==============================] - 0s 5ms/step - loss: 0.2572\n",
  216. "Epoch 45/60\n",
  217. "1/1 [==============================] - 0s 3ms/step - loss: 0.2565\n",
  218. "Epoch 46/60\n",
  219. "1/1 [==============================] - 0s 3ms/step - loss: 0.2520\n",
  220. "Epoch 47/60\n",
  221. "1/1 [==============================] - 0s 3ms/step - loss: 0.2444\n",
  222. "Epoch 48/60\n",
  223. "1/1 [==============================] - 0s 4ms/step - loss: 0.2346\n",
  224. "Epoch 49/60\n",
  225. "1/1 [==============================] - 0s 3ms/step - loss: 0.2234\n",
  226. "Epoch 50/60\n",
  227. "1/1 [==============================] - 0s 3ms/step - loss: 0.2119\n",
  228. "Epoch 51/60\n",
  229. "1/1 [==============================] - 0s 4ms/step - loss: 0.2010\n",
  230. "Epoch 52/60\n",
  231. "1/1 [==============================] - 0s 11ms/step - loss: 0.1912\n",
  232. "Epoch 53/60\n",
  233. "1/1 [==============================] - 0s 3ms/step - loss: 0.1831\n",
  234. "Epoch 54/60\n",
  235. "1/1 [==============================] - 0s 4ms/step - loss: 0.1771\n",
  236. "Epoch 55/60\n",
  237. "1/1 [==============================] - 0s 3ms/step - loss: 0.1730\n",
  238. "Epoch 56/60\n",
  239. "1/1 [==============================] - 0s 3ms/step - loss: 0.1709\n",
  240. "Epoch 57/60\n",
  241. "1/1 [==============================] - 0s 3ms/step - loss: 0.1705\n",
  242. "Epoch 58/60\n",
  243. "1/1 [==============================] - 0s 3ms/step - loss: 0.1713\n",
  244. "Epoch 59/60\n",
  245. "1/1 [==============================] - 0s 3ms/step - loss: 0.1730\n",
  246. "Epoch 60/60\n",
  247. "1/1 [==============================] - 0s 3ms/step - loss: 0.1751\n",
  248. "{'verbose': 1, 'epochs': 60, 'steps': 1}\n"
  249. ]
  250. }
  251. ],
  252. "source": [
  253. "# Train the model and access training parameters\n",
  254. "history = model.fit(train_X, train_Y, epochs=60)\n",
  255. "print(history.params)"
  256. ]
  257. },
  258. {
  259. "cell_type": "code",
  260. "execution_count": 7,
  261. "id": "46615960",
  262. "metadata": {},
  263. "outputs": [
  264. {
  265. "name": "stdout",
  266. "output_type": "stream",
  267. "text": [
  268. "Weight matrix shape: (1, 17)\n",
  269. "Bias vector shape: (17,)\n",
  270. "[[-0.2720451 0.25746062 0.00145817 -0.14928943 -0.18011677 -0.38295865\n",
  271. " -0.10688066 -0.08276322 -0.06117761 -0.415354 0.21152566 -0.015845\n",
  272. " -0.22856292 0.25203183 0.29251993 -0.08234903 0.6508041 ]]\n"
  273. ]
  274. }
  275. ],
  276. "source": [
  277. "# Get the weights of the Dense layer\n",
  278. "weights = model.layers[0].get_weights()\n",
  279. "# Print the weight matrix and bias vector\n",
  280. "print('Weight matrix shape:', weights[0].shape)\n",
  281. "print('Bias vector shape:', weights[1].shape)\n",
  282. "print (weights[0])"
  283. ]
  284. },
  285. {
  286. "cell_type": "code",
  287. "execution_count": 8,
  288. "id": "da12fc5b",
  289. "metadata": {},
  290. "outputs": [
  291. {
  292. "data": {
  293. "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjMAAAHFCAYAAAAHcXhbAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/P9b71AAAACXBIWXMAAA9hAAAPYQGoP6dpAABDe0lEQVR4nO3deXxU9b3/8feZmWSykAxZyAZJWGQPIAIiggouKCpu1apVhLb316K4cG2tUtuCXm0q/ZXalh/0alW0VrS9V7jcWhesLCpadkFE1ggBEkKA7MkkmTm/P8IMjAmYZZIzk7yej8d5JHPOTPLJl8C8+W7HME3TFAAAQJiyWV0AAABAWxBmAABAWCPMAACAsEaYAQAAYY0wAwAAwhphBgAAhDXCDAAACGuEGQAAENYIMwAAIKwRZoAuYMmSJTIMQxs3brS6lHOaN2+eDMNo8li4cKGltS1atEhLlixpdP6rr76SYRhNXgPQMRxWFwAAX/fOO+/I5XIFnOvTp49F1TRYtGiRkpOTNWPGjIDz6enp+uSTT9SvXz9rCgNAmAEQekaNGqXk5GSry2gWp9Opiy66yOoygC6NYSYAfh999JGuuOIKxcXFKSYmRhdffLHeeuutgOdUVVXpxz/+sfr06aOoqCglJiZq9OjRWrp0qf85+/fv1x133KGMjAw5nU6lpqbqiiuu0NatW9tU37mGdAzD0Lx58/yPfUNWO3bs0J133imXy6XU1FR973vfU2lpacBrvV6v/vCHP+j8889XdHS0unfvrosuukgrVqyQJPXu3Vs7duzQmjVr/MNevXv3PmdNzWlL3/DfqlWrdO+99yo5OVlJSUm65ZZbdOTIkTa1FdCV0DMDQJK0Zs0aXXXVVRo+fLheeOEFOZ1OLVq0SFOnTtXSpUt1++23S5Iefvhh/fnPf9ZTTz2lkSNHqrKyUp9//rmOHz/u/1rXXnutPB6P5s+fr6ysLBUXF2vdunUqKSlpVi0ej0f19fX+x4ZhyG63t+rn+ta3vqXbb79d3//+97V9+3bNmTNHkvTiiy/6nzNjxgy9+uqr+v73v68nn3xSkZGR2rx5s7766itJ0rJly3TrrbfK5XJp0aJFkhp6ZM6muW3p82//9m+67rrr9Nprryk/P1+PPPKI7r77bn3wwQet+pmBLscE0Om99NJLpiRzw4YNZ33ORRddZKakpJjl5eX+c/X19WZOTo7Zq1cv0+v1mqZpmjk5OeZNN9101q9TXFxsSjKfffbZFtc5d+5cU1Kjo2fPnqZpmmZeXp4pyXzppZcavVaSOXfu3EZfa/78+QHPu++++8yoqCj/z7N27VpTkvn444+fs7ahQ4eal112WaPzTdXU3Lb0/bncd999AV9z/vz5piSzoKDgnDUBaMAwEwBVVlbqX//6l2699VZ169bNf95ut2vatGk6dOiQdu3aJUm68MIL9fbbb+uxxx7T6tWrVV1dHfC1EhMT1a9fP/3617/WggULtGXLFnm93hbV8/7772vDhg3+4x//+Eerf7Ybbrgh4PHw4cNVU1OjoqIiSdLbb78tSZo1a1arv8eZWtKW56pRkg4cOBCUmoDOjjADQCdPnpRpmkpPT290LSMjQ5L8w0i///3v9eijj2r58uWaNGmSEhMTddNNN2nPnj2SGoaE/vnPf+rqq6/W/PnzdcEFF6hHjx568MEHVV5e3qx6RowYodGjR/sP35t7ayQlJQU89g0P+ULYsWPHZLfblZaW1urvcaaWtGVzawRwboQZAEpISJDNZlNBQUGja76JqL7VRbGxsXriiSf05ZdfqrCwUIsXL9ann36qqVOn+l+TnZ2tF154QYWFhdq1a5f+/d//XYsWLdIjjzzSpjqjoqIkSW63O+D818NBS/To0UMej0eFhYVtqs2nJW0JIDgIMwAUGxursWPH6s033wzoDfB6vXr11VfVq1cvDRgwoNHrUlNTNWPGDN15553atWuXqqqqGj1nwIAB+tnPfqZhw4Zp8+bNbaozNTVVUVFR2rZtW8D5//mf/2n115wyZYokafHixed8ntPpbFZPSWvbEkDrsZoJ6EI++OAD/wqdM1177bXKzc3VVVddpUmTJunHP/6xIiMjtWjRIn3++edaunSpDMOQJI0dO1bXX3+9hg8froSEBO3cuVN//vOfNW7cOMXExGjbtm26//77ddttt6l///6KjIzUBx98oG3btumxxx5rU/2GYejuu+/Wiy++qH79+mnEiBFav369XnvttVZ/zUsuuUTTpk3TU089paNHj+r666+X0+nUli1bFBMTowceeECSNGzYML3++ut644031LdvX0VFRWnYsGFNfs3mtiWA4CDMAF3Io48+2uT5vLw8XXbZZfrggw80d+5czZgxQ16vVyNGjNCKFSt0/fXX+597+eWXa8WKFfrtb3+rqqoq9ezZU/fcc48ef/xxSVJaWpr69eunRYsWKT8/X4ZhqG/fvvrNb37jDwZt8Zvf/EaSNH/+fFVUVOjyyy/X3//+d/++L62xZMkSXXDBBXrhhRe0ZMkSRUdHa8iQIfrpT3/qf84TTzyhgoIC/Z//839UXl6u7OzsJoOhpGa3JYDgMEzTNK0uAgAAoLWYMwMAAMIaYQYAAIQ1wgwAAAhrloaZtWvXaurUqcrIyJBhGFq+fHmj5+zcuVM33HCDXC6X4uLidNFFF+ngwYMdXywAAAhJloaZyspKjRgxQgsXLmzy+r59+zRhwgQNGjRIq1ev1meffaaf//zn/o2zAAAAQmY1k2EYWrZsmW666Sb/uTvuuEMRERH685//bF1hAAAgpIXsPjNer1dvvfWWfvKTn+jqq6/Wli1b1KdPH82ZMycg8Hyd2+0O2Orc6/XqxIkTSkpKYqMqAADChGmaKi8vV0ZGhmy2bxhIsvCO3QEkmcuWLfM/LigoMCWZMTEx5oIFC8wtW7aYubm5pmEY5urVq8/6debOnWtK4uDg4ODg4OgER35+/jdmiJAdZjpy5Ih69uypO++8M2Cr8htuuEGxsbFaunRpk1/n6z0zpaWlysrKUn5+vuLj49v1ZwAAAMFRVlamzMxMlZSUyOVynfO5ITvMlJycLIfDoSFDhgScHzx4sD766KOzvs7pdMrpdDY6Hx8fT5gBACDMNGeKSMjuMxMZGakxY8Zo165dAed3796t7Oxsi6oCAAChxtKemYqKCu3du9f/OC8vT1u3blViYqKysrL0yCOP6Pbbb9ell16qSZMm6Z133tH//u//avXq1dYVDQAAQoqlc2ZWr16tSZMmNTo/ffp0LVmyRJL04osvKjc3V4cOHdLAgQP1xBNP6MYbb2z29ygrK5PL5VJpaSnDTAAAhImWvH+HzATg9kKYAQAg/LTk/Ttk58wAAAA0B2EGAACENcIMAAAIa4QZAAAQ1ggzAAAgrBFmAABAWCPMAACAsEaYAQAAYY0w00oer6lDJ6tUUFptdSkAAHRphJlWmv/Ol5rwzCr955r9VpcCAECXRphppczEGEnSwRNVFlcCAEDXRphppeykhjBz4HilxZUAANC1EWZaKTsxVpKUf7JaXm+nvlcnAAAhjTDTShndo+SwGaqt96qwrMbqcgAA6LIIM63ksNvUMyFaknTgOPNmAACwCmGmDbL8k4CZNwMAgFUIM21wehIwPTMAAFiFMNMGvknALM8GAMA6hJk2yEpirxkAAKxGmGkDhpkAALAeYaYNfBOAS6vrVFpVZ3E1AAB0TYSZNoiJdCi5m1OSdIAVTQAAWIIw00YMNQEAYC3CTBtlc8NJAAAsRZhpoyxuOAkAgKUIM23EMBMAANYizLRRFhvnAQBgKcJMG/l6ZgrLalRT57G4GgAAuh7CTBslxUYqNtIu05QOnay2uhwAALocwkwbGYahrCTfUBOTgAEA6GiEmSDwLc9mEjAAAB2PMBMErGgCAMA6hJkg4O7ZAABYhzATBFmJbJwHAIBVLA0za9eu1dSpU5WRkSHDMLR8+fKzPveHP/yhDMPQs88+22H1NVf2qb1m8k9Wy+s1La4GAICuxdIwU1lZqREjRmjhwoXnfN7y5cv1r3/9SxkZGR1UWct
  294. "text/plain": [
  295. "<Figure size 640x480 with 1 Axes>"
  296. ]
  297. },
  298. "metadata": {},
  299. "output_type": "display_data"
  300. },
  301. {
  302. "name": "stdout",
  303. "output_type": "stream",
  304. "text": [
  305. "1/1 [==============================] - 0s 61ms/step\n"
  306. ]
  307. },
  308. {
  309. "data": {
  310. "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiMAAAGdCAYAAADAAnMpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/P9b71AAAACXBIWXMAAA9hAAAPYQGoP6dpAABEP0lEQVR4nO3deXxM5/4H8M8YkYUsTSqbRCd2opYKtykhsZWoyw23evkVt7ropUXqlthKi1CthlJbLS3VTVJV1C4RLbU0QYk9IdLJja0JIdvk/P6YZuqYGckkM3Nm5nzer5eXznfOzPlGUvPxPM95jkIQBAFEREREEqkldQNEREQkbwwjREREJCmGESIiIpIUwwgRERFJimGEiIiIJMUwQkRERJJiGCEiIiJJMYwQERGRpGpL3UBVlJeX4/fff4e7uzsUCoXU7RAREVEVCIKAO3fuIDAwELVqGR//sIsw8vvvvyM4OFjqNoiIiKgasrOzERQUZPR5uwgj7u7uALRfjIeHh8TdEBERUVUUFBQgODhY9zlujF2EkYqpGQ8PD4YRIiIiO1PZEgsuYCUiIiJJMYwQERGRpBhGiIiISFJ2sWakKgRBQFlZGTQajdStkI1TKpWoXbs2LxMnIrIRDhFGSkpKoFarce/ePalbITvh5uaGgIAA1KlTR+pWiIhkz+7DSHl5OTIzM6FUKhEYGIg6derwX7xklCAIKCkpwfXr15GZmYmmTZs+ciMeIiKyPLsPIyUlJSgvL0dwcDDc3NykbofsgKurK5ycnHDlyhWUlJTAxcVF6paIiGTNYf5JyH/dkin480JEZDv4NzIRERFJimHETmVlZUGhUCA9Pb3Kr1m3bh28vLwk74OIiOhBDCMVNBogORn48kvt71a4RDg7OxujRo3SLbx94oknMG7cONy8ebPS1wYHB0OtVqN169ZVPt+QIUNw/vz5mrRcLZGRkVAoFFAoFHB2dkaDBg3Qv39/JCUlmfxeM2fORLt27czfJBERSYZhBACSkgCVCoiKAoYO1f6uUmnrFnL58mWEhYXh/Pnz+PLLL3Hx4kUsX74ce/fuRXh4OG7dumX0tSUlJVAqlfD390ft2lVfg+zq6gpfX19ztG+yV155BWq1GhcvXkRiYiJatWqFF154Aa+++qok/RARke1gGElKAgYPBq5dE9dzcrR1CwWSMWPGoE6dOti1axe6deuGhg0bom/fvtizZw9ycnIwdepU3bEqlQqzZ8/GyJEj4enpiVdeecXg9MiWLVvQtGlTuLq6IioqCp999hkUCgX++OMPAPrTNBWjDOvXr4dKpYKnpydeeOEF3LlzR3fMjh070KVLF3h5ecHHxwfPPfccLl26ZPLX6+bmBn9/fwQHB+Ppp5/G/PnzsWLFCqxatQp79uzRHTdp0iQ0a9YMbm5uaNSoEaZPn47S0lJd/7NmzcKJEyd0Iy3r1q0DACxcuBBPPvkk6tati+DgYPznP//B3bt3Te6TiEhuvk/Pwaepl1FUKt2mofIOIxoNMG4cIAj6z1XUxo83+5TNrVu3sHPnTvznP/+Bq6ur6Dl/f38MGzYMX3/9NYQH+lqwYAFat26N48ePY/r06XrvmZWVhcGDB2PgwIFIT0/Ha6+9Jgo0xly6dAmbN2/G1q1bsXXrVqSkpGDevHm65wsLCxEbG4ujR49i7969qFWrFv7xj3+gvLy8Bn8CWiNGjMBjjz0mmq5xd3fHunXrcObMGSxatAirVq3CRx99BEA7zfTWW28hNDQUarUaarUaQ4YMAaC9Ombx4sX47bff8Nlnn2Hfvn14++23a9wjEZGjKtWUo9m0HzHuq3TM3paBE9l/SNaL3e8zUiOpqfojIg8SBCA7W3tcZKTZTnvhwgUIgoCWLVsafL5ly5a4ffs2rl+/rptW6d69OyZOnKg7JisrS/Sa5cuXo3nz5liwYAEAoHnz5vjtt98wZ86cR/ZSXl6OdevWwd3dHQDw4osvYu/evbrXDRo0SHT86tWr4evrizNnzpi0XsWQWrVqoVmzZqKvZdq0abr/VqlUeOutt/D111/j7bffhqurK+rVq4fatWvD399f9F7jx4/X/XdISAjee+89vP766/jkk09q1CMRkSPKUBeg76JUUS1M5S1RN3IPI2q1eY8zk4oRkQd3kg0LC3vka86dO4eOHTuKap06dar0XCqVShdEACAgIAB5eXm6x5cuXcL06dNx+PBh3LhxQzcicvXq1RqHEUD7tT74dW7atAkJCQm4ePEi7t69i7KyMnh4eFT6Pvv378fcuXNx5swZFBQUoKysDEVFRSgsLETdunVr3CcRkaNYsPMslu7/a7r96Ube+OrVcAk7kvs0TUCAeY+roiZNmkChUODMmTMGnz979iwee+wxPP7447paZR+oD3+oV9Qq4+TkJHqsUChEUzD9+/fHzZs3sWrVKvzyyy/45ZdfAGgX0daURqPBhQsXEBISAgA4fPgwXnjhBfTt2xdbt25FWloapk6dWum5rly5gujoaLRu3RqJiYk4fvw4li5dCgC69SZERHJXUlYO1eRtoiCybNhTkgcRQO4jIxERQFCQdrGqoQ9uhUL7fESEWU/r4+ODXr164ZNPPsGECRNE60Zyc3PxxRdfYPjw4SbdY6dFixbYvn27qHbs2LEa9Xnz5k1kZGRgxYoViPjzz+DgwYM1es8HffbZZ7h9+7ZuKuinn37CE088IVrrcuXKFdFr6tSpo3dn5mPHjqGsrAwffvihbmfVb775xmx9EhHZu5PX/sDfl/wkqqVN74XH6trGzULlPTKiVAKLFmn/++EP/orHCQna48xsyZIlKC4uxrPPPosDBw4gOzsbO3bsQK9evdCgQYNK13o87LXXXsPZs2cxadIknD9/Ht98843uSpPq3jjwscceg4+PD1auXImLFy9i3759iI2NrdZ73bt3D7m5ubh27Rp++eUXTJo0CaNHj8brr7+OqKgoANoRo6tXr+Krr77CpUuXsHjxYnz33Xei91GpVMjMzER6ejpu3LiB4uJiNG7cGGVlZfj4449x+fJlrF+/HsuXL69Wn0REjmb21jOiIBLZvD6y5vWzmSACyD2MAEBMDLBpE9CggbgeFKStx8RY5LRNmzbFsWPH0LhxYwwZMgSNGzfGq6++iqioKBw6dAje3qYtJAoJCcGmTZuQlJSENm3aYNmyZboRBmdn52r1WKtWLXz11Vc4fvw4WrdujQkTJugWyJpq1apVCAgIQOPGjfGPf/wDZ86cwddffy1aYDpgwABMmDABY8eORbt27fDzzz/rXTk0aNAg9OnTB1FRUahfvz6+/PJLtGvXDgsXLsT8+fPRunVrfPHFF4iPj69Wn0REjqKoVAPV5G349GCmrrZ6RBjW/bvy9YTWphCqsrBAYgUFBfD09ER+fr7eYsaioiJkZmYiJCSkZndf1Wi0V82o1do1IhERFhkRsaY5c+Zg+fLlyM7OlroVm2O2nxsiIhv069XbiPnkZ1HtxIze8HRzMvIKy3jU5/eD5L1m5EFKpVkv35XCJ598go4dO8LHxwc//fQTFixYgLFjx0rdFhERWdG0zaew4fBV3ePoJ/3xybAOEnZUOYYRB3LhwgXMnj0bt27dQsOGDfHWW28hLi5O6raIiMgK7pdo0HLGDlHt85c6oWuz+hJ1VHUMIw7ko48+0u1WSkRE8vHL5ZsYsvKwqHZqZm+4u1h3Wqa6GEaIiIjs2MRvT2DT8b92E/9H+wb4aEg76RqqBoYRIiIiO1RYXIbQd3aKahtf+Rueafy4kVfYLoYRIiIiO3Pwwg383+pfRLUz7z4Ltzr2+bFun10TERHJ1NiNv2Lryb/umfavTsGIj2kjYUc1xzBCRERkBwqKStFm5i5RbdPocEnvtmsuDCNEREQ2bv+5PPx77VFR7ex7feDiZN+bc1bgdvAyMHPmTLRr1073eOTIkRg4cKDV+8jKyoJCoUB6errVz01EZK9e/uyYKIiMfEaFrHn9HCaIAAwjkho5ciQUCgUUCgWcnJzQqFEjTJw4EYWFhRY976JFi3Q30asMAwQRkTT+uFcC1eRt2JPxP11t85jOmPn
  311. "text/plain": [
  312. "<Figure size 640x480 with 1 Axes>"
  313. ]
  314. },
  315. "metadata": {},
  316. "output_type": "display_data"
  317. }
  318. ],
  319. "source": [
  320. "# Plot the loss function\n",
  321. "plt.plot(history.history['loss'])\n",
  322. "plt.title(\"Loss Function\")\n",
  323. "plt.xlabel(\"Epoch\")\n",
  324. "plt.ylabel(\"Loss\")\n",
  325. "plt.show()\n",
  326. "\n",
  327. "# Plot the input data and the predicted values\n",
  328. "plt.plot(train_X, train_Y, 'ro', label=\"Original Data\")\n",
  329. "plt.plot(train_X, model.predict(train_X), label=\"Predicted\")\n",
  330. "plt.legend()\n",
  331. "plt.show()"
  332. ]
  333. },
  334. {
  335. "cell_type": "code",
  336. "execution_count": null,
  337. "id": "60417d5f",
  338. "metadata": {},
  339. "outputs": [],
  340. "source": []
  341. }
  342. ],
  343. "metadata": {
  344. "kernelspec": {
  345. "display_name": "Python 3 (ipykernel)",
  346. "language": "python",
  347. "name": "python3"
  348. },
  349. "language_info": {
  350. "codemirror_mode": {
  351. "name": "ipython",
  352. "version": 3
  353. },
  354. "file_extension": ".py",
  355. "mimetype": "text/x-python",
  356. "name": "python",
  357. "nbconvert_exporter": "python",
  358. "pygments_lexer": "ipython3",
  359. "version": "3.8.16"
  360. }
  361. },
  362. "nbformat": 4,
  363. "nbformat_minor": 5
  364. }