Machine Learning Kurs im Rahmen der Studierendentage im SS 2023
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

180 lines
4.9 KiB

2 years ago
  1. {
  2. "cells": [
  3. {
  4. "cell_type": "code",
  5. "execution_count": null,
  6. "id": "8ab45695",
  7. "metadata": {},
  8. "outputs": [],
  9. "source": [
  10. "#\n",
  11. "# An example of the minimzer usage in tensor flow\n",
  12. "# the loss function is plotted and the result in terms of a line\n",
  13. "#"
  14. ]
  15. },
  16. {
  17. "cell_type": "code",
  18. "execution_count": null,
  19. "id": "270932f3",
  20. "metadata": {},
  21. "outputs": [],
  22. "source": [
  23. "import numpy as np\n",
  24. "import matplotlib.pyplot as plt\n",
  25. "import tensorflow as tf"
  26. ]
  27. },
  28. {
  29. "cell_type": "code",
  30. "execution_count": null,
  31. "id": "77cd99a8",
  32. "metadata": {},
  33. "outputs": [],
  34. "source": [
  35. "# Define the training data\n",
  36. "train_X = np.asarray([3.3,4.4,5.5,6.71,6.93,4.168,9.779,6.182,7.59,2.167,\n",
  37. " 7.042,10.791,5.313,7.997,5.654,9.27,3.1])\n",
  38. "train_Y = np.asarray([1.7,2.76,2.09,3.19,1.694,1.573,3.366,2.596,2.53,1.221,\n",
  39. " 2.827,3.465,1.65,2.904,2.42,2.94,1.3])\n"
  40. ]
  41. },
  42. {
  43. "cell_type": "markdown",
  44. "id": "f39cbcd9",
  45. "metadata": {},
  46. "source": [
  47. "The input to the model is represented by the train_X \n",
  48. "Y_train represents the target or the truth values for the training data\n",
  49. "The model will recieve train_X and make predictions on the weights\n",
  50. "The difference between these predictions and the actual target values\n",
  51. "train_Y will be used to update the weights and minimize the loss function."
  52. ]
  53. },
  54. {
  55. "cell_type": "code",
  56. "execution_count": null,
  57. "id": "ed8449c3",
  58. "metadata": {},
  59. "outputs": [],
  60. "source": [
  61. "# Define the model to a simple linear regression with only one dense layer and\n",
  62. "# no activation function for the first layer all train_X points are input\n",
  63. "\n",
  64. "# model = tf.keras.models.Sequential([\n",
  65. "# tf.keras.layers.Dense(1, input_shape=[1])\n",
  66. "#])"
  67. ]
  68. },
  69. {
  70. "cell_type": "code",
  71. "execution_count": null,
  72. "id": "71e072b4",
  73. "metadata": {},
  74. "outputs": [],
  75. "source": [
  76. "# This model has 2 dense layers the first with relu activation\n",
  77. "# and the 2nd layer has 1 output unit and uses the default\n",
  78. "# linear activation function.\n",
  79. "\n",
  80. "model = tf.keras.models.Sequential([\n",
  81. " tf.keras.layers.Dense(17, activation='relu',input_shape=[1]),\n",
  82. " tf.keras.layers.Dense(1)\n",
  83. "])"
  84. ]
  85. },
  86. {
  87. "cell_type": "code",
  88. "execution_count": null,
  89. "id": "5fabf184",
  90. "metadata": {},
  91. "outputs": [],
  92. "source": [
  93. "# different optimizer methods can be enabled\n",
  94. "\n",
  95. "model.compile(optimizer=tf.keras.optimizers.Adam(0.01), loss='mean_squared_error')\n",
  96. "#model.compile(optimizer=tf.keras.optimizers.SGD(0.01), loss='mean_squared_error')\n",
  97. "#model.compile(optimizer=tf.keras.optimizers.Adagrad(learning_rate=0.01), loss='mean_squared_error')\n",
  98. "#model.compile(optimizer=tf.keras.optimizers.RMSprop(learning_rate=0.01), loss='mean_squared_error')\n",
  99. "#model.compile(optimizer=tf.keras.optimizers.Ftrl(learning_rate=0.015), loss='mean_squared_error')"
  100. ]
  101. },
  102. {
  103. "cell_type": "code",
  104. "execution_count": null,
  105. "id": "22c4124f",
  106. "metadata": {},
  107. "outputs": [],
  108. "source": [
  109. "# Train the model and access training parameters\n",
  110. "history = model.fit(train_X, train_Y, epochs=60)\n",
  111. "print(history.params)"
  112. ]
  113. },
  114. {
  115. "cell_type": "code",
  116. "execution_count": null,
  117. "id": "46615960",
  118. "metadata": {},
  119. "outputs": [],
  120. "source": [
  121. "# Get the weights of the Dense layer\n",
  122. "weights = model.layers[0].get_weights()\n",
  123. "# Print the weight matrix and bias vector\n",
  124. "print('Weight matrix shape:', weights[0].shape)\n",
  125. "print('Bias vector shape:', weights[1].shape)\n",
  126. "print (weights[0])"
  127. ]
  128. },
  129. {
  130. "cell_type": "code",
  131. "execution_count": null,
  132. "id": "da12fc5b",
  133. "metadata": {},
  134. "outputs": [],
  135. "source": [
  136. "# Plot the loss function\n",
  137. "plt.plot(history.history['loss'])\n",
  138. "plt.title(\"Loss Function\")\n",
  139. "plt.xlabel(\"Epoch\")\n",
  140. "plt.ylabel(\"Loss\")\n",
  141. "plt.show()\n",
  142. "\n",
  143. "# Plot the input data and the predicted values\n",
  144. "plt.plot(train_X, train_Y, 'ro', label=\"Original Data\")\n",
  145. "plt.plot(train_X, model.predict(train_X), label=\"Predicted\")\n",
  146. "plt.legend()\n",
  147. "plt.show()"
  148. ]
  149. },
  150. {
  151. "cell_type": "code",
  152. "execution_count": null,
  153. "id": "60417d5f",
  154. "metadata": {},
  155. "outputs": [],
  156. "source": []
  157. }
  158. ],
  159. "metadata": {
  160. "kernelspec": {
  161. "display_name": "Python 3 (ipykernel)",
  162. "language": "python",
  163. "name": "python3"
  164. },
  165. "language_info": {
  166. "codemirror_mode": {
  167. "name": "ipython",
  168. "version": 3
  169. },
  170. "file_extension": ".py",
  171. "mimetype": "text/x-python",
  172. "name": "python",
  173. "nbconvert_exporter": "python",
  174. "pygments_lexer": "ipython3",
  175. "version": "3.8.16"
  176. }
  177. },
  178. "nbformat": 4,
  179. "nbformat_minor": 5
  180. }