{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Wieloklasowa klasyfikacja tekstu\n", "

Celem projektu było stworzenie modelu, który klasyfikuje wypowiedzi zgłaszane przez studentów z Indii przygotowujących się do egzaminów JEE Advanced, JEE Mains i NEET do jednej z kilku możliwych klas opisujących przedmiot związany z wypowiedzią.

" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "

Import bibliotek

" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import pandas as pd\n", "import pandas as pd\n", "import matplotlib.pyplot as plt\n", "import tensorflow as tf\n", "from sklearn.model_selection import train_test_split\n", "import re\n", "import nltk\n", "nltk.download('punkt')\n", "nltk.download('stopwords')\n", "nltk.download('wordnet')\n", "nltk.download('omw-1.4')\n", "from nltk.tokenize import word_tokenize\n", "from nltk.stem import WordNetLemmatizer \n", "from nltk.corpus import stopwords\n", "stopwords.words('english')\n", "import string\n", "string.punctuation\n", "from nltk.stem.porter import PorterStemmer\n", "import optuna\n", "from keras.optimizers import Adam\n", "from tensorflow.keras.preprocessing.text import Tokenizer\n", "from tensorflow.keras.preprocessing.sequence import pad_sequences\n", "from keras.callbacks import ModelCheckpoint, EarlyStopping, ReduceLROnPlateau, TensorBoard, CSVLogger\n", "\n", "from keras.models import Sequential\n", "from keras.layers import Embedding, LSTM, Dense, Conv1D, GlobalMaxPooling1D, GRU, GlobalAveragePooling1D" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "

Podgląd zbioru danych

" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
engSubject
0An anti-forest measure is\\nA. Afforestation\\nB...Biology
1Among the following organic acids, the acid pr...Chemistry
2If the area of two similar triangles are equal...Maths
3In recent year, there has been a growing\\nconc...Biology
4Which of the following statement\\nregarding tr...Physics
\n", "
" ], "text/plain": [ " eng Subject\n", "0 An anti-forest measure is\\nA. Afforestation\\nB... Biology\n", "1 Among the following organic acids, the acid pr... Chemistry\n", "2 If the area of two similar triangles are equal... Maths\n", "3 In recent year, there has been a growing\\nconc... Biology\n", "4 Which of the following statement\\nregarding tr... Physics" ] }, "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df = pd.read_csv(\"subjects-questions.csv\")\n", "df.head()" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAA2UAAAIiCAYAAABIXGAzAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAACYOElEQVR4nOzdeXwV1f3/8VfIcrOQXLJvZGEJgRDCTgiLLCFsAdwqVdpUkEJbEYpCrV/b/sS2gnWt1VbcWkFFbGttS4EQFkG2sAQjhB1MSMhCQnaW7PP7I83gFVBSg1fh/Xw87kPzmTMzZ86dOdzPPWfmOhiGYSAiIiIiIiJ20c7eFRAREREREbmZKSkTERERERGxIyVlIiIiIiIidqSkTERERERExI6UlImIiIiIiNiRkjIRERERERE7UlImIiIiIiJiR0rKRERERERE7EhJmYiIiIiIiB0pKRMRka/FPffcQ1xcHFVVVfauyk3jD3/4A15eXmRmZtq7KvItpXNI5OuhpExE2sSbb76Jg4MDe/fuvWqZnJwcHBwcePPNN9t8/4sWLcLBwYGzZ8+2+ba/SS5cuMCiRYvYvHnz17K/lnb9ql599VU++ugj1qxZg5eXVxvUTL7M7t27efTRR3n//ffp06ePzbK2el+/qmvpN24UDg4OLFq0qM2291X600OHDrFo0SJycnK+sNwXnUMi0rac7F0BEbl5BAcHs3PnTrp06WLvqnxrXbhwgccffxyAkSNHXvf9/fCHP2T8+PFfaRsHDhzgl7/8JWlpaXTs2LGNaiZfpKysjO9+97v86U9/Iikpyd7Vkevgq/Snhw4d4vHHH2fkyJFERkZesYzOIZGvl5IyEfnaWCwWBg8ebO9qSCt07NjxKydSvXr1ori4uI1qJNfCx8eH7Oxse1fjG80wDGpqanBzc7N3Vf4n17s/1Tkk8vXS9EUR+dpcbbrNkSNHuOeeewgMDMRisRAeHs4PfvADamtrAYiMjMTBweGKr89P48vLy+OOO+7Ay8sLq9XK97//fUpKSmzKvPfee4wdO5bg4GDc3Nzo0aMHjzzyCOfPn//C+ldVVeHk5MTTTz9txs6ePUu7du2wWq00NDSY8Xnz5uHv749hGPzmN7/BycmJvLy8y7Z533334evrS01NDQCbNm1i5MiR+Pr64ubmRnh4OHfeeScXLlwgJycHf39/AB5//HGzDaZPnw7AiRMnmDFjBlFRUbi7uxMaGsrkyZM5cOCAzT43b96Mg4MDb7/9Ng899BBBQUG4ubkxYsQIPv74Y5uyV5vm9t5775GQkICHhwft27dn3Lhxl607ffp02rdvz4kTJ5g4cSLt27cnLCyMBQsWmO/tF4mMjGTSpEl88MEHxMXF4erqSufOnfnDH/5wWdnc3Fy+//3vExAQgMVioUePHjz77LM0NTXZlHv55Zfp3bs37du3x9PTk+7du/Poo4/a7PNazrW2OmePHz/OtGnTbOr9xz/+8bLjO3jwIGPHjsXd3R1/f3/mzJnD6tWrr3gN/PnPf6Z37964urri4+PD7bffzuHDh6+5vVNTU+nXrx9ubm50796dP//5z2aZnJwcnJycWLJkyWXrf/TRRzg4OPC3v/3tmtupRXV1NT/5yU/w8/PD19eXO+64g4KCgi+tc8s5dvDgQRITE/Hw8MDf358HHniACxcu2JR1cHDggQceYOnSpfTo0QOLxcKyZcuYPn36Vd+rz043rKqqYuHChXTq1AkXFxdCQ0OZP3/+Zf1GVVUVs2bNwtfXl/bt2zN+/HiOHTt2Wd1brq39+/dz1113YbVa8fHx4aGHHqKhoYGjR48yfvx4PD09iYyM5KmnnrJZ/2r96bZt20hMTMTT0xN3d3eGDBnC6tWrzeVvvvkmd911FwCjRo0yj/Wz2/myc6jl3NuzZ48Ze//993FwcCA5OdmmPnFxcdx5551XePdExIYhItIG/vKXvxiAsWfPnquWyc7ONgDjL3/5ixnLzMw02rdvb0RGRhpLly41Nm7caLz99tvG1KlTjaqqKsMwDGPfvn3Gzp07zdf27duNXr16GR4eHsbJkycNwzCMxx57zACMiIgI42c/+5mxbt0647nnnjM8PDyMvn37GnV1deY+f/Ob3xjPP/+8sXr1amPz5s3G0qVLjU6dOhmjRo360uMcPHiwMXbsWPPvlStXGq6uroaDg4Oxfft2M96jRw9j6tSphmEYxpkzZwyLxWL84he/sNlWaWmp4ebmZvzsZz8z28fV1dVISkoy/vnPfxqbN2823nnnHSMlJcUoLy83ampqjNTUVAMwZs6cabbHiRMnDMMwjC1bthgLFiww/v73vxtbtmwxPvjgA+O2224z3NzcjCNHjpj7/fDDDw3ACAsLM2699VZj1apVxttvv2107drV8PLyMtv0s+36WU888YTh4OBg3HfffcZ//vMf4x//+IeRkJBgeHh4GAcPHjTL3XvvvYaLi4vRo0cP45lnnjE2bNhg/L//9/8MBwcH4/HHH//Sto6IiDBCQ0ON8PBw489//rOxZs0a43vf+54BGE8//bRZrri42AgNDTX8/f2NpUuXGqmpqcYDDzxgAMZPfvITs9y7775rAMbcuXONtLQ0Y8OGDcbSpUuNefPmmWWu5Vxrq3P24MGDhtVqNXr16mUsX77cSEtLMxYsWGC0a9fOWLRokVmngoICw9fX1wgPDzfefPNNY82aNUZKSooRGRlpAMaHH35oll28eLEBGPfcc4+xevVqY/ny5Ubnzp0Nq9VqHDt27Avf14iICKNjx45GTEyMsXz5cmPdunXGXXfdZQDGli1bzHK33367ER4ebjQ0NNisf9dddxkhISFGfX39NbdTS7/RuXNnY+7cuca6deuM119/3fD29r6m67HlHAsPDzeeeOIJIy0tzVi0aJHh5ORkTJo0yaYsYISGhhpxcXHGihUrjE2bNhlZWVnGiRMnbN6rnTt3Gt///vcNwHjvvfcMwzCM8+fPG3369DH8/PyM5557ztiwYYPxwgsvGFar1Rg9erTR1NRkGIZhNDU1GaNGjTIsFotZn8cee8zo3LmzARiPPfbYZe9BdHS08Zvf/MZYv3698fDDDxuA8cADDxjdu3c3/vCHPxjr1683ZsyYYQDG+++/b65/pf508+bNhrOzs9G/f3/jvffeM/75z38aY8eONRwcHIyVK1cahtF8vbScJ3/84x/NYy4uLr7mc6i6utpwdnY2Fi9ebO77xz/+seHm5mZ4eHiY/e2ZM2cMBwcH409/+tOXvpciNzslZSLSJv7XpGz06NFGhw4dzA8E1+KBBx4wnJycjDVr1pixlg84Dz74oE3Zd955xwCMt99++4rbampqMurr640tW7YYgPHJJ5984b5/+ctfGm5ubkZNTY1hGIbxwx/+0Bg/frwRFxdnJhr5+fkGYLz66qvmevfee68REBBg1NbWmrHf/e53Rrt27Yzs7GzDMAzj73//uwEYmZmZV91/SUnJZR/urqahocGoq6szoqKibNqlJSnr16+f+WHSMAwjJyfHcHZ2Nn74wx+asc9/eM/NzTWcnJyMuXPn2uyrurraCAoKMhPRlmMGjL/+9a82ZSdOnGhER0d/af0jIiIMBweHy9ojKSnJ8PLyMs6fP28YhmE88sgjBmDs2rXLptxPfvITw8HBwTh69KhhGM3nTYcOHb50v591pXOtrc7ZcePGGR07djQqKysvK+vq6mqUlZUZhmEYP/vZzwwHBwebhLdl/c8mZeXl5Yabm5sxceJEm3K5ubmGxWIxpk2bZsaulpS5uroap06dMmMXL140fHx8jB/96EdmrOX8+eCDD8xYfn6+4eTkZJNsX0s7tfQb999/v038qaeeMgCjsLDwqusaxqVz7IUXXrCJP/HEEwZgbNu2zYwBhtVqNdv1av76178aDg4OxqOPPmrGlixZYrRr1+6y/q3lmm15X9euXfuF9blSUvbss8/alO3Tp48BGP/4xz/MWH19veHv72/ccccdZuxK/engwYONgIAAo7q62ow1NDQYsbGxRseOHc3r/W9/+9tlCb1htO4cGjZsmDF69Gjz765duxo/+9nPjHbt2plJfEv/+9kvBETkyjR9UUTs5sKFC2zZsoWpU6ea0/K+zJNPPslLL73E0qVLmTBhwmXLv/e979n8PXXqVJycnPjwww/N2Keffsq0adMICgrC0dERZ2dnRowYAfCl07wSExO5ePEiO3bsAGDDhg0kJSUxZswY1q9fb8YAxowZY67305/+lOLiYnNqV1NTEy+//DLJycnmjfZ9+vTBxcWF2bNns2zZMj799NNrapMWDQ0NLF68mJiYGFxcXHBycsLFxYXjx49f8bimTZtmMzUxIiKCIUOG2LTV561bt46GhgZ+8IMf0NDQYL5cXV0ZMWLEZVPpHBwcmDx5sk0sLi6OU6dOXdMx9ezZk969e19W76qqKvbt2wc0T/mMiYlh0KBBNuWmT5+OYRhs2rQJgEGDBlFRUcE999zDv/71ry99UueVzrW2OmdramrYuHEjt99+O+7u7jZtOXHiRGpqakhPTwdgy5YtxMbGEhMTY7Pde+65x+bvnTt3cvHiRXM6a4uwsDBGjx7Nxo0bv7Suffr0ITw83Pzb1dWVbt262bxfI0eOpHfv3jbTLJcuXYqDgwOzZ88GWt9OU6ZMsfk7Li4O4JrPk89f99OmTQO47FwePXo03t7eV93Oli1bSElJ4fvf/z5PPPGEGf/Pf/5DbGwsffr0sXmvxo0bZzOFtGV/V6vPlUyaNMnm7x49euDg4GDTvzk5OdG1a9cvbI/z58+za9cuvvOd79C+fXsz7ujoSEpKCqdPn+bo0aNXXR9adw4lJiayfft2Ll68yKlTpzhx4gR33303ffr0sekLw8PDiYqK+sL9iojuKRMROyovL6exsfGaHyTx9ttv8+ijj/L//t//Y+bMmVcsExQUZPO3k5MTvr6+lJaWAnDu3DmGDx/Orl27+O1vf8vmzZvZs2cP//jHPwC4ePHiF9ZhyJAhuLu7s2HDBk6cOEFOTo6ZlO3atYtz586xYcMGOnfuTKdOncz1+vbty/Dhw80Psv/5z3/IycnhgQceMMt06dKFDRs2EBAQwJw5c+jSpQtdunThhRdeuKb2eeihh/jVr37FbbfdxqpVq9i1axd79uyhd+/eVzyuz7dVS6ylra7kzJkzAAwcOBBnZ2eb13vvvXdZouPu7o6rq6tNzGKxmPfQfZmr1REw61laWkpwcPBl5UJCQmzKpaSk8Oc//5lTp05x5513EhAQQHx8vPkB8rOudq611TlbWlpKQ0MDL7744mXtOHHiRACzLUtLSwkMDLxs25+PtRzn1drii97XFr6+vpfFLBbLZefPvHnz2LhxI0ePHqW+vp7XXnuN73znO+Z709p2+vx+LRYL8OXXI1y6xj/r8+dIiyu1TYuDBw9y2223MXz4cN544w2bZWfOnGH//v2XvVeenp4YhmHzXn1Rfa7Ex8fH5m8XF5crXjcuLi5feN2Ul5djGMY1XQtX05pzaMyYMdTW1rJt2zbWr1+Pn58fffv2ZcyYMeYXUxs3brT5ckpErk5PXxQRu/Hx8cHR0ZHTp09/adn169dz3333MX36dPOR8FdSVFREaGio+XdDQwOlpaXmh6RNmzZRUFDA5s2bzdExgIqKimuqs4uLC8OGDWPDhg107NiRoKAgevXqRefOnYHmh2hs3Ljxsm+/ofmD7F133cW+fft46aWX6Nat22WPmh4+fDjDhw+nsbGRvXv38uKLLzJ//nwCAwO5++67v7Bub7/9Nj/4wQ9YvHixTfzs2bN06NDhsvJFRUVXjF3pg3kLPz8/AP7+978TERHxhfVpC1erI1z6IO/r60thYeFl5VoeFNFSZ4AZM2YwY8YMzp8/z0cffcRjjz3GpEmTOHbsmHk8X3SutdU56+3tbY5gzJkz54rrtyT1vr6+ZjJ8pXZo0dIeV2uLz7bDVzVt2jR+/vOf88c//pHBgwdTVFRkcxytaaev6vPXOFx+jrS42m+znT59mvHjxxMeHs7777+Ps7OzzXI/Pz/c3NxsHnry+eUt+/ui+lxP3t7etGvX7pqvhStpzTkUHx9P+/bt2bBhAzk5OSQmJuLg4EBiYiLPPvsse/bsITc3V0mZyDXSSJmI2E3LE//+9re/feFUsszMTO68805Gjx7Nq6+++oXbfOedd2z+/utf/0pDQ4P5m14tH8pavolv8corr1xzvceMGUNGRgbvv/+++YHDw8ODwYMH8+KLL1JQUHDFDyK333474eHhLFiwgA0bNnD//fdf9UOio6Mj8fHx5shay1S9LxpBcHBwuOy4Vq9eTX5+/hX38e6772IYhvn3qVOn2LFjxxf+/tm4ceNwcnLi5MmTDBgw4IqvtnTw4EE++eQTm9iKFSvw9PSkX79+QPM0qkOHDplt1GL58uU4ODgwatSoy7br4eHBhAkT+MUvfkFdXR0HDx4Evvxca6tz1t3dnVGjRvHxxx8TFxd3xXZs+YA8YsQIsrKyOHTokM02Vq5cafN3QkICbm5uvP322zbx06dPs2nTJhITE69a39ZydXU1p9k+99xz9OnTh6FDh5rLr7Wd2srnr/sVK1YA1/ZbfpWVlUyYMAEHB4er/rj5pEmTOHnyJL6+vld8r1qmILeca1erz/Xk4eFBfHw8//jHP2z6h6amJt5++206duxIt27dgKv3I605h5ydnbnllltYv349mzZtMr9gGj58OE5OTvzyl780kzQR+XIaKRORNrVp0yZycnIui7dMyfq85557jmHDhhEfH88jjzxC165dOXPmDP/+97955ZVXMAyDiRMn4ubmxsKFC9m7d6/N+jExMTYfov7xj3/g5OREUlISBw8e5Fe/+hW9e/dm6tSpQPP0Q29vb3784x/z2GOP4ezszDvvvHPZB/8vkpiYSGNjIxs3bmTZsmVmfMyYMTz22GM4ODgwevToy9ZzdHRkzpw5/PznP8fDw+Oy+zaWLl3Kpk2bSE5OJjw8nJqaGvOb+ZYkz9PTk4iICP71r3+RmJiIj48Pfn5+5uPM33zzTbp3705cXBwZGRk8/fTTV51CVlxczO23386sWbOorKzksccew9XVlf/7v/+76rFHRkby61//ml/84hd8+umnjB8/Hm9vb86cOcPu3bvx8PD4wpHM1goJCWHKlCksWrSI4OBg3n77bdavX8/vfvc73N3dAXjwwQdZvnw5ycnJ/PrXvyYiIoLVq1fzpz/9iZ/85CfmB9FZs2bh5ubG0KFDCQ4OpqioiCVLlmC1Whk4cCBVVVXXdK611Tn7wgsvMGzYMIYPH85PfvITIiMjqa6u5sSJE6xatcq8F27+/Pn8+c9/ZsKECfz6178mMDCQFStWcOTIEQDatWv+frVDhw786le/4tFHH+UHP/gB99xzD6WlpTz++OO4urry2GOPtdn7AnD//ffz1FNPkZGRweuvv37Z8i9rJ09Pzzaph4uLC88++yznzp1j4MCB7Nixg9/+9rdMmDCBYcOGfen606ZN49ChQ7z66qvk5eXZ/HRFy+/0zZ8/n/fff59bbrmFBx98kLi4OJqamsjNzSUtLY0FCxYQHx/P2LFjueWWW3j44Yc5f/48AwYMYPv27bz11lttcqxfZsmSJSQlJTFq1CgWLlyIi4sLf/rTn8jKyuLdd981vwSKjY0F4NVXX8XT0xNXV1c6deqEr69vq86hxMREFixYAFzqo9zc3BgyZAhpaWnExcUREBDwtRy7yLeePZ8yIiI3jpanqF3tlZ2dfcWnhRmGYRw6dMi46667DF9fX/Px1tOnTzdqamrMda72anl6WMuTzDIyMozJkycb7du3Nzw9PY177rnHOHPmjM3+duzYYSQkJBju7u6Gv7+/8cMf/tDYt2/fFet2JU1NTYafn58BGPn5+WZ8+/bt5lMNryYnJ8cAjB//+MeXLdu5c6dx++23GxEREYbFYjF8fX2NESNGGP/+979tym3YsMHo27evYbFYDMC49957DcNofnLazJkzjYCAAMPd3d0YNmyYsXXrVmPEiBHGiBEjzPVbnp731ltvGfPmzTP8/f0Ni8ViDB8+3Ni7d6/Nvq70lD7DMIx//vOfxqhRowwvLy/DYrEYERERxne+8x1jw4YNZpl7773X8PDwuGzdq23z8yIiIozk5GTj73//u9GzZ0/DxcXFiIyMNJ577rnLyp46dcqYNm2a4evrazg7OxvR0dHG008/bTQ2Npplli1bZowaNcoIDAw0XFxcjJCQEGPq1KnG/v37DcMwrvlcM4y2OWdb9nnfffcZoaGhhrOzs+Hv728MGTLE+O1vf2tzfFlZWcaYMWMMV1dXw8fHx5g5c6axbNmyKz4x9PXXXzfi4uIMFxcXw2q1GrfeeutlT2682tMXk5OTL2vbz58/nzVy5EjDx8fHuHDhwhWXf1E7GcbVn9raco5+/umAn9dyju3fv98YOXKk4ebmZvj4+Bg/+clPjHPnztmUBYw5c+Zcto2IiIirvleffVriuXPnjF/+8pdGdHS02ba9evUyHnzwQaOoqMgsV1FRYdx3331Ghw4dDHd3dyMpKck4cuTIVZ++WFJScsVj+rwRI0YYPXv2NP++Wn+6detWY/To0YaHh4fh5uZmDB482Fi1atVl2/v9739vdOrUyXB0dLxsO9dyDhmGYXzyyScGYERFRdnEW542+dBDD122johcmYNhfGbuioiIXFcvvvgi8+bNIysri549e173/R0+fJjvfe97bN++HTc3N6D5vrdRo0bxt7/9je985zvXvQ7/q8jISGJjY/nPf/5j76p8I82ePZt3332X0tJSXFxcvvb9FxcXExERwdy5cy/7YeOvy/Tp0/n73//OuXPn7LJ/EZG2oumLIiJfg48//pjs7Gx+/etfc+utt34tCRnAa6+9RmxsLC4uLpw7d87mUdny7fHrX/+akJAQOnfuzLlz5/jPf/7D66+/zi9/+cuvPSE7ffo0n376KU8//TTt2rXjpz/96de6fxGRG5GSMhGRr8Htt99OUVERw4cPZ+nSpV/bfseNG8fMmTPx8PDglVde4d577/3a9i1tx9nZmaeffprTp0/T0NBAVFQUzz33nF0Sotdff51f//rXREZG8s4779g87VRERP43mr4oIiIiIiJiR3okvoiIiIiIiB0pKRMREREREbEjJWUiIiIiIiJ2pAd9tKGmpiYKCgrw9PQ0f6BRRERERERuPoZhUF1dTUhICO3afclYmF1/Je1zFi9ebADGT3/6UzPW1NRkPPbYY0ZwcLDh6upqjBgxwsjKyrJZr6amxnjggQcMX19fw93d3Zg8ebKRl5dnU6asrMz4/ve/b3h5eRleXl7G97//faO8vNymzKlTp4xJkyYZ7u7uhq+vrzF37lyjtrb2muufl5f3hT8Yqpdeeumll1566aWXXnrdXK/P5yVX8o0ZKduzZw+vvvoqcXFxNvGnnnqK5557jjfffJNu3brx29/+lqSkJI4ePYqnpycA8+fPZ9WqVaxcuRJfX18WLFjApEmTyMjIwNHREYBp06Zx+vRpUlNTgeYf3UxJSWHVqlUANDY2kpycjL+/P9u2baO0tJR7770XwzB48cUXr+kYWuqTl5eHl5dXm7SLiIiIiIh8+1RVVREWFmbmCF/kG/FI/HPnztGvXz/+9Kc/8dvf/pY+ffrw+9//HsMwCAkJYf78+fz85z8HoLa2lsDAQH73u9/xox/9iMrKSvz9/Xnrrbf47ne/C0BBQQFhYWGsWbOGcePGcfjwYWJiYkhPTyc+Ph6A9PR0EhISOHLkCNHR0axdu5ZJkyaRl5dHSEgIACtXrmT69OkUFxdfU5JVVVWF1WqlsrJSSZmIiIiIyE2sNbnBN+JBH3PmzCE5OZkxY8bYxLOzsykqKmLs2LFmzGKxMGLECHbs2AFARkYG9fX1NmVCQkKIjY01y+zcuROr1WomZACDBw/GarXalImNjTUTMmj+0dXa2loyMjKuWO/a2lqqqqpsXiIiIiIiIq1h9+mLK1euZN++fezZs+eyZUVFRQAEBgbaxAMDAzl16pRZxsXFBW9v78vKtKxfVFREQEDAZdsPCAiwKfP5/Xh7e+Pi4mKW+bwlS5bw+OOPXxZfu3Yt7u7uACQlJVFaWsq+ffvM5QkJCTg5ObF161YzFhcXR2hoKGvXrjVjnTt3pmfPnqxfv56amhqzzvHx8ezcuZOzZ88C4O7uTmJiIgcOHCAnJ8dcPzk5mZycHA4ePGjGRowYQU1NDbt27TJjAwYMwGq1snHjRjPWvXt3oqKiWL16NU1NTQCEhYXRp08fNm/eTHV1tdlGw4YNY+/evRQWFgLg7OzM+PHjOXLkCMePHze3OXbsWEpKSvj444/N2JAhQ2jXrh3btm2zaYuQkBBzqilAly5diImJIS0tjdraWqD5PR40aBA7duygtLQUAA8PD0aPHs0nn3xCbm6uuf7kyZM5efIkhw4dMmOjRo3i/Pnz7N6924wNHDgQT09PNm3aZMZ69OhB165d+c9//kPLwHJ4eDi9e/e2aQsfHx+GDh3Knj17zHPGxcXFHK09ceKEuc1x48Zx5swZMjMzzdjQoUMB2L59uxnr06cPgYGBrFu3zox17dqVHj16sG7dOurq6gAICgpi4MCBbN++nbKyMqB5Ou3IkSNt2sLBwYFJkyZx4sQJDh8+bG5z9OjRVFdX21yHgwYNwsPDgw8//NCMxcTE0KVLF3Pa72fbYtOmTZw/fx4AX19fhgwZwu7duzlz5gzQ/IXK2LFjOXToECdPnjTXHz9+PAUFBezfv9+MDRs2jKamJvNLE4C+ffvi7+9PWlqaGYuKiqJ79+6kpqZSX18PQHBwMAMGDGDbtm2Ul5fbtEVmZiZ5eXkAtGvXjuTkZI4fP86RI0fMbSYmJlJZWcnevXvNWHx8PK6urmzZssWM9ezZk8jISFavXm3GIiMj6dWrFxs3buTChQsA+Pn5kZCQwK5duyguLgbA1dWVpKQkDh48yKeffmquP2HCBPLz823aYvjw4TQ0NLBz504z1q9fP3x9fVm/fr0Z69atmznq39DQADR/QdW/f3+2bt1KRUUFAFarlVtuuYV9+/aRn58PgKOjIxMnTuTYsWMcPXrU3OaYMWMoLy+3+WJq8ODBWCwWm7aIjY0lPDycNWvWmLFOnToRGxvLhg0buHjxIgD+/v4MHjyY9PR0SkpKAHBzc2PMmDFkZWWRnZ1trj9x4kRyc3PJysoyYyNGjKC2tpb09HQz1r9/f7y9vdmwYYMZi46Oplu3bqxZs4bGxkYAQkND6devHx999BGVlZUAdOjQgeHDh5ORkUFBQQEATk5OTJgwgaNHj3Ls2DFzm+rL1ZerL1dfrr5cfXlr+/LP9lVfxq7TF/Py8hgwYABpaWn07t0bgJEjR5rTF3fs2MHQoUMpKCggODjYXG/WrFnk5eWRmprKihUrmDFjhtm5t0hKSqJLly4sXbqUxYsXs2zZMpsTFJo7gZkzZ/LII48we/ZsTp06ZdNhQnNHvHz5cu6+++7L6l9bW2uz35Z5o5q+KCIiIiJyc/vWTF/MyMiguLiY/v374+TkhJOTE1u2bOEPf/gDTk5O5sjV50eqiouLzWVBQUHU1dWZ36JcrUzLNzyfVVJSYlPm8/spLy+nvr7+shG0FhaLBS8vL5uXiIiIiIhIa9g1KWsZ2svMzDRfAwYM4Hvf+x6ZmZl07tyZoKAgm2Hduro6tmzZwpAhQ4Dm4U5nZ2ebMoWFhWRlZZllEhISqKystJnasGvXLiorK23KZGVlmdM2ANLS0rBYLPTv3/+6toOIiIiIiNy87HpPmaenJ7GxsTYxDw8PfH19zfj8+fNZvHgxUVFRREVFsXjxYtzd3Zk2bRrQPKd25syZLFiwAF9fX3x8fFi4cCG9evUyHxzSo0cPxo8fz6xZs3jllVeA5kfiT5o0iejoaKB5jnxMTAwpKSk8/fTTlJWVsXDhQmbNmqURMBERERERuW7s/qCPL/Pwww9z8eJF7r//fsrLy4mPjyctLc3mef/PP/88Tk5OTJ06lYsXL5KYmMibb75p/kYZwDvvvMO8efPMpzROmTKFl156yVzu6OjI6tWruf/++xk6dChubm5MmzaNZ5555us7WBERERERuel8I36n7Eah3ykTERERERH4Fj3oQ0RERERE5GanpExERERERMSOlJSJiIiIiIjYkZIyERERERERO1JSJiIiIiIiYkdKykREREREROxISZmIiIiIiIgdKSkTERERERGxIyVlIiIiIiIidqSkTERERERExI6UlImIiIiIiNiRk70rIN88T3581t5VuKk90tfP3lUQERERka+RRspERERERETsSEmZiIiIiIiIHSkpExERERERsSMlZSIiIiIiInakpExERERERMSOlJSJiIiIiIjYkR6JLyLyOfpZCPvTT0OIiMjNRCNlIiIiIiIidqSkTERERERExI6UlImIiIiIiNiRkjIRERERERE7UlImIiIiIiJiR0rKRERERERE7EhJmYiIiIiIiB0pKRMREREREbEjJWUiIiIiIiJ2pKRMRERERETEjpSUiYiIiIiI2JGSMhERERERETtSUiYiIiIiImJHSspERERERETsSEmZiIiIiIiIHSkpExERERERsSMlZSIiIiIiInakpExERERERMSOlJSJiIiIiIjYkZIyERERERERO1JSJiIiIiIiYkdKykREREREROxISZmIiIiIiIgdKSkTERERERGxIyVlIiIiIiIidqSkTERERERExI6UlImIiIiIiNiRXZOyl19+mbi4OLy8vPDy8iIhIYG1a9eay6dPn46Dg4PNa/DgwTbbqK2tZe7cufj5+eHh4cGUKVM4ffq0TZny8nJSUlKwWq1YrVZSUlKoqKiwKZObm8vkyZPx8PDAz8+PefPmUVdXd92OXUREREREBOyclHXs2JEnn3ySvXv3snfvXkaPHs2tt97KwYMHzTLjx4+nsLDQfK1Zs8ZmG/Pnz+eDDz5g5cqVbNu2jXPnzjFp0iQaGxvNMtOmTSMzM5PU1FRSU1PJzMwkJSXFXN7Y2EhycjLnz59n27ZtrFy5kvfff58FCxZc/0YQEREREZGbmpM9dz558mSbv5944glefvll0tPT6dmzJwAWi4WgoKArrl9ZWckbb7zBW2+9xZgxYwB4++23CQsLY8OGDYwbN47Dhw+TmppKeno68fHxALz22mskJCRw9OhRoqOjSUtL49ChQ+Tl5RESEgLAs88+y/Tp03niiSfw8vK6Xk0gIiIiIiI3uW/MPWWNjY2sXLmS8+fPk5CQYMY3b95MQEAA3bp1Y9asWRQXF5vLMjIyqK+vZ+zYsWYsJCSE2NhYduzYAcDOnTuxWq1mQgYwePBgrFarTZnY2FgzIQMYN24ctbW1ZGRkXLXOtbW1VFVV2bxERERERERaw64jZQAHDhwgISGBmpoa2rdvzwcffEBMTAwAEyZM4K677iIiIoLs7Gx+9atfMXr0aDIyMrBYLBQVFeHi4oK3t7fNNgMDAykqKgKgqKiIgICAy/YbEBBgUyYwMNBmube3Ny4uLmaZK1myZAmPP/74ZfG1a9fi7u4OQFJSEqWlpezbt89cnpCQgJOTE1u3bjVjcXFxhIaG2txT17lzZ3r27Mn69eupqakx6x0fH8/OnTs5e/YsAO7u7iQmJnLgwAFycnLM9ZOTk8nJybGZDjpixAhqamrYtWuXGRswYABWq5WNGzcCEN1kUNIhnDKvjnTLS8fBaAKg0iOAIt+uRBZlYqm7AMBFiye5gb0IOXsUzwulADS1c+J4x0H4VeTiW3Xp/r4ToQPxqKkguPS4GcsNjMXAgYgzB8xYkU8Xqt19iTq924yVeYVQ0iGSLvl7cWpsvtfvnJsP+f7dCSs+iHtNJQB1zm5kB/clsOwkHc6dMdc/Gj4E76oCAioutU92cF+cG2roWHLYjOX7d6fWyY3OhR+bsZIOEZR5hRKdtxMMo7kt2gdQ5NOVToWZuNS3tIUXuYGxhJ49QvsLZQA0OjpzInQg/hWn8KnKN7d5PHQg7WvKCS49YcZOBfYCYNWqnWasT58+BAYGsm7dOjPWtWtXevTowbp168z7HoOCghg4cCDbt2+nrKx5356enowcOZJPPvmE3NxcABwcHJg0aRInTpzg8OFLxz169Giqq6vZs2ePGRs0aBAeHh58+OGHZiwmJoYuXbqwatUqMxYeHk7v3r3ZtGkT58+fB8DX15chQ4awe/duzpxpfh8sFgtjx47l0KFDnDx50lx//PjxFBQUsH//fjM2bNgwmpqazC9OAPr27Yu/vz9paWlmLCoqiu7du5Oamkp9fT0AwcHBDBgwgG3btlFeXm7TFpmZmeTl5QHQrl07kpOTOX78OEeOHDG3mZiYSPsLpYSePWrGTvvHUO/kQqfCTDNW7N2J8vZBzefFf1W0D+KMT2c6F+zDuaH5mr3gaiUvoCcdSw7jcbG5Pg1OFk6G9CegPAfv6gJz/WMd47FeKCGw7FMzlhMUR7umRsKLL13HBb5RXHS10iV/rxk7aw2j1BpG1OldtGtqnr5d5e5HoV83Ior241p3DoBaFw9ygnoTXHocr/MlABjtHDnWMR7fytP4Veaa2zwZ0h+3umpCzh4zY3kBMTS2cyay6BMzdsa7E5XtA+mWl27Gyj2DKfbuROeCDJwbagE479qB0wExdCw+hEdNBQD1ThY+DelPQHk23tWF5vqNccnk5uaSlZVlxkaMGEFtbS3p6Zf2079/f7y9vdmwYYMZi46Oplu3bqxZs8acyh4aGkq/fv346KOPqKxs7i86dOjA8OHDycjIoKCg+X1wcnJiwoQJHD16lGPHLh33t7EvB+jevTtRUVGsXr2apqbmvjwsLIw+ffqwefNmqqurgeZ/84YNG8bevXspLGx+H5ydnRk/fjxHjhzh+PFL/fbYsWMpKSnh448v9ZNDhgyhXbt2bNu2zaYtQkJCSE1NNWNdunQhJiaGtLQ0amubz4vAwEAGDRrEjh07KC1t/nfEw8OD0aNH2/Rf0DzD5uTJkxw6dMiMjRo1ivPnz7N796V/MwYOHIinpyebNm0yYz169KBr16785z//wfhvX97Sf322LXx8fBg6dCh79uwxPwO4uLiYs29OnLjUb48bN44zZ86QmZlpxoYOHQrA9u3bzZj6cvv05ZWVlezde6mfjI+Px9XVlS1btpixnj17EhkZyerVq81YZGQkvXr1YuPGjVy40PxvvJ+fHwkJCezatcscIHB1dSUpKYmDBw/y6aeX+u0JEyaQn59v0xbDhw+noaGBnTsv/ZvRr18/fH19Wb9+vRnr1q0b0dHRrF27loaGBqB5wKF///5s3brVfCaC1WrllltuYd++feTnN3++cHR0ZOLEiRw7doyjRy/9GzZmzBjKy8ttBhoGDx6MxWKxaYvY2FjCw8NtbhXq1KkTsbGxbNiwgYsXLwLg7+/P4MGDSU9Pp6Sk+d8RNzc3xowZQ1ZWFtnZ2eb6EydOvKn78s/2VV/GwWjpmeykrq6O3NxcKioqeP/993n99dfZsmWLmZh9VmFhIREREaxcuZI77riDFStWMGPGDLNjb5GUlESXLl1YunQpixcvZtmyZTYnJzR3ADNnzuSRRx5h9uzZnDp1yqazhOZOePny5dx9991XrHttba3NvquqqggLC6OysvJbPeXxyY/P2rsKN7VH+vrZuwo3PV0D9qfrQEREvu2qqqqwWq3XlBvYffqii4sLXbt2ZcCAASxZsoTevXvzwgsvXLFscHAwERER5jd2QUFB1NXVmd+gtCguLjZHvoKCgsxvdz6rpKTEpsznR8TKy8upr6+/bATtsywWi/nkyJaXiIiIiIhIa9g9Kfs8wzAuG/lqUVpaSl5eHsHBwUDzUKezs7PNsG9hYSFZWVkMGTIEaB6SrKystJnWsGvXLiorK23KZGVlmVM2ANLS0rBYLPTv37/Nj1FERERERKSFXe8pe/TRR5kwYQJhYWFUV1ezcuVKNm/eTGpqKufOnWPRokXceeedBAcHk5OTw6OPPoqfnx+333470DyfdubMmSxYsABfX198fHxYuHAhvXr1Mp/G2KNHD8aPH8+sWbN45ZVXAJg9ezaTJk0iOjoaaJ4fHxMTQ0pKCk8//TRlZWUsXLiQWbNmafRLRERERESuK7smZWfOnCElJYXCwkKsVitxcXGkpqaSlJTExYsXOXDgAMuXL6eiooLg4GBGjRrFe++9h6enp7mN559/HicnJ6ZOncrFixdJTEzkzTffxNHR0SzzzjvvMG/ePPMpjVOmTOGll14ylzs6OrJ69Wruv/9+hg4dipubG9OmTeOZZ575+hpDRERERERuSnZ/0MeNpDU3832T6SEH9qUHHNifrgH703UgIiLfdt+qB32IiIiIiIjczJSUiYiIiIiI2JGSMhERERERETtSUiYiIiIiImJHSspERERERETsSEmZiIiIiIiIHSkpExERERERsSMlZSIiIiIiInakpExERERERMSOlJSJiIiIiIjYkZIyERERERERO1JSJiIiIiIiYkdKykREREREROxISZmIiIiIiIgdOdm7AiIiIvLN8+THZ+1dhZveI3397F0FEfmaaKRMRERERETEjpSUiYiIiIiI2JGSMhERERERETtSUiYiIiIiImJHSspERERERETsSEmZiIiIiIiIHSkpExERERERsSMlZSIiIiIiInakpExERERERMSOlJSJiIiIiIjYkZIyERERERERO1JSJiIiIiIiYkdKykREREREROxISZmIiIiIiIgdKSkTERERERGxIyVlIiIiIiIidqSkTERERERExI6UlImIiIiIiNiRkjIRERERERE7UlImIiIiIiJiR0rKRERERERE7EhJmYiIiIiIiB0pKRMREREREbEjJWUiIiIiIiJ2pKRMRERERETEjpSUiYiIiIiI2JGSMhERERERETtSUiYiIiIiImJHSspERERERETsSEmZiIiIiIiIHSkpExERERERsSO7JmUvv/wycXFxeHl54eXlRUJCAmvXrjWXG4bBokWLCAkJwc3NjZEjR3Lw4EGbbdTW1jJ37lz8/Pzw8PBgypQpnD592qZMeXk5KSkpWK1WrFYrKSkpVFRU2JTJzc1l8uTJeHh44Ofnx7x586irq7tuxy4iIiIiIgJ2Tso6duzIk08+yd69e9m7dy+jR4/m1ltvNROvp556iueee46XXnqJPXv2EBQURFJSEtXV1eY25s+fzwcffMDKlSvZtm0b586dY9KkSTQ2Npplpk2bRmZmJqmpqaSmppKZmUlKSoq5vLGxkeTkZM6fP8+2bdtYuXIl77//PgsWLPj6GkNERERERG5KDoZhGPauxGf5+Pjw9NNPc9999xESEsL8+fP5+c9/DjSPigUGBvK73/2OH/3oR1RWVuLv789bb73Fd7/7XQAKCgoICwtjzZo1jBs3jsOHDxMTE0N6ejrx8fEApKenk5CQwJEjR4iOjmbt2rVMmjSJvLw8QkJCAFi5ciXTp0+nuLgYLy+va6p7VVUVVquVysrKa17nm+jJj8/auwo3tUf6+tm7Cjc9XQP2p+vA/nQd2J+uA5Fvt9bkBt+Ye8oaGxtZuXIl58+fJyEhgezsbIqKihg7dqxZxmKxMGLECHbs2AFARkYG9fX1NmVCQkKIjY01y+zcuROr1WomZACDBw/GarXalImNjTUTMoBx48ZRW1tLRkbGVetcW1tLVVWVzUtERERERKQ1nOxdgQMHDpCQkEBNTQ3t27fngw8+ICYmxkyYAgMDbcoHBgZy6tQpAIqKinBxccHb2/uyMkVFRWaZgICAy/YbEBBgU+bz+/H29sbFxcUscyVLlizh8ccfvyy+du1a3N3dAUhKSqK0tJR9+/aZyxMSEnBycmLr1q1mLC4ujtDQUJt76jp37kzPnj1Zv349NTU1Zr3j4+PZuXMnZ882f4vp7u5OYmIiBw4cICcnx1w/OTmZnJwcm/vwRowYQU1NDbt27TJjAwYMwGq1snHjRgCimwxKOoRT5tWRbnnpOBhNAFR6BFDk25XIokwsdRcAuGjxJDewFyFnj+J5oRSApnZOHO84CL+KXHyrLt3fdyJ0IB41FQSXHjdjuYGxGDgQceaAGSvy6UK1uy9Rp3ebsTKvEEo6RNIlfy9Ojc33+p1z8yHfvzthxQdxr6kEoM7ZjezgvgSWnaTDuTPm+kfDh+BdVUBAxaX2yQ7ui3NDDR1LDpuxfP/u1Dq50bnwYzNW0iGCMq9QovN2wn8HlivbB1Dk05VOhZm41Le0hRe5gbGEnj1C+wtlADQ6OnMidCD+Fafwqco3t3k8dCDta8oJLj1hxk4F9gJg1aqdZqxPnz4EBgaybt06M9a1a1d69OjBunXrzPseg4KCGDhwINu3b6esrHnfnp6ejBw5kk8++YTc3FwAHBwcmDRpEidOnODw4UvHPXr0aKqrq9mzZ48ZGzRoEB4eHnz44YdmLCYmhi5durBq1SozFh4eTu/evdm0aRPnz58HwNfXlyFDhrB7927OnGl+HywWC2PHjuXQoUOcPHnSXH/8+PEUFBSwf/9+MzZs2DCamprMfgCgb9+++Pv7k5aWZsaioqLo3r07qamp1NfXAxAcHMyAAQPYtm0b5eXlNm2RmZlJXl4eAO3atSM5OZnjx49z5MgRc5uJiYm0v1BK6NmjZuy0fwz1Ti50Ksw0Y8XenShvH9R8XvxXRfsgzvh0pnPBPpwbmq/ZC65W8gJ60rHkMB4Xm+vT4GThZEh/Aspz8K4uMNc/1jEe64USAss+NWM5QXG0a2okvPjSdVzgG8VFVytd8veasbPWMEqtYUSd3kW7pubp21XufhT6dSOiaD+udecAqHXxICeoN8Glx/E6XwKA0c6RYx3j8a08jV9lrrnNkyH9caurJuTsMTOWFxBDYztnIos+MWNnvDtR2T6QbnnpZqzcM5hi7050LsjAuaEWgPOuHTgdEEPH4kN41FQAUO9k4dOQ/gSUZ+NdXWiu3xiXTG5uLllZWWZsxIgR1NbWkp5+aT/9+/fH29ubDRs2mLHo6Gi6devGmjVrzKnsoaGh9OvXj48++ojKyub+okOHDgwfPpyMjAwKCprfBycnJyZMmMDRo0c5duzScX8b+3KA7t27ExUVxerVq2lqau7Lw8LC6NOnD5s3bzZvB/D29mbYsGHs3buXwsLm9yEKR/XlX6Ev/2xbFPp25ZyrN1H5l/rYMq9QSjpE0DV/D46Nzf3XOXcf8v26E34mC7faKladdlBf/hX68srKSvbuvdRPxsfH4+rqypYtW8xYz549iYyMZPXq1WYsMjKSXr16sXHjRi5caD4v/Pz8SEhIYNeuXRQXFwPg6upKUlISBw8e5NNPL/XbEyZMID8/36Ythg8fTkNDAzt3Xvo3o1+/fvj6+rJ+/Xoz1q1bN3MWV0NDA9A84NC/f3+2bt1qPhPBarVyyy23sG/fPvLzm89JR0dHJk6cyLFjxzh69NK/YWPGjKG8vNxmoGHw4MFYLBabtoiNjSU8PJw1a9aYsU6dOhEbG8uGDRu4ePEiAP7+/gwePJj09HRKSpr/HXFzc2PMmDFkZWWRnZ1trj9x4sSbui8/dOgQ18ru0xfr6urIzc2loqKC999/n9dff50tW7ZQUVHB0KFDKSgoIDg42Cw/a9Ys8vLySE1NZcWKFcyYMYPa2lqbbSYlJdGlSxeWLl3K4sWLWbZsmc3JCc0dwMyZM3nkkUeYPXs2p06dsvngC+Di4sLy5cu5++67r1j32tpam31XVVURFham6YvylWi6iv3pGrA/XQf2p+vA/nQdiHy7faumL7q4uNC1a1cGDBjAkiVL6N27Ny+88AJBQUEAl41UFRcXm6NaQUFB1NXVmd+gXK1My7c7n1VSUmJT5vP7KS8vp76+/rIRtM+yWCzmkyNbXiIiIiIiIq1h96Ts8wzDoLa2lk6dOhEUFGQzpFtXV8eWLVsYMmQI0DzU6ezsbFOmsLCQrKwss0xCQgKVlZXs3n1p+sSuXbuorKy0KZOVlWVO2QBIS0vDYrHQv3//63q8IiIiIiJyc7PrPWWPPvooEyZMICwsjOrqalauXMnmzZtJTU3FwcGB+fPns3jxYqKiooiKimLx4sW4u7szbdo0oHk+7cyZM1mwYAG+vr74+PiwcOFCevXqxZgxYwDo0aMH48ePZ9asWbzyyisAzJ49m0mTJhEdHQ3A2LFjiYmJISUlhaeffpqysjIWLlzIrFmzNPolIiIiIiLXlV2TsjNnzpCSkkJhYSFWq5W4uDhSU1NJSkoC4OGHH+bixYvcf//9lJeXEx8fT1paGp6enuY2nn/+eZycnJg6dSoXL14kMTGRN998E0dHR7PMO++8w7x588ynNE6ZMoWXXnrJXO7o6Mjq1au5//77GTp0KG5ubkybNo1nnnnma2oJERERERG5Wdn9QR83Ev1OmbQF3dhtf7oG7E/Xgf3pOrA/XQci327fqgd9iIiIiIiI3MyUlImIiIiIiNiRkjIRERERERE7UlImIiIiIiJiR0rKRERERERE7EhJmYiIiIiIiB0pKRMREREREbEjJWUiIiIiIiJ2pKRMRERERETEjpSUiYiIiIiI2JGSMhERERERETtSUiYiIiIiImJHSspERERERETsSEmZiIiIiIiIHSkpExERERERsSMlZSIiIiIiInakpExERERERMSOlJSJiIiIiIjYkZIyERERERERO1JSJiIiIiIiYkdKykREREREROxISZmIiIiIiIgdKSkTERERERGxIyVlIiIiIiIidqSkTERERERExI6UlImIiIiIiNiRkjIRERERERE7UlImIiIiIiJiR0rKRERERERE7EhJmYiIiIiIiB0pKRMREREREbEjJWUiIiIiIiJ2pKRMRERERETEjpSUiYiIiIiI2JGSMhERERERETtSUiYiIiIiImJHSspERERERETsSEmZiIiIiIiIHSkpExERERERsSMlZSIiIiIiInakpExERERERMSOlJSJiIiIiIjYkZIyERERERERO1JSJiIiIiIiYkdKykREREREROxISZmIiIiIiIgdKSkTERERERGxI7smZUuWLGHgwIF4enoSEBDAbbfdxtGjR23KTJ8+HQcHB5vX4MGDbcrU1tYyd+5c/Pz88PDwYMqUKZw+fdqmTHl5OSkpKVitVqxWKykpKVRUVNiUyc3NZfLkyXh4eODn58e8efOoq6u7LscuIiIiIiIC4PS/rNTU1MSJEycoLi6mqanJZtktt9xyzdvZsmULc+bMYeDAgTQ0NPCLX/yCsWPHcujQITw8PMxy48eP5y9/+Yv5t4uLi8125s+fz6pVq1i5ciW+vr4sWLCASZMmkZGRgaOjIwDTpk3j9OnTpKamAjB79mxSUlJYtWoVAI2NjSQnJ+Pv78+2bdsoLS3l3nvvxTAMXnzxxdY1kIiIiIiIyDVqdVKWnp7OtGnTOHXqFIZh2CxzcHCgsbHxmrfVkiC1+Mtf/kJAQAAZGRk2yZ3FYiEoKOiK26isrOSNN97grbfeYsyYMQC8/fbbhIWFsWHDBsaNG8fhw4dJTU0lPT2d+Ph4AF577TUSEhI4evQo0dHRpKWlcejQIfLy8ggJCQHg2WefZfr06TzxxBN4eXld83GJiIiIiIhcqy+dvpiWlmYzze/HP/4xAwYMICsri7KyMsrLy81XWVnZV6pMZWUlAD4+PjbxzZs3ExAQQLdu3Zg1axbFxcXmsoyMDOrr6xk7dqwZCwkJITY2lh07dgCwc+dOrFarmZABDB48GKvValMmNjbWTMgAxo0bR21tLRkZGVesb21tLVVVVTYvERERERGR1vjSkbKzZ88ydOhQVq9eTWRkJMePH+fvf/87Xbt2bdOKGIbBQw89xLBhw4iNjTXjEyZM4K677iIiIoLs7Gx+9atfMXr0aDIyMrBYLBQVFeHi4oK3t7fN9gIDAykqKgKgqKiIgICAy/YZEBBgUyYwMNBmube3Ny4uLmaZz1uyZAmPP/74ZfG1a9fi7u4OQFJSEqWlpezbt89cnpCQgJOTE1u3bjVjcXFxhIaGsnbtWjPWuXNnevbsyfr166mpqTHrHB8fz86dOzl79iwA7u7uJCYmcuDAAXJycsz1k5OTycnJ4eDBg2ZsxIgR1NTUsGvXLjM2YMAArFYrGzduBCC6yaCkQzhlXh3plpeOg9E8RbXSI4Ai365EFmViqbsAwEWLJ7mBvQg5exTPC6UANLVz4njHQfhV5OJbdenevhOhA/GoqSC49LgZyw2MxcCBiDMHzFiRTxeq3X2JOr3bjJV5hVDSIZIu+Xtxamy+z++cmw/5/t0JKz6Ie01zQl/n7EZ2cF8Cy07S4dwZc/2j4UPwriogoOJS+2QH98W5oYaOJYfNWL5/d2qd3Ohc+LEZK+kQQZlXKNF5O+G/o8OV7QMo8ulKp8JMXOpb2sKL3MBYQs8eof2F5i8oGh2dORE6EP+KU/hU5ZvbPB46kPY15QSXnjBjpwJ7AbBq1U4z1qdPHwIDA1m3bp0Z69q1Kz169GDdunXmPY9BQUEMHDiQ7du3m1+OeHp6MnLkSD755BNyc3OB5tHsSZMmceLECQ4fvnTco0ePprq6mj179pixQYMG4eHhwYcffmjGYmJi6NKlizntFyA8PJzevXuzadMmzp8/D4Cvry9Dhgxh9+7dnDnT/D5YLBZzevLJkyfN9cePH09BQQH79+83Y8OGDaOpqcn80gSgb9+++Pv7k5aWZsaioqLo3r07qamp1NfXAxAcHMyAAQPYtm0b5eXlNm2RmZlJXl4eAO3atSM5OZnjx49z5MgRc5uJiYm0v1BK6NlL97ee9o+h3smFToWZZqzYuxPl7YOaz4v/qmgfxBmfznQu2IdzQ/M1e8HVSl5ATzqWHMbjYnN9GpwsnAzpT0B5Dt7VBeb6xzrGY71QQmDZp2YsJyiOdk2NhBdfuo4LfKO46GqlS/5eM3bWGkapNYyo07to19Q8Y6HK3Y9Cv25EFO3Hte4cALUuHuQE9Sa49Dhe50sAMNo5cqxjPL6Vp/GrzDW3eTKkP2511YScPWbG8gJiaGznTGTRJ2bsjHcnKtsH0i0v3YyVewZT7N2JzgUZODfUAnDetQOnA2LoWHwIj5oKAOqdLHwa0p+A8my8qwvN9RvjksnNzSUrK8uMjRgxgtraWtLTL+2nf//+eHt7s2HDBjMWHR1Nt27dWLNmjTl7IzQ0lH79+vHRRx+ZXwB26NCB4cOHk5GRQUFB8/vg5OTEhAkTOHr0KMeOXTrub2NfDtC9e3eioqJYvXq1ebtBWFgYffr0YfPmzVRXVwPN/94NGzaMvXv3UljY/D5E4ai+/Cv05Z9ti0Lfrpxz9SYq/1IfW+YVSkmHCLrm78Gxsbn/OufuQ75fd8LPZOFWW8Wq0w7qy79CX15ZWcnevZf6yfj4eFxdXdmyZYsZ69mzJ5GRkaxevdqMRUZG0qtXLzZu3MiFC83nhZ+fHwkJCezatcscHHB1dSUpKYmDBw/y6aeX+u0JEyaQn59v0xbDhw+noaGBnTsv/ZvRr18/fH19Wb9+vRnr1q0b0dHRrF27loaGBqB5sKF///5s3brVHCixWq3ccsst7Nu3j/z85nPS0dGRiRMncuzYMZtnNIwZM4by8nKbQYbBgwdjsVhs2iI2Npbw8HDWrFljxjp16kRsbCwbNmzg4sWLAPj7+zN48GDS09MpKWn+d8TNzY0xY8aQlZVFdna2uf7EiRNv6r780KFDXCsH4/NzEK8gLS2NhQsXsn//fkaPHs3DDz/M+PHjr3kn12LOnDmsXr2abdu20bFjx6uWKywsJCIigpUrV3LHHXewYsUKZsyYQW1trU25pKQkunTpwtKlS1m8eDHLli277CEiUVFRzJw5k0ceeYTZs2dz6tQpmw+/0Hz/2vLly7n77rsvq0ttba3NfquqqggLC6OysvJbPd3xyY/P2rsKN7VH+vrZuwo3PV0D9qfrwP50HdifrgORb7eqqiqsVus15QbXdE/Z2LFjzSx+7ty5LFiwgKKiInr16oWzs7NN2bi4uFZXeO7cufz73//mo48++sKEDJq/OYmIiOD48eZv6IKCgqirq6O8vNxmtKy4uJghQ4aYZVq+4fmskpISc3QsKCjI5htHaH5iY319/WUjaC0sFgsWi+XaD1RERERERORzrvmR+C2JyZ133snhw4e57777GDhwIH369KFv377mf1vDMAweeOAB/vGPf7Bp0yY6der0peuUlpaSl5dHcHAw0Dzc6ezsbDP0W1hYSFZWlpmUJSQkUFlZye7dl6ZQ7Nq1i8rKSpsyWVlZ5rQNaB4htFgs9O/fv1XHJSIiIiIicq1a/fTFz84T/armzJnDihUr+Ne//oWnp6d575bVasXNzY1z586xaNEi7rzzToKDg8nJyeHRRx/Fz8+P22+/3Sw7c+ZMFixYgK+vLz4+PixcuJBevXqZT2Ps0aMH48ePZ9asWbzyyitA8yPxJ02aRHR0NNA8GhgTE0NKSgpPP/00ZWVlLFy4kFmzZn2rpyKKiIiIiMg3W6uTsoiIiDbb+csvvwzAyJEjbeJ/+ctfmD59Oo6Ojhw4cIDly5dTUVFBcHAwo0aN4r333sPT09Ms//zzz+Pk5MTUqVO5ePEiiYmJvPnmm+ZvlAG88847zJs3z3xK45QpU3jppZfM5Y6OjqxevZr777+foUOH4ubmxrRp03jmmWfa7HhFREREREQ+75oe9PF5b731FkuXLiU7O5udO3cSERHB73//ezp16sStt956Per5rdCam/m+yXRzt33pxm770zVgf7oO7E/Xgf3pOhD5dmtNbnDN95S1ePnll3nooYeYOHEiFRUV5iMqO3TowO9///v/qcIiIiIiIiI3q1YnZS+++CKvvfYav/jFL2ymBw4YMIADBw58wZoiIiIiIiLyea1OyrKzs6/4lEWLxWL+2KCIiIiIiIhcm1YnZZ06dSIzM/Oy+Nq1a4mJiWmLOomIiIiIiNw0Wv30xZ/97GfMmTOHmpoaDMNg9+7dvPvuuyxZsoTXX3/9etRRRERERETkhtXqpGzGjBk0NDTw8MMPc+HCBaZNm0ZoaCgvvPACd9999/Woo4iIiIiIyA2r1UkZwKxZs5g1axZnz56lqamJgICAtq6XiIiIiIjITeF/Sspa+Pnp9zNERERERES+imtKyvr168fGjRvx9vamb9++ODg4XLXsvn372qxyIiIiIiIiN7prSspuvfVWLBYLALfddtv1rI+IiIiIiMhN5ZqSsscee+yK/y8iIiIiIiJfzf98T9nevXs5fPgwDg4O9OjRg/79+7dlvURERERERG4KrU7KTp8+zT333MP27dvp0KEDABUVFQwZMoR3332XsLCwtq6jiIiIiIjIDatda1e47777qK+v5/Dhw5SVlVFWVsbhw4cxDIOZM2dejzqKiIiIiIjcsFo9UrZ161Z27NhBdHS0GYuOjubFF19k6NChbVo5ERERERGRG12rR8rCw8Opr6+/LN7Q0EBoaGibVEpERERERORm0eqk7KmnnmLu3Lns3bsXwzCA5od+/PSnP+WZZ55p8wqKiIiIiIjcyFo9fXH69OlcuHCB+Ph4nJyaV29oaMDJyYn77ruP++67zyxbVlbWdjUVERERERG5AbU6Kfv9739/HaohIiIiIiJyc2p1Unbvvfdej3qIiIiIiIjclFp9T9lnXbx4kaqqKpsXgGEYPPzww21SQRERERERkRtZq5Oy8+fP88ADDxAQEED79u3x9va2ef3tb3+jvLyct99++3rUV0RERERE5IbS6qTs4YcfZtOmTfzpT3/CYrHw+uuv8/jjjxMSEsKyZctIT0+nS5cujB8//nrUV0RERERE5IbS6nvKVq1axfLlyxk5ciT33Xcfw4cPp2vXrkRERLBixQrWrFnDs88+ez3qKiIiIiIicsNp9UhZWVkZnTp1AsDLy8t87P2wYcP46KOP2rZ2IiIiIiIiN7hWJ2WdO3cmJycHgJiYGP76178CzSNoHTp0aMu6iYiIiIiI3PBanZTNmDGDTz75BID/+7//M+8te/DBB/nZz37W5hUUERERERG5kbX6nrIHH3zQ/P9Ro0Zx5MgR9u7dS5cuXejdu3ebVk5ERERERORG1+qRsuXLl1NbW2v+HR4ezh133EGPHj1Yvnx5m1ZORERERETkRvc/TV+srKy8LF5dXc2MGTPapFIiIiIiIiI3i1YnZYZh4ODgcFn89OnTWK3WNqmUiIiIiIjIzeKa7ynr27cvDg4OODg4kJiYiJPTpVUbGxvJzs7WD0aLiIiIiIi00jUnZbfddhsAmZmZjBs3jvbt25vLXFxciIyM5M4772zzCoqIiIiIiNzIrjkpe+yxxwCIjIzk7rvvxmKxXLdKiYiIiIiI3CxafU/Z6NGjKSkpMf/evXs38+fP59VXX23TiomIiIiIiNwMWp2UTZs2jQ8//BCAoqIixowZw+7du3n00Uf59a9/3eYVFBERERERuZG1OinLyspi0KBBAPz1r3+lV69e7NixgxUrVvDmm2+2df1ERERERERuaK1Oyurr6837yTZs2MCUKVMA6N69O4WFhW1bOxERERERkRtcq5Oynj17snTpUrZu3cr69evNx+AXFBTg6+vb5hUUERERERG5kbU6Kfvd737HK6+8wsiRI7nnnnvo3bs3AP/+97/NaY0iIiIiIiJyba75kfgtRo4cydmzZ6mqqsLb29uMz549G3d39zatnIiIiIiIyI2u1SNlixYt4vTp0zYJGTT/fllAQECbVUxERERERORm0OqkbNWqVXTp0oXExERWrFhBTU3N9aiXiIiIiIjITaHVSVlGRgb79u0jLi6OBx98kODgYH7yk5+wZ8+e61E/ERERERGRG1qrkzKAuLg4nn/+efLz8/nzn/9Mfn4+Q4cOpVevXrzwwgtUVla2dT1FRERERERuSP9TUtaiqamJuro6amtrMQwDHx8fXn75ZcLCwnjvvffaqo4iIiIiIiI3rP8pKcvIyOCBBx4gODiYBx98kL59+3L48GG2bNnCkSNHeOyxx5g3b96XbmfJkiUMHDgQT09PAgICuO222zh69KhNGcMwWLRoESEhIbi5uTFy5EgOHjxoU6a2tpa5c+fi5+eHh4cHU6ZM4fTp0zZlysvLSUlJwWq1YrVaSUlJoaKiwqZMbm4ukydPxsPDAz8/P+bNm0ddXd3/0kQiIiIiIiLXpNVJWVxcHIMHDyY7O5s33niDvLw8nnzySbp27WqW+cEPfkBJScmXbmvLli3MmTOH9PR01q9fT0NDA2PHjuX8+fNmmaeeeornnnuOl156iT179hAUFERSUhLV1dVmmfnz5/PBBx+wcuVKtm3bxrlz55g0aRKNjY1mmWnTppGZmUlqaiqpqalkZmaSkpJiLm9sbCQ5OZnz58+zbds2Vq5cyfvvv8+CBQta20QiIiIiIiLXzMEwDKM1K/zmN7/hvvvuIzQ0tM0rU1JSQkBAAFu2bOGWW27BMAxCQkKYP38+P//5z4HmUbHAwEB+97vf8aMf/YjKykr8/f156623+O53vwtAQUEBYWFhrFmzhnHjxnH48GFiYmJIT08nPj4egPT0dBISEjhy5AjR0dGsXbuWSZMmkZeXR0hICAArV65k+vTpFBcX4+Xl9aX1r6qqwmq1UllZeU3lv6me/PisvatwU3ukr5+9q3DT0zVgf7oO7E/Xgf3pOhD5dmtNbtDqkbK77777uiRkgPmAEB8fHwCys7MpKipi7NixZhmLxcKIESPYsWMH0DyVsr6+3qZMSEgIsbGxZpmdO3ditVrNhAxg8ODBWK1WmzKxsbFmQgYwbtw4amtrycjIuGJ9a2trqaqqsnmJiIiIiIi0hlNrV4iOjiY4OJgRI0YwYsQIRo4cSXR09FeuiGEYPPTQQwwbNozY2FgAioqKAAgMDLQpGxgYyKlTp8wyLi4ul/2YdWBgoLl+UVHRFX/YOiAgwKbM5/fj7e2Ni4uLWebzlixZwuOPP35ZfO3atbi7uwOQlJREaWkp+/btM5cnJCTg5OTE1q1bzVhcXByhoaGsXbvWjHXu3JmePXuyfv168/fgAgICiI+PZ+fOnZw92/wtpru7O4mJiRw4cICcnBxz/eTkZHJycmzuwRsxYgQ1NTXs2rXLjA0YMACr1crGjRsBiG4yKOkQTplXR7rlpeNgNAFQ6RFAkW9XIosysdRdAOCixZPcwF6EnD2K54VSAJraOXG84yD8KnLxrbp0b9+J0IF41FQQXHrcjOUGxmLgQMSZA2asyKcL1e6+RJ3ebcbKvEIo6RBJl/y9ODU23+d3zs2HfP/uhBUfxL2mOaGvc3YjO7gvgWUn6XDujLn+0fAheFcVEFBxqX2yg/vi3FBDx5LDZizfvzu1Tm50LvzYjJV0iKDMK5TovJ3w34HlyvYBFPl0pVNhJi71LW3hRW5gLKFnj9D+QhkAjY7OnAgdiH/FKXyq8s1tHg8dSPuacoJLT5ixU4G9AFi1aqcZ69OnD4GBgaxbt86Mde3alR49erBu3TrznsegoCAGDhzI9u3bKStr3renpycjR47kk08+ITc3FwAHBwcmTZrEiRMnOHz40nGPHj2a6upqm5+2GDRoEB4eHnz44YdmLCYmhi5durBq1SozFh4eTu/evdm0aZM59djX15chQ4awe/duzpxpfh8sFgtjx47l0KFDnDx50lx//PjxFBQUsH//fjM2bNgwmpqazC9NAPr27Yu/vz9paWlmLCoqiu7du5Oamkp9fT0AwcHBDBgwgG3btlFeXm7TFpmZmeTl5QHQrl07kpOTOX78OEeOHDG3mZiYSPsLpYSevXR/62n/GOqdXOhUmGnGir07Ud4+qPm8+K+K9kGc8elM54J9ODc0X7MXXK3kBfSkY8lhPC4216fBycLJkP4ElOfgXV1grn+sYzzWCyUEln1qxnKC4mjX1Eh48aXruMA3iouuVrrk7zVjZ61hlFrDiDq9i3ZNzVO3q9z9KPTrRkTRflzrzgFQ6+JBTlBvgkuP43W+eZq50c6RYx3j8a08jV9lrrnNkyH9caurJuTsMTOWFxBDYztnIos+MWNnvDtR2T6QbnnpZqzcM5hi7050LsjAuaEWgPOuHTgdEEPH4kN41FQAUO9k4dOQ/gSUZ+NdXWiu3xiXTG5uLllZWWZsxIgR1NbWkp5+aT/9+/fH29ubDRs2mLHo6Gi6devGmjVrzGnsoaGh9OvXj48++sj8ArBDhw4MHz6cjIwMCgqa3wcnJycmTJjA0aNHOXbs0nF/G/tygO7duxMVFcXq1atpamruy8PCwujTpw+bN282bwXw9vZm2LBh7N27l8LC5vchCkf15V+hL/9sWxT6duWcqzdR+Zf62DKvUEo6RNA1fw+Ojc391zl3H/L9uhN+Jgu32ipWnXZQX/4V+vLKykr27r3UT8bHx+Pq6sqWLVvMWM+ePYmMjGT16tVmLDIykl69erFx40YuXGg+L/z8/EhISGDXrl0UFxcD4OrqSlJSEgcPHuTTTy/12xMmTCA/P9+mLYYPH05DQwM7d176N6Nfv374+vqyfv16M9atWzdzBldDQwPQPNjQv39/tm7daj4PwWq1csstt7Bv3z7y85vPSUdHRyZOnMixY8dsntEwZswYysvLbQYZBg8ejMVisWmL2NhYwsPDWbNmjRnr1KkTsbGxbNiwgYsXLwLg7+/P4MGDSU9PN29XcnNzY8yYMWRlZZGdnW2uP3HixJu6Lz906BDXqtXTF8+cOcOmTZvYsmULmzdv5tixYwQGBpoJ2o9//OPWbM40Z84cVq9ezbZt2+jYsSMAO3bsYOjQoRQUFBAcHGyWnTVrFnl5eaSmprJixQpmzJhBbW2tzfaSkpLo0qULS5cuZfHixSxbtuyyh4hERUUxc+ZMHnnkEWbPns2pU6dsPvwCuLi4sHz5cu6+++7L6lxbW2uz36qqKsLCwjR9Ub4STVexP10D9qfrwP50HdifrgORb7frOn0xMDCQe+65h6VLl3LkyBGOHTvGuHHjeP/995kzZ87/VOG5c+fy73//mw8//NBMyKD523/gspGq4uJic1QrKCiIuro681uUq5Vp+Ybns0pKSmzKfH4/5eXl1NfXXzaC1sJiseDl5WXzEhERERERaY1WJ2Xnzp0jNTWVRx55hISEBHr16sX+/fuZO3cu//jHP1q1LcMweOCBB/jHP/7Bpk2b6NSpk83yTp06ERQUZDOsW1dXx5YtWxgyZAjQPNzp7OxsU6awsJCsrCyzTEJCApWVlezefWkKxa5du6isrLQpk5WVZU7bAEhLS8NisdC/f/9WHZeIiIiIiMi1avU9Zd7e3vj4+JCSksIvf/lLhg0bhtVq/Z92PmfOHFasWMG//vUvPD09zZEqq9WKm5sbDg4OzJ8/n8WLFxMVFUVUVBSLFy/G3d2dadOmmWVnzpzJggUL8PX1xcfHh4ULF9KrVy/GjBkDQI8ePRg/fjyzZs3ilVdeAWD27NlMmjTJvB9u7NixxMTEkJKSwtNPP01ZWRkLFy5k1qxZGgETEREREZHrptVJWXJyMtu2beOtt94iLy+P3NxcRo4cSY8ePVq985dffhmAkSNH2sT/8pe/MH36dAAefvhhLl68yP333095eTnx8fGkpaXh6elpln/++edxcnJi6tSpXLx4kcTERN58800cHR3NMu+88w7z5s0zn9I4ZcoUXnrpJXO5o6Mjq1ev5v7772fo0KG4ubkxbdo0nnnmmVYfl4iIiIiIyLVq9YM+Wuzfv58tW7awZcsWtm7dioODAyNHjmTlypVtXcdvDf1OmbQF3dhtf7oG7E/Xgf3pOrA/XQci326tyQ1aPVLWIi4ujsbGRurr66mtrSU1NbXV95SJiIiIiIjc7Fr9oI/nn3+eW2+9FR8fHwYNGsS7775LdHQ0H3zwgfl8fhEREREREbk2rR4pe+eddxg5ciSzZs3illtu+VZP0xMREREREbG3Vidln/1ldBEREREREflqWj19UURERERERNqOkjIRERERERE7UlImIiIiIiJiR0rKRERERERE7EhJmYiIiIiIiB39Tz8e/fe//52//vWv5ObmUldXZ7Ns3759bVIxERERERGRm0GrR8r+8Ic/MGPGDAICAvj4448ZNGgQvr6+fPrpp0yYMOF61FFEREREROSG1eqk7E9/+hOvvvoqL730Ei4uLjz88MOsX7+eefPmUVlZeT3qKCIiIiIicsNqdVKWm5vLkCFDAHBzc6O6uhqAlJQU3n333batnYiIiIiIyA2u1UlZUFAQpaWlAERERJCeng5AdnY2hmG0be1ERERERERucK1OykaPHs2qVasAmDlzJg8++CBJSUl897vf5fbbb2/zCoqIiIiIiNzIWv30xVdffZWmpiYAfvzjH+Pj48O2bduYPHkyP/7xj9u8giIiIiIiIjeyVidl7dq1o127SwNsU6dOZerUqW1aKRERERERkZvF//Q7ZeXl5bzxxhscPnwYBwcHevTowYwZM/Dx8Wnr+omIiIiIiNzQWn1P2ZYtW+jUqRN/+MMfKC8vp6ysjD/84Q906tSJLVu2XI86ioiIiIiI3LBaPVI2Z84cpk6dyssvv4yjoyMAjY2N3H///cyZM4esrKw2r6SIiIiIiMiNqtUjZSdPnmTBggVmQgbg6OjIQw89xMmTJ9u0ciIiIiIiIje6Vidl/fr14/Dhw5fFDx8+TJ8+fdqiTiIiIiIiIjeNa5q+uH//fvP/582bx09/+lNOnDjB4MGDAUhPT+ePf/wjTz755PWppYiIiIiIyA3qmpKyPn364ODggGEYZuzhhx++rNy0adP47ne/23a1ExERERERucFdU1KWnZ19veshIiIiIiJyU7qmpCwiIuJ610NEREREROSm1OoHfTg6OjJq1CjKysps4mfOnLF5IqOIiIiIiIh8uVYnZYZhUFtby4ABAy77TbLP3nMmIiIiIiIiX67VSZmDgwPvv/8+kydPZsiQIfzrX/+yWSYiIiIiIiLX7n8aKXN0dOSFF17gmWee4bvf/S6//e1vNUomIiIiIiLyP7imB31czezZs+nWrRvf+c532LJlS1vVSURERERE5KbR6pGyiIgImwd6jBw5kvT0dE6fPt2mFRMREREREbkZtHqk7Eq/Wda1a1c+/vhjzpw50yaVEhERERERuVm0eqRsz5497Nq167L4J598QklJSZtUSkRERERE5GbR6qRszpw55OXlXRbPz89nzpw5bVIpERERERGRm0Wrk7JDhw7Rr1+/y+J9+/bl0KFDbVIpERERERGRm0WrkzKLxXLFe8cKCwtxcvpKD3MUERERERG56bQ6KUtKSuL//u//qKysNGMVFRU8+uijJCUltWnlREREREREbnStHtp69tlnueWWW4iIiKBv374AZGZmEhgYyFtvvdXmFRQREREREbmRtTopCw0NZf/+/bzzzjt88sknuLm5MWPGDO655x6cnZ2vRx1FRERERERuWP/TTWAeHh7Mnj27resiIiIiIiJy07mmpOzf//43EyZMwNnZmX//+99fWHbKlCltUjEREREREZGbwTUlZbfddhtFRUUEBARw2223XbWcg4MDjY2NbVU3ERERERGRG941JWVNTU1X/H8RERERERH5alr9SPyrycvL47777murzYmIiIiIiNwU2iwpKysrY9myZW21ORERERERkZtCmyVl/6uPPvqIyZMnExISgoODA//85z9tlk+fPh0HBweb1+DBg23K1NbWMnfuXPz8/PDw8GDKlCmcPn3apkx5eTkpKSlYrVasVispKSlUVFTYlMnNzWXy5Ml4eHjg5+fHvHnzqKurux6HLSIiIiIiAnwDkrLz58/Tu3dvXnrppauWGT9+PIWFheZrzZo1Nsvnz5/PBx98wMqVK9m2bRvnzp1j0qRJNg8dmTZtGpmZmaSmppKamkpmZiYpKSnm8sbGRpKTkzl//jzbtm1j5cqVvP/++yxYsKDtD1pEREREROS//qffKWtLEyZMYMKECV9YxmKxEBQUdMVllZWVvPHGG7z11luMGTMGgLfffpuwsDA2bNjAuHHjOHz4MKmpqaSnpxMfHw/Aa6+9RkJCAkePHiU6Opq0tDQOHTpEXl4eISEhADz77LNMnz6dJ554Ai8vrzY8ahERERERkWbXnJTdcccdX7j881MB29LmzZsJCAigQ4cOjBgxgieeeIKAgAAAMjIyqK+vZ+zYsWb5kJAQYmNj2bFjB+PGjWPnzp1YrVYzIQMYPHgwVquVHTt2EB0dzc6dO4mNjTUTMoBx48ZRW1tLRkYGo0aNuqxetbW11NbWmn9XVVVdj8MXEREREZEb2DUnZVar9UuX/+AHP/jKFfq8CRMmcNdddxEREUF2dja/+tWvGD16NBkZGVgsFoqKinBxccHb29tmvcDAQIqKigDM31j7vICAAJsygYGBNsu9vb1xcXExy3zekiVLePzxxy+Lr127Fnd3dwCSkpIoLS1l37595vKEhAScnJzYunWrGYuLiyM0NJS1a9easc6dO9OzZ0/Wr19PTU2NWef4+Hh27tzJ2bNnAXB3dycxMZEDBw6Qk5Njrp+cnExOTg4HDx40YyNGjKCmpoZdu3aZsQEDBmC1Wtm4cSMA0U0GJR3CKfPqSLe8dByM5p9BqPQIoMi3K5FFmVjqLgBw0eJJbmAvQs4exfNCKQBN7Zw43nEQfhW5+FZdurfvROhAPGoqCC49bsZyA2MxcCDizAEzVuTThWp3X6JO7zZjZV4hlHSIpEv+Xpwam+/zO+fmQ75/d8KKD+JeUwlAnbMb2cF9CSw7SYdzZ8z1j4YPwbuqgICKS+2THdwX54YaOpYcNmP5/t2pdXKjc+HHZqykQwRlXqFE5+0Ew2hui/YBFPl0pVNhJi71LW3hRW5gLKFnj9D+QhkAjY7OnAgdiH/FKXyq8s1tHg8dSPuacoJLT5ixU4G9AFi1aqcZ69OnD4GBgaxbt86Mde3alR49erBu3TrznsegoCAGDhzI9u3bKStr3renpycjR47kk08+ITc3F2j+LcFJkyZx4sQJDh++dNyjR4+murqaPXv2mLFBgwbh4eHBhx9+aMZiYmLo0qULq1atMmPh4eH07t2bTZs2cf78eQB8fX0ZMmQIu3fv5syZ5vfBYrEwduxYDh06xMmTJ831x48fT0FBAfv37zdjw4YNo6mpiR07dpixvn374u/vT1pamhmLioqie/fupKamUl9fD0BwcDADBgxg27ZtlJeX27RFZmYmeXl5ALRr147k5GSOHz/OkSNHzG0mJibS/kIpoWePmrHT/jHUO7nQqTDTjBV7d6K8fVDzefFfFe2DOOPTmc4F+3BuaL5mL7hayQvoSceSw3hcbK5Pg5OFkyH9CSjPwbu6wFz/WMd4rBdKCCz71IzlBMXRrqmR8OJL13GBbxQXXa10yd9rxs5awyi1hhF1ehftmpqnble5+1Ho142Iov241p0DoNbFg5yg3gSXHsfrfAkARjtHjnWMx7fyNH6VueY2T4b0x62umpCzx8xYXkAMje2ciSz6xIyd8e5EZftAuuWlm7Fyz2CKvTvRuSAD54bmL6/Ou3bgdEAMHYsP4VFTAUC9k4VPQ/oTUJ6Nd3WhuX5jXDK5ublkZWWZsREjRlBbW0t6+qX99O/fH29vbzZs2GDGoqOj6datG2vWrDGnsYeGhtKvXz8++ugjKiub+4sOHTowfPhwMjIyKChofh+cnJyYMGECR48e5dixS8f9bezLAbp3705UVBSrV682f9ImLCyMPn36sHnzZqqrq4Hmf++GDRvG3r17KSxsfh+icFRf/hX68s+2RaFvV865ehOVf6mPLfMKpaRDBF3z9+DY2Nx/nXP3Id+vO+FnsnCrrWLVaQf15V+hL6+srGTv3kv9ZHx8PK6urmzZssWM9ezZk8jISFavXm3GIiMj6dWrFxs3buTChebzws/Pj4SEBHbt2kVxcTEArq6uJCUlcfDgQT799FK/PWHCBPLz823aYvjw4TQ0NLBz56V/M/r164evry/r1683Y926dSM6Opq1a9fS0NAANA829O/fn61bt5qDIFarlVtuuYV9+/aRn998Tjo6OjJx4kSOHTvG0aOX/g0bM2YM5eXlZGRkmLHBgwdjsVhs2iI2Npbw8HCb24Q6depEbGwsGzZs4OLFiwD4+/szePBg0tPTKSlp/nfEzc2NMWPGkJWVRXZ2trn+xIkTb+q+/NChQ1wrB8P4b8/0DeDg4MAHH3zwhT9QXVhYSEREBCtXruSOO+5gxYoVzJgxw2bECpobvUuXLixdupTFixezbNkymxMUmjuBmTNn8sgjjzB79mxOnTpl8+EXwMXFheXLl3P33XdfVpcrjZSFhYVRWVn5rZ7u+OTHZ+1dhZvaI3397F2Fm56uAfvTdWB/ug7sT9eByLdbVVUVVqv1mnIDuz/oo7WCg4OJiIjg+PHmb+iCgoKoq6szv0VpUVxcbI58BQUFmd/wfFZJSYlNmc+PiJWXl1NfX3/ZCFoLi8WCl5eXzUtERERERKQ1vnVJWWlpKXl5eQQHBwPNw53Ozs42Q7+FhYVkZWUxZMgQoHlYsrKykt27L02h2LVrF5WVlTZlsrKyzGkbAGlpaVgsFvr37/91HJqIiIiIiNyE7P70xXPnznHixKW52NnZ2WRmZuLj44OPjw+LFi3izjvvJDg4mJycHB599FH8/Py4/fbbgeY5tTNnzmTBggX4+vri4+PDwoUL6dWrl/k0xh49ejB+/HhmzZrFK6+8AsDs2bOZNGkS0dHRAIwdO5aYmBhSUlJ4+umnKSsrY+HChcyaNUsjYCIiIiIict3YPSnbu3evzZMNH3roIQDuvfdeXn75ZQ4cOMDy5cupqKggODiYUaNG8d577+Hp6Wmu8/zzz+Pk5MTUqVO5ePEiiYmJvPnmmzg6Oppl3nnnHebNm2c+pXHKlCk2v43m6OjI6tWruf/++xk6dChubm5MmzaNZ5555no3gYiIiIiI3MS+UQ/6+LZrzc1832S6udu+dGO3/ekasD9dB/an68D+dB2IfLvd0A/6EBERERERuZEoKRMREREREbEjJWUiIiIiIiJ2pKRMRERERETEjpSUiYiIiIiI2JGSMhERERERETtSUiYiIiIiImJHSspERERERETsSEmZiIiIiIiIHSkpExERERERsSMlZSIiIiIiInakpExERERERMSOlJSJiIiIiIjYkZIyERERERERO3KydwVERERERL6Jnvz4rL2rcNN7pK+fvavwtdBImYiIiIiIiB0pKRMREREREbEjJWUiIiIiIiJ2pKRMRERERETEjpSUiYiIiIiI2JGSMhERERERETtSUiYiIiIiImJHSspERERERETsSEmZiIiIiIiIHSkpExERERERsSMlZSIiIiIiInakpExERERERMSOlJSJiIiIiIjYkZIyERERERERO1JSJiIiIiIiYkdKykREREREROxISZmIiIiIiIgdKSkTERERERGxIyVlIiIiIiIidqSkTERERERExI6UlImIiIiIiNiRkjIRERERERE7UlImIiIiIiJiR0rKRERERERE7EhJmYiIiIiIiB0pKRMREREREbEjJWUiIiIiIiJ2pKRMRERERETEjpSUiYiIiIiI2JGSMhERERERETtSUiYiIiIiImJHdk/KPvroIyZPnkxISAgODg7885//tFluGAaLFi0iJCQENzc3Ro4cycGDB23K1NbWMnfuXPz8/PDw8GDKlCmcPn3apkx5eTkpKSlYrVasVispKSlUVFTYlMnNzWXy5Ml4eHjg5+fHvHnzqKurux6HLSIiIiIiAnwDkrLz58/Tu3dvXnrppSsuf+qpp3juued46aWX2LNnD0FBQSQlJVFdXW2WmT9/Ph988AErV65k27ZtnDt3jkmTJtHY2GiWmTZtGpmZmaSmppKamkpmZiYpKSnm8sbGRpKTkzl//jzbtm1j5cqVvP/++yxYsOD6HbyIiIiIiNz0nOxdgQkTJjBhwoQrLjMMg9///vf84he/4I477gBg2bJlBAYGsmLFCn70ox9RWVnJG2+8wVtvvcWYMWMAePvttwkLC2PDhg2MGzeOw4cPk5qaSnp6OvHx8QC89tprJCQkcPToUaKjo0lLS+PQoUPk5eUREhICwLPPPsv06dN54okn8PLy+hpaQ0REREREbjZ2Hyn7ItnZ2RQVFTF27FgzZrFYGDFiBDt27AAgIyOD+vp6mzIhISHExsaaZXbu3InVajUTMoDBgwdjtVptysTGxpoJGcC4ceOora0lIyPjivWrra2lqqrK5iUiIiIiItIadh8p+yJFRUUABAYG2sQDAwM5deqUWcbFxQVvb+/LyrSsX1RUREBAwGXbDwgIsCnz+f14e3vj4uJilvm8JUuW8Pjjj18WX7t2Le7u7gAkJSVRWlrKvn37zOUJCQk4OTmxdetWMxYXF0doaChr1641Y507d6Znz56sX7+empoas87x8fHs3LmTs2fPAuDu7k5iYiIHDhwgJyfHXD85OZmcnBybe/BGjBhBTU0Nu3btMmMDBgzAarWyceNGAKKbDEo6hFPm1ZFueek4GE0AVHoEUOTblciiTCx1FwC4aPEkN7AXIWeP4nmhFICmdk4c7zgIv4pcfKsu3dt3InQgHjUVBJceN2O5gbEYOBBx5oAZK/LpQrW7L1Gnd5uxMq8QSjpE0iV/L06Nzff5nXPzId+/O2HFB3GvqQSgztmN7OC+BJadpMO5M+b6R8OH4F1VQEDFpfbJDu6Lc0MNHUsOm7F8/+7UOrnRufBjM1bSIYIyr1Ci83aCYTS3RfsAiny60qkwE5f6lrbwIjcwltCzR2h/oQyARkdnToQOxL/iFD5V+eY2j4cOpH1NOcGlJ8zYqcBeAKxatdOM9enTh8DAQNatW2fGunbtSo8ePVi3bp15z2NQUBADBw5k+/btlJU179vT05ORI0fyySefkJubC4CDgwOTJk3ixIkTHD586bhHjx5NdXU1e/bsMWODBg3Cw8ODDz/80IzFxMTQpUsXVq1aZcbCw8Pp3bs3mzZt4vz58wD4+voyZMgQdu/ezZkzze+DxWJh7NixHDp0iJMnT5rrjx8/noKCAvbv32/Ghg0bRlNTk/mlCUDfvn3x9/cnLS3NjEVFRdG9e3dSU1Opr68HIDg4mAEDBrBt2zbKy8tt2iIzM5O8vDwA2rVrR3JyMsePH+fIkSPmNhMTE2l/oZTQs0fN2Gn/GOqdXOhUmGnGir07Ud4+qPm8+K+K9kGc8elM54J9ODc0X7MXXK3kBfSkY8lhPC4216fBycLJkP4ElOfgXV1grn+sYzzWCyUEln1qxnKC4mjX1Eh48aXruMA3iouuVrrk7zVjZ61hlFrDiDq9i3ZNzVO3q9z9KPTrRkTRflzrzgFQ6+JBTlBvgkuP43W+BACjnSPHOsbjW3kav8pcc5snQ/rjVldNyNljZiwvIIbGds5EFn1ixs54d6KyfSDd8tLNWLlnMMXenehckIFzQy0A5107cDogho7Fh/CoqQCg3snCpyH9CSjPxru60Fy/MS6Z3NxcsrKyzNiIESOora0lPf3Sfvr374+3tzcbNmwwY9HR0XTr1o01a9aY09hDQ0Pp168fH330EZWVzf1Fhw4dGD58OBkZGRQUNL8PTk5OTJgwgaNHj3Ls2KXj/jb25QDdu3cnKiqK1atX09TU3JeHhYXRp08fNm/ebN4K4O3tzbBhw9i7dy+Fhc3vQxSO6su/Ql/+2bYo9O3KOVdvovIv9bFlXqGUdIiga/4eHBub+69z7j7k+3Un/EwWbrVVrDrtoL78K/TllZWV7N17qZ+Mj4/H1dWVLVu2mLGePXsSGRnJ6tWrzVhkZCS9evVi48aNRJ9rbgv15f9bX34sbDDWc2cILM/+TFv0xrGpnrDiQ5fawq8bF1086VJwaSDkrDWcUmvHb3VffujQpWP8Mg6G8d+e6RvAwcGBDz74gNtuuw2AHTt2MHToUAoKCggODjbLzZo1i7y8PFJTU1mxYgUzZsygtrbWZltJSUl06dKFpUuXsnjxYpYtW8bRo0dtykRFRTFz5kweeeQRZs+ezalTp2w+/AK4uLiwfPly7r777svqW1tba7PfqqoqwsLCqKys/FZPd3zy47P2rsJN7ZG+fvauwk1P14D96TqwP10H9qfrwP50Hdjft/k6qKqqwmq1XlNu8I2evhgUFARw2UhVcXGxOaoVFBREXV2d+S3K1cq0fMPzWSUlJTZlPr+f8vJy6uvrLxtBa2GxWPDy8rJ5iYiIiIiItMY3Oinr1KkTQUFBrF+/3ozV1dWxZcsWhgwZAjRPXXF2drYpU1hYSFZWllkmISGByspKdu++NIVi165dVFZW2pTJysoyp20ApKWlYbFY6N+//3U9ThERERERuXnZ/Z6yc+fOceLEpbnY2dnZZGZm4uPjQ3h4OPPnz2fx4sVERUURFRXF4sWLcXd3Z9q0aQBYrVZmzpzJggUL8PX1xcfHh4ULF9KrVy/zaYw9evRg/PjxzJo1i1deeQWA2bNnM2nSJKKjowEYO3YsMTExpKSk8PTTT1NWVsbChQuZNWuWRsBEREREROS6sXtStnfvXkaNGmX+/dBDDwFw77338uabb/Lwww9z8eJF7r//fsrLy4mPjyctLQ1PT09zneeffx4nJyemTp3KxYsXSUxM5M0338TR0dEs88477zBv3jzzKY1Tpkyx+W00R0dHVq9ezf3338/QoUNxc3Nj2rRpPPPMM9e7CURERERE5Cb2jXrQx7dda27m+ybTTa329W2+ofVGoWvA/nQd2J+uA/vTdWB/ug7s79t8HdwwD/oQERERERG50SkpExERERERsSMlZSIiIiIiInakpExERERERMSOlJSJiIiIiIjYkZIyERERERERO1JSJiIiIiIiYkdKykREREREROxISZmIiIiIiIgdKSkTERERERGxIyVlIiIiIiIidqSkTERERERExI6UlImIiIiIiNiRkjIRERERERE7UlImIiIiIiJiR0rKRERERERE7EhJmYiIiIiIiB0pKRMREREREbEjJWUiIiIiIiJ2pKRMRERERETEjpSUiYiIiIiI2JGSMhERERERETtSUiYiIiIiImJHSspERERERETsSEmZiIiIiIiIHSkpExERERERsSMlZSIiIiIiInakpExERERERMSOlJSJiIiIiIjYkZIyERERERERO1JSJiIiIiIiYkdKykREREREROxISZmIiIiIiIgdKSkTERERERGxIyVlIiIiIiIidqSkTERERERExI6UlImIiIiIiNiRkjIRERERERE7UlImIiIiIiJiR0rKRERERERE7EhJmYiIiIiIiB0pKRMREREREbEjJWUiIiIiIiJ2pKRMRERERETEjpSUiYiIiIiI2JGSMhERERERETtSUiYiIiIiImJH3/ikbNGiRTg4ONi8goKCzOWGYbBo0SJCQkJwc3Nj5MiRHDx40GYbtbW1zJ07Fz8/Pzw8PJgyZQqnT5+2KVNeXk5KSgpWqxWr1UpKSgoVFRVfxyGKiIiIiMhN7BuflAH07NmTwsJC83XgwAFz2VNPPcVzzz3HSy+9xJ49ewgKCiIpKYnq6mqzzPz58/nggw9YuXIl27Zt49y5c0yaNInGxkazzLRp08jMzCQ1NZXU1FQyMzNJSUn5Wo9TRERERERuPk72rsC1cHJyshkda2EYBr///e/5xS9+wR133AHAsmXLCAwMZMWKFfzoRz+isrKSN954g7feeosxY8YA8PbbbxMWFsaGDRsYN24chw8fJjU1lfT0dOLj4wF47bXXSEhI4OjRo0RHR1+xXrW1tdTW1pp/V1VVtfWhi4iIiIjIDe5bkZQdP36ckJAQLBYL8fHxLF68mM6dO5OdnU1RURFjx441y1osFkaMGMGOHTv40Y9+REZGBvX19TZlQkJCiI2NZceOHYwbN46dO3ditVrNhAxg8ODBWK1WduzYcdWkbMmSJTz++OOXxdeuXYu7uzsASUlJlJaWsm/fPnN5QkICTk5ObN261YzFxcURGhrK2rVrzVjnzp3p2bMn69evp6amBoCAgADi4+PZuXMnZ8+eBcDd3Z3ExEQOHDhATk6OuX5ycjI5OTk20zlHjBhBTU0Nu3btMmMDBgzAarWyceNGAKKbDEo6hFPm1ZFueek4GE0AVHoEUOTblciiTCx1FwC4aPEkN7AXIWeP4nmhFICmdk4c7zgIv4pcfKsuTRM9EToQj5oKgkuPm7HcwFgMHIg4c2n0s8inC9XuvkSd3m3GyrxCKOkQSZf8vTg11gFwzs2HfP/uhBUfxL2mEoA6Zzeyg/sSWHaSDufOmOsfDR+Cd1UBARWX2ic7uC/ODTV0LDlsxvL9u1Pr5Ebnwo/NWEmHCMq8QonO2wmG0dwW7QMo8ulKp8JMXOpb2sKL3MBYQs8eof2FMgAaHZ05EToQ/4pT+FTlm9s8HjqQ9jXlBJeeMGOnAnsBsGrVTjPWp08fAgMDWbdunRnr2rUrPXr0YN26ddTVNbdFUFAQAwcOZPv27ZSVNe/b09OTkSNH8sknn5CbmwuAg4MDkyZN4sSJExw+fOm4R48eTXV1NXv27DFjgwYNwsPDgw8//NCMxcTE0KVLF1atWmXGwsPD6d27N5s2beL8+fMA+Pr6MmTIEHbv3s2ZM83vg8ViYezYsRw6dIiTJ0+a648fP56CggL2799vxoYNG0ZTUxM7duwwY3379sXf35+0tDQzFhUVRffu3UlNTaW+vh6A4OBgBgwYwLZt2ygvL7dpi8zMTPLy8gBo164dycnJHD9+nCNHjpjbTExMpP2FUkLPHjVjp/1jqHdyoVNhphkr9u5Eefug5vPivyraB3HGpzOdC/bh3NB8zV5wtZIX0JOOJYfxuNhcnwYnCydD+hNQnoN3dYG5/rGO8VgvlBBY9qkZywmKo11TI+HFl67jAt8oLrpa6ZK/14ydtYZRag0j6vQu2jU1zwKocvej0K8bEUX7ca07B0Ctiwc5Qb0JLj2O1/kSAIx2jhzrGI9v5Wn8KnPNbZ4M6Y9bXTUhZ4+Zsbz/3969h0VV7f8Df48MDDBcHRUGHbkoICmK4lGRFD2m4IUkT1npQUHRLLwQelJOmWXU+WXqMT1B6JPIKW+dI6f6KVAqZqYogeIVERXUdAhvgaByXd8/iB0DqJTQFnm/nofncdZee+21x1lr1mevtfd0egJV7YzhVHBUSvvJ1hlFFnZwu3RQSrtpqUWhrTNcrmTCuLLm4lWpqQ1+7PQEuhSegvruzwCACqUK5x280elmHmxv6aX9q3qPxcWLF3HixAkpzc/PD2VlZTh48NfjeHt7w9bWFrt27ZLS3N3d4ebmhqSkJGlFROfOndGvXz989913KCqq6S9sbGwwZMgQZGZm4sqVmv8HpVKJ0aNHIycnB2fO/HrerbEvB4AePXrA1dUVO3bsQHV1TV+u0+ng5eWFb7/9VlpVYmtriyeffBIZGRnQ62v+H1xhxL78Ifryuu+FXtMdJaa2cL38ax97w6ozrto4ovvlH2BUVdN/lZi3x+UOPdD1pxMwKyvG//9Rwb78IfryoqIiZGT82k8OHDgQpqam2Lt3r5TWs2dPODk5YceOHVKak5MTPD09sXv3briX1LwX7Mt/X19+RjcI1iU/we5mXp33og+MqiugKzz163vRwQ13TCzR7UpmnfeiK65bd2nVffmpU7+e44MohPilZ3pEJScn4/bt23Bzc8NPP/2E6OhonD59GidPnkROTg58fX1x+fJlODg4SPvMnDkTFy5cwNdff41NmzYhNDTUYEYLAEaNGgVnZ2fExcXhvffew4YNGwz+0wDAzc0NoaGhiIqKarRujc2U6XQ6FBUVwcrKqhnfhT/W/ztyTe4qtGmL+naQuwptHtuA/NgO5Md2ID+2A/mxHcivNbeD4uJiWFtbNyk2eORnykaPHi3929PTEz4+PujWrRsSEhIwaNAgADVXjOoSQjRIq69+nsbyP6gclUoFlUrVpPMgIiIiIiJqTKt40EddarUanp6eyM3Nle4zKygoMMhTWFgIOzs7ADXLusrLy6Wp73vlqZ2Wr+vq1atSHiIiIiIiopbQ6oKysrIyZGdnQ6vVwtnZGfb29ti5c6e0vby8HHv37sXgwYMB1NxvYGxsbJBHr9fjxIkTUh4fHx8UFRUhPf3Xde+HDh1CUVGRlIeIiIiIiKglPPLLFxcsWIDAwEB07doVhYWFiI6ORnFxMaZOnQqFQoGIiAi89957cHV1haurK9577z2Ym5tj0qRJAABra2tMnz4d8+fPh0ajQfv27bFgwQJ4enpKT2P08PBAQEAAZsyYgbi4OAA196WNGzfung/5ICIiIiIiag6PfFD2448/4sUXX8S1a9fQsWNHDBo0CAcPHoSjoyMA4LXXXsOdO3fwyiuv4ObNmxg4cCC++eYbWFpaSmX885//hFKpxMSJE3Hnzh2MGDECGzZsgJGRkZRn48aNmDt3rvSUxqeffhr/+te//tiTJSIiIiKiNueRf/pia/JbnrDyKOOThuTVmp8y9LhgG5Af24H82A7kx3YgP7YD+bXmdvBbYoNWd08ZERERERHR44RBGRERERERkYwYlBEREREREcmIQRkREREREZGMGJQRERERERHJiEEZERERERGRjBiUERERERERyYhBGRERERERkYwYlBEREREREcmIQRkREREREZGMGJQRERERERHJiEEZERERERGRjBiUERERERERyYhBGRERERERkYwYlBEREREREcmIQRkREREREZGMGJQRERERERHJiEEZERERERGRjBiUERERERERyYhBGRERERERkYwYlBEREREREcmIQRkREREREZGMGJQRERERERHJiEEZERERERGRjBiUERERERERyYhBGRERERERkYwYlBEREREREcmIQRkREREREZGMGJQRERERERHJiEEZERERERGRjBiUERERERERyYhBGRERERERkYwYlBEREREREcmIQRkREREREZGMGJQRERERERHJiEEZERERERGRjBiUERERERERyYhBGRERERERkYwYlBEREREREcmIQRkREREREZGMGJQRERERERHJiEEZERERERGRjBiUERERERERyYhBGRERERERkYwYlBEREREREcmIQRkREREREZGMGJTVExMTA2dnZ5iamsLb2xv79u2Tu0pERERERPQYY1BWx9atWxEREYHXX38dR44cwZAhQzB69GhcvHhR7qoREREREdFjikFZHStXrsT06dMRFhYGDw8PrFq1CjqdDrGxsXJXjYiIiIiIHlNKuSvwqCgvL0dmZiYWLVpkkD5q1CgcOHCg0X3KyspQVlYmvS4qKgIAFBcXt1xF/wB3S27JXYU2rbjYRO4qtHlsA/JjO5Af24H82A7kx3Ygv9bcDmpjAiHEA/MyKPvFtWvXUFVVBTs7O4N0Ozs7FBQUNLrPP/7xD7z99tsN0nU6XYvUkdqGhp8ooraH7YCI7YAIeDzawa1bt2BtbX3fPAzK6lEoFAavhRAN0mpFRUUhMjJSel1dXY0bN25Ao9Hccx9qWcXFxdDpdLh06RKsrKzkrg6RLNgOiNgOiNgG5CeEwK1bt+Dg4PDAvAzKftGhQwcYGRk1mBUrLCxsMHtWS6VSQaVSGaTZ2Ni0VBXpN7CysmIHRG0e2wER2wER24C8HjRDVosP+viFiYkJvL29sXPnToP0nTt3YvDgwTLVioiIiIiIHnecKasjMjISwcHB6N+/P3x8fLB27VpcvHgRs2bNkrtqRERERET0mGJQVsfzzz+P69evY+nSpdDr9ejVqxeSkpLg6Ogod9WoiVQqFZYsWdJgWSlRW8J2QMR2QMQ20LooRFOe0UhEREREREQtgveUERERERERyYhBGRERERERkYwYlBEREREREcmIQRk9Ur799lsoFAr8/PPPLVJ+fn4+FAoFsrKyWqR8oqZQKBT44osv/vDjtnT7ImoNQkJCEBQUJHc1iJrk94xbhg0bhoiIiBarE7UMBmX0hwsJCYFCoYBCoYCxsTFcXFywYMEClJaWtvixdTqd9GRNopZSUFCAOXPmwMXFBSqVCjqdDoGBgdi9e7es9Ro8eDD0en2TfsiSARzJofb7obGfonnllVegUCgQEhLSpLJ4EY5ag7pjIoVCAY1Gg4CAABw7dgwAxy1tCYMykkVAQAD0ej3Onz+P6OhoxMTEYMGCBS1+XCMjI9jb20Op5K9BUMvIz8+Ht7c3UlNTsWzZMhw/fhwpKSkYPnw4wsPDZa2biYkJ7O3toVAomq3M8vLyZiuLCKgZhG7ZsgV37tyR0u7evYvNmzeja9euMtaMqGXUjon0ej12794NpVKJcePGAeC4pS1hUEayUKlUsLe3h06nw6RJkzB58mSD5VyZmZno378/zM3NMXjwYOTk5ACoGfC2a9cOGRkZBuWtWbMGjo6OEELg5s2bmDx5Mjp27AgzMzO4uroiPj5e2r/+ldOTJ09i7NixsLKygqWlJYYMGYJz584BqJktGDBgANRqNWxsbODr64sLFy607JtDrVrt1fz09HQ8++yzcHNzQ8+ePREZGYmDBw9K+a5du4ZnnnkG5ubmcHV1xVdffWVQzqlTpzBmzBhYWFjAzs4OwcHBuHbtmrR92LBhmDNnDiIiImBraws7OzusXbsWpaWlCA0NhaWlJbp164bk5GRpn/qzXxcuXEBgYCBsbW2hVqvRs2dPJCUlIT8/H8OHDwcA2NraGsxODBs2DLNnz0ZkZCQ6dOiAkSNHYtq0adIAolZlZSXs7e2xfv365nx7qQ3o168funbtisTERCktMTEROp0Offv2ldJSUlLw5JNPwsbGBhqNBuPGjZP6bgBwdnYGAPTt2xcKhQLDhg0zOM7y5cuh1Wqh0WgQHh6OiooKaVtMTAxcXV1hamoKOzs7PPvssy10tkS/jons7e3h5eWFhQsX4tKlS7h69Wqj45a9e/diwIABUKlU0Gq1WLRoESorK+9Z/s2bNzFlyhTY2trC3Nwco0ePRm5urkGedevWQafTwdzcHM888wxWrlwJGxsbAE0be9HDY1BGjwQzMzODL8TXX38dK1asQEZGBpRKJaZNmwYAcHJywlNPPSUFWbXi4+OlJQCLFy/GqVOnkJycjOzsbMTGxqJDhw6NHvfy5csYOnQoTE1NkZqaiszMTEybNg2VlZWorKxEUFAQ/Pz8cOzYMaSlpWHmzJnNOstAj5cbN24gJSUF4eHhUKvVDbbXfsEBwNtvv42JEyfi2LFjGDNmDCZPnowbN24AAPR6Pfz8/ODl5YWMjAykpKTgp59+wsSJEw3KS0hIQIcOHZCeno45c+bg5ZdfxnPPPYfBgwfj8OHD8Pf3R3BwMG7fvt1ofcPDw1FWVobvvvsOx48fx/vvvw8LCwvodDps27YNAJCTkwO9Xo8PP/zQ4LhKpRL79+9HXFwcwsLCkJKSAr1eL+VJSkpCSUlJgzoTNUVoaKhBP79+/Xrpe6BWaWkpIiMj8cMPP2D37t1o164dnnnmGVRXVwMA0tPTAQC7du2CXq83CPL27NmDc+fOYc+ePUhISMCGDRuwYcMGAEBGRgbmzp2LpUuXIicnBykpKRg6dGgLnzFRjZKSEmzcuBHdu3eHRqNpsP3y5csYM2YM/vSnP+Ho0aOIjY3FJ598gujo6HuWGRISgoyMDHz11VdIS0uDEAJjxoyRxl379+/HrFmzMG/ePGRlZWHkyJF49913pf2bMvaiZiCI/mBTp04V48ePl14fOnRIaDQaMXHiRLFnzx4BQOzatUvavmPHDgFA3LlzRwghxNatW4Wtra24e/euEEKIrKwsoVAoRF5enhBCiMDAQBEaGtrosfPy8gQAceTIESGEEFFRUcLZ2VmUl5c3yHv9+nUBQHz77bfNcNbUFhw6dEgAEImJiffNB0C88cYb0uuSkhKhUChEcnKyEEKIxYsXi1GjRhnsc+nSJQFA5OTkCCGE8PPzE08++aS0vbKyUqjVahEcHCyl6fV6AUCkpaUJIYTUvm7evCmEEMLT01O89dZbjdaxft5afn5+wsvLq0H+J554Qrz//vvS66CgIBESEnLf94Govtrvh6tXrwqVSiXy8vJEfn6+MDU1FVevXhXjx48XU6dObXTfwsJCAUAcP35cCNGwv697DEdHR1FZWSmlPffcc+L5558XQgixbds2YWVlJYqLi1vkHInqmjp1qjAyMhJqtVqo1WoBQGi1WpGZmSmEaPg5/vvf/y7c3d1FdXW1VMZHH30kLCwsRFVVlRCipp+eN2+eEEKIM2fOCABi//79Uv5r164JMzMz8fnnnwshhHj++efF2LFjDeo1efJkYW1tLb1+0NiLHh5nykgW27dvh4WFBUxNTeHj44OhQ4dizZo10vbevXtL/9ZqtQCAwsJCAEBQUBCUSiX+97//Aai5gjp8+HA4OTkBAF5++WVs2bIFXl5eeO2113DgwIF71iMrKwtDhgyBsbFxg23t27dHSEgI/P39ERgYiA8//NBgJoCoPvHLEo6mXDWs+xlXq9WwtLSUPuOZmZnYs2cPLCwspL8ePXoAgMHyrLplGBkZQaPRwNPTU0qzs7MD8GvbqW/u3LmIjo6Gr68vlixZIt1Y/iD9+/dvkBYWFiZdRS0sLMSOHTsazGwQNVWHDh0wduxYJCQkID4+HmPHjm2w4uHcuXOYNGkSXFxcYGVlJS1XvHjx4gPL79mzJ4yMjKTXWq1WaicjR46Eo6MjXFxcEBwcjI0bN95ztpmoOQwfPhxZWVnIysrCoUOHMGrUKIwePbrR2yWys7Ph4+Nj8D3j6+uLkpIS/Pjjj43mVyqVGDhwoJSm0Wjg7u6O7OxsADUrIgYMGGCwX/3XDxp70cNjUEayqO2AcnJycPfuXSQmJqJTp07S9rpBUm3HU7skxcTEBMHBwYiPj0d5eTk2bdpkMPir7cgiIiJw5coVjBgx4p4PETEzM7tvPePj45GWlobBgwdj69atcHNzM7gviKguV1dXKBQK6YvufupfCFAoFNJnvLq6GoGBgdKXdO1fbm6uwTKqxsq4X9upLywsDOfPn0dwcDCOHz+O/v37G1wcuZfGlmZOmTIF58+fR1paGj777DM4OTlhyJAhDyyL6F6mTZuGDRs2ICEhodEAPzAwENevX8e6detw6NAhHDp0CEDTHj5zv/ZnaWmJw4cPY/PmzdBqtXjzzTfRp08fPomUWoxarUb37t3RvXt3DBgwAJ988glKS0uxbt26BnmFEA0u/N3vgqC4x/1edcu5X5m1HjT2oofHoIxkUdsBOTo6NjpL9SBhYWHYtWsXYmJiUFFRgQkTJhhs79ixI0JCQvDZZ59h1apVWLt2baPl9O7dG/v27TO4n62+vn37IioqCgcOHECvXr2wadOm31xfahvat28Pf39/fPTRR43+xENTB3X9+vXDyZMn4eTkJH1R1/41FhA9DJ1Oh1mzZiExMRHz58+XBgEmJiYAgKqqqiaVo9FoEBQUhPj4eMTHxyM0NLRZ60ltT0BAAMrLy1FeXg5/f3+DbdevX0d2djbeeOMNjBgxAh4eHrh586ZBnt/6Ga5LqVTiqaeewrJly3Ds2DHk5+cjNTX1958M0W+gUCjQrl07gyeQ1nriiSdw4MABg6DpwIEDsLS0ROfOnRvNX1lZKV20AGraz5kzZ+Dh4QEA6NGjh3QPZq36D/UAHjz2oofDoIxaJQ8PDwwaNAgLFy7Eiy++aDDj9eabb+LLL7/E2bNncfLkSWzfvl3qeOqbPXs2iouL8cILLyAjIwO5ubn49NNPkZOTg7y8PERFRSEtLQ0XLlzAN998Y9CJETUmJiYGVVVVGDBgALZt24bc3FxkZ2dj9erV8PHxaVIZ4eHhuHHjBl588UWkp6fj/Pnz+OabbzBt2rTfNcC8l4iICHz99dfIy8vD4cOHkZqaKn2+HR0doVAosH37dly9ehUlJSUPLC8sLAwJCQnIzs7G1KlTm62e1DYZGRkhOzsb2dnZBksNgZqngmo0GqxduxZnz55FamoqIiMjDfJ06tQJZmZm0oNyioqKmnTc7du3Y/Xq1cjKysKFCxfw73//G9XV1XB3d2+2cyOqq6ysDAUFBSgoKEB2djbmzJmDkpISBAYGNsj7yiuv4NKlS5gzZw5Onz6NL7/8EkuWLEFkZCTatWs4rHd1dcX48eMxY8YMfP/99zh69Cj++te/onPnzhg/fjwAYM6cOUhKSsLKlSuRm5uLuLg4JCcnN5g9u9/Yix4egzJqtaZPn47y8vIG0+cmJiaIiopC7969MXToUBgZGWHLli2NlqHRaJCamoqSkhL4+fnB29sb69atg7GxMczNzXH69Gn85S9/gZubG2bOnInZs2fjpZde+iNOj1opZ2dnHD58GMOHD8f8+fPRq1cvjBw5Ert370ZsbGyTynBwcMD+/ftRVVUFf39/9OrVC/PmzYO1tXWjX7q/V1VVFcLDw+Hh4YGAgAC4u7sjJiYGANC5c2e8/fbbWLRoEezs7DB79uwHlvfUU09Bq9XC398fDg4OzVZParusrKxgZWXVIL1du3bYsmULMjMz0atXL7z66qv44IMPDPIolUqsXr0acXFxcHBwkAagD2JjY4PExET8+c9/hoeHBz7++GNs3rwZPXv2bJZzIqovJSUFWq0WWq0WAwcOxA8//ID//Oc/DX7GAajpm5OSkpCeno4+ffpg1qxZmD59Ot544417lh8fHw9vb2+MGzcOPj4+EEIgKSlJWqnk6+uLjz/+GCtXrkSfPn2QkpKCV199Faampg3KutfYix6eQtxrsSnRI+7dd9/Fli1bcPz4cbmrQkQAbt++DQcHB6xfv57LWoiIWrEZM2bg9OnT2Ldvn0E6x14thz8PTq1OSUkJsrOzsWbNGrzzzjtyV4eozauurkZBQQFWrFgBa2trPP3003JXiYiIfoPly5dj5MiRUKvVSE5ORkJCgrRyAuDY64/AoIxandmzZ2Pz5s0ICgri9DnRI+DixYtwdnZGly5dsGHDBiiV/GohImpN0tPTsWzZMty6dQsuLi5YvXo1wsLCpO0ce7U8Ll8kIiIiIiKSER/0QUREREREJCMGZURERERERDJiUEZERERERCQjBmVEREREREQyYlBGREREREQkIwZlRERELcDJyQmrVq16qDLeeusteHl5NUt9iIjo0cWgjIiIHishISFQKBRQKBQwNjaGi4sLFixYgNLSUrmr9pstWLAAu3fvbnL+/Px8KBQKZGVltVyliIio2fEXPomI6LETEBCA+Ph4VFRUYN++fQgLC0NpaSliY2MN8lVUVMDY2FimWj6YhYUFLCws5K4GERG1MM6UERHRY0elUsHe3h46nQ6TJk3C5MmT8cUXX0jLAdevXw8XFxeoVCrk5eVJM2t1/4YNGyaVd+DAAQwdOhRmZmbQ6XSYO3euwcxbYWEhAgMDYWZmBmdnZ2zcuLFBnRQKBeLi4jBu3DiYm5vDw8MDaWlpOHv2LIYNGwa1Wg0fHx+cO3dO2qf+8sXq6mosXboUXbp0gUqlgpeXF1JSUqTtzs7OAIC+ffs2OAciInp0MSgjIqLHnpmZGSoqKgAAZ8+exeeff45t27YhKysLXbt2hV6vl/6OHDkCjUaDoUOHAgCOHz8Of39/TJgwAceOHcPWrVvx/fffY/bs2VL5ISEhyM/PR2pqKv773/8iJiYGhYWFDerxzjvvYMqUKcjKykKPHj0wadIkvPTSS4iKikJGRgYAGJRb34cffogVK1Zg+fLlOHbsGPz9/fH0008jNzcXAJCeng4A2LVrF/R6PRITE5vnDSQiohbF5YtERPRYS09Px6ZNmzBixAgAQHl5OT799FN07NhRymNvbw8AuHv3LoKCguDj44O33noLAPDBBx9g0qRJiIiIAAC4urpi9erV8PPzQ2xsLC5evIjk5GQcPHgQAwcOBAB88skn8PDwaFCX0NBQTJw4EQCwcOFC+Pj4YPHixfD39wcAzJs3D6Ghofc8l+XLl2PhwoV44YUXAADvv/8+9uzZg1WrVuGjjz6Szkmj0UjnREREjz4GZURE9NjZvn07LCwsUFlZiYqKCowfPx5r1qxBTEwMHB0dDQKyuqZPn45bt25h586daNeuZjFJZmYmzp49a7AkUQiB6upq5OXl4cyZM1Aqlejfv7+0vUePHrCxsWlQfu/evaV/29nZAQA8PT0N0u7evYvi4mJYWVkZ7FtcXIwrV67A19fXIN3X1xdHjx5t4jtDRESPIgZlRET02Bk+fDhiY2NhbGwMBwcHg4d5qNXqRveJjo5GSkoK0tPTYWlpKaVXV1fjpZdewty5cxvs07VrV+Tk5ACouWfsQerWozZ/Y2nV1dX3LKP+cYQQTTo2ERE9uhiUERHRY0etVqN79+5Nzr9t2zYsXboUycnJ6Natm8G2fv364eTJk/csz8PDA5WVlcjIyMCAAQMAADk5Ofj5559/d/0bY2VlBQcHB3z//ffS/W5AzUNIao9rYmICAKiqqmrWYxMRUctiUEZERG3aiRMnMGXKFCxcuBA9e/ZEQUEBgJoAp3379li4cCEGDRqE8PBwzJgxA2q1GtnZ2di5cyfWrFkDd3d3BAQEYMaMGVi7di2USiUiIiJgZmbW7HX929/+hiVLlqBbt27w8vJCfHw8srKypKWVnTp1gpmZGVJSUtClSxeYmprC2tq62etBRETNi09fJCKiNi0jIwO3b99GdHQ0tFqt9DdhwgQANfeB7d27F7m5uRgyZAj69u2LxYsXQ6vVSmXEx8dDp9PBz88PEyZMwMyZM9GpU6dmr+vcuXMxf/58zJ8/H56enkhJScFXX30FV1dXAIBSqcTq1asRFxcHBwcHjB8/vtnrQEREzU8hhBByV4KIiIiIiKit4kwZERERERGRjBiUERERERERyYhBGRERERERkYwYlBEREREREcmIQRkREREREZGMGJQRERERERHJiEEZERERERGRjBiUERERERERyYhBGRERERERkYwYlBEREREREcmIQRkREREREZGM/g+/bkR16qNHTQAAAABJRU5ErkJggg==", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "subject_counts = df['Subject'].value_counts()\n", "\n", "plt.figure(figsize=(10, 6))\n", "subject_counts.plot(kind='bar', color='skyblue')\n", "plt.title('Liczba wystąpień poszczególnych przedmiotów')\n", "plt.xlabel('Przedmiot')\n", "plt.ylabel('Liczba wystąpień')\n", "plt.xticks(rotation=0)\n", "plt.grid(axis='y', linestyle='--', linewidth=0.7)\n", "plt.show()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "

Preprocessing danych

" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
engSubject_BiologySubject_ChemistrySubject_MathsSubject_Physics
0An anti-forest measure is\\nA. Afforestation\\nB...TrueFalseFalseFalse
1Among the following organic acids, the acid pr...FalseTrueFalseFalse
2If the area of two similar triangles are equal...FalseFalseTrueFalse
3In recent year, there has been a growing\\nconc...TrueFalseFalseFalse
4Which of the following statement\\nregarding tr...FalseFalseFalseTrue
\n", "
" ], "text/plain": [ " eng Subject_Biology \\\n", "0 An anti-forest measure is\\nA. Afforestation\\nB... True \n", "1 Among the following organic acids, the acid pr... False \n", "2 If the area of two similar triangles are equal... False \n", "3 In recent year, there has been a growing\\nconc... True \n", "4 Which of the following statement\\nregarding tr... False \n", "\n", " Subject_Chemistry Subject_Maths Subject_Physics \n", "0 False False False \n", "1 True False False \n", "2 False True False \n", "3 False False False \n", "4 False False True " ] }, "execution_count": 4, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df = pd.get_dummies(df, columns=['Subject'])\n", "df.head()" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [], "source": [ "def remove_stopwords(text):\n", " stop_words = set(stopwords.words('english'))\n", " words = word_tokenize(text)\n", " filtered_words = [word for word in words if word.lower() not in stop_words]\n", " return ' '.join(filtered_words)\n", "\n", "def lemmatize_text(text):\n", " lemmatizer = WordNetLemmatizer()\n", " words = word_tokenize(text)\n", " lemmatized_words = [lemmatizer.lemmatize(word) for word in words]\n", " return ' '.join(lemmatized_words)\n", "\n", "def clean_text(text):\n", " text = re.sub(r'\\n', ' ', text) \n", " text = re.sub(r'[^a-zA-Z\\s]', '', text) \n", " text = text.lower() \n", " text = ' '.join(text.split())\n", " return text\n", "\n", "def stem_text(text):\n", " stemmer = PorterStemmer()\n", " token_words = word_tokenize(text)\n", " stem_sentence = [stemmer.stem(word) for word in token_words]\n", " return \" \".join(stem_sentence)" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
engSubject_BiologySubject_ChemistrySubject_MathsSubject_Physicsprepared_text
0An anti-forest measure is\\nA. Afforestation\\nB...TrueFalseFalseFalsean anti-forest measur is a. afforest b . selec...
1Among the following organic acids, the acid pr...FalseTrueFalseFalseamong the follow organ acid , the acid present...
2If the area of two similar triangles are equal...FalseFalseTrueFalseif the area of two similar triangl are equal ,...
3In recent year, there has been a growing\\nconc...TrueFalseFalseFalsein recent year , there ha been a grow concern ...
4Which of the following statement\\nregarding tr...FalseFalseFalseTruewhich of the follow statement regard transform...
\n", "
" ], "text/plain": [ " eng Subject_Biology \\\n", "0 An anti-forest measure is\\nA. Afforestation\\nB... True \n", "1 Among the following organic acids, the acid pr... False \n", "2 If the area of two similar triangles are equal... False \n", "3 In recent year, there has been a growing\\nconc... True \n", "4 Which of the following statement\\nregarding tr... False \n", "\n", " Subject_Chemistry Subject_Maths Subject_Physics \\\n", "0 False False False \n", "1 True False False \n", "2 False True False \n", "3 False False False \n", "4 False False True \n", "\n", " prepared_text \n", "0 an anti-forest measur is a. afforest b . selec... \n", "1 among the follow organ acid , the acid present... \n", "2 if the area of two similar triangl are equal ,... \n", "3 in recent year , there ha been a grow concern ... \n", "4 which of the follow statement regard transform... " ] }, "execution_count": 7, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df['prepared_text'] = df['eng'].apply(remove_stopwords)\n", "df['prepared_text'] = df['eng'].apply(lemmatize_text)\n", "df['prepared_text'] = df['eng'].apply(clean_text)\n", "df['prepared_text'] = df['eng'].apply(stem_text)\n", "df.head()" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [], "source": [ "df.to_csv(\"subjects-questions-prepared.csv\")" ] }, { "cell_type": "code", "execution_count": 58, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "122519\n", "98015\n", "24504\n" ] } ], "source": [ "X = df['prepared_text'] \n", "y = df[['Subject_Biology', 'Subject_Chemistry', 'Subject_Maths', 'Subject_Physics']]\n", "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)\n", "print(len(X))\n", "print(len(X_train))\n", "print(len(X_test))" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [], "source": [ "max_words = df['prepared_text'].str.len().max()\n", "tokenizer = Tokenizer(num_words=max_words, oov_token='')\n", "\n", "tokenizer.fit_on_texts(X_train)\n", "tokenizer.fit_on_texts(X_test)\n", "\n", "train_sequences = tokenizer.texts_to_sequences(X_train)\n", "test_sequences = tokenizer.texts_to_sequences(X_test)\n", "\n", "padded_train = pad_sequences(train_sequences, maxlen=5, padding='post', truncating='post')\n", "padded_test = pad_sequences(test_sequences, maxlen=5, padding='post', truncating='post')\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "

Definicja modeli

" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "

Model 1

\n", "

Model składający się z warstw Embedding, LSTM, GlobalAveragePooling1D oraz kilku warstw Dense

" ] }, { "cell_type": "code", "execution_count": 14, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Model: \"sequential_1\"\n", "_________________________________________________________________\n", " Layer (type) Output Shape Param # \n", "=================================================================\n", " embedding_1 (Embedding) (None, None, 16) 160000 \n", " \n", " lstm_1 (LSTM) (None, None, 64) 20736 \n", " \n", " global_average_pooling1d_1 (None, 64) 0 \n", " (GlobalAveragePooling1D) \n", " \n", " dense (Dense) (None, 254) 16510 \n", " \n", " dense_1 (Dense) (None, 128) 32640 \n", " \n", " dense_2 (Dense) (None, 4) 516 \n", " \n", "=================================================================\n", "Total params: 230,402\n", "Trainable params: 230,402\n", "Non-trainable params: 0\n", "_________________________________________________________________\n" ] } ], "source": [ "model_1 = Sequential()\n", "\n", "model_1.add(Embedding(input_dim=10000, output_dim=16))\n", "model_1.add(LSTM(units=64, return_sequences=True))\n", "model_1.add(GlobalAveragePooling1D())\n", "model_1.add(Dense(254, activation='relu'))\n", "model_1.add(Dense(128, activation='relu'))\n", "model_1.add(Dense(4, activation='softmax'))\n", "\n", "model_1.compile(optimizer='adam', loss='categorical_crossentropy', \n", " metrics=['accuracy', \n", " tf.keras.metrics.Precision(name='precision'),\n", " tf.keras.metrics.Recall(name='recall'),\n", " tf.keras.metrics.AUC(name='auc'),\n", " tf.keras.metrics.TruePositives(name='tp'),\n", " tf.keras.metrics.FalsePositives(name='fp'),\n", " tf.keras.metrics.TrueNegatives(name='tn'),\n", " tf.keras.metrics.FalseNegatives(name='fn')])\n", "\n", "model_1.summary()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "

Model 2

\n", "

Model konwolucyjny z warstwą Embedding i jedną warstwą konwolucyjną 1D

" ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Model: \"sequential_2\"\n", "_________________________________________________________________\n", " Layer (type) Output Shape Param # \n", "=================================================================\n", " embedding_2 (Embedding) (None, None, 16) 160000 \n", " \n", " conv1d (Conv1D) (None, None, 128) 10368 \n", " \n", " global_max_pooling1d (Globa (None, 128) 0 \n", " lMaxPooling1D) \n", " \n", " dense_3 (Dense) (None, 4) 516 \n", " \n", "=================================================================\n", "Total params: 170,884\n", "Trainable params: 170,884\n", "Non-trainable params: 0\n", "_________________________________________________________________\n" ] } ], "source": [ "model_2 = Sequential()\n", "\n", "model_2.add(Embedding(input_dim=10000, output_dim=16))\n", "model_2.add(Conv1D(filters=128, kernel_size=5, activation='relu'))\n", "model_2.add(GlobalMaxPooling1D())\n", "model_2.add(Dense(4, activation='softmax'))\n", "model_2.compile(optimizer='adam', loss='categorical_crossentropy', \n", " metrics=['accuracy', \n", " tf.keras.metrics.Precision(name='precision'),\n", " tf.keras.metrics.Recall(name='recall'),\n", " tf.keras.metrics.AUC(name='auc'),\n", " tf.keras.metrics.TruePositives(name='tp'),\n", " tf.keras.metrics.FalsePositives(name='fp'),\n", " tf.keras.metrics.TrueNegatives(name='tn'),\n", " tf.keras.metrics.FalseNegatives(name='fn')])\n", "\n", "model_2.summary()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "

Model 3

\n", "

Model rekurencyjny z warstwą Embedding i jedną warstwą GRU

" ] }, { "cell_type": "code", "execution_count": 23, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Model: \"sequential_4\"\n", "_________________________________________________________________\n", " Layer (type) Output Shape Param # \n", "=================================================================\n", " embedding_4 (Embedding) (None, None, 16) 160000 \n", " \n", " gru_1 (GRU) (None, 64) 15744 \n", " \n", " dense_5 (Dense) (None, 4) 260 \n", " \n", "=================================================================\n", "Total params: 176,004\n", "Trainable params: 176,004\n", "Non-trainable params: 0\n", "_________________________________________________________________\n" ] } ], "source": [ "model_3 = Sequential()\n", "\n", "model_3.add(Embedding(input_dim=10000, output_dim=16))\n", "model_3.add(GRU(units=64))\n", "model_3.add(Dense(units=4, activation='softmax'))\n", "model_3.compile(optimizer='adam', loss='categorical_crossentropy', \n", " metrics=['accuracy', \n", " tf.keras.metrics.Precision(name='precision'),\n", " tf.keras.metrics.Recall(name='recall'),\n", " tf.keras.metrics.AUC(name='auc'),\n", " tf.keras.metrics.TruePositives(name='tp'),\n", " tf.keras.metrics.FalsePositives(name='fp'),\n", " tf.keras.metrics.TrueNegatives(name='tn'),\n", " tf.keras.metrics.FalseNegatives(name='fn')])\n", "\n", "model_3.summary()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "

Trening i walidacja modeli

" ] }, { "cell_type": "code", "execution_count": 25, "metadata": {}, "outputs": [], "source": [ "def checkpoint(model_name):\n", " return ModelCheckpoint(filepath='best_model_'+ model_name +'.h5', monitor='val_accuracy', save_best_only=True)\n", "\n", "early_stopping = EarlyStopping(monitor='val_loss', patience=5)\n", "reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.2, patience=3, min_lr=0.0001)" ] }, { "cell_type": "code", "execution_count": 26, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/100\n", "30/30 [==============================] - 1s 47ms/step - loss: 0.3951 - accuracy: 0.8490 - precision: 0.8730 - recall: 0.8248 - auc: 0.9727 - tp: 64673.0000 - fp: 9412.0000 - tn: 225824.0000 - fn: 13739.0000 - val_loss: 0.5155 - val_accuracy: 0.8170 - val_precision: 0.8458 - val_recall: 0.7902 - val_auc: 0.9559 - val_tp: 15490.0000 - val_fp: 2823.0000 - val_tn: 55986.0000 - val_fn: 4113.0000 - lr: 2.0000e-04\n", "Epoch 2/100\n", "30/30 [==============================] - 1s 41ms/step - loss: 0.3950 - accuracy: 0.8484 - precision: 0.8742 - recall: 0.8232 - auc: 0.9727 - tp: 64545.0000 - fp: 9290.0000 - tn: 225946.0000 - fn: 13867.0000 - val_loss: 0.5165 - val_accuracy: 0.8174 - val_precision: 0.8442 - val_recall: 0.7933 - val_auc: 0.9558 - val_tp: 15552.0000 - val_fp: 2871.0000 - val_tn: 55938.0000 - val_fn: 4051.0000 - lr: 2.0000e-04\n", "Epoch 3/100\n", "30/30 [==============================] - 1s 40ms/step - loss: 0.3944 - accuracy: 0.8490 - precision: 0.8732 - recall: 0.8252 - auc: 0.9727 - tp: 64706.0000 - fp: 9392.0000 - tn: 225844.0000 - fn: 13706.0000 - val_loss: 0.5172 - val_accuracy: 0.8171 - val_precision: 0.8456 - val_recall: 0.7905 - val_auc: 0.9557 - val_tp: 15496.0000 - val_fp: 2829.0000 - val_tn: 55980.0000 - val_fn: 4107.0000 - lr: 2.0000e-04\n", "Epoch 4/100\n", "30/30 [==============================] - 1s 45ms/step - loss: 0.3938 - accuracy: 0.8494 - precision: 0.8744 - recall: 0.8242 - auc: 0.9728 - tp: 64624.0000 - fp: 9286.0000 - tn: 225950.0000 - fn: 13788.0000 - val_loss: 0.5176 - val_accuracy: 0.8169 - val_precision: 0.8424 - val_recall: 0.7944 - val_auc: 0.9558 - val_tp: 15573.0000 - val_fp: 2914.0000 - val_tn: 55895.0000 - val_fn: 4030.0000 - lr: 2.0000e-04\n", "Epoch 5/100\n", "30/30 [==============================] - 1s 45ms/step - loss: 0.3924 - accuracy: 0.8496 - precision: 0.8735 - recall: 0.8261 - auc: 0.9730 - tp: 64774.0000 - fp: 9377.0000 - tn: 225859.0000 - fn: 13638.0000 - val_loss: 0.5172 - val_accuracy: 0.8171 - val_precision: 0.8435 - val_recall: 0.7935 - val_auc: 0.9557 - val_tp: 15555.0000 - val_fp: 2886.0000 - val_tn: 55923.0000 - val_fn: 4048.0000 - lr: 1.0000e-04\n", "Epoch 6/100\n", "30/30 [==============================] - 1s 44ms/step - loss: 0.3923 - accuracy: 0.8496 - precision: 0.8736 - recall: 0.8257 - auc: 0.9730 - tp: 64747.0000 - fp: 9369.0000 - tn: 225867.0000 - fn: 13665.0000 - val_loss: 0.5176 - val_accuracy: 0.8168 - val_precision: 0.8436 - val_recall: 0.7939 - val_auc: 0.9557 - val_tp: 15563.0000 - val_fp: 2886.0000 - val_tn: 55923.0000 - val_fn: 4040.0000 - lr: 1.0000e-04\n" ] } ], "source": [ "history = model_1.fit(padded_train, y_train,\n", " steps_per_epoch = 30,\n", " epochs = 100,\n", " validation_split=0.2,\n", " verbose = 1,\n", " validation_steps = 50,\n", " callbacks=[checkpoint(\"1\"), early_stopping, reduce_lr], \n", " )" ] }, { "cell_type": "code", "execution_count": 45, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Loss: 0.5143917202949524\n", "Accuracy: 0.8162341117858887\n", "Precision: 0.8397026062011719\n", "Recall: 0.7926868796348572\n", "AUC: 0.9560295343399048\n", "True Positives: 19424.0\n", "False Positives: 3708.0\n", "True Negatives: 69804.0\n", "False Negatives: 5080.0\n" ] } ], "source": [ "score = model_1.evaluate(padded_test, y_test, verbose=0)\n", "\n", "print(\"Loss:\", score[0])\n", "print(\"Accuracy:\", score[1])\n", "print(\"Precision:\", score[2])\n", "print(\"Recall:\", score[3])\n", "print(\"AUC:\", score[4])\n", "print(\"True Positives:\", score[5])\n", "print(\"False Positives:\", score[6])\n", "print(\"True Negatives:\", score[7])\n", "print(\"False Negatives:\", score[8])" ] }, { "cell_type": "code", "execution_count": 49, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/100\n", "30/30 [==============================] - 1s 40ms/step - loss: 0.3013 - accuracy: 0.8852 - precision: 0.9036 - recall: 0.8683 - auc: 0.9840 - tp: 68085.0000 - fp: 7263.0000 - tn: 227973.0000 - fn: 10327.0000 - val_loss: 0.5586 - val_accuracy: 0.8162 - val_precision: 0.8363 - val_recall: 0.8018 - val_auc: 0.9532 - val_tp: 15717.0000 - val_fp: 3076.0000 - val_tn: 55733.0000 - val_fn: 3886.0000 - lr: 1.0000e-04\n", "Epoch 2/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.3011 - accuracy: 0.8855 - precision: 0.9035 - recall: 0.8686 - auc: 0.9840 - tp: 68112.0000 - fp: 7272.0000 - tn: 227964.0000 - fn: 10300.0000 - val_loss: 0.5590 - val_accuracy: 0.8162 - val_precision: 0.8359 - val_recall: 0.8020 - val_auc: 0.9532 - val_tp: 15722.0000 - val_fp: 3087.0000 - val_tn: 55722.0000 - val_fn: 3881.0000 - lr: 1.0000e-04\n", "Epoch 3/100\n", "30/30 [==============================] - 1s 36ms/step - loss: 0.3008 - accuracy: 0.8855 - precision: 0.9037 - recall: 0.8684 - auc: 0.9840 - tp: 68090.0000 - fp: 7254.0000 - tn: 227982.0000 - fn: 10322.0000 - val_loss: 0.5597 - val_accuracy: 0.8164 - val_precision: 0.8361 - val_recall: 0.8016 - val_auc: 0.9531 - val_tp: 15713.0000 - val_fp: 3080.0000 - val_tn: 55729.0000 - val_fn: 3890.0000 - lr: 1.0000e-04\n", "Epoch 4/100\n", "30/30 [==============================] - 1s 34ms/step - loss: 0.3006 - accuracy: 0.8857 - precision: 0.9037 - recall: 0.8688 - auc: 0.9840 - tp: 68127.0000 - fp: 7260.0000 - tn: 227976.0000 - fn: 10285.0000 - val_loss: 0.5602 - val_accuracy: 0.8163 - val_precision: 0.8358 - val_recall: 0.8017 - val_auc: 0.9531 - val_tp: 15715.0000 - val_fp: 3088.0000 - val_tn: 55721.0000 - val_fn: 3888.0000 - lr: 1.0000e-04\n", "Epoch 5/100\n", "30/30 [==============================] - 1s 40ms/step - loss: 0.3003 - accuracy: 0.8856 - precision: 0.9037 - recall: 0.8690 - auc: 0.9841 - tp: 68143.0000 - fp: 7259.0000 - tn: 227977.0000 - fn: 10269.0000 - val_loss: 0.5607 - val_accuracy: 0.8161 - val_precision: 0.8356 - val_recall: 0.8018 - val_auc: 0.9530 - val_tp: 15717.0000 - val_fp: 3092.0000 - val_tn: 55717.0000 - val_fn: 3886.0000 - lr: 1.0000e-04\n", "Epoch 6/100\n", "30/30 [==============================] - 2s 56ms/step - loss: 0.3000 - accuracy: 0.8861 - precision: 0.9037 - recall: 0.8693 - auc: 0.9841 - tp: 68160.0000 - fp: 7267.0000 - tn: 227969.0000 - fn: 10252.0000 - val_loss: 0.5614 - val_accuracy: 0.8161 - val_precision: 0.8354 - val_recall: 0.8016 - val_auc: 0.9530 - val_tp: 15713.0000 - val_fp: 3097.0000 - val_tn: 55712.0000 - val_fn: 3890.0000 - lr: 1.0000e-04\n" ] } ], "source": [ "history = model_2.fit(padded_train, y_train,\n", " steps_per_epoch = 30,\n", " epochs = 100,\n", " validation_split=0.2,\n", " verbose = 1,\n", " validation_steps = 50,\n", " callbacks=[checkpoint(\"2\"), early_stopping, reduce_lr], \n", " )" ] }, { "cell_type": "code", "execution_count": 50, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Loss: 0.5664545297622681\n", "Accuracy: 0.8139079213142395\n", "Precision: 0.8310044407844543\n", "Recall: 0.7984818816184998\n", "AUC: 0.9523454308509827\n", "True Positives: 19566.0\n", "False Positives: 3979.0\n", "True Negatives: 69533.0\n", "False Negatives: 4938.0\n" ] } ], "source": [ "score = model_2.evaluate(padded_test, y_test, verbose=0)\n", "\n", "print(\"Loss:\", score[0])\n", "print(\"Accuracy:\", score[1])\n", "print(\"Precision:\", score[2])\n", "print(\"Recall:\", score[3])\n", "print(\"AUC:\", score[4])\n", "print(\"True Positives:\", score[5])\n", "print(\"False Positives:\", score[6])\n", "print(\"True Negatives:\", score[7])\n", "print(\"False Negatives:\", score[8])" ] }, { "cell_type": "code", "execution_count": 28, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/100\n", "30/30 [==============================] - 1s 43ms/step - loss: 0.3993 - accuracy: 0.8482 - precision: 0.8744 - recall: 0.8214 - auc: 0.9722 - tp: 64409.0000 - fp: 9252.0000 - tn: 225984.0000 - fn: 14003.0000 - val_loss: 0.4974 - val_accuracy: 0.8173 - val_precision: 0.8476 - val_recall: 0.7934 - val_auc: 0.9581 - val_tp: 15553.0000 - val_fp: 2796.0000 - val_tn: 56013.0000 - val_fn: 4050.0000 - lr: 2.0000e-04\n", "Epoch 2/100\n", "30/30 [==============================] - 1s 39ms/step - loss: 0.3986 - accuracy: 0.8482 - precision: 0.8742 - recall: 0.8219 - auc: 0.9722 - tp: 64443.0000 - fp: 9272.0000 - tn: 225964.0000 - fn: 13969.0000 - val_loss: 0.4981 - val_accuracy: 0.8167 - val_precision: 0.8492 - val_recall: 0.7896 - val_auc: 0.9580 - val_tp: 15479.0000 - val_fp: 2749.0000 - val_tn: 56060.0000 - val_fn: 4124.0000 - lr: 2.0000e-04\n", "Epoch 3/100\n", "30/30 [==============================] - 1s 42ms/step - loss: 0.3982 - accuracy: 0.8486 - precision: 0.8749 - recall: 0.8209 - auc: 0.9723 - tp: 64366.0000 - fp: 9204.0000 - tn: 226032.0000 - fn: 14046.0000 - val_loss: 0.4987 - val_accuracy: 0.8157 - val_precision: 0.8468 - val_recall: 0.7926 - val_auc: 0.9580 - val_tp: 15537.0000 - val_fp: 2811.0000 - val_tn: 55998.0000 - val_fn: 4066.0000 - lr: 2.0000e-04\n", "Epoch 4/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.3976 - accuracy: 0.8488 - precision: 0.8760 - recall: 0.8207 - auc: 0.9724 - tp: 64354.0000 - fp: 9111.0000 - tn: 226125.0000 - fn: 14058.0000 - val_loss: 0.4988 - val_accuracy: 0.8161 - val_precision: 0.8468 - val_recall: 0.7929 - val_auc: 0.9580 - val_tp: 15543.0000 - val_fp: 2811.0000 - val_tn: 55998.0000 - val_fn: 4060.0000 - lr: 2.0000e-04\n", "Epoch 5/100\n", "30/30 [==============================] - 1s 39ms/step - loss: 0.3965 - accuracy: 0.8492 - precision: 0.8751 - recall: 0.8225 - auc: 0.9725 - tp: 64493.0000 - fp: 9206.0000 - tn: 226030.0000 - fn: 13919.0000 - val_loss: 0.4986 - val_accuracy: 0.8176 - val_precision: 0.8478 - val_recall: 0.7936 - val_auc: 0.9580 - val_tp: 15556.0000 - val_fp: 2792.0000 - val_tn: 56017.0000 - val_fn: 4047.0000 - lr: 1.0000e-04\n", "Epoch 6/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.3963 - accuracy: 0.8493 - precision: 0.8747 - recall: 0.8235 - auc: 0.9726 - tp: 64574.0000 - fp: 9254.0000 - tn: 225982.0000 - fn: 13838.0000 - val_loss: 0.4986 - val_accuracy: 0.8174 - val_precision: 0.8474 - val_recall: 0.7938 - val_auc: 0.9580 - val_tp: 15560.0000 - val_fp: 2802.0000 - val_tn: 56007.0000 - val_fn: 4043.0000 - lr: 1.0000e-04\n" ] } ], "source": [ "history = model_3.fit(padded_train, y_train,\n", " steps_per_epoch = 30,\n", " epochs = 100,\n", " validation_split=0.2,\n", " verbose = 1,\n", " validation_steps = 50,\n", " callbacks=[checkpoint(\"3\"), early_stopping, reduce_lr], \n", " )" ] }, { "cell_type": "code", "execution_count": 48, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Loss: 0.5007005929946899\n", "Accuracy: 0.816642165184021\n", "Precision: 0.8432700037956238\n", "Recall: 0.7930949926376343\n", "AUC: 0.9578227400779724\n", "True Positives: 19434.0\n", "False Positives: 3612.0\n", "True Negatives: 69900.0\n", "False Negatives: 5070.0\n" ] } ], "source": [ "score = model_3.evaluate(padded_test, y_test, verbose=0)\n", "\n", "print(\"Loss:\", score[0])\n", "print(\"Accuracy:\", score[1])\n", "print(\"Precision:\", score[2])\n", "print(\"Recall:\", score[3])\n", "print(\"AUC:\", score[4])\n", "print(\"True Positives:\", score[5])\n", "print(\"False Positives:\", score[6])\n", "print(\"True Negatives:\", score[7])\n", "print(\"False Negatives:\", score[8])" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "

Eksperymenty

\n", "

Trening i waldacja modelu 2 na wszystkich epokach

" ] }, { "cell_type": "code", "execution_count": 51, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/100\n", "30/30 [==============================] - 1s 42ms/step - loss: 0.2998 - accuracy: 0.8859 - precision: 0.9040 - recall: 0.8694 - auc: 0.9841 - tp: 68169.0000 - fp: 7240.0000 - tn: 227996.0000 - fn: 10243.0000 - val_loss: 0.5619 - val_accuracy: 0.8164 - val_precision: 0.8352 - val_recall: 0.8020 - val_auc: 0.9529 - val_tp: 15722.0000 - val_fp: 3102.0000 - val_tn: 55707.0000 - val_fn: 3881.0000 - lr: 1.0000e-04\n", "Epoch 2/100\n", "30/30 [==============================] - 1s 37ms/step - loss: 0.2996 - accuracy: 0.8859 - precision: 0.9039 - recall: 0.8695 - auc: 0.9841 - tp: 68178.0000 - fp: 7252.0000 - tn: 227984.0000 - fn: 10234.0000 - val_loss: 0.5626 - val_accuracy: 0.8164 - val_precision: 0.8357 - val_recall: 0.8019 - val_auc: 0.9528 - val_tp: 15719.0000 - val_fp: 3091.0000 - val_tn: 55718.0000 - val_fn: 3884.0000 - lr: 1.0000e-04\n", "Epoch 3/100\n", "30/30 [==============================] - 1s 41ms/step - loss: 0.2994 - accuracy: 0.8860 - precision: 0.9041 - recall: 0.8691 - auc: 0.9842 - tp: 68144.0000 - fp: 7225.0000 - tn: 228011.0000 - fn: 10268.0000 - val_loss: 0.5631 - val_accuracy: 0.8162 - val_precision: 0.8349 - val_recall: 0.8017 - val_auc: 0.9528 - val_tp: 15716.0000 - val_fp: 3108.0000 - val_tn: 55701.0000 - val_fn: 3887.0000 - lr: 1.0000e-04\n", "Epoch 4/100\n", "30/30 [==============================] - 1s 39ms/step - loss: 0.2991 - accuracy: 0.8862 - precision: 0.9040 - recall: 0.8694 - auc: 0.9842 - tp: 68169.0000 - fp: 7242.0000 - tn: 227994.0000 - fn: 10243.0000 - val_loss: 0.5638 - val_accuracy: 0.8163 - val_precision: 0.8351 - val_recall: 0.8019 - val_auc: 0.9528 - val_tp: 15720.0000 - val_fp: 3105.0000 - val_tn: 55704.0000 - val_fn: 3883.0000 - lr: 1.0000e-04\n", "Epoch 5/100\n", "30/30 [==============================] - 1s 36ms/step - loss: 0.2989 - accuracy: 0.8863 - precision: 0.9043 - recall: 0.8698 - auc: 0.9842 - tp: 68201.0000 - fp: 7218.0000 - tn: 228018.0000 - fn: 10211.0000 - val_loss: 0.5643 - val_accuracy: 0.8164 - val_precision: 0.8353 - val_recall: 0.8022 - val_auc: 0.9528 - val_tp: 15725.0000 - val_fp: 3100.0000 - val_tn: 55709.0000 - val_fn: 3878.0000 - lr: 1.0000e-04\n", "Epoch 6/100\n", "30/30 [==============================] - 1s 38ms/step - loss: 0.2986 - accuracy: 0.8863 - precision: 0.9042 - recall: 0.8699 - auc: 0.9842 - tp: 68207.0000 - fp: 7226.0000 - tn: 228010.0000 - fn: 10205.0000 - val_loss: 0.5650 - val_accuracy: 0.8163 - val_precision: 0.8345 - val_recall: 0.8020 - val_auc: 0.9526 - val_tp: 15722.0000 - val_fp: 3117.0000 - val_tn: 55692.0000 - val_fn: 3881.0000 - lr: 1.0000e-04\n", "Epoch 7/100\n", "30/30 [==============================] - 1s 38ms/step - loss: 0.2984 - accuracy: 0.8866 - precision: 0.9044 - recall: 0.8700 - auc: 0.9843 - tp: 68216.0000 - fp: 7214.0000 - tn: 228022.0000 - fn: 10196.0000 - val_loss: 0.5655 - val_accuracy: 0.8160 - val_precision: 0.8350 - val_recall: 0.8019 - val_auc: 0.9526 - val_tp: 15719.0000 - val_fp: 3106.0000 - val_tn: 55703.0000 - val_fn: 3884.0000 - lr: 1.0000e-04\n", "Epoch 8/100\n", "30/30 [==============================] - 1s 46ms/step - loss: 0.2982 - accuracy: 0.8866 - precision: 0.9044 - recall: 0.8701 - auc: 0.9843 - tp: 68230.0000 - fp: 7213.0000 - tn: 228023.0000 - fn: 10182.0000 - val_loss: 0.5660 - val_accuracy: 0.8165 - val_precision: 0.8351 - val_recall: 0.8023 - val_auc: 0.9526 - val_tp: 15727.0000 - val_fp: 3105.0000 - val_tn: 55704.0000 - val_fn: 3876.0000 - lr: 1.0000e-04\n", "Epoch 9/100\n", "30/30 [==============================] - 1s 43ms/step - loss: 0.2980 - accuracy: 0.8865 - precision: 0.9044 - recall: 0.8701 - auc: 0.9843 - tp: 68229.0000 - fp: 7216.0000 - tn: 228020.0000 - fn: 10183.0000 - val_loss: 0.5667 - val_accuracy: 0.8160 - val_precision: 0.8344 - val_recall: 0.8021 - val_auc: 0.9525 - val_tp: 15724.0000 - val_fp: 3120.0000 - val_tn: 55689.0000 - val_fn: 3879.0000 - lr: 1.0000e-04\n", "Epoch 10/100\n", "30/30 [==============================] - 1s 42ms/step - loss: 0.2977 - accuracy: 0.8864 - precision: 0.9043 - recall: 0.8700 - auc: 0.9843 - tp: 68217.0000 - fp: 7222.0000 - tn: 228014.0000 - fn: 10195.0000 - val_loss: 0.5673 - val_accuracy: 0.8157 - val_precision: 0.8344 - val_recall: 0.8022 - val_auc: 0.9525 - val_tp: 15725.0000 - val_fp: 3122.0000 - val_tn: 55687.0000 - val_fn: 3878.0000 - lr: 1.0000e-04\n", "Epoch 11/100\n", "30/30 [==============================] - 1s 40ms/step - loss: 0.2975 - accuracy: 0.8870 - precision: 0.9046 - recall: 0.8702 - auc: 0.9843 - tp: 68236.0000 - fp: 7199.0000 - tn: 228037.0000 - fn: 10176.0000 - val_loss: 0.5678 - val_accuracy: 0.8162 - val_precision: 0.8349 - val_recall: 0.8020 - val_auc: 0.9525 - val_tp: 15722.0000 - val_fp: 3109.0000 - val_tn: 55700.0000 - val_fn: 3881.0000 - lr: 1.0000e-04\n", "Epoch 12/100\n", "30/30 [==============================] - 1s 49ms/step - loss: 0.2973 - accuracy: 0.8869 - precision: 0.9047 - recall: 0.8703 - auc: 0.9844 - tp: 68241.0000 - fp: 7188.0000 - tn: 228048.0000 - fn: 10171.0000 - val_loss: 0.5684 - val_accuracy: 0.8160 - val_precision: 0.8347 - val_recall: 0.8022 - val_auc: 0.9524 - val_tp: 15725.0000 - val_fp: 3115.0000 - val_tn: 55694.0000 - val_fn: 3878.0000 - lr: 1.0000e-04\n", "Epoch 13/100\n", "30/30 [==============================] - 2s 55ms/step - loss: 0.2971 - accuracy: 0.8869 - precision: 0.9047 - recall: 0.8707 - auc: 0.9844 - tp: 68270.0000 - fp: 7194.0000 - tn: 228042.0000 - fn: 10142.0000 - val_loss: 0.5690 - val_accuracy: 0.8160 - val_precision: 0.8348 - val_recall: 0.8026 - val_auc: 0.9524 - val_tp: 15734.0000 - val_fp: 3114.0000 - val_tn: 55695.0000 - val_fn: 3869.0000 - lr: 1.0000e-04\n", "Epoch 14/100\n", "30/30 [==============================] - 1s 40ms/step - loss: 0.2968 - accuracy: 0.8870 - precision: 0.9047 - recall: 0.8704 - auc: 0.9844 - tp: 68252.0000 - fp: 7189.0000 - tn: 228047.0000 - fn: 10160.0000 - val_loss: 0.5696 - val_accuracy: 0.8163 - val_precision: 0.8345 - val_recall: 0.8020 - val_auc: 0.9523 - val_tp: 15722.0000 - val_fp: 3117.0000 - val_tn: 55692.0000 - val_fn: 3881.0000 - lr: 1.0000e-04\n", "Epoch 15/100\n", "30/30 [==============================] - 1s 41ms/step - loss: 0.2966 - accuracy: 0.8870 - precision: 0.9046 - recall: 0.8706 - auc: 0.9844 - tp: 68265.0000 - fp: 7203.0000 - tn: 228033.0000 - fn: 10147.0000 - val_loss: 0.5702 - val_accuracy: 0.8160 - val_precision: 0.8346 - val_recall: 0.8017 - val_auc: 0.9523 - val_tp: 15716.0000 - val_fp: 3114.0000 - val_tn: 55695.0000 - val_fn: 3887.0000 - lr: 1.0000e-04\n", "Epoch 16/100\n", "30/30 [==============================] - 1s 39ms/step - loss: 0.2964 - accuracy: 0.8873 - precision: 0.9050 - recall: 0.8707 - auc: 0.9844 - tp: 68275.0000 - fp: 7165.0000 - tn: 228071.0000 - fn: 10137.0000 - val_loss: 0.5708 - val_accuracy: 0.8160 - val_precision: 0.8347 - val_recall: 0.8021 - val_auc: 0.9522 - val_tp: 15723.0000 - val_fp: 3113.0000 - val_tn: 55696.0000 - val_fn: 3880.0000 - lr: 1.0000e-04\n", "Epoch 17/100\n", "30/30 [==============================] - 1s 37ms/step - loss: 0.2962 - accuracy: 0.8871 - precision: 0.9046 - recall: 0.8706 - auc: 0.9845 - tp: 68265.0000 - fp: 7196.0000 - tn: 228040.0000 - fn: 10147.0000 - val_loss: 0.5713 - val_accuracy: 0.8160 - val_precision: 0.8343 - val_recall: 0.8022 - val_auc: 0.9522 - val_tp: 15725.0000 - val_fp: 3123.0000 - val_tn: 55686.0000 - val_fn: 3878.0000 - lr: 1.0000e-04\n", "Epoch 18/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.2960 - accuracy: 0.8871 - precision: 0.9048 - recall: 0.8710 - auc: 0.9845 - tp: 68294.0000 - fp: 7186.0000 - tn: 228050.0000 - fn: 10118.0000 - val_loss: 0.5719 - val_accuracy: 0.8160 - val_precision: 0.8342 - val_recall: 0.8019 - val_auc: 0.9522 - val_tp: 15720.0000 - val_fp: 3124.0000 - val_tn: 55685.0000 - val_fn: 3883.0000 - lr: 1.0000e-04\n", "Epoch 19/100\n", "30/30 [==============================] - 1s 36ms/step - loss: 0.2958 - accuracy: 0.8871 - precision: 0.9048 - recall: 0.8708 - auc: 0.9845 - tp: 68284.0000 - fp: 7183.0000 - tn: 228053.0000 - fn: 10128.0000 - val_loss: 0.5724 - val_accuracy: 0.8157 - val_precision: 0.8341 - val_recall: 0.8022 - val_auc: 0.9521 - val_tp: 15725.0000 - val_fp: 3128.0000 - val_tn: 55681.0000 - val_fn: 3878.0000 - lr: 1.0000e-04\n", "Epoch 20/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.2955 - accuracy: 0.8873 - precision: 0.9049 - recall: 0.8709 - auc: 0.9845 - tp: 68291.0000 - fp: 7174.0000 - tn: 228062.0000 - fn: 10121.0000 - val_loss: 0.5731 - val_accuracy: 0.8160 - val_precision: 0.8342 - val_recall: 0.8021 - val_auc: 0.9521 - val_tp: 15724.0000 - val_fp: 3125.0000 - val_tn: 55684.0000 - val_fn: 3879.0000 - lr: 1.0000e-04\n", "Epoch 21/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.2954 - accuracy: 0.8875 - precision: 0.9049 - recall: 0.8711 - auc: 0.9846 - tp: 68306.0000 - fp: 7178.0000 - tn: 228058.0000 - fn: 10106.0000 - val_loss: 0.5736 - val_accuracy: 0.8156 - val_precision: 0.8338 - val_recall: 0.8018 - val_auc: 0.9520 - val_tp: 15717.0000 - val_fp: 3132.0000 - val_tn: 55677.0000 - val_fn: 3886.0000 - lr: 1.0000e-04\n", "Epoch 22/100\n", "30/30 [==============================] - 1s 37ms/step - loss: 0.2952 - accuracy: 0.8873 - precision: 0.9050 - recall: 0.8712 - auc: 0.9846 - tp: 68314.0000 - fp: 7171.0000 - tn: 228065.0000 - fn: 10098.0000 - val_loss: 0.5743 - val_accuracy: 0.8153 - val_precision: 0.8338 - val_recall: 0.8018 - val_auc: 0.9520 - val_tp: 15717.0000 - val_fp: 3133.0000 - val_tn: 55676.0000 - val_fn: 3886.0000 - lr: 1.0000e-04\n", "Epoch 23/100\n", "30/30 [==============================] - 1s 39ms/step - loss: 0.2950 - accuracy: 0.8876 - precision: 0.9051 - recall: 0.8711 - auc: 0.9846 - tp: 68302.0000 - fp: 7163.0000 - tn: 228073.0000 - fn: 10110.0000 - val_loss: 0.5748 - val_accuracy: 0.8155 - val_precision: 0.8336 - val_recall: 0.8019 - val_auc: 0.9520 - val_tp: 15719.0000 - val_fp: 3138.0000 - val_tn: 55671.0000 - val_fn: 3884.0000 - lr: 1.0000e-04\n", "Epoch 24/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.2947 - accuracy: 0.8876 - precision: 0.9052 - recall: 0.8714 - auc: 0.9846 - tp: 68331.0000 - fp: 7159.0000 - tn: 228077.0000 - fn: 10081.0000 - val_loss: 0.5754 - val_accuracy: 0.8152 - val_precision: 0.8331 - val_recall: 0.8019 - val_auc: 0.9519 - val_tp: 15720.0000 - val_fp: 3150.0000 - val_tn: 55659.0000 - val_fn: 3883.0000 - lr: 1.0000e-04\n", "Epoch 25/100\n", "30/30 [==============================] - 1s 37ms/step - loss: 0.2945 - accuracy: 0.8875 - precision: 0.9050 - recall: 0.8714 - auc: 0.9846 - tp: 68329.0000 - fp: 7170.0000 - tn: 228066.0000 - fn: 10083.0000 - val_loss: 0.5759 - val_accuracy: 0.8155 - val_precision: 0.8333 - val_recall: 0.8020 - val_auc: 0.9518 - val_tp: 15722.0000 - val_fp: 3145.0000 - val_tn: 55664.0000 - val_fn: 3881.0000 - lr: 1.0000e-04\n", "Epoch 26/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.2943 - accuracy: 0.8876 - precision: 0.9051 - recall: 0.8715 - auc: 0.9847 - tp: 68333.0000 - fp: 7165.0000 - tn: 228071.0000 - fn: 10079.0000 - val_loss: 0.5766 - val_accuracy: 0.8154 - val_precision: 0.8335 - val_recall: 0.8019 - val_auc: 0.9518 - val_tp: 15719.0000 - val_fp: 3141.0000 - val_tn: 55668.0000 - val_fn: 3884.0000 - lr: 1.0000e-04\n", "Epoch 27/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.2941 - accuracy: 0.8875 - precision: 0.9053 - recall: 0.8714 - auc: 0.9847 - tp: 68332.0000 - fp: 7146.0000 - tn: 228090.0000 - fn: 10080.0000 - val_loss: 0.5771 - val_accuracy: 0.8151 - val_precision: 0.8334 - val_recall: 0.8017 - val_auc: 0.9517 - val_tp: 15716.0000 - val_fp: 3142.0000 - val_tn: 55667.0000 - val_fn: 3887.0000 - lr: 1.0000e-04\n", "Epoch 28/100\n", "30/30 [==============================] - 1s 39ms/step - loss: 0.2939 - accuracy: 0.8877 - precision: 0.9052 - recall: 0.8716 - auc: 0.9847 - tp: 68345.0000 - fp: 7159.0000 - tn: 228077.0000 - fn: 10067.0000 - val_loss: 0.5777 - val_accuracy: 0.8152 - val_precision: 0.8334 - val_recall: 0.8019 - val_auc: 0.9517 - val_tp: 15720.0000 - val_fp: 3142.0000 - val_tn: 55667.0000 - val_fn: 3883.0000 - lr: 1.0000e-04\n", "Epoch 29/100\n", "30/30 [==============================] - 1s 34ms/step - loss: 0.2937 - accuracy: 0.8876 - precision: 0.9051 - recall: 0.8717 - auc: 0.9847 - tp: 68350.0000 - fp: 7164.0000 - tn: 228072.0000 - fn: 10062.0000 - val_loss: 0.5782 - val_accuracy: 0.8156 - val_precision: 0.8334 - val_recall: 0.8019 - val_auc: 0.9517 - val_tp: 15720.0000 - val_fp: 3142.0000 - val_tn: 55667.0000 - val_fn: 3883.0000 - lr: 1.0000e-04\n", "Epoch 30/100\n", "30/30 [==============================] - 1s 34ms/step - loss: 0.2935 - accuracy: 0.8877 - precision: 0.9055 - recall: 0.8717 - auc: 0.9847 - tp: 68348.0000 - fp: 7132.0000 - tn: 228104.0000 - fn: 10064.0000 - val_loss: 0.5788 - val_accuracy: 0.8153 - val_precision: 0.8336 - val_recall: 0.8015 - val_auc: 0.9516 - val_tp: 15712.0000 - val_fp: 3136.0000 - val_tn: 55673.0000 - val_fn: 3891.0000 - lr: 1.0000e-04\n", "Epoch 31/100\n", "30/30 [==============================] - 1s 36ms/step - loss: 0.2933 - accuracy: 0.8881 - precision: 0.9054 - recall: 0.8720 - auc: 0.9847 - tp: 68374.0000 - fp: 7142.0000 - tn: 228094.0000 - fn: 10038.0000 - val_loss: 0.5796 - val_accuracy: 0.8147 - val_precision: 0.8333 - val_recall: 0.8019 - val_auc: 0.9515 - val_tp: 15719.0000 - val_fp: 3145.0000 - val_tn: 55664.0000 - val_fn: 3884.0000 - lr: 1.0000e-04\n", "Epoch 32/100\n", "30/30 [==============================] - 1s 37ms/step - loss: 0.2932 - accuracy: 0.8880 - precision: 0.9054 - recall: 0.8718 - auc: 0.9848 - tp: 68363.0000 - fp: 7147.0000 - tn: 228089.0000 - fn: 10049.0000 - val_loss: 0.5800 - val_accuracy: 0.8153 - val_precision: 0.8333 - val_recall: 0.8020 - val_auc: 0.9514 - val_tp: 15721.0000 - val_fp: 3144.0000 - val_tn: 55665.0000 - val_fn: 3882.0000 - lr: 1.0000e-04\n", "Epoch 33/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.2929 - accuracy: 0.8882 - precision: 0.9055 - recall: 0.8719 - auc: 0.9848 - tp: 68366.0000 - fp: 7132.0000 - tn: 228104.0000 - fn: 10046.0000 - val_loss: 0.5806 - val_accuracy: 0.8153 - val_precision: 0.8329 - val_recall: 0.8014 - val_auc: 0.9515 - val_tp: 15710.0000 - val_fp: 3151.0000 - val_tn: 55658.0000 - val_fn: 3893.0000 - lr: 1.0000e-04\n", "Epoch 34/100\n", "30/30 [==============================] - 1s 37ms/step - loss: 0.2927 - accuracy: 0.8882 - precision: 0.9054 - recall: 0.8721 - auc: 0.9848 - tp: 68384.0000 - fp: 7146.0000 - tn: 228090.0000 - fn: 10028.0000 - val_loss: 0.5811 - val_accuracy: 0.8152 - val_precision: 0.8331 - val_recall: 0.8020 - val_auc: 0.9514 - val_tp: 15721.0000 - val_fp: 3150.0000 - val_tn: 55659.0000 - val_fn: 3882.0000 - lr: 1.0000e-04\n", "Epoch 35/100\n", "30/30 [==============================] - 1s 34ms/step - loss: 0.2926 - accuracy: 0.8880 - precision: 0.9054 - recall: 0.8721 - auc: 0.9848 - tp: 68383.0000 - fp: 7143.0000 - tn: 228093.0000 - fn: 10029.0000 - val_loss: 0.5817 - val_accuracy: 0.8151 - val_precision: 0.8329 - val_recall: 0.8015 - val_auc: 0.9514 - val_tp: 15712.0000 - val_fp: 3152.0000 - val_tn: 55657.0000 - val_fn: 3891.0000 - lr: 1.0000e-04\n", "Epoch 36/100\n", "30/30 [==============================] - 1s 36ms/step - loss: 0.2923 - accuracy: 0.8883 - precision: 0.9057 - recall: 0.8723 - auc: 0.9848 - tp: 68401.0000 - fp: 7118.0000 - tn: 228118.0000 - fn: 10011.0000 - val_loss: 0.5823 - val_accuracy: 0.8152 - val_precision: 0.8334 - val_recall: 0.8014 - val_auc: 0.9513 - val_tp: 15710.0000 - val_fp: 3140.0000 - val_tn: 55669.0000 - val_fn: 3893.0000 - lr: 1.0000e-04\n", "Epoch 37/100\n", "30/30 [==============================] - 1s 36ms/step - loss: 0.2922 - accuracy: 0.8883 - precision: 0.9056 - recall: 0.8720 - auc: 0.9849 - tp: 68378.0000 - fp: 7128.0000 - tn: 228108.0000 - fn: 10034.0000 - val_loss: 0.5829 - val_accuracy: 0.8153 - val_precision: 0.8330 - val_recall: 0.8018 - val_auc: 0.9513 - val_tp: 15717.0000 - val_fp: 3152.0000 - val_tn: 55657.0000 - val_fn: 3886.0000 - lr: 1.0000e-04\n", "Epoch 38/100\n", "30/30 [==============================] - 1s 36ms/step - loss: 0.2920 - accuracy: 0.8883 - precision: 0.9056 - recall: 0.8725 - auc: 0.9849 - tp: 68415.0000 - fp: 7133.0000 - tn: 228103.0000 - fn: 9997.0000 - val_loss: 0.5835 - val_accuracy: 0.8152 - val_precision: 0.8329 - val_recall: 0.8011 - val_auc: 0.9513 - val_tp: 15704.0000 - val_fp: 3150.0000 - val_tn: 55659.0000 - val_fn: 3899.0000 - lr: 1.0000e-04\n", "Epoch 39/100\n", "30/30 [==============================] - 1s 39ms/step - loss: 0.2918 - accuracy: 0.8886 - precision: 0.9056 - recall: 0.8724 - auc: 0.9849 - tp: 68409.0000 - fp: 7133.0000 - tn: 228103.0000 - fn: 10003.0000 - val_loss: 0.5840 - val_accuracy: 0.8153 - val_precision: 0.8334 - val_recall: 0.8016 - val_auc: 0.9512 - val_tp: 15714.0000 - val_fp: 3141.0000 - val_tn: 55668.0000 - val_fn: 3889.0000 - lr: 1.0000e-04\n", "Epoch 40/100\n", "30/30 [==============================] - 1s 42ms/step - loss: 0.2916 - accuracy: 0.8883 - precision: 0.9053 - recall: 0.8725 - auc: 0.9849 - tp: 68412.0000 - fp: 7159.0000 - tn: 228077.0000 - fn: 10000.0000 - val_loss: 0.5846 - val_accuracy: 0.8149 - val_precision: 0.8332 - val_recall: 0.8014 - val_auc: 0.9511 - val_tp: 15710.0000 - val_fp: 3145.0000 - val_tn: 55664.0000 - val_fn: 3893.0000 - lr: 1.0000e-04\n", "Epoch 41/100\n", "30/30 [==============================] - 1s 36ms/step - loss: 0.2915 - accuracy: 0.8885 - precision: 0.9053 - recall: 0.8725 - auc: 0.9849 - tp: 68414.0000 - fp: 7156.0000 - tn: 228080.0000 - fn: 9998.0000 - val_loss: 0.5852 - val_accuracy: 0.8146 - val_precision: 0.8326 - val_recall: 0.8014 - val_auc: 0.9511 - val_tp: 15709.0000 - val_fp: 3159.0000 - val_tn: 55650.0000 - val_fn: 3894.0000 - lr: 1.0000e-04\n", "Epoch 42/100\n", "30/30 [==============================] - 1s 38ms/step - loss: 0.2913 - accuracy: 0.8883 - precision: 0.9058 - recall: 0.8724 - auc: 0.9850 - tp: 68406.0000 - fp: 7117.0000 - tn: 228119.0000 - fn: 10006.0000 - val_loss: 0.5856 - val_accuracy: 0.8148 - val_precision: 0.8329 - val_recall: 0.8017 - val_auc: 0.9511 - val_tp: 15716.0000 - val_fp: 3153.0000 - val_tn: 55656.0000 - val_fn: 3887.0000 - lr: 1.0000e-04\n", "Epoch 43/100\n", "30/30 [==============================] - 1s 43ms/step - loss: 0.2911 - accuracy: 0.8883 - precision: 0.9054 - recall: 0.8725 - auc: 0.9850 - tp: 68417.0000 - fp: 7149.0000 - tn: 228087.0000 - fn: 9995.0000 - val_loss: 0.5863 - val_accuracy: 0.8148 - val_precision: 0.8328 - val_recall: 0.8015 - val_auc: 0.9510 - val_tp: 15712.0000 - val_fp: 3155.0000 - val_tn: 55654.0000 - val_fn: 3891.0000 - lr: 1.0000e-04\n", "Epoch 44/100\n", "30/30 [==============================] - 1s 49ms/step - loss: 0.2909 - accuracy: 0.8885 - precision: 0.9057 - recall: 0.8727 - auc: 0.9850 - tp: 68427.0000 - fp: 7128.0000 - tn: 228108.0000 - fn: 9985.0000 - val_loss: 0.5868 - val_accuracy: 0.8147 - val_precision: 0.8331 - val_recall: 0.8016 - val_auc: 0.9510 - val_tp: 15714.0000 - val_fp: 3148.0000 - val_tn: 55661.0000 - val_fn: 3889.0000 - lr: 1.0000e-04\n", "Epoch 45/100\n", "30/30 [==============================] - 1s 36ms/step - loss: 0.2907 - accuracy: 0.8888 - precision: 0.9059 - recall: 0.8732 - auc: 0.9850 - tp: 68472.0000 - fp: 7115.0000 - tn: 228121.0000 - fn: 9940.0000 - val_loss: 0.5874 - val_accuracy: 0.8148 - val_precision: 0.8327 - val_recall: 0.8011 - val_auc: 0.9509 - val_tp: 15704.0000 - val_fp: 3156.0000 - val_tn: 55653.0000 - val_fn: 3899.0000 - lr: 1.0000e-04\n", "Epoch 46/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.2906 - accuracy: 0.8888 - precision: 0.9056 - recall: 0.8730 - auc: 0.9850 - tp: 68457.0000 - fp: 7135.0000 - tn: 228101.0000 - fn: 9955.0000 - val_loss: 0.5881 - val_accuracy: 0.8147 - val_precision: 0.8321 - val_recall: 0.8010 - val_auc: 0.9509 - val_tp: 15702.0000 - val_fp: 3168.0000 - val_tn: 55641.0000 - val_fn: 3901.0000 - lr: 1.0000e-04\n", "Epoch 47/100\n", "30/30 [==============================] - 1s 39ms/step - loss: 0.2903 - accuracy: 0.8887 - precision: 0.9059 - recall: 0.8729 - auc: 0.9850 - tp: 68449.0000 - fp: 7110.0000 - tn: 228126.0000 - fn: 9963.0000 - val_loss: 0.5885 - val_accuracy: 0.8145 - val_precision: 0.8327 - val_recall: 0.8014 - val_auc: 0.9509 - val_tp: 15710.0000 - val_fp: 3156.0000 - val_tn: 55653.0000 - val_fn: 3893.0000 - lr: 1.0000e-04\n", "Epoch 48/100\n", "30/30 [==============================] - 1s 34ms/step - loss: 0.2901 - accuracy: 0.8887 - precision: 0.9060 - recall: 0.8731 - auc: 0.9851 - tp: 68460.0000 - fp: 7106.0000 - tn: 228130.0000 - fn: 9952.0000 - val_loss: 0.5892 - val_accuracy: 0.8147 - val_precision: 0.8324 - val_recall: 0.8011 - val_auc: 0.9508 - val_tp: 15704.0000 - val_fp: 3163.0000 - val_tn: 55646.0000 - val_fn: 3899.0000 - lr: 1.0000e-04\n", "Epoch 49/100\n", "30/30 [==============================] - 1s 37ms/step - loss: 0.2900 - accuracy: 0.8887 - precision: 0.9058 - recall: 0.8730 - auc: 0.9851 - tp: 68453.0000 - fp: 7115.0000 - tn: 228121.0000 - fn: 9959.0000 - val_loss: 0.5897 - val_accuracy: 0.8146 - val_precision: 0.8327 - val_recall: 0.8012 - val_auc: 0.9508 - val_tp: 15706.0000 - val_fp: 3156.0000 - val_tn: 55653.0000 - val_fn: 3897.0000 - lr: 1.0000e-04\n", "Epoch 50/100\n", "30/30 [==============================] - 1s 42ms/step - loss: 0.2898 - accuracy: 0.8889 - precision: 0.9061 - recall: 0.8728 - auc: 0.9851 - tp: 68441.0000 - fp: 7092.0000 - tn: 228144.0000 - fn: 9971.0000 - val_loss: 0.5903 - val_accuracy: 0.8145 - val_precision: 0.8328 - val_recall: 0.8011 - val_auc: 0.9508 - val_tp: 15704.0000 - val_fp: 3153.0000 - val_tn: 55656.0000 - val_fn: 3899.0000 - lr: 1.0000e-04\n", "Epoch 51/100\n", "30/30 [==============================] - 1s 38ms/step - loss: 0.2897 - accuracy: 0.8888 - precision: 0.9059 - recall: 0.8729 - auc: 0.9851 - tp: 68449.0000 - fp: 7108.0000 - tn: 228128.0000 - fn: 9963.0000 - val_loss: 0.5908 - val_accuracy: 0.8147 - val_precision: 0.8323 - val_recall: 0.8009 - val_auc: 0.9508 - val_tp: 15701.0000 - val_fp: 3163.0000 - val_tn: 55646.0000 - val_fn: 3902.0000 - lr: 1.0000e-04\n", "Epoch 52/100\n", "30/30 [==============================] - 1s 41ms/step - loss: 0.2895 - accuracy: 0.8890 - precision: 0.9061 - recall: 0.8732 - auc: 0.9851 - tp: 68467.0000 - fp: 7094.0000 - tn: 228142.0000 - fn: 9945.0000 - val_loss: 0.5915 - val_accuracy: 0.8145 - val_precision: 0.8321 - val_recall: 0.8013 - val_auc: 0.9506 - val_tp: 15708.0000 - val_fp: 3169.0000 - val_tn: 55640.0000 - val_fn: 3895.0000 - lr: 1.0000e-04\n", "Epoch 53/100\n", "30/30 [==============================] - 1s 43ms/step - loss: 0.2893 - accuracy: 0.8888 - precision: 0.9060 - recall: 0.8735 - auc: 0.9852 - tp: 68491.0000 - fp: 7105.0000 - tn: 228131.0000 - fn: 9921.0000 - val_loss: 0.5920 - val_accuracy: 0.8142 - val_precision: 0.8315 - val_recall: 0.8010 - val_auc: 0.9506 - val_tp: 15702.0000 - val_fp: 3183.0000 - val_tn: 55626.0000 - val_fn: 3901.0000 - lr: 1.0000e-04\n", "Epoch 54/100\n", "30/30 [==============================] - 2s 51ms/step - loss: 0.2891 - accuracy: 0.8891 - precision: 0.9063 - recall: 0.8735 - auc: 0.9852 - tp: 68493.0000 - fp: 7084.0000 - tn: 228152.0000 - fn: 9919.0000 - val_loss: 0.5925 - val_accuracy: 0.8143 - val_precision: 0.8321 - val_recall: 0.8014 - val_auc: 0.9506 - val_tp: 15709.0000 - val_fp: 3170.0000 - val_tn: 55639.0000 - val_fn: 3894.0000 - lr: 1.0000e-04\n", "Epoch 55/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.2889 - accuracy: 0.8895 - precision: 0.9061 - recall: 0.8736 - auc: 0.9852 - tp: 68500.0000 - fp: 7096.0000 - tn: 228140.0000 - fn: 9912.0000 - val_loss: 0.5930 - val_accuracy: 0.8143 - val_precision: 0.8325 - val_recall: 0.8006 - val_auc: 0.9505 - val_tp: 15695.0000 - val_fp: 3157.0000 - val_tn: 55652.0000 - val_fn: 3908.0000 - lr: 1.0000e-04\n", "Epoch 56/100\n", "30/30 [==============================] - 1s 40ms/step - loss: 0.2888 - accuracy: 0.8892 - precision: 0.9062 - recall: 0.8737 - auc: 0.9852 - tp: 68509.0000 - fp: 7091.0000 - tn: 228145.0000 - fn: 9903.0000 - val_loss: 0.5936 - val_accuracy: 0.8143 - val_precision: 0.8318 - val_recall: 0.8008 - val_auc: 0.9505 - val_tp: 15698.0000 - val_fp: 3175.0000 - val_tn: 55634.0000 - val_fn: 3905.0000 - lr: 1.0000e-04\n", "Epoch 57/100\n", "30/30 [==============================] - 1s 41ms/step - loss: 0.2887 - accuracy: 0.8891 - precision: 0.9060 - recall: 0.8735 - auc: 0.9852 - tp: 68496.0000 - fp: 7105.0000 - tn: 228131.0000 - fn: 9916.0000 - val_loss: 0.5941 - val_accuracy: 0.8143 - val_precision: 0.8320 - val_recall: 0.8008 - val_auc: 0.9505 - val_tp: 15698.0000 - val_fp: 3170.0000 - val_tn: 55639.0000 - val_fn: 3905.0000 - lr: 1.0000e-04\n", "Epoch 58/100\n", "30/30 [==============================] - 1s 40ms/step - loss: 0.2884 - accuracy: 0.8893 - precision: 0.9063 - recall: 0.8735 - auc: 0.9852 - tp: 68493.0000 - fp: 7079.0000 - tn: 228157.0000 - fn: 9919.0000 - val_loss: 0.5947 - val_accuracy: 0.8141 - val_precision: 0.8319 - val_recall: 0.8008 - val_auc: 0.9504 - val_tp: 15698.0000 - val_fp: 3171.0000 - val_tn: 55638.0000 - val_fn: 3905.0000 - lr: 1.0000e-04\n", "Epoch 59/100\n", "30/30 [==============================] - 1s 38ms/step - loss: 0.2884 - accuracy: 0.8894 - precision: 0.9062 - recall: 0.8737 - auc: 0.9852 - tp: 68511.0000 - fp: 7095.0000 - tn: 228141.0000 - fn: 9901.0000 - val_loss: 0.5953 - val_accuracy: 0.8145 - val_precision: 0.8317 - val_recall: 0.8005 - val_auc: 0.9504 - val_tp: 15693.0000 - val_fp: 3175.0000 - val_tn: 55634.0000 - val_fn: 3910.0000 - lr: 1.0000e-04\n", "Epoch 60/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.2881 - accuracy: 0.8894 - precision: 0.9063 - recall: 0.8737 - auc: 0.9853 - tp: 68512.0000 - fp: 7084.0000 - tn: 228152.0000 - fn: 9900.0000 - val_loss: 0.5959 - val_accuracy: 0.8142 - val_precision: 0.8316 - val_recall: 0.8007 - val_auc: 0.9503 - val_tp: 15696.0000 - val_fp: 3179.0000 - val_tn: 55630.0000 - val_fn: 3907.0000 - lr: 1.0000e-04\n", "Epoch 61/100\n", "30/30 [==============================] - 1s 36ms/step - loss: 0.2880 - accuracy: 0.8896 - precision: 0.9063 - recall: 0.8740 - auc: 0.9853 - tp: 68529.0000 - fp: 7089.0000 - tn: 228147.0000 - fn: 9883.0000 - val_loss: 0.5965 - val_accuracy: 0.8140 - val_precision: 0.8321 - val_recall: 0.8001 - val_auc: 0.9502 - val_tp: 15685.0000 - val_fp: 3165.0000 - val_tn: 55644.0000 - val_fn: 3918.0000 - lr: 1.0000e-04\n", "Epoch 62/100\n", "30/30 [==============================] - 1s 37ms/step - loss: 0.2878 - accuracy: 0.8896 - precision: 0.9062 - recall: 0.8739 - auc: 0.9853 - tp: 68523.0000 - fp: 7093.0000 - tn: 228143.0000 - fn: 9889.0000 - val_loss: 0.5970 - val_accuracy: 0.8137 - val_precision: 0.8318 - val_recall: 0.8005 - val_auc: 0.9503 - val_tp: 15692.0000 - val_fp: 3173.0000 - val_tn: 55636.0000 - val_fn: 3911.0000 - lr: 1.0000e-04\n", "Epoch 63/100\n", "30/30 [==============================] - 1s 38ms/step - loss: 0.2877 - accuracy: 0.8895 - precision: 0.9063 - recall: 0.8739 - auc: 0.9853 - tp: 68525.0000 - fp: 7085.0000 - tn: 228151.0000 - fn: 9887.0000 - val_loss: 0.5976 - val_accuracy: 0.8138 - val_precision: 0.8319 - val_recall: 0.7999 - val_auc: 0.9502 - val_tp: 15681.0000 - val_fp: 3168.0000 - val_tn: 55641.0000 - val_fn: 3922.0000 - lr: 1.0000e-04\n", "Epoch 64/100\n", "30/30 [==============================] - 1s 36ms/step - loss: 0.2875 - accuracy: 0.8897 - precision: 0.9065 - recall: 0.8742 - auc: 0.9853 - tp: 68545.0000 - fp: 7069.0000 - tn: 228167.0000 - fn: 9867.0000 - val_loss: 0.5981 - val_accuracy: 0.8138 - val_precision: 0.8317 - val_recall: 0.8001 - val_auc: 0.9502 - val_tp: 15685.0000 - val_fp: 3175.0000 - val_tn: 55634.0000 - val_fn: 3918.0000 - lr: 1.0000e-04\n", "Epoch 65/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.2873 - accuracy: 0.8899 - precision: 0.9065 - recall: 0.8742 - auc: 0.9854 - tp: 68546.0000 - fp: 7067.0000 - tn: 228169.0000 - fn: 9866.0000 - val_loss: 0.5986 - val_accuracy: 0.8141 - val_precision: 0.8316 - val_recall: 0.8002 - val_auc: 0.9501 - val_tp: 15687.0000 - val_fp: 3176.0000 - val_tn: 55633.0000 - val_fn: 3916.0000 - lr: 1.0000e-04\n", "Epoch 66/100\n", "30/30 [==============================] - 1s 40ms/step - loss: 0.2872 - accuracy: 0.8898 - precision: 0.9064 - recall: 0.8742 - auc: 0.9854 - tp: 68544.0000 - fp: 7078.0000 - tn: 228158.0000 - fn: 9868.0000 - val_loss: 0.5993 - val_accuracy: 0.8137 - val_precision: 0.8309 - val_recall: 0.8006 - val_auc: 0.9501 - val_tp: 15694.0000 - val_fp: 3193.0000 - val_tn: 55616.0000 - val_fn: 3909.0000 - lr: 1.0000e-04\n", "Epoch 67/100\n", "30/30 [==============================] - 1s 41ms/step - loss: 0.2871 - accuracy: 0.8898 - precision: 0.9064 - recall: 0.8743 - auc: 0.9854 - tp: 68554.0000 - fp: 7079.0000 - tn: 228157.0000 - fn: 9858.0000 - val_loss: 0.5997 - val_accuracy: 0.8135 - val_precision: 0.8313 - val_recall: 0.8005 - val_auc: 0.9500 - val_tp: 15693.0000 - val_fp: 3185.0000 - val_tn: 55624.0000 - val_fn: 3910.0000 - lr: 1.0000e-04\n", "Epoch 68/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.2869 - accuracy: 0.8900 - precision: 0.9064 - recall: 0.8744 - auc: 0.9854 - tp: 68567.0000 - fp: 7079.0000 - tn: 228157.0000 - fn: 9845.0000 - val_loss: 0.6003 - val_accuracy: 0.8137 - val_precision: 0.8314 - val_recall: 0.7999 - val_auc: 0.9500 - val_tp: 15681.0000 - val_fp: 3180.0000 - val_tn: 55629.0000 - val_fn: 3922.0000 - lr: 1.0000e-04\n", "Epoch 69/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.2868 - accuracy: 0.8901 - precision: 0.9067 - recall: 0.8746 - auc: 0.9854 - tp: 68579.0000 - fp: 7055.0000 - tn: 228181.0000 - fn: 9833.0000 - val_loss: 0.6010 - val_accuracy: 0.8139 - val_precision: 0.8313 - val_recall: 0.8002 - val_auc: 0.9499 - val_tp: 15687.0000 - val_fp: 3183.0000 - val_tn: 55626.0000 - val_fn: 3916.0000 - lr: 1.0000e-04\n", "Epoch 70/100\n", "30/30 [==============================] - 1s 36ms/step - loss: 0.2866 - accuracy: 0.8902 - precision: 0.9065 - recall: 0.8745 - auc: 0.9854 - tp: 68570.0000 - fp: 7074.0000 - tn: 228162.0000 - fn: 9842.0000 - val_loss: 0.6015 - val_accuracy: 0.8137 - val_precision: 0.8310 - val_recall: 0.8004 - val_auc: 0.9499 - val_tp: 15690.0000 - val_fp: 3190.0000 - val_tn: 55619.0000 - val_fn: 3913.0000 - lr: 1.0000e-04\n", "Epoch 71/100\n", "30/30 [==============================] - 1s 34ms/step - loss: 0.2864 - accuracy: 0.8900 - precision: 0.9066 - recall: 0.8743 - auc: 0.9854 - tp: 68559.0000 - fp: 7064.0000 - tn: 228172.0000 - fn: 9853.0000 - val_loss: 0.6020 - val_accuracy: 0.8139 - val_precision: 0.8310 - val_recall: 0.8001 - val_auc: 0.9498 - val_tp: 15685.0000 - val_fp: 3190.0000 - val_tn: 55619.0000 - val_fn: 3918.0000 - lr: 1.0000e-04\n", "Epoch 72/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.2863 - accuracy: 0.8903 - precision: 0.9069 - recall: 0.8747 - auc: 0.9854 - tp: 68587.0000 - fp: 7040.0000 - tn: 228196.0000 - fn: 9825.0000 - val_loss: 0.6024 - val_accuracy: 0.8137 - val_precision: 0.8311 - val_recall: 0.8007 - val_auc: 0.9498 - val_tp: 15697.0000 - val_fp: 3190.0000 - val_tn: 55619.0000 - val_fn: 3906.0000 - lr: 1.0000e-04\n", "Epoch 73/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.2861 - accuracy: 0.8904 - precision: 0.9069 - recall: 0.8749 - auc: 0.9855 - tp: 68606.0000 - fp: 7042.0000 - tn: 228194.0000 - fn: 9806.0000 - val_loss: 0.6031 - val_accuracy: 0.8139 - val_precision: 0.8306 - val_recall: 0.8007 - val_auc: 0.9497 - val_tp: 15696.0000 - val_fp: 3201.0000 - val_tn: 55608.0000 - val_fn: 3907.0000 - lr: 1.0000e-04\n", "Epoch 74/100\n", "30/30 [==============================] - 1s 40ms/step - loss: 0.2860 - accuracy: 0.8905 - precision: 0.9070 - recall: 0.8744 - auc: 0.9855 - tp: 68560.0000 - fp: 7029.0000 - tn: 228207.0000 - fn: 9852.0000 - val_loss: 0.6036 - val_accuracy: 0.8139 - val_precision: 0.8311 - val_recall: 0.7999 - val_auc: 0.9496 - val_tp: 15681.0000 - val_fp: 3187.0000 - val_tn: 55622.0000 - val_fn: 3922.0000 - lr: 1.0000e-04\n", "Epoch 75/100\n", "30/30 [==============================] - 1s 42ms/step - loss: 0.2858 - accuracy: 0.8906 - precision: 0.9070 - recall: 0.8748 - auc: 0.9855 - tp: 68595.0000 - fp: 7037.0000 - tn: 228199.0000 - fn: 9817.0000 - val_loss: 0.6042 - val_accuracy: 0.8139 - val_precision: 0.8305 - val_recall: 0.8002 - val_auc: 0.9495 - val_tp: 15687.0000 - val_fp: 3201.0000 - val_tn: 55608.0000 - val_fn: 3916.0000 - lr: 1.0000e-04\n", "Epoch 76/100\n", "30/30 [==============================] - 1s 34ms/step - loss: 0.2857 - accuracy: 0.8907 - precision: 0.9069 - recall: 0.8751 - auc: 0.9855 - tp: 68618.0000 - fp: 7042.0000 - tn: 228194.0000 - fn: 9794.0000 - val_loss: 0.6047 - val_accuracy: 0.8139 - val_precision: 0.8312 - val_recall: 0.8001 - val_auc: 0.9495 - val_tp: 15684.0000 - val_fp: 3184.0000 - val_tn: 55625.0000 - val_fn: 3919.0000 - lr: 1.0000e-04\n", "Epoch 77/100\n", "30/30 [==============================] - 1s 41ms/step - loss: 0.2856 - accuracy: 0.8907 - precision: 0.9070 - recall: 0.8750 - auc: 0.9855 - tp: 68611.0000 - fp: 7039.0000 - tn: 228197.0000 - fn: 9801.0000 - val_loss: 0.6053 - val_accuracy: 0.8135 - val_precision: 0.8305 - val_recall: 0.8001 - val_auc: 0.9495 - val_tp: 15685.0000 - val_fp: 3202.0000 - val_tn: 55607.0000 - val_fn: 3918.0000 - lr: 1.0000e-04\n", "Epoch 78/100\n", "30/30 [==============================] - 1s 40ms/step - loss: 0.2854 - accuracy: 0.8910 - precision: 0.9069 - recall: 0.8753 - auc: 0.9855 - tp: 68634.0000 - fp: 7045.0000 - tn: 228191.0000 - fn: 9778.0000 - val_loss: 0.6058 - val_accuracy: 0.8133 - val_precision: 0.8304 - val_recall: 0.8004 - val_auc: 0.9494 - val_tp: 15691.0000 - val_fp: 3204.0000 - val_tn: 55605.0000 - val_fn: 3912.0000 - lr: 1.0000e-04\n", "Epoch 79/100\n", "30/30 [==============================] - 1s 38ms/step - loss: 0.2852 - accuracy: 0.8908 - precision: 0.9072 - recall: 0.8752 - auc: 0.9855 - tp: 68624.0000 - fp: 7017.0000 - tn: 228219.0000 - fn: 9788.0000 - val_loss: 0.6063 - val_accuracy: 0.8133 - val_precision: 0.8303 - val_recall: 0.8004 - val_auc: 0.9494 - val_tp: 15691.0000 - val_fp: 3208.0000 - val_tn: 55601.0000 - val_fn: 3912.0000 - lr: 1.0000e-04\n", "Epoch 80/100\n", "30/30 [==============================] - 1s 38ms/step - loss: 0.2851 - accuracy: 0.8908 - precision: 0.9071 - recall: 0.8751 - auc: 0.9855 - tp: 68621.0000 - fp: 7030.0000 - tn: 228206.0000 - fn: 9791.0000 - val_loss: 0.6070 - val_accuracy: 0.8134 - val_precision: 0.8304 - val_recall: 0.8005 - val_auc: 0.9493 - val_tp: 15692.0000 - val_fp: 3206.0000 - val_tn: 55603.0000 - val_fn: 3911.0000 - lr: 1.0000e-04\n", "Epoch 81/100\n", "30/30 [==============================] - 1s 38ms/step - loss: 0.2849 - accuracy: 0.8910 - precision: 0.9071 - recall: 0.8757 - auc: 0.9856 - tp: 68666.0000 - fp: 7035.0000 - tn: 228201.0000 - fn: 9746.0000 - val_loss: 0.6074 - val_accuracy: 0.8131 - val_precision: 0.8300 - val_recall: 0.8001 - val_auc: 0.9492 - val_tp: 15684.0000 - val_fp: 3212.0000 - val_tn: 55597.0000 - val_fn: 3919.0000 - lr: 1.0000e-04\n", "Epoch 82/100\n", "30/30 [==============================] - 1s 39ms/step - loss: 0.2848 - accuracy: 0.8909 - precision: 0.9070 - recall: 0.8754 - auc: 0.9856 - tp: 68643.0000 - fp: 7038.0000 - tn: 228198.0000 - fn: 9769.0000 - val_loss: 0.6080 - val_accuracy: 0.8134 - val_precision: 0.8307 - val_recall: 0.8000 - val_auc: 0.9492 - val_tp: 15682.0000 - val_fp: 3195.0000 - val_tn: 55614.0000 - val_fn: 3921.0000 - lr: 1.0000e-04\n", "Epoch 83/100\n", "30/30 [==============================] - 1s 38ms/step - loss: 0.2847 - accuracy: 0.8910 - precision: 0.9071 - recall: 0.8756 - auc: 0.9856 - tp: 68654.0000 - fp: 7030.0000 - tn: 228206.0000 - fn: 9758.0000 - val_loss: 0.6085 - val_accuracy: 0.8132 - val_precision: 0.8299 - val_recall: 0.8003 - val_auc: 0.9492 - val_tp: 15689.0000 - val_fp: 3216.0000 - val_tn: 55593.0000 - val_fn: 3914.0000 - lr: 1.0000e-04\n", "Epoch 84/100\n", "30/30 [==============================] - 1s 40ms/step - loss: 0.2845 - accuracy: 0.8911 - precision: 0.9074 - recall: 0.8756 - auc: 0.9856 - tp: 68660.0000 - fp: 7010.0000 - tn: 228226.0000 - fn: 9752.0000 - val_loss: 0.6090 - val_accuracy: 0.8131 - val_precision: 0.8304 - val_recall: 0.8003 - val_auc: 0.9491 - val_tp: 15688.0000 - val_fp: 3203.0000 - val_tn: 55606.0000 - val_fn: 3915.0000 - lr: 1.0000e-04\n", "Epoch 85/100\n", "30/30 [==============================] - 1s 38ms/step - loss: 0.2844 - accuracy: 0.8912 - precision: 0.9073 - recall: 0.8758 - auc: 0.9856 - tp: 68677.0000 - fp: 7016.0000 - tn: 228220.0000 - fn: 9735.0000 - val_loss: 0.6096 - val_accuracy: 0.8132 - val_precision: 0.8305 - val_recall: 0.8003 - val_auc: 0.9491 - val_tp: 15689.0000 - val_fp: 3202.0000 - val_tn: 55607.0000 - val_fn: 3914.0000 - lr: 1.0000e-04\n", "Epoch 86/100\n", "30/30 [==============================] - 1s 37ms/step - loss: 0.2842 - accuracy: 0.8913 - precision: 0.9074 - recall: 0.8757 - auc: 0.9856 - tp: 68668.0000 - fp: 7006.0000 - tn: 228230.0000 - fn: 9744.0000 - val_loss: 0.6101 - val_accuracy: 0.8132 - val_precision: 0.8307 - val_recall: 0.8000 - val_auc: 0.9491 - val_tp: 15682.0000 - val_fp: 3196.0000 - val_tn: 55613.0000 - val_fn: 3921.0000 - lr: 1.0000e-04\n", "Epoch 87/100\n", "30/30 [==============================] - 1s 36ms/step - loss: 0.2841 - accuracy: 0.8913 - precision: 0.9073 - recall: 0.8758 - auc: 0.9856 - tp: 68677.0000 - fp: 7017.0000 - tn: 228219.0000 - fn: 9735.0000 - val_loss: 0.6107 - val_accuracy: 0.8131 - val_precision: 0.8296 - val_recall: 0.7999 - val_auc: 0.9490 - val_tp: 15681.0000 - val_fp: 3221.0000 - val_tn: 55588.0000 - val_fn: 3922.0000 - lr: 1.0000e-04\n", "Epoch 88/100\n", "30/30 [==============================] - 1s 32ms/step - loss: 0.2840 - accuracy: 0.8913 - precision: 0.9076 - recall: 0.8757 - auc: 0.9857 - tp: 68662.0000 - fp: 6992.0000 - tn: 228244.0000 - fn: 9750.0000 - val_loss: 0.6112 - val_accuracy: 0.8132 - val_precision: 0.8304 - val_recall: 0.7999 - val_auc: 0.9490 - val_tp: 15681.0000 - val_fp: 3203.0000 - val_tn: 55606.0000 - val_fn: 3922.0000 - lr: 1.0000e-04\n", "Epoch 89/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.2839 - accuracy: 0.8915 - precision: 0.9074 - recall: 0.8760 - auc: 0.9857 - tp: 68692.0000 - fp: 7012.0000 - tn: 228224.0000 - fn: 9720.0000 - val_loss: 0.6119 - val_accuracy: 0.8130 - val_precision: 0.8299 - val_recall: 0.7998 - val_auc: 0.9490 - val_tp: 15679.0000 - val_fp: 3214.0000 - val_tn: 55595.0000 - val_fn: 3924.0000 - lr: 1.0000e-04\n", "Epoch 90/100\n", "30/30 [==============================] - 1s 32ms/step - loss: 0.2837 - accuracy: 0.8914 - precision: 0.9075 - recall: 0.8761 - auc: 0.9857 - tp: 68696.0000 - fp: 6998.0000 - tn: 228238.0000 - fn: 9716.0000 - val_loss: 0.6123 - val_accuracy: 0.8131 - val_precision: 0.8298 - val_recall: 0.8003 - val_auc: 0.9489 - val_tp: 15689.0000 - val_fp: 3217.0000 - val_tn: 55592.0000 - val_fn: 3914.0000 - lr: 1.0000e-04\n", "Epoch 91/100\n", "30/30 [==============================] - 1s 34ms/step - loss: 0.2836 - accuracy: 0.8916 - precision: 0.9076 - recall: 0.8761 - auc: 0.9857 - tp: 68697.0000 - fp: 6994.0000 - tn: 228242.0000 - fn: 9715.0000 - val_loss: 0.6129 - val_accuracy: 0.8133 - val_precision: 0.8301 - val_recall: 0.8003 - val_auc: 0.9489 - val_tp: 15688.0000 - val_fp: 3212.0000 - val_tn: 55597.0000 - val_fn: 3915.0000 - lr: 1.0000e-04\n", "Epoch 92/100\n", "30/30 [==============================] - 1s 34ms/step - loss: 0.2834 - accuracy: 0.8916 - precision: 0.9078 - recall: 0.8760 - auc: 0.9857 - tp: 68690.0000 - fp: 6980.0000 - tn: 228256.0000 - fn: 9722.0000 - val_loss: 0.6134 - val_accuracy: 0.8130 - val_precision: 0.8301 - val_recall: 0.8004 - val_auc: 0.9488 - val_tp: 15691.0000 - val_fp: 3211.0000 - val_tn: 55598.0000 - val_fn: 3912.0000 - lr: 1.0000e-04\n", "Epoch 93/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.2833 - accuracy: 0.8915 - precision: 0.9073 - recall: 0.8761 - auc: 0.9857 - tp: 68698.0000 - fp: 7021.0000 - tn: 228215.0000 - fn: 9714.0000 - val_loss: 0.6141 - val_accuracy: 0.8129 - val_precision: 0.8300 - val_recall: 0.8002 - val_auc: 0.9488 - val_tp: 15687.0000 - val_fp: 3214.0000 - val_tn: 55595.0000 - val_fn: 3916.0000 - lr: 1.0000e-04\n", "Epoch 94/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.2832 - accuracy: 0.8919 - precision: 0.9074 - recall: 0.8765 - auc: 0.9857 - tp: 68730.0000 - fp: 7017.0000 - tn: 228219.0000 - fn: 9682.0000 - val_loss: 0.6144 - val_accuracy: 0.8132 - val_precision: 0.8301 - val_recall: 0.8005 - val_auc: 0.9488 - val_tp: 15692.0000 - val_fp: 3212.0000 - val_tn: 55597.0000 - val_fn: 3911.0000 - lr: 1.0000e-04\n", "Epoch 95/100\n", "30/30 [==============================] - 1s 38ms/step - loss: 0.2830 - accuracy: 0.8919 - precision: 0.9076 - recall: 0.8766 - auc: 0.9857 - tp: 68739.0000 - fp: 7000.0000 - tn: 228236.0000 - fn: 9673.0000 - val_loss: 0.6150 - val_accuracy: 0.8132 - val_precision: 0.8301 - val_recall: 0.8008 - val_auc: 0.9487 - val_tp: 15698.0000 - val_fp: 3212.0000 - val_tn: 55597.0000 - val_fn: 3905.0000 - lr: 1.0000e-04\n", "Epoch 96/100\n", "30/30 [==============================] - 1s 41ms/step - loss: 0.2829 - accuracy: 0.8917 - precision: 0.9075 - recall: 0.8763 - auc: 0.9857 - tp: 68714.0000 - fp: 7001.0000 - tn: 228235.0000 - fn: 9698.0000 - val_loss: 0.6155 - val_accuracy: 0.8130 - val_precision: 0.8302 - val_recall: 0.8005 - val_auc: 0.9487 - val_tp: 15692.0000 - val_fp: 3209.0000 - val_tn: 55600.0000 - val_fn: 3911.0000 - lr: 1.0000e-04\n", "Epoch 97/100\n", "30/30 [==============================] - 1s 37ms/step - loss: 0.2828 - accuracy: 0.8916 - precision: 0.9074 - recall: 0.8764 - auc: 0.9858 - tp: 68723.0000 - fp: 7014.0000 - tn: 228222.0000 - fn: 9689.0000 - val_loss: 0.6161 - val_accuracy: 0.8132 - val_precision: 0.8299 - val_recall: 0.8007 - val_auc: 0.9487 - val_tp: 15697.0000 - val_fp: 3217.0000 - val_tn: 55592.0000 - val_fn: 3906.0000 - lr: 1.0000e-04\n", "Epoch 98/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.2827 - accuracy: 0.8918 - precision: 0.9075 - recall: 0.8768 - auc: 0.9858 - tp: 68749.0000 - fp: 7008.0000 - tn: 228228.0000 - fn: 9663.0000 - val_loss: 0.6167 - val_accuracy: 0.8132 - val_precision: 0.8300 - val_recall: 0.8005 - val_auc: 0.9486 - val_tp: 15693.0000 - val_fp: 3215.0000 - val_tn: 55594.0000 - val_fn: 3910.0000 - lr: 1.0000e-04\n", "Epoch 99/100\n", "30/30 [==============================] - 1s 36ms/step - loss: 0.2825 - accuracy: 0.8918 - precision: 0.9077 - recall: 0.8764 - auc: 0.9858 - tp: 68721.0000 - fp: 6984.0000 - tn: 228252.0000 - fn: 9691.0000 - val_loss: 0.6173 - val_accuracy: 0.8129 - val_precision: 0.8301 - val_recall: 0.8001 - val_auc: 0.9485 - val_tp: 15685.0000 - val_fp: 3211.0000 - val_tn: 55598.0000 - val_fn: 3918.0000 - lr: 1.0000e-04\n", "Epoch 100/100\n", "30/30 [==============================] - 1s 39ms/step - loss: 0.2824 - accuracy: 0.8920 - precision: 0.9076 - recall: 0.8767 - auc: 0.9858 - tp: 68743.0000 - fp: 6996.0000 - tn: 228240.0000 - fn: 9669.0000 - val_loss: 0.6178 - val_accuracy: 0.8134 - val_precision: 0.8305 - val_recall: 0.8003 - val_auc: 0.9485 - val_tp: 15689.0000 - val_fp: 3203.0000 - val_tn: 55606.0000 - val_fn: 3914.0000 - lr: 1.0000e-04\n" ] } ], "source": [ "history = model_2.fit(padded_train, y_train,\n", " steps_per_epoch = 30,\n", " epochs = 100,\n", " validation_split=0.2,\n", " verbose = 1,\n", " validation_steps = 50,\n", " callbacks=[checkpoint(\"2_all_epochs\"), reduce_lr], \n", " )" ] }, { "cell_type": "code", "execution_count": 53, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Loss: 0.6283017992973328\n", "Accuracy: 0.8093372583389282\n", "Precision: 0.8246207237243652\n", "Recall: 0.7963189482688904\n", "AUC: 0.9471717476844788\n", "True Positives: 19513.0\n", "False Positives: 4150.0\n", "True Negatives: 69362.0\n", "False Negatives: 4991.0\n" ] } ], "source": [ "score = model_2.evaluate(padded_test, y_test, verbose=0)\n", "\n", "print(\"Loss:\", score[0])\n", "print(\"Accuracy:\", score[1])\n", "print(\"Precision:\", score[2])\n", "print(\"Recall:\", score[3])\n", "print(\"AUC:\", score[4])\n", "print(\"True Positives:\", score[5])\n", "print(\"False Positives:\", score[6])\n", "print(\"True Negatives:\", score[7])\n", "print(\"False Negatives:\", score[8])" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "

Fine tune modelu 2

" ] }, { "cell_type": "code", "execution_count": 64, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "[I 2024-06-08 13:54:59,853] A new study created in memory with name: no-name-2054c194-0d55-4969-91e6-f0f12af54a54\n", "C:\\Users\\Michał\\AppData\\Local\\Temp\\ipykernel_33252\\265862631.py:5: FutureWarning: suggest_loguniform has been deprecated in v3.0.0. This feature will be removed in v6.0.0. See https://github.com/optuna/optuna/releases/tag/v3.0.0. Use suggest_float(..., log=True) instead.\n", " learning_rate = trial.suggest_loguniform('learning_rate', 1e-4, 1e-2)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/100\n", "30/30 [==============================] - 3s 50ms/step - loss: 1.3636 - accuracy: 0.4293 - precision: 0.0000e+00 - recall: 0.0000e+00 - auc: 0.6770 - tp: 0.0000e+00 - fp: 0.0000e+00 - tn: 235236.0000 - fn: 78412.0000 - val_loss: 1.3366 - val_accuracy: 0.5192 - val_precision: 0.0000e+00 - val_recall: 0.0000e+00 - val_auc: 0.7490 - val_tp: 0.0000e+00 - val_fp: 0.0000e+00 - val_tn: 58809.0000 - val_fn: 19603.0000 - lr: 5.8385e-04\n", "Epoch 2/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 1.2935 - accuracy: 0.5733 - precision: 0.0000e+00 - recall: 0.0000e+00 - auc: 0.7779 - tp: 0.0000e+00 - fp: 0.0000e+00 - tn: 235236.0000 - fn: 78412.0000 - val_loss: 1.2388 - val_accuracy: 0.6064 - val_precision: 0.0000e+00 - val_recall: 0.0000e+00 - val_auc: 0.8067 - val_tp: 0.0000e+00 - val_fp: 0.0000e+00 - val_tn: 58809.0000 - val_fn: 19603.0000 - lr: 5.8385e-04\n", "Epoch 3/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 1.1618 - accuracy: 0.6429 - precision: 0.8046 - recall: 0.0279 - auc: 0.8282 - tp: 2190.0000 - fp: 532.0000 - tn: 234704.0000 - fn: 76222.0000 - val_loss: 1.0777 - val_accuracy: 0.6686 - val_precision: 0.8556 - val_recall: 0.1137 - val_auc: 0.8483 - val_tp: 2228.0000 - val_fp: 376.0000 - val_tn: 58433.0000 - val_fn: 17375.0000 - lr: 5.8385e-04\n", "Epoch 4/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.9811 - accuracy: 0.6952 - precision: 0.8830 - recall: 0.2839 - auc: 0.8698 - tp: 22263.0000 - fp: 2950.0000 - tn: 232286.0000 - fn: 56149.0000 - val_loss: 0.8973 - val_accuracy: 0.7031 - val_precision: 0.8776 - val_recall: 0.4413 - val_auc: 0.8845 - val_tp: 8650.0000 - val_fp: 1206.0000 - val_tn: 57603.0000 - val_fn: 10953.0000 - lr: 5.8385e-04\n", "Epoch 5/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.8165 - accuracy: 0.7212 - precision: 0.8668 - recall: 0.5415 - auc: 0.9023 - tp: 42458.0000 - fp: 6524.0000 - tn: 228712.0000 - fn: 35954.0000 - val_loss: 0.7649 - val_accuracy: 0.7247 - val_precision: 0.8524 - val_recall: 0.5987 - val_auc: 0.9109 - val_tp: 11737.0000 - val_fp: 2033.0000 - val_tn: 56776.0000 - val_fn: 7866.0000 - lr: 5.8385e-04\n", "Epoch 6/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.7070 - accuracy: 0.7398 - precision: 0.8521 - recall: 0.6408 - auc: 0.9237 - tp: 50249.0000 - fp: 8720.0000 - tn: 226516.0000 - fn: 28163.0000 - val_loss: 0.6861 - val_accuracy: 0.7409 - val_precision: 0.8417 - val_recall: 0.6552 - val_auc: 0.9260 - val_tp: 12844.0000 - val_fp: 2416.0000 - val_tn: 56393.0000 - val_fn: 6759.0000 - lr: 5.8385e-04\n", "Epoch 7/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.6396 - accuracy: 0.7590 - precision: 0.8509 - recall: 0.6783 - auc: 0.9362 - tp: 53189.0000 - fp: 9320.0000 - tn: 225916.0000 - fn: 25223.0000 - val_loss: 0.6371 - val_accuracy: 0.7571 - val_precision: 0.8406 - val_recall: 0.6789 - val_auc: 0.9348 - val_tp: 13309.0000 - val_fp: 2523.0000 - val_tn: 56286.0000 - val_fn: 6294.0000 - lr: 5.8385e-04\n", "Epoch 8/100\n", "30/30 [==============================] - 1s 30ms/step - loss: 0.5939 - accuracy: 0.7798 - precision: 0.8534 - recall: 0.6967 - auc: 0.9441 - tp: 54633.0000 - fp: 9385.0000 - tn: 225851.0000 - fn: 23779.0000 - val_loss: 0.6025 - val_accuracy: 0.7744 - val_precision: 0.8446 - val_recall: 0.6905 - val_auc: 0.9408 - val_tp: 13535.0000 - val_fp: 2491.0000 - val_tn: 56318.0000 - val_fn: 6068.0000 - lr: 5.8385e-04\n", "Epoch 9/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.5587 - accuracy: 0.7991 - precision: 0.8592 - recall: 0.7070 - auc: 0.9499 - tp: 55439.0000 - fp: 9088.0000 - tn: 226148.0000 - fn: 22973.0000 - val_loss: 0.5757 - val_accuracy: 0.7881 - val_precision: 0.8508 - val_recall: 0.6991 - val_auc: 0.9452 - val_tp: 13705.0000 - val_fp: 2404.0000 - val_tn: 56405.0000 - val_fn: 5898.0000 - lr: 5.8385e-04\n", "Epoch 10/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.5295 - accuracy: 0.8121 - precision: 0.8657 - recall: 0.7255 - auc: 0.9545 - tp: 56886.0000 - fp: 8828.0000 - tn: 226408.0000 - fn: 21526.0000 - val_loss: 0.5537 - val_accuracy: 0.7978 - val_precision: 0.8530 - val_recall: 0.7263 - val_auc: 0.9488 - val_tp: 14238.0000 - val_fp: 2453.0000 - val_tn: 56356.0000 - val_fn: 5365.0000 - lr: 5.8385e-04\n", "Epoch 11/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.5049 - accuracy: 0.8224 - precision: 0.8693 - recall: 0.7534 - auc: 0.9582 - tp: 59073.0000 - fp: 8884.0000 - tn: 226352.0000 - fn: 19339.0000 - val_loss: 0.5360 - val_accuracy: 0.8058 - val_precision: 0.8549 - val_recall: 0.7467 - val_auc: 0.9516 - val_tp: 14637.0000 - val_fp: 2484.0000 - val_tn: 56325.0000 - val_fn: 4966.0000 - lr: 5.8385e-04\n", "Epoch 12/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4840 - accuracy: 0.8288 - precision: 0.8712 - recall: 0.7749 - auc: 0.9612 - tp: 60761.0000 - fp: 8982.0000 - tn: 226254.0000 - fn: 17651.0000 - val_loss: 0.5220 - val_accuracy: 0.8099 - val_precision: 0.8562 - val_recall: 0.7591 - val_auc: 0.9537 - val_tp: 14880.0000 - val_fp: 2500.0000 - val_tn: 56309.0000 - val_fn: 4723.0000 - lr: 5.8385e-04\n", "Epoch 13/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.4669 - accuracy: 0.8337 - precision: 0.8727 - recall: 0.7866 - auc: 0.9636 - tp: 61675.0000 - fp: 8995.0000 - tn: 226241.0000 - fn: 16737.0000 - val_loss: 0.5112 - val_accuracy: 0.8128 - val_precision: 0.8535 - val_recall: 0.7701 - val_auc: 0.9554 - val_tp: 15097.0000 - val_fp: 2591.0000 - val_tn: 56218.0000 - val_fn: 4506.0000 - lr: 5.8385e-04\n", "Epoch 14/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4530 - accuracy: 0.8369 - precision: 0.8739 - recall: 0.7957 - auc: 0.9655 - tp: 62391.0000 - fp: 8999.0000 - tn: 226237.0000 - fn: 16021.0000 - val_loss: 0.5031 - val_accuracy: 0.8147 - val_precision: 0.8531 - val_recall: 0.7750 - val_auc: 0.9566 - val_tp: 15192.0000 - val_fp: 2615.0000 - val_tn: 56194.0000 - val_fn: 4411.0000 - lr: 5.8385e-04\n", "Epoch 15/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.4415 - accuracy: 0.8405 - precision: 0.8740 - recall: 0.8029 - auc: 0.9670 - tp: 62959.0000 - fp: 9078.0000 - tn: 226158.0000 - fn: 15453.0000 - val_loss: 0.4969 - val_accuracy: 0.8166 - val_precision: 0.8536 - val_recall: 0.7782 - val_auc: 0.9576 - val_tp: 15256.0000 - val_fp: 2617.0000 - val_tn: 56192.0000 - val_fn: 4347.0000 - lr: 5.8385e-04\n", "Epoch 16/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4319 - accuracy: 0.8429 - precision: 0.8755 - recall: 0.8067 - auc: 0.9682 - tp: 63256.0000 - fp: 8994.0000 - tn: 226242.0000 - fn: 15156.0000 - val_loss: 0.4923 - val_accuracy: 0.8172 - val_precision: 0.8547 - val_recall: 0.7778 - val_auc: 0.9582 - val_tp: 15247.0000 - val_fp: 2591.0000 - val_tn: 56218.0000 - val_fn: 4356.0000 - lr: 5.8385e-04\n", "Epoch 17/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4237 - accuracy: 0.8455 - precision: 0.8756 - recall: 0.8129 - auc: 0.9693 - tp: 63740.0000 - fp: 9052.0000 - tn: 226184.0000 - fn: 14672.0000 - val_loss: 0.4889 - val_accuracy: 0.8191 - val_precision: 0.8527 - val_recall: 0.7842 - val_auc: 0.9588 - val_tp: 15372.0000 - val_fp: 2655.0000 - val_tn: 56154.0000 - val_fn: 4231.0000 - lr: 5.8385e-04\n", "Epoch 18/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.4167 - accuracy: 0.8474 - precision: 0.8775 - recall: 0.8144 - auc: 0.9703 - tp: 63861.0000 - fp: 8918.0000 - tn: 226318.0000 - fn: 14551.0000 - val_loss: 0.4862 - val_accuracy: 0.8198 - val_precision: 0.8546 - val_recall: 0.7842 - val_auc: 0.9592 - val_tp: 15373.0000 - val_fp: 2616.0000 - val_tn: 56193.0000 - val_fn: 4230.0000 - lr: 5.8385e-04\n", "Epoch 19/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.4106 - accuracy: 0.8489 - precision: 0.8783 - recall: 0.8176 - auc: 0.9710 - tp: 64111.0000 - fp: 8881.0000 - tn: 226355.0000 - fn: 14301.0000 - val_loss: 0.4839 - val_accuracy: 0.8214 - val_precision: 0.8552 - val_recall: 0.7844 - val_auc: 0.9595 - val_tp: 15376.0000 - val_fp: 2603.0000 - val_tn: 56206.0000 - val_fn: 4227.0000 - lr: 5.8385e-04\n", "Epoch 20/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4049 - accuracy: 0.8505 - precision: 0.8781 - recall: 0.8218 - auc: 0.9718 - tp: 64442.0000 - fp: 8947.0000 - tn: 226289.0000 - fn: 13970.0000 - val_loss: 0.4828 - val_accuracy: 0.8225 - val_precision: 0.8527 - val_recall: 0.7927 - val_auc: 0.9598 - val_tp: 15540.0000 - val_fp: 2684.0000 - val_tn: 56125.0000 - val_fn: 4063.0000 - lr: 5.8385e-04\n", "Epoch 21/100\n", "30/30 [==============================] - 1s 25ms/step - loss: 0.3997 - accuracy: 0.8522 - precision: 0.8790 - recall: 0.8252 - auc: 0.9724 - tp: 64703.0000 - fp: 8905.0000 - tn: 226331.0000 - fn: 13709.0000 - val_loss: 0.4812 - val_accuracy: 0.8228 - val_precision: 0.8514 - val_recall: 0.7935 - val_auc: 0.9601 - val_tp: 15555.0000 - val_fp: 2715.0000 - val_tn: 56094.0000 - val_fn: 4048.0000 - lr: 5.8385e-04\n", "Epoch 22/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3949 - accuracy: 0.8539 - precision: 0.8798 - recall: 0.8273 - auc: 0.9730 - tp: 64867.0000 - fp: 8863.0000 - tn: 226373.0000 - fn: 13545.0000 - val_loss: 0.4810 - val_accuracy: 0.8228 - val_precision: 0.8520 - val_recall: 0.7950 - val_auc: 0.9601 - val_tp: 15585.0000 - val_fp: 2708.0000 - val_tn: 56101.0000 - val_fn: 4018.0000 - lr: 5.8385e-04\n", "Epoch 23/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3905 - accuracy: 0.8550 - precision: 0.8811 - recall: 0.8286 - auc: 0.9736 - tp: 64976.0000 - fp: 8772.0000 - tn: 226464.0000 - fn: 13436.0000 - val_loss: 0.4805 - val_accuracy: 0.8235 - val_precision: 0.8520 - val_recall: 0.7946 - val_auc: 0.9602 - val_tp: 15577.0000 - val_fp: 2705.0000 - val_tn: 56104.0000 - val_fn: 4026.0000 - lr: 5.8385e-04\n", "Epoch 24/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3862 - accuracy: 0.8565 - precision: 0.8819 - recall: 0.8309 - auc: 0.9741 - tp: 65153.0000 - fp: 8727.0000 - tn: 226509.0000 - fn: 13259.0000 - val_loss: 0.4800 - val_accuracy: 0.8234 - val_precision: 0.8517 - val_recall: 0.7952 - val_auc: 0.9603 - val_tp: 15588.0000 - val_fp: 2715.0000 - val_tn: 56094.0000 - val_fn: 4015.0000 - lr: 5.8385e-04\n", "Epoch 25/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3823 - accuracy: 0.8579 - precision: 0.8838 - recall: 0.8321 - auc: 0.9747 - tp: 65250.0000 - fp: 8583.0000 - tn: 226653.0000 - fn: 13162.0000 - val_loss: 0.4802 - val_accuracy: 0.8231 - val_precision: 0.8504 - val_recall: 0.7977 - val_auc: 0.9604 - val_tp: 15638.0000 - val_fp: 2751.0000 - val_tn: 56058.0000 - val_fn: 3965.0000 - lr: 5.8385e-04\n", "Epoch 26/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3784 - accuracy: 0.8594 - precision: 0.8839 - recall: 0.8344 - auc: 0.9751 - tp: 65430.0000 - fp: 8595.0000 - tn: 226641.0000 - fn: 12982.0000 - val_loss: 0.4804 - val_accuracy: 0.8235 - val_precision: 0.8514 - val_recall: 0.7975 - val_auc: 0.9604 - val_tp: 15633.0000 - val_fp: 2729.0000 - val_tn: 56080.0000 - val_fn: 3970.0000 - lr: 5.8385e-04\n", "Epoch 27/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3748 - accuracy: 0.8604 - precision: 0.8847 - recall: 0.8359 - auc: 0.9756 - tp: 65548.0000 - fp: 8541.0000 - tn: 226695.0000 - fn: 12864.0000 - val_loss: 0.4812 - val_accuracy: 0.8242 - val_precision: 0.8510 - val_recall: 0.7975 - val_auc: 0.9603 - val_tp: 15634.0000 - val_fp: 2738.0000 - val_tn: 56071.0000 - val_fn: 3969.0000 - lr: 5.8385e-04\n", "Epoch 28/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3702 - accuracy: 0.8622 - precision: 0.8863 - recall: 0.8381 - auc: 0.9762 - tp: 65720.0000 - fp: 8430.0000 - tn: 226806.0000 - fn: 12692.0000 - val_loss: 0.4811 - val_accuracy: 0.8238 - val_precision: 0.8510 - val_recall: 0.7972 - val_auc: 0.9604 - val_tp: 15627.0000 - val_fp: 2737.0000 - val_tn: 56072.0000 - val_fn: 3976.0000 - lr: 1.1677e-04\n", "Epoch 29/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.3694 - accuracy: 0.8625 - precision: 0.8864 - recall: 0.8386 - auc: 0.9763 - tp: 65760.0000 - fp: 8427.0000 - tn: 226809.0000 - fn: 12652.0000 - val_loss: 0.4812 - val_accuracy: 0.8238 - val_precision: 0.8509 - val_recall: 0.7974 - val_auc: 0.9603 - val_tp: 15632.0000 - val_fp: 2740.0000 - val_tn: 56069.0000 - val_fn: 3971.0000 - lr: 1.1677e-04\n", "Epoch 30/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3687 - accuracy: 0.8628 - precision: 0.8866 - recall: 0.8390 - auc: 0.9764 - tp: 65787.0000 - fp: 8412.0000 - tn: 226824.0000 - fn: 12625.0000 - val_loss: 0.4813 - val_accuracy: 0.8240 - val_precision: 0.8505 - val_recall: 0.7971 - val_auc: 0.9603 - val_tp: 15626.0000 - val_fp: 2746.0000 - val_tn: 56063.0000 - val_fn: 3977.0000 - lr: 1.1677e-04\n", "Epoch 31/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3680 - accuracy: 0.8632 - precision: 0.8869 - recall: 0.8392 - auc: 0.9765 - tp: 65805.0000 - fp: 8393.0000 - tn: 226843.0000 - fn: 12607.0000 - val_loss: 0.4814 - val_accuracy: 0.8236 - val_precision: 0.8505 - val_recall: 0.7975 - val_auc: 0.9603 - val_tp: 15634.0000 - val_fp: 2749.0000 - val_tn: 56060.0000 - val_fn: 3969.0000 - lr: 1.0000e-04\n", "Epoch 32/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3674 - accuracy: 0.8633 - precision: 0.8872 - recall: 0.8397 - auc: 0.9765 - tp: 65843.0000 - fp: 8372.0000 - tn: 226864.0000 - fn: 12569.0000 - val_loss: 0.4816 - val_accuracy: 0.8240 - val_precision: 0.8502 - val_recall: 0.7972 - val_auc: 0.9603 - val_tp: 15628.0000 - val_fp: 2754.0000 - val_tn: 56055.0000 - val_fn: 3975.0000 - lr: 1.0000e-04\n", "Epoch 33/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3668 - accuracy: 0.8634 - precision: 0.8869 - recall: 0.8398 - auc: 0.9766 - tp: 65853.0000 - fp: 8398.0000 - tn: 226838.0000 - fn: 12559.0000 - val_loss: 0.4817 - val_accuracy: 0.8241 - val_precision: 0.8502 - val_recall: 0.7971 - val_auc: 0.9603 - val_tp: 15626.0000 - val_fp: 2754.0000 - val_tn: 56055.0000 - val_fn: 3977.0000 - lr: 1.0000e-04\n", "Epoch 34/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3662 - accuracy: 0.8637 - precision: 0.8873 - recall: 0.8400 - auc: 0.9767 - tp: 65867.0000 - fp: 8362.0000 - tn: 226874.0000 - fn: 12545.0000 - val_loss: 0.4819 - val_accuracy: 0.8240 - val_precision: 0.8501 - val_recall: 0.7969 - val_auc: 0.9603 - val_tp: 15621.0000 - val_fp: 2755.0000 - val_tn: 56054.0000 - val_fn: 3982.0000 - lr: 1.0000e-04\n", "Epoch 35/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3657 - accuracy: 0.8638 - precision: 0.8874 - recall: 0.8402 - auc: 0.9768 - tp: 65885.0000 - fp: 8359.0000 - tn: 226877.0000 - fn: 12527.0000 - val_loss: 0.4821 - val_accuracy: 0.8238 - val_precision: 0.8499 - val_recall: 0.7972 - val_auc: 0.9603 - val_tp: 15628.0000 - val_fp: 2759.0000 - val_tn: 56050.0000 - val_fn: 3975.0000 - lr: 1.0000e-04\n", "Epoch 36/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.3651 - accuracy: 0.8639 - precision: 0.8877 - recall: 0.8403 - auc: 0.9768 - tp: 65892.0000 - fp: 8334.0000 - tn: 226902.0000 - fn: 12520.0000 - val_loss: 0.4822 - val_accuracy: 0.8235 - val_precision: 0.8496 - val_recall: 0.7970 - val_auc: 0.9603 - val_tp: 15624.0000 - val_fp: 2766.0000 - val_tn: 56043.0000 - val_fn: 3979.0000 - lr: 1.0000e-04\n", "Epoch 37/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.3645 - accuracy: 0.8642 - precision: 0.8876 - recall: 0.8408 - auc: 0.9769 - tp: 65926.0000 - fp: 8351.0000 - tn: 226885.0000 - fn: 12486.0000 - val_loss: 0.4824 - val_accuracy: 0.8239 - val_precision: 0.8500 - val_recall: 0.7972 - val_auc: 0.9603 - val_tp: 15628.0000 - val_fp: 2757.0000 - val_tn: 56052.0000 - val_fn: 3975.0000 - lr: 1.0000e-04\n", "Epoch 38/100\n", "30/30 [==============================] - 1s 25ms/step - loss: 0.3640 - accuracy: 0.8645 - precision: 0.8879 - recall: 0.8407 - auc: 0.9770 - tp: 65923.0000 - fp: 8323.0000 - tn: 226913.0000 - fn: 12489.0000 - val_loss: 0.4825 - val_accuracy: 0.8238 - val_precision: 0.8500 - val_recall: 0.7972 - val_auc: 0.9603 - val_tp: 15627.0000 - val_fp: 2758.0000 - val_tn: 56051.0000 - val_fn: 3976.0000 - lr: 1.0000e-04\n", "Epoch 39/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3634 - accuracy: 0.8647 - precision: 0.8881 - recall: 0.8411 - auc: 0.9770 - tp: 65949.0000 - fp: 8311.0000 - tn: 226925.0000 - fn: 12463.0000 - val_loss: 0.4827 - val_accuracy: 0.8236 - val_precision: 0.8495 - val_recall: 0.7974 - val_auc: 0.9603 - val_tp: 15632.0000 - val_fp: 2770.0000 - val_tn: 56039.0000 - val_fn: 3971.0000 - lr: 1.0000e-04\n", "Epoch 40/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3628 - accuracy: 0.8648 - precision: 0.8881 - recall: 0.8413 - auc: 0.9771 - tp: 65969.0000 - fp: 8314.0000 - tn: 226922.0000 - fn: 12443.0000 - val_loss: 0.4829 - val_accuracy: 0.8236 - val_precision: 0.8497 - val_recall: 0.7976 - val_auc: 0.9602 - val_tp: 15636.0000 - val_fp: 2765.0000 - val_tn: 56044.0000 - val_fn: 3967.0000 - lr: 1.0000e-04\n", "Epoch 41/100\n", "30/30 [==============================] - 1s 26ms/step - loss: 0.3623 - accuracy: 0.8650 - precision: 0.8884 - recall: 0.8416 - auc: 0.9772 - tp: 65992.0000 - fp: 8291.0000 - tn: 226945.0000 - fn: 12420.0000 - val_loss: 0.4832 - val_accuracy: 0.8238 - val_precision: 0.8497 - val_recall: 0.7980 - val_auc: 0.9602 - val_tp: 15643.0000 - val_fp: 2768.0000 - val_tn: 56041.0000 - val_fn: 3960.0000 - lr: 1.0000e-04\n", "Epoch 42/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3618 - accuracy: 0.8652 - precision: 0.8885 - recall: 0.8416 - auc: 0.9772 - tp: 65988.0000 - fp: 8281.0000 - tn: 226955.0000 - fn: 12424.0000 - val_loss: 0.4833 - val_accuracy: 0.8239 - val_precision: 0.8496 - val_recall: 0.7981 - val_auc: 0.9602 - val_tp: 15646.0000 - val_fp: 2769.0000 - val_tn: 56040.0000 - val_fn: 3957.0000 - lr: 1.0000e-04\n", "Epoch 43/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3613 - accuracy: 0.8652 - precision: 0.8885 - recall: 0.8423 - auc: 0.9773 - tp: 66044.0000 - fp: 8287.0000 - tn: 226949.0000 - fn: 12368.0000 - val_loss: 0.4835 - val_accuracy: 0.8238 - val_precision: 0.8493 - val_recall: 0.7981 - val_auc: 0.9602 - val_tp: 15646.0000 - val_fp: 2777.0000 - val_tn: 56032.0000 - val_fn: 3957.0000 - lr: 1.0000e-04\n", "Epoch 44/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3607 - accuracy: 0.8657 - precision: 0.8890 - recall: 0.8422 - auc: 0.9773 - tp: 66036.0000 - fp: 8246.0000 - tn: 226990.0000 - fn: 12376.0000 - val_loss: 0.4837 - val_accuracy: 0.8241 - val_precision: 0.8493 - val_recall: 0.7980 - val_auc: 0.9601 - val_tp: 15644.0000 - val_fp: 2775.0000 - val_tn: 56034.0000 - val_fn: 3959.0000 - lr: 1.0000e-04\n", "Epoch 45/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3602 - accuracy: 0.8657 - precision: 0.8889 - recall: 0.8423 - auc: 0.9774 - tp: 66048.0000 - fp: 8251.0000 - tn: 226985.0000 - fn: 12364.0000 - val_loss: 0.4839 - val_accuracy: 0.8241 - val_precision: 0.8495 - val_recall: 0.7978 - val_auc: 0.9602 - val_tp: 15639.0000 - val_fp: 2770.0000 - val_tn: 56039.0000 - val_fn: 3964.0000 - lr: 1.0000e-04\n", "Epoch 46/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3596 - accuracy: 0.8657 - precision: 0.8890 - recall: 0.8427 - auc: 0.9775 - tp: 66077.0000 - fp: 8248.0000 - tn: 226988.0000 - fn: 12335.0000 - val_loss: 0.4842 - val_accuracy: 0.8236 - val_precision: 0.8494 - val_recall: 0.7980 - val_auc: 0.9601 - val_tp: 15643.0000 - val_fp: 2774.0000 - val_tn: 56035.0000 - val_fn: 3960.0000 - lr: 1.0000e-04\n", "Epoch 47/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3591 - accuracy: 0.8660 - precision: 0.8894 - recall: 0.8429 - auc: 0.9775 - tp: 66090.0000 - fp: 8217.0000 - tn: 227019.0000 - fn: 12322.0000 - val_loss: 0.4844 - val_accuracy: 0.8238 - val_precision: 0.8492 - val_recall: 0.7979 - val_auc: 0.9601 - val_tp: 15641.0000 - val_fp: 2777.0000 - val_tn: 56032.0000 - val_fn: 3962.0000 - lr: 1.0000e-04\n", "Epoch 48/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3586 - accuracy: 0.8662 - precision: 0.8897 - recall: 0.8430 - auc: 0.9776 - tp: 66104.0000 - fp: 8197.0000 - tn: 227039.0000 - fn: 12308.0000 - val_loss: 0.4847 - val_accuracy: 0.8238 - val_precision: 0.8493 - val_recall: 0.7983 - val_auc: 0.9601 - val_tp: 15650.0000 - val_fp: 2778.0000 - val_tn: 56031.0000 - val_fn: 3953.0000 - lr: 1.0000e-04\n", "Epoch 49/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3580 - accuracy: 0.8663 - precision: 0.8896 - recall: 0.8432 - auc: 0.9777 - tp: 66120.0000 - fp: 8206.0000 - tn: 227030.0000 - fn: 12292.0000 - val_loss: 0.4849 - val_accuracy: 0.8236 - val_precision: 0.8495 - val_recall: 0.7983 - val_auc: 0.9601 - val_tp: 15650.0000 - val_fp: 2773.0000 - val_tn: 56036.0000 - val_fn: 3953.0000 - lr: 1.0000e-04\n", "Epoch 50/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3575 - accuracy: 0.8666 - precision: 0.8897 - recall: 0.8437 - auc: 0.9777 - tp: 66159.0000 - fp: 8206.0000 - tn: 227030.0000 - fn: 12253.0000 - val_loss: 0.4851 - val_accuracy: 0.8239 - val_precision: 0.8494 - val_recall: 0.7982 - val_auc: 0.9601 - val_tp: 15648.0000 - val_fp: 2775.0000 - val_tn: 56034.0000 - val_fn: 3955.0000 - lr: 1.0000e-04\n", "Epoch 51/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.3570 - accuracy: 0.8667 - precision: 0.8899 - recall: 0.8439 - auc: 0.9778 - tp: 66175.0000 - fp: 8184.0000 - tn: 227052.0000 - fn: 12237.0000 - val_loss: 0.4854 - val_accuracy: 0.8238 - val_precision: 0.8492 - val_recall: 0.7986 - val_auc: 0.9600 - val_tp: 15654.0000 - val_fp: 2779.0000 - val_tn: 56030.0000 - val_fn: 3949.0000 - lr: 1.0000e-04\n", "Epoch 52/100\n", "30/30 [==============================] - 1s 30ms/step - loss: 0.3565 - accuracy: 0.8670 - precision: 0.8898 - recall: 0.8441 - auc: 0.9778 - tp: 66190.0000 - fp: 8195.0000 - tn: 227041.0000 - fn: 12222.0000 - val_loss: 0.4856 - val_accuracy: 0.8236 - val_precision: 0.8491 - val_recall: 0.7988 - val_auc: 0.9600 - val_tp: 15658.0000 - val_fp: 2783.0000 - val_tn: 56026.0000 - val_fn: 3945.0000 - lr: 1.0000e-04\n", "Epoch 53/100\n", "30/30 [==============================] - 1s 24ms/step - loss: 0.3560 - accuracy: 0.8672 - precision: 0.8899 - recall: 0.8443 - auc: 0.9779 - tp: 66205.0000 - fp: 8189.0000 - tn: 227047.0000 - fn: 12207.0000 - val_loss: 0.4858 - val_accuracy: 0.8236 - val_precision: 0.8488 - val_recall: 0.7990 - val_auc: 0.9600 - val_tp: 15662.0000 - val_fp: 2789.0000 - val_tn: 56020.0000 - val_fn: 3941.0000 - lr: 1.0000e-04\n", "Epoch 54/100\n", "30/30 [==============================] - 1s 25ms/step - loss: 0.3555 - accuracy: 0.8674 - precision: 0.8902 - recall: 0.8445 - auc: 0.9780 - tp: 66217.0000 - fp: 8165.0000 - tn: 227071.0000 - fn: 12195.0000 - val_loss: 0.4860 - val_accuracy: 0.8240 - val_precision: 0.8489 - val_recall: 0.7992 - val_auc: 0.9600 - val_tp: 15667.0000 - val_fp: 2788.0000 - val_tn: 56021.0000 - val_fn: 3936.0000 - lr: 1.0000e-04\n", "Epoch 55/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3550 - accuracy: 0.8675 - precision: 0.8902 - recall: 0.8448 - auc: 0.9780 - tp: 66244.0000 - fp: 8167.0000 - tn: 227069.0000 - fn: 12168.0000 - val_loss: 0.4863 - val_accuracy: 0.8239 - val_precision: 0.8486 - val_recall: 0.7992 - val_auc: 0.9599 - val_tp: 15666.0000 - val_fp: 2794.0000 - val_tn: 56015.0000 - val_fn: 3937.0000 - lr: 1.0000e-04\n", "Epoch 56/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3545 - accuracy: 0.8677 - precision: 0.8902 - recall: 0.8448 - auc: 0.9781 - tp: 66239.0000 - fp: 8171.0000 - tn: 227065.0000 - fn: 12173.0000 - val_loss: 0.4866 - val_accuracy: 0.8238 - val_precision: 0.8487 - val_recall: 0.7993 - val_auc: 0.9599 - val_tp: 15668.0000 - val_fp: 2794.0000 - val_tn: 56015.0000 - val_fn: 3935.0000 - lr: 1.0000e-04\n", "Epoch 57/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3540 - accuracy: 0.8679 - precision: 0.8904 - recall: 0.8451 - auc: 0.9781 - tp: 66266.0000 - fp: 8156.0000 - tn: 227080.0000 - fn: 12146.0000 - val_loss: 0.4869 - val_accuracy: 0.8234 - val_precision: 0.8484 - val_recall: 0.7991 - val_auc: 0.9599 - val_tp: 15665.0000 - val_fp: 2799.0000 - val_tn: 56010.0000 - val_fn: 3938.0000 - lr: 1.0000e-04\n", "Epoch 58/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3535 - accuracy: 0.8683 - precision: 0.8904 - recall: 0.8455 - auc: 0.9782 - tp: 66297.0000 - fp: 8159.0000 - tn: 227077.0000 - fn: 12115.0000 - val_loss: 0.4872 - val_accuracy: 0.8235 - val_precision: 0.8481 - val_recall: 0.7992 - val_auc: 0.9598 - val_tp: 15667.0000 - val_fp: 2807.0000 - val_tn: 56002.0000 - val_fn: 3936.0000 - lr: 1.0000e-04\n", "Epoch 59/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3530 - accuracy: 0.8682 - precision: 0.8906 - recall: 0.8457 - auc: 0.9783 - tp: 66313.0000 - fp: 8143.0000 - tn: 227093.0000 - fn: 12099.0000 - val_loss: 0.4873 - val_accuracy: 0.8233 - val_precision: 0.8478 - val_recall: 0.7990 - val_auc: 0.9598 - val_tp: 15663.0000 - val_fp: 2812.0000 - val_tn: 55997.0000 - val_fn: 3940.0000 - lr: 1.0000e-04\n", "Epoch 60/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3525 - accuracy: 0.8683 - precision: 0.8907 - recall: 0.8461 - auc: 0.9783 - tp: 66341.0000 - fp: 8137.0000 - tn: 227099.0000 - fn: 12071.0000 - val_loss: 0.4876 - val_accuracy: 0.8234 - val_precision: 0.8481 - val_recall: 0.7993 - val_auc: 0.9597 - val_tp: 15669.0000 - val_fp: 2806.0000 - val_tn: 56003.0000 - val_fn: 3934.0000 - lr: 1.0000e-04\n", "Epoch 61/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3520 - accuracy: 0.8683 - precision: 0.8908 - recall: 0.8463 - auc: 0.9784 - tp: 66361.0000 - fp: 8139.0000 - tn: 227097.0000 - fn: 12051.0000 - val_loss: 0.4880 - val_accuracy: 0.8235 - val_precision: 0.8481 - val_recall: 0.7994 - val_auc: 0.9597 - val_tp: 15670.0000 - val_fp: 2807.0000 - val_tn: 56002.0000 - val_fn: 3933.0000 - lr: 1.0000e-04\n", "Epoch 62/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3515 - accuracy: 0.8685 - precision: 0.8909 - recall: 0.8465 - auc: 0.9784 - tp: 66376.0000 - fp: 8130.0000 - tn: 227106.0000 - fn: 12036.0000 - val_loss: 0.4882 - val_accuracy: 0.8234 - val_precision: 0.8479 - val_recall: 0.7992 - val_auc: 0.9597 - val_tp: 15667.0000 - val_fp: 2811.0000 - val_tn: 55998.0000 - val_fn: 3936.0000 - lr: 1.0000e-04\n", "Epoch 63/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3510 - accuracy: 0.8688 - precision: 0.8910 - recall: 0.8468 - auc: 0.9785 - tp: 66403.0000 - fp: 8121.0000 - tn: 227115.0000 - fn: 12009.0000 - val_loss: 0.4885 - val_accuracy: 0.8235 - val_precision: 0.8480 - val_recall: 0.7992 - val_auc: 0.9596 - val_tp: 15666.0000 - val_fp: 2809.0000 - val_tn: 56000.0000 - val_fn: 3937.0000 - lr: 1.0000e-04\n", "Epoch 64/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.3506 - accuracy: 0.8688 - precision: 0.8909 - recall: 0.8468 - auc: 0.9786 - tp: 66397.0000 - fp: 8128.0000 - tn: 227108.0000 - fn: 12015.0000 - val_loss: 0.4888 - val_accuracy: 0.8233 - val_precision: 0.8478 - val_recall: 0.7993 - val_auc: 0.9596 - val_tp: 15668.0000 - val_fp: 2812.0000 - val_tn: 55997.0000 - val_fn: 3935.0000 - lr: 1.0000e-04\n", "Epoch 65/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3501 - accuracy: 0.8689 - precision: 0.8912 - recall: 0.8470 - auc: 0.9786 - tp: 66413.0000 - fp: 8105.0000 - tn: 227131.0000 - fn: 11999.0000 - val_loss: 0.4890 - val_accuracy: 0.8234 - val_precision: 0.8477 - val_recall: 0.7990 - val_auc: 0.9596 - val_tp: 15663.0000 - val_fp: 2813.0000 - val_tn: 55996.0000 - val_fn: 3940.0000 - lr: 1.0000e-04\n", "Epoch 66/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3496 - accuracy: 0.8692 - precision: 0.8912 - recall: 0.8475 - auc: 0.9787 - tp: 66454.0000 - fp: 8117.0000 - tn: 227119.0000 - fn: 11958.0000 - val_loss: 0.4894 - val_accuracy: 0.8238 - val_precision: 0.8477 - val_recall: 0.7997 - val_auc: 0.9595 - val_tp: 15677.0000 - val_fp: 2817.0000 - val_tn: 55992.0000 - val_fn: 3926.0000 - lr: 1.0000e-04\n", "Epoch 67/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3492 - accuracy: 0.8692 - precision: 0.8912 - recall: 0.8477 - auc: 0.9787 - tp: 66472.0000 - fp: 8118.0000 - tn: 227118.0000 - fn: 11940.0000 - val_loss: 0.4896 - val_accuracy: 0.8236 - val_precision: 0.8478 - val_recall: 0.7993 - val_auc: 0.9596 - val_tp: 15669.0000 - val_fp: 2814.0000 - val_tn: 55995.0000 - val_fn: 3934.0000 - lr: 1.0000e-04\n", "Epoch 68/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3487 - accuracy: 0.8695 - precision: 0.8916 - recall: 0.8477 - auc: 0.9788 - tp: 66471.0000 - fp: 8085.0000 - tn: 227151.0000 - fn: 11941.0000 - val_loss: 0.4900 - val_accuracy: 0.8239 - val_precision: 0.8476 - val_recall: 0.7996 - val_auc: 0.9595 - val_tp: 15674.0000 - val_fp: 2818.0000 - val_tn: 55991.0000 - val_fn: 3929.0000 - lr: 1.0000e-04\n", "Epoch 69/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3482 - accuracy: 0.8697 - precision: 0.8916 - recall: 0.8480 - auc: 0.9788 - tp: 66492.0000 - fp: 8083.0000 - tn: 227153.0000 - fn: 11920.0000 - val_loss: 0.4902 - val_accuracy: 0.8236 - val_precision: 0.8473 - val_recall: 0.7995 - val_auc: 0.9595 - val_tp: 15673.0000 - val_fp: 2825.0000 - val_tn: 55984.0000 - val_fn: 3930.0000 - lr: 1.0000e-04\n", "Epoch 70/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3478 - accuracy: 0.8697 - precision: 0.8914 - recall: 0.8481 - auc: 0.9789 - tp: 66504.0000 - fp: 8103.0000 - tn: 227133.0000 - fn: 11908.0000 - val_loss: 0.4905 - val_accuracy: 0.8238 - val_precision: 0.8474 - val_recall: 0.7997 - val_auc: 0.9595 - val_tp: 15676.0000 - val_fp: 2823.0000 - val_tn: 55986.0000 - val_fn: 3927.0000 - lr: 1.0000e-04\n", "Epoch 71/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3473 - accuracy: 0.8700 - precision: 0.8919 - recall: 0.8484 - auc: 0.9789 - tp: 66525.0000 - fp: 8060.0000 - tn: 227176.0000 - fn: 11887.0000 - val_loss: 0.4909 - val_accuracy: 0.8235 - val_precision: 0.8473 - val_recall: 0.7996 - val_auc: 0.9594 - val_tp: 15675.0000 - val_fp: 2824.0000 - val_tn: 55985.0000 - val_fn: 3928.0000 - lr: 1.0000e-04\n", "Epoch 72/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3469 - accuracy: 0.8703 - precision: 0.8920 - recall: 0.8488 - auc: 0.9790 - tp: 66554.0000 - fp: 8060.0000 - tn: 227176.0000 - fn: 11858.0000 - val_loss: 0.4911 - val_accuracy: 0.8239 - val_precision: 0.8474 - val_recall: 0.7995 - val_auc: 0.9594 - val_tp: 15673.0000 - val_fp: 2822.0000 - val_tn: 55987.0000 - val_fn: 3930.0000 - lr: 1.0000e-04\n", "Epoch 73/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3464 - accuracy: 0.8702 - precision: 0.8920 - recall: 0.8490 - auc: 0.9790 - tp: 66573.0000 - fp: 8061.0000 - tn: 227175.0000 - fn: 11839.0000 - val_loss: 0.4915 - val_accuracy: 0.8241 - val_precision: 0.8474 - val_recall: 0.8000 - val_auc: 0.9594 - val_tp: 15682.0000 - val_fp: 2824.0000 - val_tn: 55985.0000 - val_fn: 3921.0000 - lr: 1.0000e-04\n", "Epoch 74/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3460 - accuracy: 0.8705 - precision: 0.8921 - recall: 0.8491 - auc: 0.9791 - tp: 66582.0000 - fp: 8051.0000 - tn: 227185.0000 - fn: 11830.0000 - val_loss: 0.4918 - val_accuracy: 0.8237 - val_precision: 0.8470 - val_recall: 0.8003 - val_auc: 0.9593 - val_tp: 15689.0000 - val_fp: 2835.0000 - val_tn: 55974.0000 - val_fn: 3914.0000 - lr: 1.0000e-04\n", "Epoch 75/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3455 - accuracy: 0.8708 - precision: 0.8926 - recall: 0.8491 - auc: 0.9791 - tp: 66578.0000 - fp: 8013.0000 - tn: 227223.0000 - fn: 11834.0000 - val_loss: 0.4921 - val_accuracy: 0.8240 - val_precision: 0.8475 - val_recall: 0.8002 - val_auc: 0.9593 - val_tp: 15687.0000 - val_fp: 2823.0000 - val_tn: 55986.0000 - val_fn: 3916.0000 - lr: 1.0000e-04\n", "Epoch 76/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3451 - accuracy: 0.8710 - precision: 0.8925 - recall: 0.8494 - auc: 0.9792 - tp: 66605.0000 - fp: 8024.0000 - tn: 227212.0000 - fn: 11807.0000 - val_loss: 0.4924 - val_accuracy: 0.8241 - val_precision: 0.8472 - val_recall: 0.8002 - val_auc: 0.9592 - val_tp: 15686.0000 - val_fp: 2829.0000 - val_tn: 55980.0000 - val_fn: 3917.0000 - lr: 1.0000e-04\n", "Epoch 77/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3447 - accuracy: 0.8709 - precision: 0.8927 - recall: 0.8497 - auc: 0.9792 - tp: 66627.0000 - fp: 8007.0000 - tn: 227229.0000 - fn: 11785.0000 - val_loss: 0.4928 - val_accuracy: 0.8242 - val_precision: 0.8474 - val_recall: 0.8000 - val_auc: 0.9592 - val_tp: 15682.0000 - val_fp: 2823.0000 - val_tn: 55986.0000 - val_fn: 3921.0000 - lr: 1.0000e-04\n", "Epoch 78/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3442 - accuracy: 0.8713 - precision: 0.8929 - recall: 0.8499 - auc: 0.9793 - tp: 66645.0000 - fp: 7993.0000 - tn: 227243.0000 - fn: 11767.0000 - val_loss: 0.4932 - val_accuracy: 0.8241 - val_precision: 0.8476 - val_recall: 0.8004 - val_auc: 0.9591 - val_tp: 15690.0000 - val_fp: 2821.0000 - val_tn: 55988.0000 - val_fn: 3913.0000 - lr: 1.0000e-04\n", "Epoch 79/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3437 - accuracy: 0.8713 - precision: 0.8931 - recall: 0.8504 - auc: 0.9793 - tp: 66680.0000 - fp: 7982.0000 - tn: 227254.0000 - fn: 11732.0000 - val_loss: 0.4934 - val_accuracy: 0.8240 - val_precision: 0.8474 - val_recall: 0.8005 - val_auc: 0.9591 - val_tp: 15693.0000 - val_fp: 2827.0000 - val_tn: 55982.0000 - val_fn: 3910.0000 - lr: 1.0000e-04\n", "Epoch 80/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3433 - accuracy: 0.8715 - precision: 0.8932 - recall: 0.8504 - auc: 0.9794 - tp: 66680.0000 - fp: 7977.0000 - tn: 227259.0000 - fn: 11732.0000 - val_loss: 0.4938 - val_accuracy: 0.8241 - val_precision: 0.8474 - val_recall: 0.8011 - val_auc: 0.9591 - val_tp: 15703.0000 - val_fp: 2827.0000 - val_tn: 55982.0000 - val_fn: 3900.0000 - lr: 1.0000e-04\n", "Epoch 81/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3429 - accuracy: 0.8715 - precision: 0.8933 - recall: 0.8507 - auc: 0.9794 - tp: 66709.0000 - fp: 7965.0000 - tn: 227271.0000 - fn: 11703.0000 - val_loss: 0.4941 - val_accuracy: 0.8240 - val_precision: 0.8473 - val_recall: 0.8008 - val_auc: 0.9590 - val_tp: 15698.0000 - val_fp: 2829.0000 - val_tn: 55980.0000 - val_fn: 3905.0000 - lr: 1.0000e-04\n", "Epoch 82/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3425 - accuracy: 0.8718 - precision: 0.8936 - recall: 0.8508 - auc: 0.9795 - tp: 66716.0000 - fp: 7947.0000 - tn: 227289.0000 - fn: 11696.0000 - val_loss: 0.4945 - val_accuracy: 0.8238 - val_precision: 0.8474 - val_recall: 0.8006 - val_auc: 0.9589 - val_tp: 15695.0000 - val_fp: 2827.0000 - val_tn: 55982.0000 - val_fn: 3908.0000 - lr: 1.0000e-04\n", "Epoch 83/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3420 - accuracy: 0.8721 - precision: 0.8933 - recall: 0.8510 - auc: 0.9795 - tp: 66726.0000 - fp: 7973.0000 - tn: 227263.0000 - fn: 11686.0000 - val_loss: 0.4948 - val_accuracy: 0.8237 - val_precision: 0.8468 - val_recall: 0.8008 - val_auc: 0.9589 - val_tp: 15698.0000 - val_fp: 2840.0000 - val_tn: 55969.0000 - val_fn: 3905.0000 - lr: 1.0000e-04\n", "Epoch 84/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3416 - accuracy: 0.8724 - precision: 0.8934 - recall: 0.8513 - auc: 0.9796 - tp: 66749.0000 - fp: 7962.0000 - tn: 227274.0000 - fn: 11663.0000 - val_loss: 0.4951 - val_accuracy: 0.8236 - val_precision: 0.8469 - val_recall: 0.8006 - val_auc: 0.9589 - val_tp: 15694.0000 - val_fp: 2838.0000 - val_tn: 55971.0000 - val_fn: 3909.0000 - lr: 1.0000e-04\n", "Epoch 85/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3412 - accuracy: 0.8723 - precision: 0.8932 - recall: 0.8516 - auc: 0.9796 - tp: 66778.0000 - fp: 7981.0000 - tn: 227255.0000 - fn: 11634.0000 - val_loss: 0.4955 - val_accuracy: 0.8241 - val_precision: 0.8466 - val_recall: 0.8007 - val_auc: 0.9588 - val_tp: 15696.0000 - val_fp: 2843.0000 - val_tn: 55966.0000 - val_fn: 3907.0000 - lr: 1.0000e-04\n", "Epoch 86/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3408 - accuracy: 0.8725 - precision: 0.8936 - recall: 0.8517 - auc: 0.9797 - tp: 66785.0000 - fp: 7951.0000 - tn: 227285.0000 - fn: 11627.0000 - val_loss: 0.4959 - val_accuracy: 0.8239 - val_precision: 0.8469 - val_recall: 0.8008 - val_auc: 0.9588 - val_tp: 15699.0000 - val_fp: 2837.0000 - val_tn: 55972.0000 - val_fn: 3904.0000 - lr: 1.0000e-04\n", "Epoch 87/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3403 - accuracy: 0.8728 - precision: 0.8936 - recall: 0.8519 - auc: 0.9797 - tp: 66803.0000 - fp: 7952.0000 - tn: 227284.0000 - fn: 11609.0000 - val_loss: 0.4961 - val_accuracy: 0.8238 - val_precision: 0.8467 - val_recall: 0.8007 - val_auc: 0.9588 - val_tp: 15697.0000 - val_fp: 2843.0000 - val_tn: 55966.0000 - val_fn: 3906.0000 - lr: 1.0000e-04\n", "Epoch 88/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3399 - accuracy: 0.8730 - precision: 0.8938 - recall: 0.8520 - auc: 0.9798 - tp: 66806.0000 - fp: 7940.0000 - tn: 227296.0000 - fn: 11606.0000 - val_loss: 0.4965 - val_accuracy: 0.8235 - val_precision: 0.8469 - val_recall: 0.8006 - val_auc: 0.9588 - val_tp: 15694.0000 - val_fp: 2838.0000 - val_tn: 55971.0000 - val_fn: 3909.0000 - lr: 1.0000e-04\n", "Epoch 89/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3395 - accuracy: 0.8730 - precision: 0.8936 - recall: 0.8524 - auc: 0.9798 - tp: 66835.0000 - fp: 7962.0000 - tn: 227274.0000 - fn: 11577.0000 - val_loss: 0.4969 - val_accuracy: 0.8236 - val_precision: 0.8466 - val_recall: 0.8010 - val_auc: 0.9587 - val_tp: 15702.0000 - val_fp: 2845.0000 - val_tn: 55964.0000 - val_fn: 3901.0000 - lr: 1.0000e-04\n", "Epoch 90/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3391 - accuracy: 0.8733 - precision: 0.8939 - recall: 0.8524 - auc: 0.9799 - tp: 66841.0000 - fp: 7937.0000 - tn: 227299.0000 - fn: 11571.0000 - val_loss: 0.4972 - val_accuracy: 0.8233 - val_precision: 0.8468 - val_recall: 0.8005 - val_auc: 0.9587 - val_tp: 15693.0000 - val_fp: 2839.0000 - val_tn: 55970.0000 - val_fn: 3910.0000 - lr: 1.0000e-04\n", "Epoch 91/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3387 - accuracy: 0.8733 - precision: 0.8939 - recall: 0.8527 - auc: 0.9799 - tp: 66862.0000 - fp: 7935.0000 - tn: 227301.0000 - fn: 11550.0000 - val_loss: 0.4976 - val_accuracy: 0.8232 - val_precision: 0.8463 - val_recall: 0.8009 - val_auc: 0.9587 - val_tp: 15701.0000 - val_fp: 2851.0000 - val_tn: 55958.0000 - val_fn: 3902.0000 - lr: 1.0000e-04\n", "Epoch 92/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3383 - accuracy: 0.8733 - precision: 0.8939 - recall: 0.8529 - auc: 0.9800 - tp: 66878.0000 - fp: 7934.0000 - tn: 227302.0000 - fn: 11534.0000 - val_loss: 0.4980 - val_accuracy: 0.8230 - val_precision: 0.8462 - val_recall: 0.8012 - val_auc: 0.9586 - val_tp: 15705.0000 - val_fp: 2854.0000 - val_tn: 55955.0000 - val_fn: 3898.0000 - lr: 1.0000e-04\n", "Epoch 93/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3379 - accuracy: 0.8735 - precision: 0.8939 - recall: 0.8530 - auc: 0.9800 - tp: 66886.0000 - fp: 7937.0000 - tn: 227299.0000 - fn: 11526.0000 - val_loss: 0.4983 - val_accuracy: 0.8230 - val_precision: 0.8463 - val_recall: 0.8013 - val_auc: 0.9586 - val_tp: 15707.0000 - val_fp: 2852.0000 - val_tn: 55957.0000 - val_fn: 3896.0000 - lr: 1.0000e-04\n", "Epoch 94/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3375 - accuracy: 0.8739 - precision: 0.8945 - recall: 0.8533 - auc: 0.9801 - tp: 66908.0000 - fp: 7895.0000 - tn: 227341.0000 - fn: 11504.0000 - val_loss: 0.4986 - val_accuracy: 0.8234 - val_precision: 0.8462 - val_recall: 0.8012 - val_auc: 0.9586 - val_tp: 15706.0000 - val_fp: 2855.0000 - val_tn: 55954.0000 - val_fn: 3897.0000 - lr: 1.0000e-04\n", "Epoch 95/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3371 - accuracy: 0.8739 - precision: 0.8943 - recall: 0.8536 - auc: 0.9801 - tp: 66931.0000 - fp: 7909.0000 - tn: 227327.0000 - fn: 11481.0000 - val_loss: 0.4990 - val_accuracy: 0.8231 - val_precision: 0.8461 - val_recall: 0.8014 - val_auc: 0.9586 - val_tp: 15710.0000 - val_fp: 2857.0000 - val_tn: 55952.0000 - val_fn: 3893.0000 - lr: 1.0000e-04\n", "Epoch 96/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3367 - accuracy: 0.8740 - precision: 0.8943 - recall: 0.8538 - auc: 0.9801 - tp: 66951.0000 - fp: 7914.0000 - tn: 227322.0000 - fn: 11461.0000 - val_loss: 0.4994 - val_accuracy: 0.8230 - val_precision: 0.8461 - val_recall: 0.8013 - val_auc: 0.9585 - val_tp: 15708.0000 - val_fp: 2857.0000 - val_tn: 55952.0000 - val_fn: 3895.0000 - lr: 1.0000e-04\n", "Epoch 97/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3363 - accuracy: 0.8740 - precision: 0.8944 - recall: 0.8539 - auc: 0.9802 - tp: 66956.0000 - fp: 7908.0000 - tn: 227328.0000 - fn: 11456.0000 - val_loss: 0.4998 - val_accuracy: 0.8230 - val_precision: 0.8460 - val_recall: 0.8014 - val_auc: 0.9585 - val_tp: 15710.0000 - val_fp: 2859.0000 - val_tn: 55950.0000 - val_fn: 3893.0000 - lr: 1.0000e-04\n", "Epoch 98/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3359 - accuracy: 0.8744 - precision: 0.8947 - recall: 0.8540 - auc: 0.9802 - tp: 66966.0000 - fp: 7880.0000 - tn: 227356.0000 - fn: 11446.0000 - val_loss: 0.5001 - val_accuracy: 0.8227 - val_precision: 0.8458 - val_recall: 0.8012 - val_auc: 0.9585 - val_tp: 15706.0000 - val_fp: 2863.0000 - val_tn: 55946.0000 - val_fn: 3897.0000 - lr: 1.0000e-04\n", "Epoch 99/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3355 - accuracy: 0.8747 - precision: 0.8948 - recall: 0.8541 - auc: 0.9803 - tp: 66972.0000 - fp: 7874.0000 - tn: 227362.0000 - fn: 11440.0000 - val_loss: 0.5005 - val_accuracy: 0.8228 - val_precision: 0.8456 - val_recall: 0.8012 - val_auc: 0.9584 - val_tp: 15706.0000 - val_fp: 2867.0000 - val_tn: 55942.0000 - val_fn: 3897.0000 - lr: 1.0000e-04\n", "Epoch 100/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3352 - accuracy: 0.8747 - precision: 0.8950 - recall: 0.8542 - auc: 0.9803 - tp: 66979.0000 - fp: 7860.0000 - tn: 227376.0000 - fn: 11433.0000 - val_loss: 0.5009 - val_accuracy: 0.8225 - val_precision: 0.8458 - val_recall: 0.8013 - val_auc: 0.9584 - val_tp: 15707.0000 - val_fp: 2864.0000 - val_tn: 55945.0000 - val_fn: 3896.0000 - lr: 1.0000e-04\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2024-06-08 13:56:09,227] Trial 0 finished with value: 0.819294810295105 and parameters: {'num_filters': 37, 'kernel_size': 5, 'learning_rate': 0.0005838498821337493}. Best is trial 0 with value: 0.819294810295105.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Loss: 0.5012595057487488\n", "Accuracy: 0.819294810295105\n", "Precision: 0.840825080871582\n", "Recall: 0.7984818816184998\n", "AUC: 0.9585517644882202\n", "True Positives: 19566.0\n", "False Positives: 3704.0\n", "True Negatives: 69808.0\n", "False Negatives: 4938.0\n", "Epoch 1/100\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\Michał\\AppData\\Local\\Temp\\ipykernel_33252\\265862631.py:5: FutureWarning: suggest_loguniform has been deprecated in v3.0.0. This feature will be removed in v6.0.0. See https://github.com/optuna/optuna/releases/tag/v3.0.0. Use suggest_float(..., log=True) instead.\n", " learning_rate = trial.suggest_loguniform('learning_rate', 1e-4, 1e-2)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "30/30 [==============================] - 4s 56ms/step - loss: 1.3706 - accuracy: 0.3701 - precision: 0.0000e+00 - recall: 0.0000e+00 - auc: 0.6488 - tp: 0.0000e+00 - fp: 0.0000e+00 - tn: 235236.0000 - fn: 78412.0000 - val_loss: 1.3552 - val_accuracy: 0.4174 - val_precision: 0.0000e+00 - val_recall: 0.0000e+00 - val_auc: 0.7013 - val_tp: 0.0000e+00 - val_fp: 0.0000e+00 - val_tn: 58809.0000 - val_fn: 19603.0000 - lr: 3.9476e-04\n", "Epoch 2/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 1.3383 - accuracy: 0.4456 - precision: 0.0000e+00 - recall: 0.0000e+00 - auc: 0.7218 - tp: 0.0000e+00 - fp: 0.0000e+00 - tn: 235236.0000 - fn: 78412.0000 - val_loss: 1.3198 - val_accuracy: 0.4690 - val_precision: 0.0000e+00 - val_recall: 0.0000e+00 - val_auc: 0.7383 - val_tp: 0.0000e+00 - val_fp: 0.0000e+00 - val_tn: 58809.0000 - val_fn: 19603.0000 - lr: 3.9476e-04\n", "Epoch 3/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 1.2942 - accuracy: 0.5060 - precision: 0.0000e+00 - recall: 0.0000e+00 - auc: 0.7569 - tp: 0.0000e+00 - fp: 0.0000e+00 - tn: 235236.0000 - fn: 78412.0000 - val_loss: 1.2662 - val_accuracy: 0.5320 - val_precision: 0.0000e+00 - val_recall: 0.0000e+00 - val_auc: 0.7717 - val_tp: 0.0000e+00 - val_fp: 0.0000e+00 - val_tn: 58809.0000 - val_fn: 19603.0000 - lr: 3.9476e-04\n", "Epoch 4/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 1.2268 - accuracy: 0.5696 - precision: 1.0000 - recall: 1.5304e-04 - auc: 0.7928 - tp: 12.0000 - fp: 0.0000e+00 - tn: 235236.0000 - fn: 78400.0000 - val_loss: 1.1863 - val_accuracy: 0.5910 - val_precision: 1.0000 - val_recall: 0.0031 - val_auc: 0.8066 - val_tp: 60.0000 - val_fp: 0.0000e+00 - val_tn: 58809.0000 - val_fn: 19543.0000 - lr: 3.9476e-04\n", "Epoch 5/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 1.1322 - accuracy: 0.6257 - precision: 0.8558 - recall: 0.0701 - auc: 0.8271 - tp: 5495.0000 - fp: 926.0000 - tn: 234310.0000 - fn: 72917.0000 - val_loss: 1.0824 - val_accuracy: 0.6424 - val_precision: 0.8364 - val_recall: 0.1429 - val_auc: 0.8398 - val_tp: 2802.0000 - val_fp: 548.0000 - val_tn: 58261.0000 - val_fn: 16801.0000 - lr: 3.9476e-04\n", "Epoch 6/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 1.0205 - accuracy: 0.6677 - precision: 0.8492 - recall: 0.2287 - auc: 0.8574 - tp: 17930.0000 - fp: 3184.0000 - tn: 232052.0000 - fn: 60482.0000 - val_loss: 0.9704 - val_accuracy: 0.6801 - val_precision: 0.8603 - val_recall: 0.3417 - val_auc: 0.8674 - val_tp: 6699.0000 - val_fp: 1088.0000 - val_tn: 57721.0000 - val_fn: 12904.0000 - lr: 3.9476e-04\n", "Epoch 7/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.9099 - accuracy: 0.6970 - precision: 0.8548 - recall: 0.4412 - auc: 0.8834 - tp: 34597.0000 - fp: 5877.0000 - tn: 229359.0000 - fn: 43815.0000 - val_loss: 0.8702 - val_accuracy: 0.7002 - val_precision: 0.8425 - val_recall: 0.5035 - val_auc: 0.8904 - val_tp: 9870.0000 - val_fp: 1845.0000 - val_tn: 56964.0000 - val_fn: 9733.0000 - lr: 3.9476e-04\n", "Epoch 8/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.8178 - accuracy: 0.7139 - precision: 0.8409 - recall: 0.5585 - auc: 0.9036 - tp: 43795.0000 - fp: 8284.0000 - tn: 226952.0000 - fn: 34617.0000 - val_loss: 0.7945 - val_accuracy: 0.7102 - val_precision: 0.8284 - val_recall: 0.5824 - val_auc: 0.9056 - val_tp: 11416.0000 - val_fp: 2365.0000 - val_tn: 56444.0000 - val_fn: 8187.0000 - lr: 3.9476e-04\n", "Epoch 9/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.7506 - accuracy: 0.7249 - precision: 0.8313 - recall: 0.6147 - auc: 0.9159 - tp: 48198.0000 - fp: 9780.0000 - tn: 225456.0000 - fn: 30214.0000 - val_loss: 0.7420 - val_accuracy: 0.7214 - val_precision: 0.8223 - val_recall: 0.6235 - val_auc: 0.9148 - val_tp: 12222.0000 - val_fp: 2642.0000 - val_tn: 56167.0000 - val_fn: 7381.0000 - lr: 3.9476e-04\n", "Epoch 10/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.7026 - accuracy: 0.7358 - precision: 0.8305 - recall: 0.6470 - auc: 0.9241 - tp: 50732.0000 - fp: 10351.0000 - tn: 224885.0000 - fn: 27680.0000 - val_loss: 0.7045 - val_accuracy: 0.7314 - val_precision: 0.8206 - val_recall: 0.6476 - val_auc: 0.9213 - val_tp: 12694.0000 - val_fp: 2775.0000 - val_tn: 56034.0000 - val_fn: 6909.0000 - lr: 3.9476e-04\n", "Epoch 11/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.6666 - accuracy: 0.7475 - precision: 0.8323 - recall: 0.6651 - auc: 0.9304 - tp: 52149.0000 - fp: 10508.0000 - tn: 224728.0000 - fn: 26263.0000 - val_loss: 0.6757 - val_accuracy: 0.7429 - val_precision: 0.8234 - val_recall: 0.6611 - val_auc: 0.9264 - val_tp: 12959.0000 - val_fp: 2780.0000 - val_tn: 56029.0000 - val_fn: 6644.0000 - lr: 3.9476e-04\n", "Epoch 12/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.6376 - accuracy: 0.7609 - precision: 0.8361 - recall: 0.6792 - auc: 0.9353 - tp: 53261.0000 - fp: 10441.0000 - tn: 224795.0000 - fn: 25151.0000 - val_loss: 0.6519 - val_accuracy: 0.7554 - val_precision: 0.8279 - val_recall: 0.6743 - val_auc: 0.9307 - val_tp: 13219.0000 - val_fp: 2748.0000 - val_tn: 56061.0000 - val_fn: 6384.0000 - lr: 3.9476e-04\n", "Epoch 13/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.6127 - accuracy: 0.7736 - precision: 0.8411 - recall: 0.6945 - auc: 0.9398 - tp: 54454.0000 - fp: 10288.0000 - tn: 224948.0000 - fn: 23958.0000 - val_loss: 0.6314 - val_accuracy: 0.7639 - val_precision: 0.8327 - val_recall: 0.6897 - val_auc: 0.9344 - val_tp: 13520.0000 - val_fp: 2716.0000 - val_tn: 56093.0000 - val_fn: 6083.0000 - lr: 3.9476e-04\n", "Epoch 14/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.5909 - accuracy: 0.7842 - precision: 0.8459 - recall: 0.7107 - auc: 0.9435 - tp: 55730.0000 - fp: 10151.0000 - tn: 225085.0000 - fn: 22682.0000 - val_loss: 0.6136 - val_accuracy: 0.7729 - val_precision: 0.8367 - val_recall: 0.7038 - val_auc: 0.9377 - val_tp: 13797.0000 - val_fp: 2693.0000 - val_tn: 56116.0000 - val_fn: 5806.0000 - lr: 3.9476e-04\n", "Epoch 15/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.5713 - accuracy: 0.7935 - precision: 0.8502 - recall: 0.7249 - auc: 0.9469 - tp: 56837.0000 - fp: 10017.0000 - tn: 225219.0000 - fn: 21575.0000 - val_loss: 0.5976 - val_accuracy: 0.7795 - val_precision: 0.8396 - val_recall: 0.7155 - val_auc: 0.9405 - val_tp: 14026.0000 - val_fp: 2680.0000 - val_tn: 56129.0000 - val_fn: 5577.0000 - lr: 3.9476e-04\n", "Epoch 16/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.5538 - accuracy: 0.8005 - precision: 0.8536 - recall: 0.7365 - auc: 0.9499 - tp: 57751.0000 - fp: 9902.0000 - tn: 225334.0000 - fn: 20661.0000 - val_loss: 0.5838 - val_accuracy: 0.7844 - val_precision: 0.8417 - val_recall: 0.7270 - val_auc: 0.9429 - val_tp: 14252.0000 - val_fp: 2680.0000 - val_tn: 56129.0000 - val_fn: 5351.0000 - lr: 3.9476e-04\n", "Epoch 17/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.5383 - accuracy: 0.8068 - precision: 0.8563 - recall: 0.7472 - auc: 0.9524 - tp: 58591.0000 - fp: 9830.0000 - tn: 225406.0000 - fn: 19821.0000 - val_loss: 0.5720 - val_accuracy: 0.7892 - val_precision: 0.8435 - val_recall: 0.7353 - val_auc: 0.9450 - val_tp: 14415.0000 - val_fp: 2675.0000 - val_tn: 56134.0000 - val_fn: 5188.0000 - lr: 3.9476e-04\n", "Epoch 18/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.5246 - accuracy: 0.8110 - precision: 0.8583 - recall: 0.7562 - auc: 0.9546 - tp: 59297.0000 - fp: 9787.0000 - tn: 225449.0000 - fn: 19115.0000 - val_loss: 0.5615 - val_accuracy: 0.7922 - val_precision: 0.8457 - val_recall: 0.7425 - val_auc: 0.9468 - val_tp: 14556.0000 - val_fp: 2655.0000 - val_tn: 56154.0000 - val_fn: 5047.0000 - lr: 3.9476e-04\n", "Epoch 19/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.5125 - accuracy: 0.8143 - precision: 0.8600 - recall: 0.7635 - auc: 0.9564 - tp: 59868.0000 - fp: 9745.0000 - tn: 225491.0000 - fn: 18544.0000 - val_loss: 0.5525 - val_accuracy: 0.7954 - val_precision: 0.8461 - val_recall: 0.7479 - val_auc: 0.9483 - val_tp: 14661.0000 - val_fp: 2666.0000 - val_tn: 56143.0000 - val_fn: 4942.0000 - lr: 3.9476e-04\n", "Epoch 20/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.5017 - accuracy: 0.8175 - precision: 0.8614 - recall: 0.7701 - auc: 0.9581 - tp: 60384.0000 - fp: 9718.0000 - tn: 225518.0000 - fn: 18028.0000 - val_loss: 0.5449 - val_accuracy: 0.7984 - val_precision: 0.8466 - val_recall: 0.7526 - val_auc: 0.9496 - val_tp: 14754.0000 - val_fp: 2673.0000 - val_tn: 56136.0000 - val_fn: 4849.0000 - lr: 3.9476e-04\n", "Epoch 21/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.4923 - accuracy: 0.8209 - precision: 0.8627 - recall: 0.7759 - auc: 0.9594 - tp: 60843.0000 - fp: 9680.0000 - tn: 225556.0000 - fn: 17569.0000 - val_loss: 0.5384 - val_accuracy: 0.8007 - val_precision: 0.8483 - val_recall: 0.7563 - val_auc: 0.9506 - val_tp: 14825.0000 - val_fp: 2651.0000 - val_tn: 56158.0000 - val_fn: 4778.0000 - lr: 3.9476e-04\n", "Epoch 22/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.4839 - accuracy: 0.8236 - precision: 0.8644 - recall: 0.7806 - auc: 0.9607 - tp: 61207.0000 - fp: 9601.0000 - tn: 225635.0000 - fn: 17205.0000 - val_loss: 0.5326 - val_accuracy: 0.8028 - val_precision: 0.8481 - val_recall: 0.7600 - val_auc: 0.9515 - val_tp: 14899.0000 - val_fp: 2668.0000 - val_tn: 56141.0000 - val_fn: 4704.0000 - lr: 3.9476e-04\n", "Epoch 23/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.4764 - accuracy: 0.8260 - precision: 0.8649 - recall: 0.7847 - auc: 0.9618 - tp: 61526.0000 - fp: 9610.0000 - tn: 225626.0000 - fn: 16886.0000 - val_loss: 0.5278 - val_accuracy: 0.8042 - val_precision: 0.8489 - val_recall: 0.7637 - val_auc: 0.9524 - val_tp: 14971.0000 - val_fp: 2665.0000 - val_tn: 56144.0000 - val_fn: 4632.0000 - lr: 3.9476e-04\n", "Epoch 24/100\n", "30/30 [==============================] - 1s 25ms/step - loss: 0.4697 - accuracy: 0.8279 - precision: 0.8659 - recall: 0.7885 - auc: 0.9627 - tp: 61825.0000 - fp: 9574.0000 - tn: 225662.0000 - fn: 16587.0000 - val_loss: 0.5235 - val_accuracy: 0.8059 - val_precision: 0.8496 - val_recall: 0.7658 - val_auc: 0.9531 - val_tp: 15012.0000 - val_fp: 2657.0000 - val_tn: 56152.0000 - val_fn: 4591.0000 - lr: 3.9476e-04\n", "Epoch 25/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.4637 - accuracy: 0.8301 - precision: 0.8668 - recall: 0.7914 - auc: 0.9636 - tp: 62053.0000 - fp: 9533.0000 - tn: 225703.0000 - fn: 16359.0000 - val_loss: 0.5200 - val_accuracy: 0.8069 - val_precision: 0.8495 - val_recall: 0.7685 - val_auc: 0.9536 - val_tp: 15064.0000 - val_fp: 2668.0000 - val_tn: 56141.0000 - val_fn: 4539.0000 - lr: 3.9476e-04\n", "Epoch 26/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4582 - accuracy: 0.8316 - precision: 0.8676 - recall: 0.7945 - auc: 0.9644 - tp: 62299.0000 - fp: 9510.0000 - tn: 225726.0000 - fn: 16113.0000 - val_loss: 0.5168 - val_accuracy: 0.8083 - val_precision: 0.8498 - val_recall: 0.7709 - val_auc: 0.9542 - val_tp: 15111.0000 - val_fp: 2670.0000 - val_tn: 56139.0000 - val_fn: 4492.0000 - lr: 3.9476e-04\n", "Epoch 27/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.4532 - accuracy: 0.8336 - precision: 0.8682 - recall: 0.7968 - auc: 0.9650 - tp: 62482.0000 - fp: 9487.0000 - tn: 225749.0000 - fn: 15930.0000 - val_loss: 0.5141 - val_accuracy: 0.8090 - val_precision: 0.8492 - val_recall: 0.7720 - val_auc: 0.9546 - val_tp: 15133.0000 - val_fp: 2687.0000 - val_tn: 56122.0000 - val_fn: 4470.0000 - lr: 3.9476e-04\n", "Epoch 28/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4486 - accuracy: 0.8348 - precision: 0.8691 - recall: 0.7989 - auc: 0.9657 - tp: 62642.0000 - fp: 9436.0000 - tn: 225800.0000 - fn: 15770.0000 - val_loss: 0.5117 - val_accuracy: 0.8111 - val_precision: 0.8501 - val_recall: 0.7742 - val_auc: 0.9550 - val_tp: 15177.0000 - val_fp: 2677.0000 - val_tn: 56132.0000 - val_fn: 4426.0000 - lr: 3.9476e-04\n", "Epoch 29/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.4444 - accuracy: 0.8362 - precision: 0.8700 - recall: 0.8012 - auc: 0.9663 - tp: 62826.0000 - fp: 9389.0000 - tn: 225847.0000 - fn: 15586.0000 - val_loss: 0.5096 - val_accuracy: 0.8115 - val_precision: 0.8501 - val_recall: 0.7759 - val_auc: 0.9554 - val_tp: 15210.0000 - val_fp: 2681.0000 - val_tn: 56128.0000 - val_fn: 4393.0000 - lr: 3.9476e-04\n", "Epoch 30/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4406 - accuracy: 0.8374 - precision: 0.8706 - recall: 0.8027 - auc: 0.9668 - tp: 62944.0000 - fp: 9356.0000 - tn: 225880.0000 - fn: 15468.0000 - val_loss: 0.5077 - val_accuracy: 0.8133 - val_precision: 0.8507 - val_recall: 0.7783 - val_auc: 0.9557 - val_tp: 15257.0000 - val_fp: 2677.0000 - val_tn: 56132.0000 - val_fn: 4346.0000 - lr: 3.9476e-04\n", "Epoch 31/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.4368 - accuracy: 0.8384 - precision: 0.8712 - recall: 0.8047 - auc: 0.9673 - tp: 63098.0000 - fp: 9330.0000 - tn: 225906.0000 - fn: 15314.0000 - val_loss: 0.5064 - val_accuracy: 0.8138 - val_precision: 0.8511 - val_recall: 0.7792 - val_auc: 0.9559 - val_tp: 15275.0000 - val_fp: 2673.0000 - val_tn: 56136.0000 - val_fn: 4328.0000 - lr: 3.9476e-04\n", "Epoch 32/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.4333 - accuracy: 0.8393 - precision: 0.8716 - recall: 0.8058 - auc: 0.9678 - tp: 63188.0000 - fp: 9311.0000 - tn: 225925.0000 - fn: 15224.0000 - val_loss: 0.5048 - val_accuracy: 0.8145 - val_precision: 0.8511 - val_recall: 0.7807 - val_auc: 0.9562 - val_tp: 15304.0000 - val_fp: 2677.0000 - val_tn: 56132.0000 - val_fn: 4299.0000 - lr: 3.9476e-04\n", "Epoch 33/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.4300 - accuracy: 0.8408 - precision: 0.8721 - recall: 0.8074 - auc: 0.9683 - tp: 63308.0000 - fp: 9287.0000 - tn: 225949.0000 - fn: 15104.0000 - val_loss: 0.5038 - val_accuracy: 0.8143 - val_precision: 0.8513 - val_recall: 0.7810 - val_auc: 0.9563 - val_tp: 15310.0000 - val_fp: 2675.0000 - val_tn: 56134.0000 - val_fn: 4293.0000 - lr: 3.9476e-04\n", "Epoch 34/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.4270 - accuracy: 0.8408 - precision: 0.8731 - recall: 0.8086 - auc: 0.9687 - tp: 63401.0000 - fp: 9211.0000 - tn: 226025.0000 - fn: 15011.0000 - val_loss: 0.5025 - val_accuracy: 0.8155 - val_precision: 0.8514 - val_recall: 0.7819 - val_auc: 0.9566 - val_tp: 15327.0000 - val_fp: 2675.0000 - val_tn: 56134.0000 - val_fn: 4276.0000 - lr: 3.9476e-04\n", "Epoch 35/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4239 - accuracy: 0.8420 - precision: 0.8734 - recall: 0.8100 - auc: 0.9691 - tp: 63510.0000 - fp: 9206.0000 - tn: 226030.0000 - fn: 14902.0000 - val_loss: 0.5016 - val_accuracy: 0.8159 - val_precision: 0.8509 - val_recall: 0.7836 - val_auc: 0.9568 - val_tp: 15361.0000 - val_fp: 2692.0000 - val_tn: 56117.0000 - val_fn: 4242.0000 - lr: 3.9476e-04\n", "Epoch 36/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4211 - accuracy: 0.8428 - precision: 0.8740 - recall: 0.8120 - auc: 0.9695 - tp: 63671.0000 - fp: 9181.0000 - tn: 226055.0000 - fn: 14741.0000 - val_loss: 0.5009 - val_accuracy: 0.8161 - val_precision: 0.8517 - val_recall: 0.7838 - val_auc: 0.9569 - val_tp: 15365.0000 - val_fp: 2675.0000 - val_tn: 56134.0000 - val_fn: 4238.0000 - lr: 3.9476e-04\n", "Epoch 37/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.4185 - accuracy: 0.8437 - precision: 0.8752 - recall: 0.8117 - auc: 0.9699 - tp: 63645.0000 - fp: 9072.0000 - tn: 226164.0000 - fn: 14767.0000 - val_loss: 0.4998 - val_accuracy: 0.8166 - val_precision: 0.8519 - val_recall: 0.7856 - val_auc: 0.9571 - val_tp: 15401.0000 - val_fp: 2678.0000 - val_tn: 56131.0000 - val_fn: 4202.0000 - lr: 3.9476e-04\n", "Epoch 38/100\n", "30/30 [==============================] - 1s 32ms/step - loss: 0.4159 - accuracy: 0.8446 - precision: 0.8756 - recall: 0.8136 - auc: 0.9702 - tp: 63797.0000 - fp: 9067.0000 - tn: 226169.0000 - fn: 14615.0000 - val_loss: 0.4994 - val_accuracy: 0.8172 - val_precision: 0.8518 - val_recall: 0.7863 - val_auc: 0.9572 - val_tp: 15413.0000 - val_fp: 2681.0000 - val_tn: 56128.0000 - val_fn: 4190.0000 - lr: 3.9476e-04\n", "Epoch 39/100\n", "30/30 [==============================] - 1s 24ms/step - loss: 0.4134 - accuracy: 0.8453 - precision: 0.8761 - recall: 0.8148 - auc: 0.9705 - tp: 63890.0000 - fp: 9037.0000 - tn: 226199.0000 - fn: 14522.0000 - val_loss: 0.4989 - val_accuracy: 0.8177 - val_precision: 0.8521 - val_recall: 0.7866 - val_auc: 0.9573 - val_tp: 15420.0000 - val_fp: 2676.0000 - val_tn: 56133.0000 - val_fn: 4183.0000 - lr: 3.9476e-04\n", "Epoch 40/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.4110 - accuracy: 0.8461 - precision: 0.8769 - recall: 0.8159 - auc: 0.9709 - tp: 63974.0000 - fp: 8981.0000 - tn: 226255.0000 - fn: 14438.0000 - val_loss: 0.4985 - val_accuracy: 0.8179 - val_precision: 0.8529 - val_recall: 0.7871 - val_auc: 0.9574 - val_tp: 15429.0000 - val_fp: 2660.0000 - val_tn: 56149.0000 - val_fn: 4174.0000 - lr: 3.9476e-04\n", "Epoch 41/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4087 - accuracy: 0.8466 - precision: 0.8767 - recall: 0.8173 - auc: 0.9712 - tp: 64087.0000 - fp: 9016.0000 - tn: 226220.0000 - fn: 14325.0000 - val_loss: 0.4980 - val_accuracy: 0.8183 - val_precision: 0.8521 - val_recall: 0.7879 - val_auc: 0.9575 - val_tp: 15446.0000 - val_fp: 2682.0000 - val_tn: 56127.0000 - val_fn: 4157.0000 - lr: 3.9476e-04\n", "Epoch 42/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4064 - accuracy: 0.8478 - precision: 0.8777 - recall: 0.8182 - auc: 0.9715 - tp: 64160.0000 - fp: 8944.0000 - tn: 226292.0000 - fn: 14252.0000 - val_loss: 0.4977 - val_accuracy: 0.8188 - val_precision: 0.8526 - val_recall: 0.7882 - val_auc: 0.9576 - val_tp: 15452.0000 - val_fp: 2672.0000 - val_tn: 56137.0000 - val_fn: 4151.0000 - lr: 3.9476e-04\n", "Epoch 43/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.4043 - accuracy: 0.8485 - precision: 0.8781 - recall: 0.8194 - auc: 0.9718 - tp: 64249.0000 - fp: 8923.0000 - tn: 226313.0000 - fn: 14163.0000 - val_loss: 0.4974 - val_accuracy: 0.8192 - val_precision: 0.8529 - val_recall: 0.7886 - val_auc: 0.9577 - val_tp: 15458.0000 - val_fp: 2667.0000 - val_tn: 56142.0000 - val_fn: 4145.0000 - lr: 3.9476e-04\n", "Epoch 44/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.4021 - accuracy: 0.8494 - precision: 0.8784 - recall: 0.8202 - auc: 0.9721 - tp: 64317.0000 - fp: 8902.0000 - tn: 226334.0000 - fn: 14095.0000 - val_loss: 0.4972 - val_accuracy: 0.8194 - val_precision: 0.8526 - val_recall: 0.7893 - val_auc: 0.9577 - val_tp: 15472.0000 - val_fp: 2675.0000 - val_tn: 56134.0000 - val_fn: 4131.0000 - lr: 3.9476e-04\n", "Epoch 45/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4001 - accuracy: 0.8500 - precision: 0.8794 - recall: 0.8213 - auc: 0.9723 - tp: 64403.0000 - fp: 8836.0000 - tn: 226400.0000 - fn: 14009.0000 - val_loss: 0.4971 - val_accuracy: 0.8192 - val_precision: 0.8535 - val_recall: 0.7896 - val_auc: 0.9578 - val_tp: 15479.0000 - val_fp: 2657.0000 - val_tn: 56152.0000 - val_fn: 4124.0000 - lr: 3.9476e-04\n", "Epoch 46/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3981 - accuracy: 0.8503 - precision: 0.8795 - recall: 0.8218 - auc: 0.9726 - tp: 64436.0000 - fp: 8827.0000 - tn: 226409.0000 - fn: 13976.0000 - val_loss: 0.4970 - val_accuracy: 0.8189 - val_precision: 0.8529 - val_recall: 0.7899 - val_auc: 0.9579 - val_tp: 15485.0000 - val_fp: 2671.0000 - val_tn: 56138.0000 - val_fn: 4118.0000 - lr: 3.9476e-04\n", "Epoch 47/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3962 - accuracy: 0.8509 - precision: 0.8802 - recall: 0.8231 - auc: 0.9728 - tp: 64542.0000 - fp: 8784.0000 - tn: 226452.0000 - fn: 13870.0000 - val_loss: 0.4968 - val_accuracy: 0.8196 - val_precision: 0.8527 - val_recall: 0.7907 - val_auc: 0.9579 - val_tp: 15501.0000 - val_fp: 2677.0000 - val_tn: 56132.0000 - val_fn: 4102.0000 - lr: 3.9476e-04\n", "Epoch 48/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3942 - accuracy: 0.8515 - precision: 0.8808 - recall: 0.8246 - auc: 0.9731 - tp: 64659.0000 - fp: 8752.0000 - tn: 226484.0000 - fn: 13753.0000 - val_loss: 0.4973 - val_accuracy: 0.8193 - val_precision: 0.8526 - val_recall: 0.7915 - val_auc: 0.9579 - val_tp: 15516.0000 - val_fp: 2682.0000 - val_tn: 56127.0000 - val_fn: 4087.0000 - lr: 3.9476e-04\n", "Epoch 49/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3924 - accuracy: 0.8530 - precision: 0.8810 - recall: 0.8254 - auc: 0.9733 - tp: 64725.0000 - fp: 8742.0000 - tn: 226494.0000 - fn: 13687.0000 - val_loss: 0.4970 - val_accuracy: 0.8197 - val_precision: 0.8521 - val_recall: 0.7918 - val_auc: 0.9579 - val_tp: 15521.0000 - val_fp: 2695.0000 - val_tn: 56114.0000 - val_fn: 4082.0000 - lr: 3.9476e-04\n", "Epoch 50/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3905 - accuracy: 0.8540 - precision: 0.8817 - recall: 0.8264 - auc: 0.9736 - tp: 64797.0000 - fp: 8694.0000 - tn: 226542.0000 - fn: 13615.0000 - val_loss: 0.4971 - val_accuracy: 0.8214 - val_precision: 0.8515 - val_recall: 0.7917 - val_auc: 0.9580 - val_tp: 15520.0000 - val_fp: 2706.0000 - val_tn: 56103.0000 - val_fn: 4083.0000 - lr: 3.9476e-04\n", "Epoch 51/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3880 - accuracy: 0.8548 - precision: 0.8829 - recall: 0.8269 - auc: 0.9739 - tp: 64841.0000 - fp: 8601.0000 - tn: 226635.0000 - fn: 13571.0000 - val_loss: 0.4971 - val_accuracy: 0.8209 - val_precision: 0.8521 - val_recall: 0.7918 - val_auc: 0.9580 - val_tp: 15521.0000 - val_fp: 2695.0000 - val_tn: 56114.0000 - val_fn: 4082.0000 - lr: 1.0000e-04\n", "Epoch 52/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3875 - accuracy: 0.8550 - precision: 0.8830 - recall: 0.8275 - auc: 0.9740 - tp: 64884.0000 - fp: 8596.0000 - tn: 226640.0000 - fn: 13528.0000 - val_loss: 0.4972 - val_accuracy: 0.8209 - val_precision: 0.8520 - val_recall: 0.7924 - val_auc: 0.9579 - val_tp: 15533.0000 - val_fp: 2698.0000 - val_tn: 56111.0000 - val_fn: 4070.0000 - lr: 1.0000e-04\n", "Epoch 53/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3870 - accuracy: 0.8550 - precision: 0.8830 - recall: 0.8278 - auc: 0.9740 - tp: 64906.0000 - fp: 8603.0000 - tn: 226633.0000 - fn: 13506.0000 - val_loss: 0.4972 - val_accuracy: 0.8206 - val_precision: 0.8516 - val_recall: 0.7923 - val_auc: 0.9580 - val_tp: 15532.0000 - val_fp: 2706.0000 - val_tn: 56103.0000 - val_fn: 4071.0000 - lr: 1.0000e-04\n", "Epoch 54/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3866 - accuracy: 0.8552 - precision: 0.8830 - recall: 0.8282 - auc: 0.9741 - tp: 64944.0000 - fp: 8602.0000 - tn: 226634.0000 - fn: 13468.0000 - val_loss: 0.4972 - val_accuracy: 0.8211 - val_precision: 0.8518 - val_recall: 0.7928 - val_auc: 0.9580 - val_tp: 15541.0000 - val_fp: 2703.0000 - val_tn: 56106.0000 - val_fn: 4062.0000 - lr: 1.0000e-04\n", "Epoch 55/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3862 - accuracy: 0.8555 - precision: 0.8831 - recall: 0.8282 - auc: 0.9741 - tp: 64939.0000 - fp: 8595.0000 - tn: 226641.0000 - fn: 13473.0000 - val_loss: 0.4973 - val_accuracy: 0.8215 - val_precision: 0.8517 - val_recall: 0.7928 - val_auc: 0.9580 - val_tp: 15541.0000 - val_fp: 2706.0000 - val_tn: 56103.0000 - val_fn: 4062.0000 - lr: 1.0000e-04\n", "Epoch 56/100\n", "30/30 [==============================] - 1s 26ms/step - loss: 0.3857 - accuracy: 0.8556 - precision: 0.8833 - recall: 0.8286 - auc: 0.9742 - tp: 64970.0000 - fp: 8586.0000 - tn: 226650.0000 - fn: 13442.0000 - val_loss: 0.4973 - val_accuracy: 0.8209 - val_precision: 0.8517 - val_recall: 0.7929 - val_auc: 0.9580 - val_tp: 15544.0000 - val_fp: 2706.0000 - val_tn: 56103.0000 - val_fn: 4059.0000 - lr: 1.0000e-04\n", "Epoch 57/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3853 - accuracy: 0.8556 - precision: 0.8833 - recall: 0.8290 - auc: 0.9743 - tp: 65000.0000 - fp: 8584.0000 - tn: 226652.0000 - fn: 13412.0000 - val_loss: 0.4974 - val_accuracy: 0.8220 - val_precision: 0.8516 - val_recall: 0.7932 - val_auc: 0.9580 - val_tp: 15549.0000 - val_fp: 2710.0000 - val_tn: 56099.0000 - val_fn: 4054.0000 - lr: 1.0000e-04\n", "Epoch 58/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3848 - accuracy: 0.8561 - precision: 0.8836 - recall: 0.8290 - auc: 0.9743 - tp: 65003.0000 - fp: 8562.0000 - tn: 226674.0000 - fn: 13409.0000 - val_loss: 0.4974 - val_accuracy: 0.8220 - val_precision: 0.8515 - val_recall: 0.7932 - val_auc: 0.9580 - val_tp: 15549.0000 - val_fp: 2711.0000 - val_tn: 56098.0000 - val_fn: 4054.0000 - lr: 1.0000e-04\n", "Epoch 59/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3844 - accuracy: 0.8562 - precision: 0.8837 - recall: 0.8293 - auc: 0.9744 - tp: 65028.0000 - fp: 8555.0000 - tn: 226681.0000 - fn: 13384.0000 - val_loss: 0.4975 - val_accuracy: 0.8219 - val_precision: 0.8516 - val_recall: 0.7934 - val_auc: 0.9580 - val_tp: 15554.0000 - val_fp: 2710.0000 - val_tn: 56099.0000 - val_fn: 4049.0000 - lr: 1.0000e-04\n", "Epoch 60/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3839 - accuracy: 0.8561 - precision: 0.8836 - recall: 0.8292 - auc: 0.9744 - tp: 65017.0000 - fp: 8561.0000 - tn: 226675.0000 - fn: 13395.0000 - val_loss: 0.4976 - val_accuracy: 0.8218 - val_precision: 0.8516 - val_recall: 0.7937 - val_auc: 0.9581 - val_tp: 15559.0000 - val_fp: 2711.0000 - val_tn: 56098.0000 - val_fn: 4044.0000 - lr: 1.0000e-04\n", "Epoch 61/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3835 - accuracy: 0.8561 - precision: 0.8838 - recall: 0.8297 - auc: 0.9745 - tp: 65055.0000 - fp: 8555.0000 - tn: 226681.0000 - fn: 13357.0000 - val_loss: 0.4976 - val_accuracy: 0.8216 - val_precision: 0.8517 - val_recall: 0.7937 - val_auc: 0.9580 - val_tp: 15558.0000 - val_fp: 2710.0000 - val_tn: 56099.0000 - val_fn: 4045.0000 - lr: 1.0000e-04\n", "Epoch 62/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3831 - accuracy: 0.8564 - precision: 0.8838 - recall: 0.8299 - auc: 0.9746 - tp: 65076.0000 - fp: 8553.0000 - tn: 226683.0000 - fn: 13336.0000 - val_loss: 0.4977 - val_accuracy: 0.8216 - val_precision: 0.8519 - val_recall: 0.7941 - val_auc: 0.9581 - val_tp: 15567.0000 - val_fp: 2707.0000 - val_tn: 56102.0000 - val_fn: 4036.0000 - lr: 1.0000e-04\n", "Epoch 63/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3827 - accuracy: 0.8567 - precision: 0.8839 - recall: 0.8299 - auc: 0.9746 - tp: 65078.0000 - fp: 8546.0000 - tn: 226690.0000 - fn: 13334.0000 - val_loss: 0.4978 - val_accuracy: 0.8213 - val_precision: 0.8516 - val_recall: 0.7943 - val_auc: 0.9581 - val_tp: 15570.0000 - val_fp: 2713.0000 - val_tn: 56096.0000 - val_fn: 4033.0000 - lr: 1.0000e-04\n", "Epoch 64/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3822 - accuracy: 0.8569 - precision: 0.8839 - recall: 0.8304 - auc: 0.9746 - tp: 65116.0000 - fp: 8551.0000 - tn: 226685.0000 - fn: 13296.0000 - val_loss: 0.4979 - val_accuracy: 0.8213 - val_precision: 0.8517 - val_recall: 0.7944 - val_auc: 0.9581 - val_tp: 15572.0000 - val_fp: 2711.0000 - val_tn: 56098.0000 - val_fn: 4031.0000 - lr: 1.0000e-04\n", "Epoch 65/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3818 - accuracy: 0.8569 - precision: 0.8842 - recall: 0.8305 - auc: 0.9747 - tp: 65122.0000 - fp: 8532.0000 - tn: 226704.0000 - fn: 13290.0000 - val_loss: 0.4980 - val_accuracy: 0.8211 - val_precision: 0.8517 - val_recall: 0.7942 - val_auc: 0.9581 - val_tp: 15569.0000 - val_fp: 2710.0000 - val_tn: 56099.0000 - val_fn: 4034.0000 - lr: 1.0000e-04\n", "Epoch 66/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3814 - accuracy: 0.8570 - precision: 0.8843 - recall: 0.8305 - auc: 0.9748 - tp: 65118.0000 - fp: 8523.0000 - tn: 226713.0000 - fn: 13294.0000 - val_loss: 0.4980 - val_accuracy: 0.8211 - val_precision: 0.8513 - val_recall: 0.7946 - val_auc: 0.9581 - val_tp: 15576.0000 - val_fp: 2720.0000 - val_tn: 56089.0000 - val_fn: 4027.0000 - lr: 1.0000e-04\n", "Epoch 67/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3810 - accuracy: 0.8573 - precision: 0.8841 - recall: 0.8310 - auc: 0.9748 - tp: 65159.0000 - fp: 8538.0000 - tn: 226698.0000 - fn: 13253.0000 - val_loss: 0.4981 - val_accuracy: 0.8213 - val_precision: 0.8515 - val_recall: 0.7947 - val_auc: 0.9581 - val_tp: 15578.0000 - val_fp: 2717.0000 - val_tn: 56092.0000 - val_fn: 4025.0000 - lr: 1.0000e-04\n", "Epoch 68/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3805 - accuracy: 0.8574 - precision: 0.8844 - recall: 0.8310 - auc: 0.9749 - tp: 65162.0000 - fp: 8516.0000 - tn: 226720.0000 - fn: 13250.0000 - val_loss: 0.4982 - val_accuracy: 0.8210 - val_precision: 0.8513 - val_recall: 0.7947 - val_auc: 0.9581 - val_tp: 15579.0000 - val_fp: 2722.0000 - val_tn: 56087.0000 - val_fn: 4024.0000 - lr: 1.0000e-04\n", "Epoch 69/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3801 - accuracy: 0.8574 - precision: 0.8844 - recall: 0.8315 - auc: 0.9749 - tp: 65197.0000 - fp: 8520.0000 - tn: 226716.0000 - fn: 13215.0000 - val_loss: 0.4983 - val_accuracy: 0.8214 - val_precision: 0.8511 - val_recall: 0.7949 - val_auc: 0.9581 - val_tp: 15582.0000 - val_fp: 2725.0000 - val_tn: 56084.0000 - val_fn: 4021.0000 - lr: 1.0000e-04\n", "Epoch 70/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3797 - accuracy: 0.8578 - precision: 0.8847 - recall: 0.8315 - auc: 0.9750 - tp: 65198.0000 - fp: 8501.0000 - tn: 226735.0000 - fn: 13214.0000 - val_loss: 0.4984 - val_accuracy: 0.8210 - val_precision: 0.8510 - val_recall: 0.7948 - val_auc: 0.9581 - val_tp: 15581.0000 - val_fp: 2728.0000 - val_tn: 56081.0000 - val_fn: 4022.0000 - lr: 1.0000e-04\n", "Epoch 71/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3793 - accuracy: 0.8579 - precision: 0.8847 - recall: 0.8315 - auc: 0.9750 - tp: 65200.0000 - fp: 8494.0000 - tn: 226742.0000 - fn: 13212.0000 - val_loss: 0.4985 - val_accuracy: 0.8211 - val_precision: 0.8511 - val_recall: 0.7951 - val_auc: 0.9580 - val_tp: 15587.0000 - val_fp: 2727.0000 - val_tn: 56082.0000 - val_fn: 4016.0000 - lr: 1.0000e-04\n", "Epoch 72/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3789 - accuracy: 0.8579 - precision: 0.8847 - recall: 0.8318 - auc: 0.9751 - tp: 65226.0000 - fp: 8500.0000 - tn: 226736.0000 - fn: 13186.0000 - val_loss: 0.4986 - val_accuracy: 0.8210 - val_precision: 0.8511 - val_recall: 0.7953 - val_auc: 0.9580 - val_tp: 15590.0000 - val_fp: 2728.0000 - val_tn: 56081.0000 - val_fn: 4013.0000 - lr: 1.0000e-04\n", "Epoch 73/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3784 - accuracy: 0.8581 - precision: 0.8848 - recall: 0.8322 - auc: 0.9751 - tp: 65254.0000 - fp: 8499.0000 - tn: 226737.0000 - fn: 13158.0000 - val_loss: 0.4987 - val_accuracy: 0.8215 - val_precision: 0.8508 - val_recall: 0.7953 - val_auc: 0.9580 - val_tp: 15591.0000 - val_fp: 2735.0000 - val_tn: 56074.0000 - val_fn: 4012.0000 - lr: 1.0000e-04\n", "Epoch 74/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3780 - accuracy: 0.8585 - precision: 0.8851 - recall: 0.8323 - auc: 0.9752 - tp: 65261.0000 - fp: 8472.0000 - tn: 226764.0000 - fn: 13151.0000 - val_loss: 0.4988 - val_accuracy: 0.8211 - val_precision: 0.8507 - val_recall: 0.7953 - val_auc: 0.9580 - val_tp: 15590.0000 - val_fp: 2737.0000 - val_tn: 56072.0000 - val_fn: 4013.0000 - lr: 1.0000e-04\n", "Epoch 75/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3776 - accuracy: 0.8585 - precision: 0.8851 - recall: 0.8326 - auc: 0.9752 - tp: 65287.0000 - fp: 8478.0000 - tn: 226758.0000 - fn: 13125.0000 - val_loss: 0.4989 - val_accuracy: 0.8216 - val_precision: 0.8506 - val_recall: 0.7952 - val_auc: 0.9580 - val_tp: 15589.0000 - val_fp: 2739.0000 - val_tn: 56070.0000 - val_fn: 4014.0000 - lr: 1.0000e-04\n", "Epoch 76/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3772 - accuracy: 0.8586 - precision: 0.8852 - recall: 0.8326 - auc: 0.9753 - tp: 65289.0000 - fp: 8469.0000 - tn: 226767.0000 - fn: 13123.0000 - val_loss: 0.4990 - val_accuracy: 0.8216 - val_precision: 0.8509 - val_recall: 0.7954 - val_auc: 0.9580 - val_tp: 15592.0000 - val_fp: 2732.0000 - val_tn: 56077.0000 - val_fn: 4011.0000 - lr: 1.0000e-04\n", "Epoch 77/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3768 - accuracy: 0.8589 - precision: 0.8853 - recall: 0.8330 - auc: 0.9753 - tp: 65318.0000 - fp: 8464.0000 - tn: 226772.0000 - fn: 13094.0000 - val_loss: 0.4991 - val_accuracy: 0.8216 - val_precision: 0.8506 - val_recall: 0.7953 - val_auc: 0.9580 - val_tp: 15591.0000 - val_fp: 2738.0000 - val_tn: 56071.0000 - val_fn: 4012.0000 - lr: 1.0000e-04\n", "Epoch 78/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3764 - accuracy: 0.8590 - precision: 0.8852 - recall: 0.8332 - auc: 0.9754 - tp: 65336.0000 - fp: 8472.0000 - tn: 226764.0000 - fn: 13076.0000 - val_loss: 0.4993 - val_accuracy: 0.8216 - val_precision: 0.8506 - val_recall: 0.7957 - val_auc: 0.9580 - val_tp: 15599.0000 - val_fp: 2740.0000 - val_tn: 56069.0000 - val_fn: 4004.0000 - lr: 1.0000e-04\n", "Epoch 79/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3760 - accuracy: 0.8591 - precision: 0.8854 - recall: 0.8335 - auc: 0.9754 - tp: 65355.0000 - fp: 8459.0000 - tn: 226777.0000 - fn: 13057.0000 - val_loss: 0.4994 - val_accuracy: 0.8216 - val_precision: 0.8510 - val_recall: 0.7959 - val_auc: 0.9580 - val_tp: 15602.0000 - val_fp: 2732.0000 - val_tn: 56077.0000 - val_fn: 4001.0000 - lr: 1.0000e-04\n", "Epoch 80/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.3756 - accuracy: 0.8592 - precision: 0.8855 - recall: 0.8336 - auc: 0.9755 - tp: 65367.0000 - fp: 8453.0000 - tn: 226783.0000 - fn: 13045.0000 - val_loss: 0.4995 - val_accuracy: 0.8217 - val_precision: 0.8508 - val_recall: 0.7962 - val_auc: 0.9580 - val_tp: 15608.0000 - val_fp: 2737.0000 - val_tn: 56072.0000 - val_fn: 3995.0000 - lr: 1.0000e-04\n", "Epoch 81/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3752 - accuracy: 0.8594 - precision: 0.8855 - recall: 0.8339 - auc: 0.9755 - tp: 65386.0000 - fp: 8458.0000 - tn: 226778.0000 - fn: 13026.0000 - val_loss: 0.4996 - val_accuracy: 0.8219 - val_precision: 0.8509 - val_recall: 0.7963 - val_auc: 0.9580 - val_tp: 15609.0000 - val_fp: 2736.0000 - val_tn: 56073.0000 - val_fn: 3994.0000 - lr: 1.0000e-04\n", "Epoch 82/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3748 - accuracy: 0.8593 - precision: 0.8855 - recall: 0.8342 - auc: 0.9756 - tp: 65411.0000 - fp: 8454.0000 - tn: 226782.0000 - fn: 13001.0000 - val_loss: 0.4997 - val_accuracy: 0.8215 - val_precision: 0.8504 - val_recall: 0.7963 - val_auc: 0.9580 - val_tp: 15609.0000 - val_fp: 2745.0000 - val_tn: 56064.0000 - val_fn: 3994.0000 - lr: 1.0000e-04\n", "Epoch 83/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3744 - accuracy: 0.8594 - precision: 0.8857 - recall: 0.8341 - auc: 0.9757 - tp: 65400.0000 - fp: 8444.0000 - tn: 226792.0000 - fn: 13012.0000 - val_loss: 0.4998 - val_accuracy: 0.8218 - val_precision: 0.8505 - val_recall: 0.7967 - val_auc: 0.9580 - val_tp: 15617.0000 - val_fp: 2745.0000 - val_tn: 56064.0000 - val_fn: 3986.0000 - lr: 1.0000e-04\n", "Epoch 84/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3740 - accuracy: 0.8598 - precision: 0.8856 - recall: 0.8343 - auc: 0.9757 - tp: 65422.0000 - fp: 8447.0000 - tn: 226789.0000 - fn: 12990.0000 - val_loss: 0.5000 - val_accuracy: 0.8217 - val_precision: 0.8504 - val_recall: 0.7965 - val_auc: 0.9580 - val_tp: 15613.0000 - val_fp: 2746.0000 - val_tn: 56063.0000 - val_fn: 3990.0000 - lr: 1.0000e-04\n", "Epoch 85/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3736 - accuracy: 0.8599 - precision: 0.8857 - recall: 0.8345 - auc: 0.9758 - tp: 65438.0000 - fp: 8442.0000 - tn: 226794.0000 - fn: 12974.0000 - val_loss: 0.5001 - val_accuracy: 0.8218 - val_precision: 0.8501 - val_recall: 0.7967 - val_auc: 0.9580 - val_tp: 15617.0000 - val_fp: 2753.0000 - val_tn: 56056.0000 - val_fn: 3986.0000 - lr: 1.0000e-04\n", "Epoch 86/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3732 - accuracy: 0.8601 - precision: 0.8859 - recall: 0.8348 - auc: 0.9758 - tp: 65462.0000 - fp: 8432.0000 - tn: 226804.0000 - fn: 12950.0000 - val_loss: 0.5002 - val_accuracy: 0.8221 - val_precision: 0.8504 - val_recall: 0.7966 - val_auc: 0.9580 - val_tp: 15615.0000 - val_fp: 2747.0000 - val_tn: 56062.0000 - val_fn: 3988.0000 - lr: 1.0000e-04\n", "Epoch 87/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3728 - accuracy: 0.8601 - precision: 0.8857 - recall: 0.8350 - auc: 0.9758 - tp: 65473.0000 - fp: 8447.0000 - tn: 226789.0000 - fn: 12939.0000 - val_loss: 0.5004 - val_accuracy: 0.8224 - val_precision: 0.8505 - val_recall: 0.7967 - val_auc: 0.9580 - val_tp: 15617.0000 - val_fp: 2745.0000 - val_tn: 56064.0000 - val_fn: 3986.0000 - lr: 1.0000e-04\n", "Epoch 88/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3724 - accuracy: 0.8601 - precision: 0.8861 - recall: 0.8351 - auc: 0.9759 - tp: 65479.0000 - fp: 8419.0000 - tn: 226817.0000 - fn: 12933.0000 - val_loss: 0.5005 - val_accuracy: 0.8220 - val_precision: 0.8506 - val_recall: 0.7968 - val_auc: 0.9580 - val_tp: 15619.0000 - val_fp: 2743.0000 - val_tn: 56066.0000 - val_fn: 3984.0000 - lr: 1.0000e-04\n", "Epoch 89/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3721 - accuracy: 0.8603 - precision: 0.8862 - recall: 0.8352 - auc: 0.9759 - tp: 65486.0000 - fp: 8409.0000 - tn: 226827.0000 - fn: 12926.0000 - val_loss: 0.5006 - val_accuracy: 0.8220 - val_precision: 0.8506 - val_recall: 0.7969 - val_auc: 0.9580 - val_tp: 15621.0000 - val_fp: 2743.0000 - val_tn: 56066.0000 - val_fn: 3982.0000 - lr: 1.0000e-04\n", "Epoch 90/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3717 - accuracy: 0.8604 - precision: 0.8863 - recall: 0.8355 - auc: 0.9760 - tp: 65513.0000 - fp: 8408.0000 - tn: 226828.0000 - fn: 12899.0000 - val_loss: 0.5008 - val_accuracy: 0.8220 - val_precision: 0.8506 - val_recall: 0.7969 - val_auc: 0.9580 - val_tp: 15621.0000 - val_fp: 2743.0000 - val_tn: 56066.0000 - val_fn: 3982.0000 - lr: 1.0000e-04\n", "Epoch 91/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3713 - accuracy: 0.8606 - precision: 0.8862 - recall: 0.8356 - auc: 0.9760 - tp: 65523.0000 - fp: 8410.0000 - tn: 226826.0000 - fn: 12889.0000 - val_loss: 0.5009 - val_accuracy: 0.8218 - val_precision: 0.8504 - val_recall: 0.7970 - val_auc: 0.9580 - val_tp: 15624.0000 - val_fp: 2748.0000 - val_tn: 56061.0000 - val_fn: 3979.0000 - lr: 1.0000e-04\n", "Epoch 92/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3709 - accuracy: 0.8606 - precision: 0.8862 - recall: 0.8358 - auc: 0.9761 - tp: 65533.0000 - fp: 8412.0000 - tn: 226824.0000 - fn: 12879.0000 - val_loss: 0.5011 - val_accuracy: 0.8222 - val_precision: 0.8500 - val_recall: 0.7971 - val_auc: 0.9579 - val_tp: 15625.0000 - val_fp: 2757.0000 - val_tn: 56052.0000 - val_fn: 3978.0000 - lr: 1.0000e-04\n", "Epoch 93/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3705 - accuracy: 0.8610 - precision: 0.8867 - recall: 0.8360 - auc: 0.9761 - tp: 65550.0000 - fp: 8373.0000 - tn: 226863.0000 - fn: 12862.0000 - val_loss: 0.5012 - val_accuracy: 0.8220 - val_precision: 0.8499 - val_recall: 0.7972 - val_auc: 0.9579 - val_tp: 15627.0000 - val_fp: 2760.0000 - val_tn: 56049.0000 - val_fn: 3976.0000 - lr: 1.0000e-04\n", "Epoch 94/100\n", "30/30 [==============================] - 1s 24ms/step - loss: 0.3701 - accuracy: 0.8610 - precision: 0.8865 - recall: 0.8360 - auc: 0.9762 - tp: 65554.0000 - fp: 8397.0000 - tn: 226839.0000 - fn: 12858.0000 - val_loss: 0.5013 - val_accuracy: 0.8223 - val_precision: 0.8501 - val_recall: 0.7971 - val_auc: 0.9579 - val_tp: 15625.0000 - val_fp: 2755.0000 - val_tn: 56054.0000 - val_fn: 3978.0000 - lr: 1.0000e-04\n", "Epoch 95/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3697 - accuracy: 0.8613 - precision: 0.8870 - recall: 0.8362 - auc: 0.9762 - tp: 65569.0000 - fp: 8353.0000 - tn: 226883.0000 - fn: 12843.0000 - val_loss: 0.5015 - val_accuracy: 0.8226 - val_precision: 0.8501 - val_recall: 0.7972 - val_auc: 0.9579 - val_tp: 15628.0000 - val_fp: 2756.0000 - val_tn: 56053.0000 - val_fn: 3975.0000 - lr: 1.0000e-04\n", "Epoch 96/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3693 - accuracy: 0.8611 - precision: 0.8864 - recall: 0.8364 - auc: 0.9763 - tp: 65585.0000 - fp: 8404.0000 - tn: 226832.0000 - fn: 12827.0000 - val_loss: 0.5016 - val_accuracy: 0.8223 - val_precision: 0.8497 - val_recall: 0.7971 - val_auc: 0.9579 - val_tp: 15625.0000 - val_fp: 2763.0000 - val_tn: 56046.0000 - val_fn: 3978.0000 - lr: 1.0000e-04\n", "Epoch 97/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3690 - accuracy: 0.8613 - precision: 0.8868 - recall: 0.8367 - auc: 0.9763 - tp: 65604.0000 - fp: 8376.0000 - tn: 226860.0000 - fn: 12808.0000 - val_loss: 0.5018 - val_accuracy: 0.8225 - val_precision: 0.8497 - val_recall: 0.7972 - val_auc: 0.9579 - val_tp: 15628.0000 - val_fp: 2764.0000 - val_tn: 56045.0000 - val_fn: 3975.0000 - lr: 1.0000e-04\n", "Epoch 98/100\n", "30/30 [==============================] - 1s 24ms/step - loss: 0.3686 - accuracy: 0.8616 - precision: 0.8870 - recall: 0.8367 - auc: 0.9764 - tp: 65607.0000 - fp: 8360.0000 - tn: 226876.0000 - fn: 12805.0000 - val_loss: 0.5019 - val_accuracy: 0.8229 - val_precision: 0.8496 - val_recall: 0.7968 - val_auc: 0.9579 - val_tp: 15619.0000 - val_fp: 2766.0000 - val_tn: 56043.0000 - val_fn: 3984.0000 - lr: 1.0000e-04\n", "Epoch 99/100\n", "30/30 [==============================] - 1s 24ms/step - loss: 0.3682 - accuracy: 0.8619 - precision: 0.8873 - recall: 0.8368 - auc: 0.9764 - tp: 65615.0000 - fp: 8333.0000 - tn: 226903.0000 - fn: 12797.0000 - val_loss: 0.5021 - val_accuracy: 0.8229 - val_precision: 0.8499 - val_recall: 0.7972 - val_auc: 0.9579 - val_tp: 15627.0000 - val_fp: 2760.0000 - val_tn: 56049.0000 - val_fn: 3976.0000 - lr: 1.0000e-04\n", "Epoch 100/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3678 - accuracy: 0.8619 - precision: 0.8871 - recall: 0.8368 - auc: 0.9764 - tp: 65617.0000 - fp: 8348.0000 - tn: 226888.0000 - fn: 12795.0000 - val_loss: 0.5023 - val_accuracy: 0.8226 - val_precision: 0.8495 - val_recall: 0.7971 - val_auc: 0.9578 - val_tp: 15625.0000 - val_fp: 2769.0000 - val_tn: 56040.0000 - val_fn: 3978.0000 - lr: 1.0000e-04\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2024-06-08 13:57:17,306] Trial 1 finished with value: 0.817417562007904 and parameters: {'num_filters': 42, 'kernel_size': 3, 'learning_rate': 0.0003947618323958337}. Best is trial 0 with value: 0.819294810295105.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Loss: 0.5054721236228943\n", "Accuracy: 0.817417562007904\n", "Precision: 0.8435232043266296\n", "Recall: 0.7921971678733826\n", "AUC: 0.9574187994003296\n", "True Positives: 19412.0\n", "False Positives: 3601.0\n", "True Negatives: 69911.0\n", "False Negatives: 5092.0\n", "Epoch 1/100\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\Michał\\AppData\\Local\\Temp\\ipykernel_33252\\265862631.py:5: FutureWarning: suggest_loguniform has been deprecated in v3.0.0. This feature will be removed in v6.0.0. See https://github.com/optuna/optuna/releases/tag/v3.0.0. Use suggest_float(..., log=True) instead.\n", " learning_rate = trial.suggest_loguniform('learning_rate', 1e-4, 1e-2)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "30/30 [==============================] - 4s 68ms/step - loss: 1.3464 - accuracy: 0.3959 - precision: 0.0000e+00 - recall: 0.0000e+00 - auc: 0.6587 - tp: 0.0000e+00 - fp: 0.0000e+00 - tn: 235236.0000 - fn: 78412.0000 - val_loss: 1.3003 - val_accuracy: 0.4699 - val_precision: 0.0000e+00 - val_recall: 0.0000e+00 - val_auc: 0.7196 - val_tp: 0.0000e+00 - val_fp: 0.0000e+00 - val_tn: 58809.0000 - val_fn: 19603.0000 - lr: 7.1779e-04\n", "Epoch 2/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 1.2216 - accuracy: 0.5570 - precision: 0.7692 - recall: 0.0037 - auc: 0.7741 - tp: 290.0000 - fp: 87.0000 - tn: 235149.0000 - fn: 78122.0000 - val_loss: 1.1120 - val_accuracy: 0.6276 - val_precision: 0.8292 - val_recall: 0.0574 - val_auc: 0.8277 - val_tp: 1126.0000 - val_fp: 232.0000 - val_tn: 58577.0000 - val_fn: 18477.0000 - lr: 7.1779e-04\n", "Epoch 3/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.9687 - accuracy: 0.6730 - precision: 0.8559 - recall: 0.3230 - auc: 0.8647 - tp: 25325.0000 - fp: 4265.0000 - tn: 230971.0000 - fn: 53087.0000 - val_loss: 0.8371 - val_accuracy: 0.6942 - val_precision: 0.8357 - val_recall: 0.5254 - val_auc: 0.8944 - val_tp: 10300.0000 - val_fp: 2025.0000 - val_tn: 56784.0000 - val_fn: 9303.0000 - lr: 7.1779e-04\n", "Epoch 4/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.7322 - accuracy: 0.7347 - precision: 0.8367 - recall: 0.6103 - auc: 0.9187 - tp: 47852.0000 - fp: 9341.0000 - tn: 225895.0000 - fn: 30560.0000 - val_loss: 0.6716 - val_accuracy: 0.7512 - val_precision: 0.8323 - val_recall: 0.6526 - val_auc: 0.9285 - val_tp: 12792.0000 - val_fp: 2578.0000 - val_tn: 56231.0000 - val_fn: 6811.0000 - lr: 7.1779e-04\n", "Epoch 5/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.6062 - accuracy: 0.7818 - precision: 0.8461 - recall: 0.6929 - auc: 0.9416 - tp: 54333.0000 - fp: 9881.0000 - tn: 225355.0000 - fn: 24079.0000 - val_loss: 0.5958 - val_accuracy: 0.7801 - val_precision: 0.8398 - val_recall: 0.7083 - val_auc: 0.9411 - val_tp: 13885.0000 - val_fp: 2648.0000 - val_tn: 56161.0000 - val_fn: 5718.0000 - lr: 7.1779e-04\n", "Epoch 6/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.5412 - accuracy: 0.8043 - precision: 0.8539 - recall: 0.7414 - auc: 0.9517 - tp: 58133.0000 - fp: 9948.0000 - tn: 225288.0000 - fn: 20279.0000 - val_loss: 0.5551 - val_accuracy: 0.7957 - val_precision: 0.8453 - val_recall: 0.7419 - val_auc: 0.9480 - val_tp: 14544.0000 - val_fp: 2661.0000 - val_tn: 56148.0000 - val_fn: 5059.0000 - lr: 7.1779e-04\n", "Epoch 7/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.5012 - accuracy: 0.8177 - precision: 0.8598 - recall: 0.7662 - auc: 0.9579 - tp: 60077.0000 - fp: 9797.0000 - tn: 225439.0000 - fn: 18335.0000 - val_loss: 0.5307 - val_accuracy: 0.8033 - val_precision: 0.8482 - val_recall: 0.7570 - val_auc: 0.9519 - val_tp: 14839.0000 - val_fp: 2656.0000 - val_tn: 56153.0000 - val_fn: 4764.0000 - lr: 7.1779e-04\n", "Epoch 8/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.4749 - accuracy: 0.8259 - precision: 0.8651 - recall: 0.7816 - auc: 0.9618 - tp: 61290.0000 - fp: 9555.0000 - tn: 225681.0000 - fn: 17122.0000 - val_loss: 0.5162 - val_accuracy: 0.8079 - val_precision: 0.8505 - val_recall: 0.7659 - val_auc: 0.9543 - val_tp: 15014.0000 - val_fp: 2640.0000 - val_tn: 56169.0000 - val_fn: 4589.0000 - lr: 7.1779e-04\n", "Epoch 9/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.4563 - accuracy: 0.8321 - precision: 0.8677 - recall: 0.7919 - auc: 0.9645 - tp: 62096.0000 - fp: 9467.0000 - tn: 225769.0000 - fn: 16316.0000 - val_loss: 0.5066 - val_accuracy: 0.8127 - val_precision: 0.8498 - val_recall: 0.7765 - val_auc: 0.9558 - val_tp: 15221.0000 - val_fp: 2691.0000 - val_tn: 56118.0000 - val_fn: 4382.0000 - lr: 7.1779e-04\n", "Epoch 10/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.4425 - accuracy: 0.8368 - precision: 0.8701 - recall: 0.8007 - auc: 0.9664 - tp: 62785.0000 - fp: 9376.0000 - tn: 225860.0000 - fn: 15627.0000 - val_loss: 0.5002 - val_accuracy: 0.8149 - val_precision: 0.8534 - val_recall: 0.7768 - val_auc: 0.9568 - val_tp: 15227.0000 - val_fp: 2616.0000 - val_tn: 56193.0000 - val_fn: 4376.0000 - lr: 7.1779e-04\n", "Epoch 11/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.4315 - accuracy: 0.8404 - precision: 0.8726 - recall: 0.8056 - auc: 0.9680 - tp: 63168.0000 - fp: 9219.0000 - tn: 226017.0000 - fn: 15244.0000 - val_loss: 0.4951 - val_accuracy: 0.8158 - val_precision: 0.8494 - val_recall: 0.7872 - val_auc: 0.9577 - val_tp: 15432.0000 - val_fp: 2736.0000 - val_tn: 56073.0000 - val_fn: 4171.0000 - lr: 7.1779e-04\n", "Epoch 12/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.4220 - accuracy: 0.8437 - precision: 0.8729 - recall: 0.8127 - auc: 0.9692 - tp: 63727.0000 - fp: 9276.0000 - tn: 225960.0000 - fn: 14685.0000 - val_loss: 0.4924 - val_accuracy: 0.8185 - val_precision: 0.8516 - val_recall: 0.7873 - val_auc: 0.9582 - val_tp: 15434.0000 - val_fp: 2690.0000 - val_tn: 56119.0000 - val_fn: 4169.0000 - lr: 7.1779e-04\n", "Epoch 13/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4142 - accuracy: 0.8456 - precision: 0.8746 - recall: 0.8159 - auc: 0.9703 - tp: 63973.0000 - fp: 9170.0000 - tn: 226066.0000 - fn: 14439.0000 - val_loss: 0.4905 - val_accuracy: 0.8201 - val_precision: 0.8543 - val_recall: 0.7877 - val_auc: 0.9586 - val_tp: 15441.0000 - val_fp: 2634.0000 - val_tn: 56175.0000 - val_fn: 4162.0000 - lr: 7.1779e-04\n", "Epoch 14/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4074 - accuracy: 0.8480 - precision: 0.8750 - recall: 0.8208 - auc: 0.9712 - tp: 64364.0000 - fp: 9195.0000 - tn: 226041.0000 - fn: 14048.0000 - val_loss: 0.4892 - val_accuracy: 0.8212 - val_precision: 0.8517 - val_recall: 0.7922 - val_auc: 0.9588 - val_tp: 15530.0000 - val_fp: 2704.0000 - val_tn: 56105.0000 - val_fn: 4073.0000 - lr: 7.1779e-04\n", "Epoch 15/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4013 - accuracy: 0.8499 - precision: 0.8769 - recall: 0.8229 - auc: 0.9720 - tp: 64524.0000 - fp: 9054.0000 - tn: 226182.0000 - fn: 13888.0000 - val_loss: 0.4891 - val_accuracy: 0.8213 - val_precision: 0.8511 - val_recall: 0.7943 - val_auc: 0.9589 - val_tp: 15571.0000 - val_fp: 2724.0000 - val_tn: 56085.0000 - val_fn: 4032.0000 - lr: 7.1779e-04\n", "Epoch 16/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3964 - accuracy: 0.8515 - precision: 0.8789 - recall: 0.8250 - auc: 0.9727 - tp: 64689.0000 - fp: 8915.0000 - tn: 226321.0000 - fn: 13723.0000 - val_loss: 0.4887 - val_accuracy: 0.8214 - val_precision: 0.8515 - val_recall: 0.7947 - val_auc: 0.9590 - val_tp: 15578.0000 - val_fp: 2717.0000 - val_tn: 56092.0000 - val_fn: 4025.0000 - lr: 7.1779e-04\n", "Epoch 17/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3913 - accuracy: 0.8531 - precision: 0.8783 - recall: 0.8280 - auc: 0.9733 - tp: 64928.0000 - fp: 8998.0000 - tn: 226238.0000 - fn: 13484.0000 - val_loss: 0.4893 - val_accuracy: 0.8222 - val_precision: 0.8512 - val_recall: 0.7955 - val_auc: 0.9590 - val_tp: 15595.0000 - val_fp: 2726.0000 - val_tn: 56083.0000 - val_fn: 4008.0000 - lr: 7.1779e-04\n", "Epoch 18/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3871 - accuracy: 0.8550 - precision: 0.8799 - recall: 0.8296 - auc: 0.9739 - tp: 65054.0000 - fp: 8878.0000 - tn: 226358.0000 - fn: 13358.0000 - val_loss: 0.4908 - val_accuracy: 0.8205 - val_precision: 0.8500 - val_recall: 0.7951 - val_auc: 0.9588 - val_tp: 15586.0000 - val_fp: 2750.0000 - val_tn: 56059.0000 - val_fn: 4017.0000 - lr: 7.1779e-04\n", "Epoch 19/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3829 - accuracy: 0.8562 - precision: 0.8812 - recall: 0.8296 - auc: 0.9744 - tp: 65053.0000 - fp: 8771.0000 - tn: 226465.0000 - fn: 13359.0000 - val_loss: 0.4913 - val_accuracy: 0.8221 - val_precision: 0.8488 - val_recall: 0.7966 - val_auc: 0.9589 - val_tp: 15616.0000 - val_fp: 2781.0000 - val_tn: 56028.0000 - val_fn: 3987.0000 - lr: 7.1779e-04\n", "Epoch 20/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3770 - accuracy: 0.8585 - precision: 0.8821 - recall: 0.8351 - auc: 0.9752 - tp: 65480.0000 - fp: 8748.0000 - tn: 226488.0000 - fn: 12932.0000 - val_loss: 0.4910 - val_accuracy: 0.8222 - val_precision: 0.8492 - val_recall: 0.7968 - val_auc: 0.9589 - val_tp: 15619.0000 - val_fp: 2773.0000 - val_tn: 56036.0000 - val_fn: 3984.0000 - lr: 1.4356e-04\n", "Epoch 21/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3761 - accuracy: 0.8590 - precision: 0.8828 - recall: 0.8352 - auc: 0.9753 - tp: 65486.0000 - fp: 8697.0000 - tn: 226539.0000 - fn: 12926.0000 - val_loss: 0.4912 - val_accuracy: 0.8222 - val_precision: 0.8489 - val_recall: 0.7969 - val_auc: 0.9589 - val_tp: 15622.0000 - val_fp: 2781.0000 - val_tn: 56028.0000 - val_fn: 3981.0000 - lr: 1.4356e-04\n", "Epoch 22/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3753 - accuracy: 0.8591 - precision: 0.8828 - recall: 0.8356 - auc: 0.9754 - tp: 65522.0000 - fp: 8701.0000 - tn: 226535.0000 - fn: 12890.0000 - val_loss: 0.4915 - val_accuracy: 0.8220 - val_precision: 0.8495 - val_recall: 0.7968 - val_auc: 0.9589 - val_tp: 15620.0000 - val_fp: 2767.0000 - val_tn: 56042.0000 - val_fn: 3983.0000 - lr: 1.4356e-04\n", "Epoch 23/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3744 - accuracy: 0.8596 - precision: 0.8831 - recall: 0.8359 - auc: 0.9755 - tp: 65546.0000 - fp: 8676.0000 - tn: 226560.0000 - fn: 12866.0000 - val_loss: 0.4916 - val_accuracy: 0.8220 - val_precision: 0.8495 - val_recall: 0.7970 - val_auc: 0.9588 - val_tp: 15624.0000 - val_fp: 2769.0000 - val_tn: 56040.0000 - val_fn: 3979.0000 - lr: 1.0000e-04\n", "Epoch 24/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3738 - accuracy: 0.8597 - precision: 0.8834 - recall: 0.8361 - auc: 0.9756 - tp: 65559.0000 - fp: 8650.0000 - tn: 226586.0000 - fn: 12853.0000 - val_loss: 0.4918 - val_accuracy: 0.8216 - val_precision: 0.8490 - val_recall: 0.7966 - val_auc: 0.9588 - val_tp: 15615.0000 - val_fp: 2777.0000 - val_tn: 56032.0000 - val_fn: 3988.0000 - lr: 1.0000e-04\n", "Epoch 25/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3733 - accuracy: 0.8598 - precision: 0.8837 - recall: 0.8361 - auc: 0.9757 - tp: 65564.0000 - fp: 8630.0000 - tn: 226606.0000 - fn: 12848.0000 - val_loss: 0.4921 - val_accuracy: 0.8220 - val_precision: 0.8492 - val_recall: 0.7973 - val_auc: 0.9588 - val_tp: 15629.0000 - val_fp: 2776.0000 - val_tn: 56033.0000 - val_fn: 3974.0000 - lr: 1.0000e-04\n", "Epoch 26/100\n", "30/30 [==============================] - 1s 25ms/step - loss: 0.3728 - accuracy: 0.8599 - precision: 0.8835 - recall: 0.8369 - auc: 0.9757 - tp: 65626.0000 - fp: 8655.0000 - tn: 226581.0000 - fn: 12786.0000 - val_loss: 0.4923 - val_accuracy: 0.8217 - val_precision: 0.8487 - val_recall: 0.7975 - val_auc: 0.9588 - val_tp: 15634.0000 - val_fp: 2788.0000 - val_tn: 56021.0000 - val_fn: 3969.0000 - lr: 1.0000e-04\n", "Epoch 27/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3723 - accuracy: 0.8602 - precision: 0.8837 - recall: 0.8371 - auc: 0.9758 - tp: 65639.0000 - fp: 8636.0000 - tn: 226600.0000 - fn: 12773.0000 - val_loss: 0.4925 - val_accuracy: 0.8217 - val_precision: 0.8484 - val_recall: 0.7973 - val_auc: 0.9588 - val_tp: 15630.0000 - val_fp: 2792.0000 - val_tn: 56017.0000 - val_fn: 3973.0000 - lr: 1.0000e-04\n", "Epoch 28/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3718 - accuracy: 0.8604 - precision: 0.8838 - recall: 0.8374 - auc: 0.9758 - tp: 65665.0000 - fp: 8630.0000 - tn: 226606.0000 - fn: 12747.0000 - val_loss: 0.4927 - val_accuracy: 0.8216 - val_precision: 0.8482 - val_recall: 0.7974 - val_auc: 0.9588 - val_tp: 15631.0000 - val_fp: 2798.0000 - val_tn: 56011.0000 - val_fn: 3972.0000 - lr: 1.0000e-04\n", "Epoch 29/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3713 - accuracy: 0.8604 - precision: 0.8841 - recall: 0.8372 - auc: 0.9759 - tp: 65649.0000 - fp: 8604.0000 - tn: 226632.0000 - fn: 12763.0000 - val_loss: 0.4930 - val_accuracy: 0.8218 - val_precision: 0.8482 - val_recall: 0.7975 - val_auc: 0.9588 - val_tp: 15634.0000 - val_fp: 2799.0000 - val_tn: 56010.0000 - val_fn: 3969.0000 - lr: 1.0000e-04\n", "Epoch 30/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3708 - accuracy: 0.8607 - precision: 0.8842 - recall: 0.8378 - auc: 0.9760 - tp: 65693.0000 - fp: 8605.0000 - tn: 226631.0000 - fn: 12719.0000 - val_loss: 0.4932 - val_accuracy: 0.8217 - val_precision: 0.8488 - val_recall: 0.7973 - val_auc: 0.9588 - val_tp: 15630.0000 - val_fp: 2785.0000 - val_tn: 56024.0000 - val_fn: 3973.0000 - lr: 1.0000e-04\n", "Epoch 31/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3703 - accuracy: 0.8608 - precision: 0.8841 - recall: 0.8383 - auc: 0.9760 - tp: 65730.0000 - fp: 8614.0000 - tn: 226622.0000 - fn: 12682.0000 - val_loss: 0.4935 - val_accuracy: 0.8217 - val_precision: 0.8484 - val_recall: 0.7979 - val_auc: 0.9587 - val_tp: 15642.0000 - val_fp: 2794.0000 - val_tn: 56015.0000 - val_fn: 3961.0000 - lr: 1.0000e-04\n", "Epoch 32/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3699 - accuracy: 0.8610 - precision: 0.8842 - recall: 0.8385 - auc: 0.9761 - tp: 65748.0000 - fp: 8608.0000 - tn: 226628.0000 - fn: 12664.0000 - val_loss: 0.4937 - val_accuracy: 0.8220 - val_precision: 0.8486 - val_recall: 0.7982 - val_auc: 0.9587 - val_tp: 15647.0000 - val_fp: 2792.0000 - val_tn: 56017.0000 - val_fn: 3956.0000 - lr: 1.0000e-04\n", "Epoch 33/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3694 - accuracy: 0.8610 - precision: 0.8845 - recall: 0.8385 - auc: 0.9761 - tp: 65751.0000 - fp: 8586.0000 - tn: 226650.0000 - fn: 12661.0000 - val_loss: 0.4939 - val_accuracy: 0.8219 - val_precision: 0.8486 - val_recall: 0.7982 - val_auc: 0.9587 - val_tp: 15648.0000 - val_fp: 2791.0000 - val_tn: 56018.0000 - val_fn: 3955.0000 - lr: 1.0000e-04\n", "Epoch 34/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3690 - accuracy: 0.8612 - precision: 0.8846 - recall: 0.8386 - auc: 0.9762 - tp: 65756.0000 - fp: 8578.0000 - tn: 226658.0000 - fn: 12656.0000 - val_loss: 0.4942 - val_accuracy: 0.8218 - val_precision: 0.8482 - val_recall: 0.7985 - val_auc: 0.9587 - val_tp: 15653.0000 - val_fp: 2802.0000 - val_tn: 56007.0000 - val_fn: 3950.0000 - lr: 1.0000e-04\n", "Epoch 35/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3685 - accuracy: 0.8613 - precision: 0.8844 - recall: 0.8393 - auc: 0.9762 - tp: 65814.0000 - fp: 8601.0000 - tn: 226635.0000 - fn: 12598.0000 - val_loss: 0.4944 - val_accuracy: 0.8220 - val_precision: 0.8475 - val_recall: 0.7986 - val_auc: 0.9587 - val_tp: 15654.0000 - val_fp: 2816.0000 - val_tn: 55993.0000 - val_fn: 3949.0000 - lr: 1.0000e-04\n", "Epoch 36/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3680 - accuracy: 0.8614 - precision: 0.8844 - recall: 0.8390 - auc: 0.9763 - tp: 65791.0000 - fp: 8599.0000 - tn: 226637.0000 - fn: 12621.0000 - val_loss: 0.4947 - val_accuracy: 0.8218 - val_precision: 0.8478 - val_recall: 0.7984 - val_auc: 0.9586 - val_tp: 15652.0000 - val_fp: 2810.0000 - val_tn: 55999.0000 - val_fn: 3951.0000 - lr: 1.0000e-04\n", "Epoch 37/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3675 - accuracy: 0.8614 - precision: 0.8848 - recall: 0.8394 - auc: 0.9764 - tp: 65820.0000 - fp: 8569.0000 - tn: 226667.0000 - fn: 12592.0000 - val_loss: 0.4948 - val_accuracy: 0.8221 - val_precision: 0.8479 - val_recall: 0.7979 - val_auc: 0.9586 - val_tp: 15641.0000 - val_fp: 2805.0000 - val_tn: 56004.0000 - val_fn: 3962.0000 - lr: 1.0000e-04\n", "Epoch 38/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3671 - accuracy: 0.8616 - precision: 0.8850 - recall: 0.8393 - auc: 0.9764 - tp: 65814.0000 - fp: 8555.0000 - tn: 226681.0000 - fn: 12598.0000 - val_loss: 0.4952 - val_accuracy: 0.8220 - val_precision: 0.8478 - val_recall: 0.7981 - val_auc: 0.9586 - val_tp: 15646.0000 - val_fp: 2809.0000 - val_tn: 56000.0000 - val_fn: 3957.0000 - lr: 1.0000e-04\n", "Epoch 39/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3666 - accuracy: 0.8617 - precision: 0.8850 - recall: 0.8396 - auc: 0.9765 - tp: 65834.0000 - fp: 8553.0000 - tn: 226683.0000 - fn: 12578.0000 - val_loss: 0.4954 - val_accuracy: 0.8220 - val_precision: 0.8477 - val_recall: 0.7981 - val_auc: 0.9586 - val_tp: 15645.0000 - val_fp: 2810.0000 - val_tn: 55999.0000 - val_fn: 3958.0000 - lr: 1.0000e-04\n", "Epoch 40/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3662 - accuracy: 0.8620 - precision: 0.8851 - recall: 0.8398 - auc: 0.9765 - tp: 65851.0000 - fp: 8545.0000 - tn: 226691.0000 - fn: 12561.0000 - val_loss: 0.4957 - val_accuracy: 0.8221 - val_precision: 0.8473 - val_recall: 0.7983 - val_auc: 0.9585 - val_tp: 15650.0000 - val_fp: 2820.0000 - val_tn: 55989.0000 - val_fn: 3953.0000 - lr: 1.0000e-04\n", "Epoch 41/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3657 - accuracy: 0.8622 - precision: 0.8855 - recall: 0.8400 - auc: 0.9766 - tp: 65868.0000 - fp: 8515.0000 - tn: 226721.0000 - fn: 12544.0000 - val_loss: 0.4959 - val_accuracy: 0.8221 - val_precision: 0.8471 - val_recall: 0.7983 - val_auc: 0.9585 - val_tp: 15650.0000 - val_fp: 2824.0000 - val_tn: 55985.0000 - val_fn: 3953.0000 - lr: 1.0000e-04\n", "Epoch 42/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3653 - accuracy: 0.8621 - precision: 0.8851 - recall: 0.8403 - auc: 0.9766 - tp: 65890.0000 - fp: 8552.0000 - tn: 226684.0000 - fn: 12522.0000 - val_loss: 0.4962 - val_accuracy: 0.8221 - val_precision: 0.8471 - val_recall: 0.7984 - val_auc: 0.9585 - val_tp: 15651.0000 - val_fp: 2824.0000 - val_tn: 55985.0000 - val_fn: 3952.0000 - lr: 1.0000e-04\n", "Epoch 43/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3649 - accuracy: 0.8625 - precision: 0.8855 - recall: 0.8404 - auc: 0.9767 - tp: 65895.0000 - fp: 8517.0000 - tn: 226719.0000 - fn: 12517.0000 - val_loss: 0.4965 - val_accuracy: 0.8218 - val_precision: 0.8471 - val_recall: 0.7982 - val_auc: 0.9584 - val_tp: 15648.0000 - val_fp: 2824.0000 - val_tn: 55985.0000 - val_fn: 3955.0000 - lr: 1.0000e-04\n", "Epoch 44/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3644 - accuracy: 0.8625 - precision: 0.8854 - recall: 0.8409 - auc: 0.9767 - tp: 65934.0000 - fp: 8536.0000 - tn: 226700.0000 - fn: 12478.0000 - val_loss: 0.4967 - val_accuracy: 0.8220 - val_precision: 0.8469 - val_recall: 0.7982 - val_auc: 0.9584 - val_tp: 15647.0000 - val_fp: 2828.0000 - val_tn: 55981.0000 - val_fn: 3956.0000 - lr: 1.0000e-04\n", "Epoch 45/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3640 - accuracy: 0.8627 - precision: 0.8858 - recall: 0.8408 - auc: 0.9768 - tp: 65927.0000 - fp: 8498.0000 - tn: 226738.0000 - fn: 12485.0000 - val_loss: 0.4970 - val_accuracy: 0.8220 - val_precision: 0.8467 - val_recall: 0.7984 - val_auc: 0.9584 - val_tp: 15652.0000 - val_fp: 2834.0000 - val_tn: 55975.0000 - val_fn: 3951.0000 - lr: 1.0000e-04\n", "Epoch 46/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3635 - accuracy: 0.8627 - precision: 0.8854 - recall: 0.8411 - auc: 0.9768 - tp: 65956.0000 - fp: 8535.0000 - tn: 226701.0000 - fn: 12456.0000 - val_loss: 0.4972 - val_accuracy: 0.8219 - val_precision: 0.8468 - val_recall: 0.7977 - val_auc: 0.9584 - val_tp: 15638.0000 - val_fp: 2830.0000 - val_tn: 55979.0000 - val_fn: 3965.0000 - lr: 1.0000e-04\n", "Epoch 47/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.3631 - accuracy: 0.8630 - precision: 0.8857 - recall: 0.8413 - auc: 0.9769 - tp: 65968.0000 - fp: 8509.0000 - tn: 226727.0000 - fn: 12444.0000 - val_loss: 0.4975 - val_accuracy: 0.8218 - val_precision: 0.8463 - val_recall: 0.7983 - val_auc: 0.9584 - val_tp: 15649.0000 - val_fp: 2841.0000 - val_tn: 55968.0000 - val_fn: 3954.0000 - lr: 1.0000e-04\n", "Epoch 48/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3627 - accuracy: 0.8632 - precision: 0.8860 - recall: 0.8415 - auc: 0.9769 - tp: 65983.0000 - fp: 8488.0000 - tn: 226748.0000 - fn: 12429.0000 - val_loss: 0.4977 - val_accuracy: 0.8213 - val_precision: 0.8464 - val_recall: 0.7979 - val_auc: 0.9584 - val_tp: 15642.0000 - val_fp: 2838.0000 - val_tn: 55971.0000 - val_fn: 3961.0000 - lr: 1.0000e-04\n", "Epoch 49/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3622 - accuracy: 0.8631 - precision: 0.8858 - recall: 0.8416 - auc: 0.9770 - tp: 65993.0000 - fp: 8507.0000 - tn: 226729.0000 - fn: 12419.0000 - val_loss: 0.4980 - val_accuracy: 0.8219 - val_precision: 0.8461 - val_recall: 0.7987 - val_auc: 0.9584 - val_tp: 15656.0000 - val_fp: 2847.0000 - val_tn: 55962.0000 - val_fn: 3947.0000 - lr: 1.0000e-04\n", "Epoch 50/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3618 - accuracy: 0.8633 - precision: 0.8860 - recall: 0.8418 - auc: 0.9770 - tp: 66010.0000 - fp: 8495.0000 - tn: 226741.0000 - fn: 12402.0000 - val_loss: 0.4983 - val_accuracy: 0.8218 - val_precision: 0.8457 - val_recall: 0.7981 - val_auc: 0.9583 - val_tp: 15645.0000 - val_fp: 2855.0000 - val_tn: 55954.0000 - val_fn: 3958.0000 - lr: 1.0000e-04\n", "Epoch 51/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3614 - accuracy: 0.8634 - precision: 0.8859 - recall: 0.8419 - auc: 0.9771 - tp: 66014.0000 - fp: 8504.0000 - tn: 226732.0000 - fn: 12398.0000 - val_loss: 0.4986 - val_accuracy: 0.8220 - val_precision: 0.8458 - val_recall: 0.7981 - val_auc: 0.9583 - val_tp: 15645.0000 - val_fp: 2853.0000 - val_tn: 55956.0000 - val_fn: 3958.0000 - lr: 1.0000e-04\n", "Epoch 52/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3610 - accuracy: 0.8638 - precision: 0.8864 - recall: 0.8421 - auc: 0.9771 - tp: 66029.0000 - fp: 8460.0000 - tn: 226776.0000 - fn: 12383.0000 - val_loss: 0.4988 - val_accuracy: 0.8220 - val_precision: 0.8456 - val_recall: 0.7986 - val_auc: 0.9583 - val_tp: 15655.0000 - val_fp: 2859.0000 - val_tn: 55950.0000 - val_fn: 3948.0000 - lr: 1.0000e-04\n", "Epoch 53/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3605 - accuracy: 0.8638 - precision: 0.8863 - recall: 0.8422 - auc: 0.9772 - tp: 66040.0000 - fp: 8469.0000 - tn: 226767.0000 - fn: 12372.0000 - val_loss: 0.4991 - val_accuracy: 0.8217 - val_precision: 0.8457 - val_recall: 0.7988 - val_auc: 0.9583 - val_tp: 15658.0000 - val_fp: 2857.0000 - val_tn: 55952.0000 - val_fn: 3945.0000 - lr: 1.0000e-04\n", "Epoch 54/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3601 - accuracy: 0.8638 - precision: 0.8863 - recall: 0.8429 - auc: 0.9772 - tp: 66095.0000 - fp: 8482.0000 - tn: 226754.0000 - fn: 12317.0000 - val_loss: 0.4994 - val_accuracy: 0.8218 - val_precision: 0.8456 - val_recall: 0.7984 - val_auc: 0.9583 - val_tp: 15651.0000 - val_fp: 2857.0000 - val_tn: 55952.0000 - val_fn: 3952.0000 - lr: 1.0000e-04\n", "Epoch 55/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3597 - accuracy: 0.8641 - precision: 0.8866 - recall: 0.8427 - auc: 0.9773 - tp: 66076.0000 - fp: 8449.0000 - tn: 226787.0000 - fn: 12336.0000 - val_loss: 0.4997 - val_accuracy: 0.8217 - val_precision: 0.8457 - val_recall: 0.7982 - val_auc: 0.9582 - val_tp: 15647.0000 - val_fp: 2854.0000 - val_tn: 55955.0000 - val_fn: 3956.0000 - lr: 1.0000e-04\n", "Epoch 56/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3593 - accuracy: 0.8641 - precision: 0.8865 - recall: 0.8429 - auc: 0.9774 - tp: 66094.0000 - fp: 8461.0000 - tn: 226775.0000 - fn: 12318.0000 - val_loss: 0.5000 - val_accuracy: 0.8218 - val_precision: 0.8455 - val_recall: 0.7980 - val_auc: 0.9582 - val_tp: 15643.0000 - val_fp: 2859.0000 - val_tn: 55950.0000 - val_fn: 3960.0000 - lr: 1.0000e-04\n", "Epoch 57/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3589 - accuracy: 0.8641 - precision: 0.8868 - recall: 0.8433 - auc: 0.9774 - tp: 66125.0000 - fp: 8445.0000 - tn: 226791.0000 - fn: 12287.0000 - val_loss: 0.5002 - val_accuracy: 0.8218 - val_precision: 0.8457 - val_recall: 0.7985 - val_auc: 0.9582 - val_tp: 15653.0000 - val_fp: 2857.0000 - val_tn: 55952.0000 - val_fn: 3950.0000 - lr: 1.0000e-04\n", "Epoch 58/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3585 - accuracy: 0.8644 - precision: 0.8869 - recall: 0.8434 - auc: 0.9774 - tp: 66129.0000 - fp: 8435.0000 - tn: 226801.0000 - fn: 12283.0000 - val_loss: 0.5006 - val_accuracy: 0.8214 - val_precision: 0.8455 - val_recall: 0.7985 - val_auc: 0.9582 - val_tp: 15653.0000 - val_fp: 2861.0000 - val_tn: 55948.0000 - val_fn: 3950.0000 - lr: 1.0000e-04\n", "Epoch 59/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3581 - accuracy: 0.8645 - precision: 0.8864 - recall: 0.8438 - auc: 0.9775 - tp: 66162.0000 - fp: 8479.0000 - tn: 226757.0000 - fn: 12250.0000 - val_loss: 0.5008 - val_accuracy: 0.8220 - val_precision: 0.8456 - val_recall: 0.7988 - val_auc: 0.9582 - val_tp: 15658.0000 - val_fp: 2859.0000 - val_tn: 55950.0000 - val_fn: 3945.0000 - lr: 1.0000e-04\n", "Epoch 60/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3577 - accuracy: 0.8647 - precision: 0.8871 - recall: 0.8436 - auc: 0.9775 - tp: 66149.0000 - fp: 8420.0000 - tn: 226816.0000 - fn: 12263.0000 - val_loss: 0.5012 - val_accuracy: 0.8217 - val_precision: 0.8458 - val_recall: 0.7981 - val_auc: 0.9581 - val_tp: 15646.0000 - val_fp: 2852.0000 - val_tn: 55957.0000 - val_fn: 3957.0000 - lr: 1.0000e-04\n", "Epoch 61/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.3573 - accuracy: 0.8648 - precision: 0.8871 - recall: 0.8440 - auc: 0.9776 - tp: 66178.0000 - fp: 8423.0000 - tn: 226813.0000 - fn: 12234.0000 - val_loss: 0.5014 - val_accuracy: 0.8216 - val_precision: 0.8453 - val_recall: 0.7984 - val_auc: 0.9581 - val_tp: 15652.0000 - val_fp: 2864.0000 - val_tn: 55945.0000 - val_fn: 3951.0000 - lr: 1.0000e-04\n", "Epoch 62/100\n", "30/30 [==============================] - 1s 30ms/step - loss: 0.3569 - accuracy: 0.8647 - precision: 0.8874 - recall: 0.8440 - auc: 0.9776 - tp: 66178.0000 - fp: 8400.0000 - tn: 226836.0000 - fn: 12234.0000 - val_loss: 0.5017 - val_accuracy: 0.8214 - val_precision: 0.8458 - val_recall: 0.7986 - val_auc: 0.9580 - val_tp: 15654.0000 - val_fp: 2853.0000 - val_tn: 55956.0000 - val_fn: 3949.0000 - lr: 1.0000e-04\n", "Epoch 63/100\n", "30/30 [==============================] - 1s 24ms/step - loss: 0.3565 - accuracy: 0.8649 - precision: 0.8874 - recall: 0.8443 - auc: 0.9777 - tp: 66203.0000 - fp: 8398.0000 - tn: 226838.0000 - fn: 12209.0000 - val_loss: 0.5020 - val_accuracy: 0.8217 - val_precision: 0.8456 - val_recall: 0.7987 - val_auc: 0.9580 - val_tp: 15657.0000 - val_fp: 2859.0000 - val_tn: 55950.0000 - val_fn: 3946.0000 - lr: 1.0000e-04\n", "Epoch 64/100\n", "30/30 [==============================] - 1s 25ms/step - loss: 0.3561 - accuracy: 0.8653 - precision: 0.8873 - recall: 0.8443 - auc: 0.9777 - tp: 66206.0000 - fp: 8405.0000 - tn: 226831.0000 - fn: 12206.0000 - val_loss: 0.5022 - val_accuracy: 0.8213 - val_precision: 0.8460 - val_recall: 0.7984 - val_auc: 0.9580 - val_tp: 15651.0000 - val_fp: 2850.0000 - val_tn: 55959.0000 - val_fn: 3952.0000 - lr: 1.0000e-04\n", "Epoch 65/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3557 - accuracy: 0.8652 - precision: 0.8876 - recall: 0.8445 - auc: 0.9778 - tp: 66219.0000 - fp: 8382.0000 - tn: 226854.0000 - fn: 12193.0000 - val_loss: 0.5025 - val_accuracy: 0.8210 - val_precision: 0.8459 - val_recall: 0.7983 - val_auc: 0.9580 - val_tp: 15649.0000 - val_fp: 2850.0000 - val_tn: 55959.0000 - val_fn: 3954.0000 - lr: 1.0000e-04\n", "Epoch 66/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3553 - accuracy: 0.8652 - precision: 0.8877 - recall: 0.8447 - auc: 0.9778 - tp: 66238.0000 - fp: 8377.0000 - tn: 226859.0000 - fn: 12174.0000 - val_loss: 0.5028 - val_accuracy: 0.8214 - val_precision: 0.8456 - val_recall: 0.7986 - val_auc: 0.9580 - val_tp: 15654.0000 - val_fp: 2858.0000 - val_tn: 55951.0000 - val_fn: 3949.0000 - lr: 1.0000e-04\n", "Epoch 67/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3549 - accuracy: 0.8655 - precision: 0.8876 - recall: 0.8452 - auc: 0.9779 - tp: 66272.0000 - fp: 8395.0000 - tn: 226841.0000 - fn: 12140.0000 - val_loss: 0.5031 - val_accuracy: 0.8215 - val_precision: 0.8453 - val_recall: 0.7990 - val_auc: 0.9579 - val_tp: 15662.0000 - val_fp: 2866.0000 - val_tn: 55943.0000 - val_fn: 3941.0000 - lr: 1.0000e-04\n", "Epoch 68/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3545 - accuracy: 0.8655 - precision: 0.8878 - recall: 0.8447 - auc: 0.9779 - tp: 66234.0000 - fp: 8372.0000 - tn: 226864.0000 - fn: 12178.0000 - val_loss: 0.5034 - val_accuracy: 0.8213 - val_precision: 0.8458 - val_recall: 0.7988 - val_auc: 0.9579 - val_tp: 15658.0000 - val_fp: 2855.0000 - val_tn: 55954.0000 - val_fn: 3945.0000 - lr: 1.0000e-04\n", "Epoch 69/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3541 - accuracy: 0.8657 - precision: 0.8878 - recall: 0.8451 - auc: 0.9780 - tp: 66263.0000 - fp: 8373.0000 - tn: 226863.0000 - fn: 12149.0000 - val_loss: 0.5037 - val_accuracy: 0.8215 - val_precision: 0.8459 - val_recall: 0.7993 - val_auc: 0.9579 - val_tp: 15668.0000 - val_fp: 2854.0000 - val_tn: 55955.0000 - val_fn: 3935.0000 - lr: 1.0000e-04\n", "Epoch 70/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3537 - accuracy: 0.8657 - precision: 0.8878 - recall: 0.8453 - auc: 0.9780 - tp: 66278.0000 - fp: 8377.0000 - tn: 226859.0000 - fn: 12134.0000 - val_loss: 0.5040 - val_accuracy: 0.8214 - val_precision: 0.8457 - val_recall: 0.7991 - val_auc: 0.9579 - val_tp: 15664.0000 - val_fp: 2858.0000 - val_tn: 55951.0000 - val_fn: 3939.0000 - lr: 1.0000e-04\n", "Epoch 71/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3533 - accuracy: 0.8657 - precision: 0.8880 - recall: 0.8452 - auc: 0.9781 - tp: 66277.0000 - fp: 8362.0000 - tn: 226874.0000 - fn: 12135.0000 - val_loss: 0.5042 - val_accuracy: 0.8214 - val_precision: 0.8456 - val_recall: 0.7992 - val_auc: 0.9579 - val_tp: 15666.0000 - val_fp: 2860.0000 - val_tn: 55949.0000 - val_fn: 3937.0000 - lr: 1.0000e-04\n", "Epoch 72/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3529 - accuracy: 0.8659 - precision: 0.8883 - recall: 0.8454 - auc: 0.9781 - tp: 66287.0000 - fp: 8338.0000 - tn: 226898.0000 - fn: 12125.0000 - val_loss: 0.5046 - val_accuracy: 0.8213 - val_precision: 0.8456 - val_recall: 0.7996 - val_auc: 0.9579 - val_tp: 15674.0000 - val_fp: 2863.0000 - val_tn: 55946.0000 - val_fn: 3929.0000 - lr: 1.0000e-04\n", "Epoch 73/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3526 - accuracy: 0.8661 - precision: 0.8878 - recall: 0.8457 - auc: 0.9782 - tp: 66314.0000 - fp: 8380.0000 - tn: 226856.0000 - fn: 12098.0000 - val_loss: 0.5049 - val_accuracy: 0.8216 - val_precision: 0.8451 - val_recall: 0.7993 - val_auc: 0.9579 - val_tp: 15669.0000 - val_fp: 2871.0000 - val_tn: 55938.0000 - val_fn: 3934.0000 - lr: 1.0000e-04\n", "Epoch 74/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3522 - accuracy: 0.8663 - precision: 0.8882 - recall: 0.8460 - auc: 0.9782 - tp: 66338.0000 - fp: 8347.0000 - tn: 226889.0000 - fn: 12074.0000 - val_loss: 0.5053 - val_accuracy: 0.8214 - val_precision: 0.8452 - val_recall: 0.7995 - val_auc: 0.9578 - val_tp: 15673.0000 - val_fp: 2870.0000 - val_tn: 55939.0000 - val_fn: 3930.0000 - lr: 1.0000e-04\n", "Epoch 75/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3518 - accuracy: 0.8665 - precision: 0.8889 - recall: 0.8456 - auc: 0.9782 - tp: 66302.0000 - fp: 8284.0000 - tn: 226952.0000 - fn: 12110.0000 - val_loss: 0.5055 - val_accuracy: 0.8213 - val_precision: 0.8453 - val_recall: 0.7996 - val_auc: 0.9578 - val_tp: 15675.0000 - val_fp: 2869.0000 - val_tn: 55940.0000 - val_fn: 3928.0000 - lr: 1.0000e-04\n", "Epoch 76/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3514 - accuracy: 0.8667 - precision: 0.8883 - recall: 0.8463 - auc: 0.9783 - tp: 66357.0000 - fp: 8342.0000 - tn: 226894.0000 - fn: 12055.0000 - val_loss: 0.5058 - val_accuracy: 0.8210 - val_precision: 0.8453 - val_recall: 0.7996 - val_auc: 0.9578 - val_tp: 15674.0000 - val_fp: 2869.0000 - val_tn: 55940.0000 - val_fn: 3929.0000 - lr: 1.0000e-04\n", "Epoch 77/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3510 - accuracy: 0.8668 - precision: 0.8887 - recall: 0.8463 - auc: 0.9783 - tp: 66361.0000 - fp: 8315.0000 - tn: 226921.0000 - fn: 12051.0000 - val_loss: 0.5061 - val_accuracy: 0.8211 - val_precision: 0.8452 - val_recall: 0.7997 - val_auc: 0.9578 - val_tp: 15676.0000 - val_fp: 2872.0000 - val_tn: 55937.0000 - val_fn: 3927.0000 - lr: 1.0000e-04\n", "Epoch 78/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3507 - accuracy: 0.8668 - precision: 0.8887 - recall: 0.8465 - auc: 0.9784 - tp: 66374.0000 - fp: 8315.0000 - tn: 226921.0000 - fn: 12038.0000 - val_loss: 0.5064 - val_accuracy: 0.8206 - val_precision: 0.8449 - val_recall: 0.7995 - val_auc: 0.9577 - val_tp: 15673.0000 - val_fp: 2878.0000 - val_tn: 55931.0000 - val_fn: 3930.0000 - lr: 1.0000e-04\n", "Epoch 79/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3503 - accuracy: 0.8673 - precision: 0.8894 - recall: 0.8464 - auc: 0.9784 - tp: 66369.0000 - fp: 8250.0000 - tn: 226986.0000 - fn: 12043.0000 - val_loss: 0.5067 - val_accuracy: 0.8207 - val_precision: 0.8447 - val_recall: 0.7995 - val_auc: 0.9577 - val_tp: 15673.0000 - val_fp: 2882.0000 - val_tn: 55927.0000 - val_fn: 3930.0000 - lr: 1.0000e-04\n", "Epoch 80/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3499 - accuracy: 0.8671 - precision: 0.8888 - recall: 0.8466 - auc: 0.9785 - tp: 66387.0000 - fp: 8303.0000 - tn: 226933.0000 - fn: 12025.0000 - val_loss: 0.5070 - val_accuracy: 0.8208 - val_precision: 0.8445 - val_recall: 0.7998 - val_auc: 0.9576 - val_tp: 15678.0000 - val_fp: 2887.0000 - val_tn: 55922.0000 - val_fn: 3925.0000 - lr: 1.0000e-04\n", "Epoch 81/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3495 - accuracy: 0.8677 - precision: 0.8891 - recall: 0.8472 - auc: 0.9785 - tp: 66430.0000 - fp: 8286.0000 - tn: 226950.0000 - fn: 11982.0000 - val_loss: 0.5073 - val_accuracy: 0.8205 - val_precision: 0.8446 - val_recall: 0.7995 - val_auc: 0.9576 - val_tp: 15673.0000 - val_fp: 2883.0000 - val_tn: 55926.0000 - val_fn: 3930.0000 - lr: 1.0000e-04\n", "Epoch 82/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3492 - accuracy: 0.8679 - precision: 0.8895 - recall: 0.8469 - auc: 0.9786 - tp: 66408.0000 - fp: 8246.0000 - tn: 226990.0000 - fn: 12004.0000 - val_loss: 0.5077 - val_accuracy: 0.8205 - val_precision: 0.8449 - val_recall: 0.7999 - val_auc: 0.9575 - val_tp: 15680.0000 - val_fp: 2878.0000 - val_tn: 55931.0000 - val_fn: 3923.0000 - lr: 1.0000e-04\n", "Epoch 83/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3488 - accuracy: 0.8679 - precision: 0.8894 - recall: 0.8473 - auc: 0.9786 - tp: 66442.0000 - fp: 8260.0000 - tn: 226976.0000 - fn: 11970.0000 - val_loss: 0.5079 - val_accuracy: 0.8205 - val_precision: 0.8447 - val_recall: 0.8000 - val_auc: 0.9575 - val_tp: 15683.0000 - val_fp: 2884.0000 - val_tn: 55925.0000 - val_fn: 3920.0000 - lr: 1.0000e-04\n", "Epoch 84/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3484 - accuracy: 0.8678 - precision: 0.8892 - recall: 0.8477 - auc: 0.9787 - tp: 66472.0000 - fp: 8282.0000 - tn: 226954.0000 - fn: 11940.0000 - val_loss: 0.5082 - val_accuracy: 0.8205 - val_precision: 0.8447 - val_recall: 0.7995 - val_auc: 0.9575 - val_tp: 15672.0000 - val_fp: 2881.0000 - val_tn: 55928.0000 - val_fn: 3931.0000 - lr: 1.0000e-04\n", "Epoch 85/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3480 - accuracy: 0.8681 - precision: 0.8895 - recall: 0.8479 - auc: 0.9787 - tp: 66482.0000 - fp: 8259.0000 - tn: 226977.0000 - fn: 11930.0000 - val_loss: 0.5086 - val_accuracy: 0.8204 - val_precision: 0.8451 - val_recall: 0.8003 - val_auc: 0.9575 - val_tp: 15689.0000 - val_fp: 2875.0000 - val_tn: 55934.0000 - val_fn: 3914.0000 - lr: 1.0000e-04\n", "Epoch 86/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3477 - accuracy: 0.8684 - precision: 0.8898 - recall: 0.8477 - auc: 0.9787 - tp: 66469.0000 - fp: 8230.0000 - tn: 227006.0000 - fn: 11943.0000 - val_loss: 0.5089 - val_accuracy: 0.8203 - val_precision: 0.8450 - val_recall: 0.8001 - val_auc: 0.9574 - val_tp: 15684.0000 - val_fp: 2876.0000 - val_tn: 55933.0000 - val_fn: 3919.0000 - lr: 1.0000e-04\n", "Epoch 87/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3473 - accuracy: 0.8684 - precision: 0.8901 - recall: 0.8482 - auc: 0.9788 - tp: 66508.0000 - fp: 8214.0000 - tn: 227022.0000 - fn: 11904.0000 - val_loss: 0.5091 - val_accuracy: 0.8202 - val_precision: 0.8445 - val_recall: 0.7996 - val_auc: 0.9574 - val_tp: 15675.0000 - val_fp: 2887.0000 - val_tn: 55922.0000 - val_fn: 3928.0000 - lr: 1.0000e-04\n", "Epoch 88/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3470 - accuracy: 0.8683 - precision: 0.8897 - recall: 0.8482 - auc: 0.9788 - tp: 66509.0000 - fp: 8247.0000 - tn: 226989.0000 - fn: 11903.0000 - val_loss: 0.5096 - val_accuracy: 0.8204 - val_precision: 0.8440 - val_recall: 0.8001 - val_auc: 0.9573 - val_tp: 15685.0000 - val_fp: 2900.0000 - val_tn: 55909.0000 - val_fn: 3918.0000 - lr: 1.0000e-04\n", "Epoch 89/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3466 - accuracy: 0.8686 - precision: 0.8899 - recall: 0.8482 - auc: 0.9789 - tp: 66507.0000 - fp: 8226.0000 - tn: 227010.0000 - fn: 11905.0000 - val_loss: 0.5098 - val_accuracy: 0.8200 - val_precision: 0.8444 - val_recall: 0.7998 - val_auc: 0.9573 - val_tp: 15678.0000 - val_fp: 2890.0000 - val_tn: 55919.0000 - val_fn: 3925.0000 - lr: 1.0000e-04\n", "Epoch 90/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3462 - accuracy: 0.8689 - precision: 0.8899 - recall: 0.8486 - auc: 0.9789 - tp: 66538.0000 - fp: 8234.0000 - tn: 227002.0000 - fn: 11874.0000 - val_loss: 0.5101 - val_accuracy: 0.8202 - val_precision: 0.8452 - val_recall: 0.7996 - val_auc: 0.9573 - val_tp: 15675.0000 - val_fp: 2872.0000 - val_tn: 55937.0000 - val_fn: 3928.0000 - lr: 1.0000e-04\n", "Epoch 91/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3458 - accuracy: 0.8689 - precision: 0.8903 - recall: 0.8485 - auc: 0.9790 - tp: 66530.0000 - fp: 8198.0000 - tn: 227038.0000 - fn: 11882.0000 - val_loss: 0.5104 - val_accuracy: 0.8203 - val_precision: 0.8445 - val_recall: 0.7998 - val_auc: 0.9572 - val_tp: 15678.0000 - val_fp: 2887.0000 - val_tn: 55922.0000 - val_fn: 3925.0000 - lr: 1.0000e-04\n", "Epoch 92/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3454 - accuracy: 0.8690 - precision: 0.8903 - recall: 0.8485 - auc: 0.9790 - tp: 66534.0000 - fp: 8200.0000 - tn: 227036.0000 - fn: 11878.0000 - val_loss: 0.5108 - val_accuracy: 0.8200 - val_precision: 0.8442 - val_recall: 0.7995 - val_auc: 0.9572 - val_tp: 15673.0000 - val_fp: 2893.0000 - val_tn: 55916.0000 - val_fn: 3930.0000 - lr: 1.0000e-04\n", "Epoch 93/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3451 - accuracy: 0.8691 - precision: 0.8903 - recall: 0.8485 - auc: 0.9790 - tp: 66536.0000 - fp: 8195.0000 - tn: 227041.0000 - fn: 11876.0000 - val_loss: 0.5110 - val_accuracy: 0.8198 - val_precision: 0.8445 - val_recall: 0.7996 - val_auc: 0.9572 - val_tp: 15675.0000 - val_fp: 2886.0000 - val_tn: 55923.0000 - val_fn: 3928.0000 - lr: 1.0000e-04\n", "Epoch 94/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3447 - accuracy: 0.8695 - precision: 0.8904 - recall: 0.8490 - auc: 0.9791 - tp: 66570.0000 - fp: 8198.0000 - tn: 227038.0000 - fn: 11842.0000 - val_loss: 0.5114 - val_accuracy: 0.8199 - val_precision: 0.8443 - val_recall: 0.7994 - val_auc: 0.9571 - val_tp: 15671.0000 - val_fp: 2889.0000 - val_tn: 55920.0000 - val_fn: 3932.0000 - lr: 1.0000e-04\n", "Epoch 95/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3444 - accuracy: 0.8695 - precision: 0.8906 - recall: 0.8490 - auc: 0.9791 - tp: 66574.0000 - fp: 8180.0000 - tn: 227056.0000 - fn: 11838.0000 - val_loss: 0.5117 - val_accuracy: 0.8202 - val_precision: 0.8444 - val_recall: 0.7995 - val_auc: 0.9571 - val_tp: 15673.0000 - val_fp: 2888.0000 - val_tn: 55921.0000 - val_fn: 3930.0000 - lr: 1.0000e-04\n", "Epoch 96/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3440 - accuracy: 0.8696 - precision: 0.8907 - recall: 0.8491 - auc: 0.9792 - tp: 66582.0000 - fp: 8169.0000 - tn: 227067.0000 - fn: 11830.0000 - val_loss: 0.5121 - val_accuracy: 0.8201 - val_precision: 0.8446 - val_recall: 0.7994 - val_auc: 0.9571 - val_tp: 15671.0000 - val_fp: 2883.0000 - val_tn: 55926.0000 - val_fn: 3932.0000 - lr: 1.0000e-04\n", "Epoch 97/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3437 - accuracy: 0.8699 - precision: 0.8909 - recall: 0.8494 - auc: 0.9792 - tp: 66607.0000 - fp: 8158.0000 - tn: 227078.0000 - fn: 11805.0000 - val_loss: 0.5123 - val_accuracy: 0.8203 - val_precision: 0.8445 - val_recall: 0.7999 - val_auc: 0.9571 - val_tp: 15681.0000 - val_fp: 2887.0000 - val_tn: 55922.0000 - val_fn: 3922.0000 - lr: 1.0000e-04\n", "Epoch 98/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3433 - accuracy: 0.8699 - precision: 0.8911 - recall: 0.8492 - auc: 0.9793 - tp: 66589.0000 - fp: 8138.0000 - tn: 227098.0000 - fn: 11823.0000 - val_loss: 0.5128 - val_accuracy: 0.8201 - val_precision: 0.8439 - val_recall: 0.7995 - val_auc: 0.9571 - val_tp: 15673.0000 - val_fp: 2899.0000 - val_tn: 55910.0000 - val_fn: 3930.0000 - lr: 1.0000e-04\n", "Epoch 99/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3430 - accuracy: 0.8702 - precision: 0.8910 - recall: 0.8497 - auc: 0.9793 - tp: 66629.0000 - fp: 8153.0000 - tn: 227083.0000 - fn: 11783.0000 - val_loss: 0.5130 - val_accuracy: 0.8197 - val_precision: 0.8440 - val_recall: 0.7994 - val_auc: 0.9570 - val_tp: 15671.0000 - val_fp: 2897.0000 - val_tn: 55912.0000 - val_fn: 3932.0000 - lr: 1.0000e-04\n", "Epoch 100/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3426 - accuracy: 0.8700 - precision: 0.8912 - recall: 0.8497 - auc: 0.9793 - tp: 66624.0000 - fp: 8131.0000 - tn: 227105.0000 - fn: 11788.0000 - val_loss: 0.5134 - val_accuracy: 0.8198 - val_precision: 0.8441 - val_recall: 0.7997 - val_auc: 0.9570 - val_tp: 15677.0000 - val_fp: 2895.0000 - val_tn: 55914.0000 - val_fn: 3926.0000 - lr: 1.0000e-04\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2024-06-08 13:58:26,323] Trial 2 finished with value: 0.816274881362915 and parameters: {'num_filters': 124, 'kernel_size': 4, 'learning_rate': 0.0007177946981171184}. Best is trial 0 with value: 0.819294810295105.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Loss: 0.5146223902702332\n", "Accuracy: 0.816274881362915\n", "Precision: 0.8387221693992615\n", "Recall: 0.7960740923881531\n", "AUC: 0.9571151733398438\n", "True Positives: 19507.0\n", "False Positives: 3751.0\n", "True Negatives: 69761.0\n", "False Negatives: 4997.0\n", "Epoch 1/100\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\Michał\\AppData\\Local\\Temp\\ipykernel_33252\\265862631.py:5: FutureWarning: suggest_loguniform has been deprecated in v3.0.0. This feature will be removed in v6.0.0. See https://github.com/optuna/optuna/releases/tag/v3.0.0. Use suggest_float(..., log=True) instead.\n", " learning_rate = trial.suggest_loguniform('learning_rate', 1e-4, 1e-2)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "30/30 [==============================] - 3s 46ms/step - loss: 1.3620 - accuracy: 0.3614 - precision: 0.0000e+00 - recall: 0.0000e+00 - auc: 0.6527 - tp: 0.0000e+00 - fp: 0.0000e+00 - tn: 235236.0000 - fn: 78412.0000 - val_loss: 1.3293 - val_accuracy: 0.4581 - val_precision: 0.0000e+00 - val_recall: 0.0000e+00 - val_auc: 0.7289 - val_tp: 0.0000e+00 - val_fp: 0.0000e+00 - val_tn: 58809.0000 - val_fn: 19603.0000 - lr: 6.1312e-04\n", "Epoch 2/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 1.2758 - accuracy: 0.5298 - precision: 0.0000e+00 - recall: 0.0000e+00 - auc: 0.7566 - tp: 0.0000e+00 - fp: 0.0000e+00 - tn: 235236.0000 - fn: 78412.0000 - val_loss: 1.1994 - val_accuracy: 0.6011 - val_precision: 0.0000e+00 - val_recall: 0.0000e+00 - val_auc: 0.8021 - val_tp: 0.0000e+00 - val_fp: 0.0000e+00 - val_tn: 58809.0000 - val_fn: 19603.0000 - lr: 6.1312e-04\n", "Epoch 3/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 1.0835 - accuracy: 0.6464 - precision: 0.8711 - recall: 0.1409 - auc: 0.8329 - tp: 11045.0000 - fp: 1634.0000 - tn: 233602.0000 - fn: 67367.0000 - val_loss: 0.9591 - val_accuracy: 0.6716 - val_precision: 0.8523 - val_recall: 0.4013 - val_auc: 0.8633 - val_tp: 7866.0000 - val_fp: 1363.0000 - val_tn: 57446.0000 - val_fn: 11737.0000 - lr: 6.1312e-04\n", "Epoch 4/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.8497 - accuracy: 0.6968 - precision: 0.8263 - recall: 0.5373 - auc: 0.8901 - tp: 42133.0000 - fp: 8858.0000 - tn: 226378.0000 - fn: 36279.0000 - val_loss: 0.7697 - val_accuracy: 0.7131 - val_precision: 0.8102 - val_recall: 0.6100 - val_auc: 0.9078 - val_tp: 11958.0000 - val_fp: 2802.0000 - val_tn: 56007.0000 - val_fn: 7645.0000 - lr: 6.1312e-04\n", "Epoch 5/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.7022 - accuracy: 0.7332 - precision: 0.8222 - recall: 0.6496 - auc: 0.9228 - tp: 50936.0000 - fp: 11017.0000 - tn: 224219.0000 - fn: 27476.0000 - val_loss: 0.6746 - val_accuracy: 0.7430 - val_precision: 0.8210 - val_recall: 0.6633 - val_auc: 0.9264 - val_tp: 13002.0000 - val_fp: 2835.0000 - val_tn: 55974.0000 - val_fn: 6601.0000 - lr: 6.1312e-04\n", "Epoch 6/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.6217 - accuracy: 0.7683 - precision: 0.8362 - recall: 0.6883 - auc: 0.9376 - tp: 53970.0000 - fp: 10575.0000 - tn: 224661.0000 - fn: 24442.0000 - val_loss: 0.6186 - val_accuracy: 0.7702 - val_precision: 0.8347 - val_recall: 0.6983 - val_auc: 0.9366 - val_tp: 13688.0000 - val_fp: 2710.0000 - val_tn: 56099.0000 - val_fn: 5915.0000 - lr: 6.1312e-04\n", "Epoch 7/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.5682 - accuracy: 0.7929 - precision: 0.8481 - recall: 0.7240 - auc: 0.9469 - tp: 56774.0000 - fp: 10170.0000 - tn: 225066.0000 - fn: 21638.0000 - val_loss: 0.5810 - val_accuracy: 0.7844 - val_precision: 0.8418 - val_recall: 0.7247 - val_auc: 0.9433 - val_tp: 14206.0000 - val_fp: 2669.0000 - val_tn: 56140.0000 - val_fn: 5397.0000 - lr: 6.1312e-04\n", "Epoch 8/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.5297 - accuracy: 0.8069 - precision: 0.8559 - recall: 0.7490 - auc: 0.9533 - tp: 58727.0000 - fp: 9886.0000 - tn: 225350.0000 - fn: 19685.0000 - val_loss: 0.5545 - val_accuracy: 0.7924 - val_precision: 0.8459 - val_recall: 0.7416 - val_auc: 0.9479 - val_tp: 14537.0000 - val_fp: 2649.0000 - val_tn: 56160.0000 - val_fn: 5066.0000 - lr: 6.1312e-04\n", "Epoch 9/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.5018 - accuracy: 0.8164 - precision: 0.8605 - recall: 0.7651 - auc: 0.9577 - tp: 59991.0000 - fp: 9722.0000 - tn: 225514.0000 - fn: 18421.0000 - val_loss: 0.5368 - val_accuracy: 0.7992 - val_precision: 0.8478 - val_recall: 0.7533 - val_auc: 0.9508 - val_tp: 14766.0000 - val_fp: 2651.0000 - val_tn: 56158.0000 - val_fn: 4837.0000 - lr: 6.1312e-04\n", "Epoch 10/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.4815 - accuracy: 0.8232 - precision: 0.8643 - recall: 0.7760 - auc: 0.9608 - tp: 60850.0000 - fp: 9557.0000 - tn: 225679.0000 - fn: 17562.0000 - val_loss: 0.5253 - val_accuracy: 0.8030 - val_precision: 0.8492 - val_recall: 0.7597 - val_auc: 0.9528 - val_tp: 14892.0000 - val_fp: 2644.0000 - val_tn: 56165.0000 - val_fn: 4711.0000 - lr: 6.1312e-04\n", "Epoch 11/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4665 - accuracy: 0.8283 - precision: 0.8661 - recall: 0.7853 - auc: 0.9630 - tp: 61580.0000 - fp: 9521.0000 - tn: 225715.0000 - fn: 16832.0000 - val_loss: 0.5177 - val_accuracy: 0.8069 - val_precision: 0.8496 - val_recall: 0.7655 - val_auc: 0.9540 - val_tp: 15006.0000 - val_fp: 2657.0000 - val_tn: 56152.0000 - val_fn: 4597.0000 - lr: 6.1312e-04\n", "Epoch 12/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.4548 - accuracy: 0.8322 - precision: 0.8687 - recall: 0.7921 - auc: 0.9647 - tp: 62113.0000 - fp: 9386.0000 - tn: 225850.0000 - fn: 16299.0000 - val_loss: 0.5117 - val_accuracy: 0.8096 - val_precision: 0.8480 - val_recall: 0.7727 - val_auc: 0.9550 - val_tp: 15148.0000 - val_fp: 2716.0000 - val_tn: 56093.0000 - val_fn: 4455.0000 - lr: 6.1312e-04\n", "Epoch 13/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.4450 - accuracy: 0.8351 - precision: 0.8695 - recall: 0.7977 - auc: 0.9660 - tp: 62551.0000 - fp: 9384.0000 - tn: 225852.0000 - fn: 15861.0000 - val_loss: 0.5075 - val_accuracy: 0.8117 - val_precision: 0.8504 - val_recall: 0.7744 - val_auc: 0.9557 - val_tp: 15181.0000 - val_fp: 2670.0000 - val_tn: 56139.0000 - val_fn: 4422.0000 - lr: 6.1312e-04\n", "Epoch 14/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4368 - accuracy: 0.8379 - precision: 0.8715 - recall: 0.8021 - auc: 0.9672 - tp: 62898.0000 - fp: 9271.0000 - tn: 225965.0000 - fn: 15514.0000 - val_loss: 0.5045 - val_accuracy: 0.8141 - val_precision: 0.8515 - val_recall: 0.7766 - val_auc: 0.9562 - val_tp: 15223.0000 - val_fp: 2655.0000 - val_tn: 56154.0000 - val_fn: 4380.0000 - lr: 6.1312e-04\n", "Epoch 15/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4300 - accuracy: 0.8404 - precision: 0.8732 - recall: 0.8044 - auc: 0.9682 - tp: 63077.0000 - fp: 9158.0000 - tn: 226078.0000 - fn: 15335.0000 - val_loss: 0.5026 - val_accuracy: 0.8145 - val_precision: 0.8490 - val_recall: 0.7823 - val_auc: 0.9567 - val_tp: 15336.0000 - val_fp: 2727.0000 - val_tn: 56082.0000 - val_fn: 4267.0000 - lr: 6.1312e-04\n", "Epoch 16/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.4237 - accuracy: 0.8417 - precision: 0.8731 - recall: 0.8085 - auc: 0.9690 - tp: 63393.0000 - fp: 9216.0000 - tn: 226020.0000 - fn: 15019.0000 - val_loss: 0.5011 - val_accuracy: 0.8172 - val_precision: 0.8517 - val_recall: 0.7808 - val_auc: 0.9570 - val_tp: 15307.0000 - val_fp: 2665.0000 - val_tn: 56144.0000 - val_fn: 4296.0000 - lr: 6.1312e-04\n", "Epoch 17/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.4181 - accuracy: 0.8441 - precision: 0.8753 - recall: 0.8106 - auc: 0.9698 - tp: 63560.0000 - fp: 9052.0000 - tn: 226184.0000 - fn: 14852.0000 - val_loss: 0.4999 - val_accuracy: 0.8178 - val_precision: 0.8525 - val_recall: 0.7822 - val_auc: 0.9572 - val_tp: 15334.0000 - val_fp: 2653.0000 - val_tn: 56156.0000 - val_fn: 4269.0000 - lr: 6.1312e-04\n", "Epoch 18/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4131 - accuracy: 0.8458 - precision: 0.8754 - recall: 0.8141 - auc: 0.9704 - tp: 63837.0000 - fp: 9086.0000 - tn: 226150.0000 - fn: 14575.0000 - val_loss: 0.4991 - val_accuracy: 0.8185 - val_precision: 0.8526 - val_recall: 0.7835 - val_auc: 0.9574 - val_tp: 15359.0000 - val_fp: 2655.0000 - val_tn: 56154.0000 - val_fn: 4244.0000 - lr: 6.1312e-04\n", "Epoch 19/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4085 - accuracy: 0.8472 - precision: 0.8773 - recall: 0.8154 - auc: 0.9711 - tp: 63935.0000 - fp: 8938.0000 - tn: 226298.0000 - fn: 14477.0000 - val_loss: 0.4989 - val_accuracy: 0.8202 - val_precision: 0.8526 - val_recall: 0.7853 - val_auc: 0.9575 - val_tp: 15394.0000 - val_fp: 2661.0000 - val_tn: 56148.0000 - val_fn: 4209.0000 - lr: 6.1312e-04\n", "Epoch 20/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.4044 - accuracy: 0.8486 - precision: 0.8774 - recall: 0.8195 - auc: 0.9716 - tp: 64255.0000 - fp: 8978.0000 - tn: 226258.0000 - fn: 14157.0000 - val_loss: 0.4988 - val_accuracy: 0.8206 - val_precision: 0.8504 - val_recall: 0.7906 - val_auc: 0.9576 - val_tp: 15498.0000 - val_fp: 2726.0000 - val_tn: 56083.0000 - val_fn: 4105.0000 - lr: 6.1312e-04\n", "Epoch 21/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.4003 - accuracy: 0.8501 - precision: 0.8779 - recall: 0.8211 - auc: 0.9722 - tp: 64381.0000 - fp: 8952.0000 - tn: 226284.0000 - fn: 14031.0000 - val_loss: 0.4993 - val_accuracy: 0.8209 - val_precision: 0.8529 - val_recall: 0.7887 - val_auc: 0.9576 - val_tp: 15461.0000 - val_fp: 2667.0000 - val_tn: 56142.0000 - val_fn: 4142.0000 - lr: 6.1312e-04\n", "Epoch 22/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3966 - accuracy: 0.8513 - precision: 0.8798 - recall: 0.8213 - auc: 0.9726 - tp: 64399.0000 - fp: 8800.0000 - tn: 226436.0000 - fn: 14013.0000 - val_loss: 0.5006 - val_accuracy: 0.8214 - val_precision: 0.8544 - val_recall: 0.7882 - val_auc: 0.9575 - val_tp: 15451.0000 - val_fp: 2633.0000 - val_tn: 56176.0000 - val_fn: 4152.0000 - lr: 6.1312e-04\n", "Epoch 23/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3914 - accuracy: 0.8535 - precision: 0.8812 - recall: 0.8244 - auc: 0.9734 - tp: 64646.0000 - fp: 8714.0000 - tn: 226522.0000 - fn: 13766.0000 - val_loss: 0.4998 - val_accuracy: 0.8209 - val_precision: 0.8510 - val_recall: 0.7923 - val_auc: 0.9577 - val_tp: 15532.0000 - val_fp: 2719.0000 - val_tn: 56090.0000 - val_fn: 4071.0000 - lr: 1.2262e-04\n", "Epoch 24/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3904 - accuracy: 0.8543 - precision: 0.8801 - recall: 0.8262 - auc: 0.9735 - tp: 64785.0000 - fp: 8826.0000 - tn: 226410.0000 - fn: 13627.0000 - val_loss: 0.5000 - val_accuracy: 0.8210 - val_precision: 0.8503 - val_recall: 0.7926 - val_auc: 0.9576 - val_tp: 15538.0000 - val_fp: 2735.0000 - val_tn: 56074.0000 - val_fn: 4065.0000 - lr: 1.2262e-04\n", "Epoch 25/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3896 - accuracy: 0.8543 - precision: 0.8799 - recall: 0.8267 - auc: 0.9736 - tp: 64825.0000 - fp: 8844.0000 - tn: 226392.0000 - fn: 13587.0000 - val_loss: 0.5000 - val_accuracy: 0.8210 - val_precision: 0.8503 - val_recall: 0.7927 - val_auc: 0.9577 - val_tp: 15539.0000 - val_fp: 2736.0000 - val_tn: 56073.0000 - val_fn: 4064.0000 - lr: 1.2262e-04\n", "Epoch 26/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3888 - accuracy: 0.8550 - precision: 0.8802 - recall: 0.8272 - auc: 0.9737 - tp: 64862.0000 - fp: 8829.0000 - tn: 226407.0000 - fn: 13550.0000 - val_loss: 0.5002 - val_accuracy: 0.8207 - val_precision: 0.8501 - val_recall: 0.7927 - val_auc: 0.9576 - val_tp: 15539.0000 - val_fp: 2741.0000 - val_tn: 56068.0000 - val_fn: 4064.0000 - lr: 1.0000e-04\n", "Epoch 27/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3883 - accuracy: 0.8549 - precision: 0.8809 - recall: 0.8269 - auc: 0.9738 - tp: 64838.0000 - fp: 8763.0000 - tn: 226473.0000 - fn: 13574.0000 - val_loss: 0.5003 - val_accuracy: 0.8211 - val_precision: 0.8503 - val_recall: 0.7925 - val_auc: 0.9576 - val_tp: 15535.0000 - val_fp: 2734.0000 - val_tn: 56075.0000 - val_fn: 4068.0000 - lr: 1.0000e-04\n", "Epoch 28/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.3877 - accuracy: 0.8554 - precision: 0.8804 - recall: 0.8278 - auc: 0.9739 - tp: 64907.0000 - fp: 8817.0000 - tn: 226419.0000 - fn: 13505.0000 - val_loss: 0.5005 - val_accuracy: 0.8211 - val_precision: 0.8501 - val_recall: 0.7928 - val_auc: 0.9576 - val_tp: 15542.0000 - val_fp: 2741.0000 - val_tn: 56068.0000 - val_fn: 4061.0000 - lr: 1.0000e-04\n", "Epoch 29/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3872 - accuracy: 0.8555 - precision: 0.8807 - recall: 0.8282 - auc: 0.9739 - tp: 64942.0000 - fp: 8798.0000 - tn: 226438.0000 - fn: 13470.0000 - val_loss: 0.5007 - val_accuracy: 0.8210 - val_precision: 0.8497 - val_recall: 0.7929 - val_auc: 0.9576 - val_tp: 15544.0000 - val_fp: 2750.0000 - val_tn: 56059.0000 - val_fn: 4059.0000 - lr: 1.0000e-04\n", "Epoch 30/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3866 - accuracy: 0.8556 - precision: 0.8821 - recall: 0.8267 - auc: 0.9740 - tp: 64820.0000 - fp: 8664.0000 - tn: 226572.0000 - fn: 13592.0000 - val_loss: 0.5009 - val_accuracy: 0.8213 - val_precision: 0.8499 - val_recall: 0.7932 - val_auc: 0.9576 - val_tp: 15549.0000 - val_fp: 2747.0000 - val_tn: 56062.0000 - val_fn: 4054.0000 - lr: 1.0000e-04\n", "Epoch 31/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3861 - accuracy: 0.8555 - precision: 0.8809 - recall: 0.8283 - auc: 0.9740 - tp: 64952.0000 - fp: 8779.0000 - tn: 226457.0000 - fn: 13460.0000 - val_loss: 0.5010 - val_accuracy: 0.8211 - val_precision: 0.8500 - val_recall: 0.7928 - val_auc: 0.9576 - val_tp: 15541.0000 - val_fp: 2742.0000 - val_tn: 56067.0000 - val_fn: 4062.0000 - lr: 1.0000e-04\n", "Epoch 32/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3855 - accuracy: 0.8560 - precision: 0.8810 - recall: 0.8288 - auc: 0.9741 - tp: 64988.0000 - fp: 8780.0000 - tn: 226456.0000 - fn: 13424.0000 - val_loss: 0.5012 - val_accuracy: 0.8216 - val_precision: 0.8500 - val_recall: 0.7933 - val_auc: 0.9576 - val_tp: 15551.0000 - val_fp: 2744.0000 - val_tn: 56065.0000 - val_fn: 4052.0000 - lr: 1.0000e-04\n", "Epoch 33/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3850 - accuracy: 0.8559 - precision: 0.8812 - recall: 0.8291 - auc: 0.9742 - tp: 65014.0000 - fp: 8768.0000 - tn: 226468.0000 - fn: 13398.0000 - val_loss: 0.5013 - val_accuracy: 0.8217 - val_precision: 0.8499 - val_recall: 0.7933 - val_auc: 0.9576 - val_tp: 15551.0000 - val_fp: 2746.0000 - val_tn: 56063.0000 - val_fn: 4052.0000 - lr: 1.0000e-04\n", "Epoch 34/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3845 - accuracy: 0.8562 - precision: 0.8812 - recall: 0.8293 - auc: 0.9743 - tp: 65026.0000 - fp: 8770.0000 - tn: 226466.0000 - fn: 13386.0000 - val_loss: 0.5015 - val_accuracy: 0.8217 - val_precision: 0.8499 - val_recall: 0.7936 - val_auc: 0.9576 - val_tp: 15556.0000 - val_fp: 2748.0000 - val_tn: 56061.0000 - val_fn: 4047.0000 - lr: 1.0000e-04\n", "Epoch 35/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3839 - accuracy: 0.8565 - precision: 0.8816 - recall: 0.8294 - auc: 0.9743 - tp: 65038.0000 - fp: 8733.0000 - tn: 226503.0000 - fn: 13374.0000 - val_loss: 0.5017 - val_accuracy: 0.8216 - val_precision: 0.8499 - val_recall: 0.7935 - val_auc: 0.9576 - val_tp: 15555.0000 - val_fp: 2747.0000 - val_tn: 56062.0000 - val_fn: 4048.0000 - lr: 1.0000e-04\n", "Epoch 36/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3835 - accuracy: 0.8566 - precision: 0.8816 - recall: 0.8298 - auc: 0.9744 - tp: 65069.0000 - fp: 8741.0000 - tn: 226495.0000 - fn: 13343.0000 - val_loss: 0.5019 - val_accuracy: 0.8219 - val_precision: 0.8498 - val_recall: 0.7937 - val_auc: 0.9576 - val_tp: 15559.0000 - val_fp: 2749.0000 - val_tn: 56060.0000 - val_fn: 4044.0000 - lr: 1.0000e-04\n", "Epoch 37/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3829 - accuracy: 0.8570 - precision: 0.8818 - recall: 0.8301 - auc: 0.9745 - tp: 65087.0000 - fp: 8722.0000 - tn: 226514.0000 - fn: 13325.0000 - val_loss: 0.5021 - val_accuracy: 0.8220 - val_precision: 0.8497 - val_recall: 0.7942 - val_auc: 0.9575 - val_tp: 15569.0000 - val_fp: 2753.0000 - val_tn: 56056.0000 - val_fn: 4034.0000 - lr: 1.0000e-04\n", "Epoch 38/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3824 - accuracy: 0.8569 - precision: 0.8816 - recall: 0.8301 - auc: 0.9745 - tp: 65092.0000 - fp: 8741.0000 - tn: 226495.0000 - fn: 13320.0000 - val_loss: 0.5023 - val_accuracy: 0.8222 - val_precision: 0.8500 - val_recall: 0.7938 - val_auc: 0.9576 - val_tp: 15561.0000 - val_fp: 2746.0000 - val_tn: 56063.0000 - val_fn: 4042.0000 - lr: 1.0000e-04\n", "Epoch 39/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3819 - accuracy: 0.8572 - precision: 0.8821 - recall: 0.8305 - auc: 0.9746 - tp: 65124.0000 - fp: 8706.0000 - tn: 226530.0000 - fn: 13288.0000 - val_loss: 0.5024 - val_accuracy: 0.8225 - val_precision: 0.8501 - val_recall: 0.7942 - val_auc: 0.9575 - val_tp: 15569.0000 - val_fp: 2745.0000 - val_tn: 56064.0000 - val_fn: 4034.0000 - lr: 1.0000e-04\n", "Epoch 40/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3814 - accuracy: 0.8572 - precision: 0.8820 - recall: 0.8305 - auc: 0.9747 - tp: 65122.0000 - fp: 8713.0000 - tn: 226523.0000 - fn: 13290.0000 - val_loss: 0.5027 - val_accuracy: 0.8223 - val_precision: 0.8500 - val_recall: 0.7944 - val_auc: 0.9575 - val_tp: 15572.0000 - val_fp: 2748.0000 - val_tn: 56061.0000 - val_fn: 4031.0000 - lr: 1.0000e-04\n", "Epoch 41/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3809 - accuracy: 0.8573 - precision: 0.8823 - recall: 0.8310 - auc: 0.9747 - tp: 65159.0000 - fp: 8693.0000 - tn: 226543.0000 - fn: 13253.0000 - val_loss: 0.5028 - val_accuracy: 0.8226 - val_precision: 0.8496 - val_recall: 0.7938 - val_auc: 0.9575 - val_tp: 15561.0000 - val_fp: 2754.0000 - val_tn: 56055.0000 - val_fn: 4042.0000 - lr: 1.0000e-04\n", "Epoch 42/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3804 - accuracy: 0.8579 - precision: 0.8824 - recall: 0.8311 - auc: 0.9748 - tp: 65170.0000 - fp: 8688.0000 - tn: 226548.0000 - fn: 13242.0000 - val_loss: 0.5031 - val_accuracy: 0.8230 - val_precision: 0.8495 - val_recall: 0.7945 - val_auc: 0.9575 - val_tp: 15574.0000 - val_fp: 2759.0000 - val_tn: 56050.0000 - val_fn: 4029.0000 - lr: 1.0000e-04\n", "Epoch 43/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3799 - accuracy: 0.8578 - precision: 0.8825 - recall: 0.8314 - auc: 0.9748 - tp: 65193.0000 - fp: 8682.0000 - tn: 226554.0000 - fn: 13219.0000 - val_loss: 0.5033 - val_accuracy: 0.8229 - val_precision: 0.8496 - val_recall: 0.7946 - val_auc: 0.9574 - val_tp: 15577.0000 - val_fp: 2758.0000 - val_tn: 56051.0000 - val_fn: 4026.0000 - lr: 1.0000e-04\n", "Epoch 44/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3794 - accuracy: 0.8582 - precision: 0.8825 - recall: 0.8316 - auc: 0.9749 - tp: 65208.0000 - fp: 8681.0000 - tn: 226555.0000 - fn: 13204.0000 - val_loss: 0.5034 - val_accuracy: 0.8235 - val_precision: 0.8493 - val_recall: 0.7945 - val_auc: 0.9575 - val_tp: 15575.0000 - val_fp: 2763.0000 - val_tn: 56046.0000 - val_fn: 4028.0000 - lr: 1.0000e-04\n", "Epoch 45/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3789 - accuracy: 0.8583 - precision: 0.8827 - recall: 0.8319 - auc: 0.9750 - tp: 65231.0000 - fp: 8666.0000 - tn: 226570.0000 - fn: 13181.0000 - val_loss: 0.5037 - val_accuracy: 0.8230 - val_precision: 0.8496 - val_recall: 0.7950 - val_auc: 0.9574 - val_tp: 15585.0000 - val_fp: 2760.0000 - val_tn: 56049.0000 - val_fn: 4018.0000 - lr: 1.0000e-04\n", "Epoch 46/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3784 - accuracy: 0.8586 - precision: 0.8830 - recall: 0.8321 - auc: 0.9750 - tp: 65250.0000 - fp: 8644.0000 - tn: 226592.0000 - fn: 13162.0000 - val_loss: 0.5039 - val_accuracy: 0.8236 - val_precision: 0.8496 - val_recall: 0.7948 - val_auc: 0.9574 - val_tp: 15580.0000 - val_fp: 2759.0000 - val_tn: 56050.0000 - val_fn: 4023.0000 - lr: 1.0000e-04\n", "Epoch 47/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3780 - accuracy: 0.8588 - precision: 0.8830 - recall: 0.8324 - auc: 0.9751 - tp: 65268.0000 - fp: 8652.0000 - tn: 226584.0000 - fn: 13144.0000 - val_loss: 0.5041 - val_accuracy: 0.8233 - val_precision: 0.8496 - val_recall: 0.7953 - val_auc: 0.9574 - val_tp: 15591.0000 - val_fp: 2760.0000 - val_tn: 56049.0000 - val_fn: 4012.0000 - lr: 1.0000e-04\n", "Epoch 48/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3775 - accuracy: 0.8588 - precision: 0.8832 - recall: 0.8326 - auc: 0.9751 - tp: 65286.0000 - fp: 8638.0000 - tn: 226598.0000 - fn: 13126.0000 - val_loss: 0.5043 - val_accuracy: 0.8235 - val_precision: 0.8497 - val_recall: 0.7948 - val_auc: 0.9574 - val_tp: 15581.0000 - val_fp: 2756.0000 - val_tn: 56053.0000 - val_fn: 4022.0000 - lr: 1.0000e-04\n", "Epoch 49/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3770 - accuracy: 0.8589 - precision: 0.8831 - recall: 0.8329 - auc: 0.9752 - tp: 65308.0000 - fp: 8641.0000 - tn: 226595.0000 - fn: 13104.0000 - val_loss: 0.5046 - val_accuracy: 0.8235 - val_precision: 0.8497 - val_recall: 0.7955 - val_auc: 0.9574 - val_tp: 15594.0000 - val_fp: 2758.0000 - val_tn: 56051.0000 - val_fn: 4009.0000 - lr: 1.0000e-04\n", "Epoch 50/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3765 - accuracy: 0.8590 - precision: 0.8831 - recall: 0.8329 - auc: 0.9753 - tp: 65308.0000 - fp: 8644.0000 - tn: 226592.0000 - fn: 13104.0000 - val_loss: 0.5048 - val_accuracy: 0.8236 - val_precision: 0.8499 - val_recall: 0.7950 - val_auc: 0.9574 - val_tp: 15585.0000 - val_fp: 2753.0000 - val_tn: 56056.0000 - val_fn: 4018.0000 - lr: 1.0000e-04\n", "Epoch 51/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3761 - accuracy: 0.8593 - precision: 0.8831 - recall: 0.8331 - auc: 0.9753 - tp: 65328.0000 - fp: 8644.0000 - tn: 226592.0000 - fn: 13084.0000 - val_loss: 0.5050 - val_accuracy: 0.8233 - val_precision: 0.8495 - val_recall: 0.7953 - val_auc: 0.9573 - val_tp: 15591.0000 - val_fp: 2762.0000 - val_tn: 56047.0000 - val_fn: 4012.0000 - lr: 1.0000e-04\n", "Epoch 52/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3756 - accuracy: 0.8594 - precision: 0.8837 - recall: 0.8335 - auc: 0.9754 - tp: 65355.0000 - fp: 8603.0000 - tn: 226633.0000 - fn: 13057.0000 - val_loss: 0.5052 - val_accuracy: 0.8236 - val_precision: 0.8496 - val_recall: 0.7957 - val_auc: 0.9573 - val_tp: 15598.0000 - val_fp: 2761.0000 - val_tn: 56048.0000 - val_fn: 4005.0000 - lr: 1.0000e-04\n", "Epoch 53/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3751 - accuracy: 0.8594 - precision: 0.8835 - recall: 0.8341 - auc: 0.9754 - tp: 65406.0000 - fp: 8625.0000 - tn: 226611.0000 - fn: 13006.0000 - val_loss: 0.5055 - val_accuracy: 0.8238 - val_precision: 0.8495 - val_recall: 0.7960 - val_auc: 0.9573 - val_tp: 15604.0000 - val_fp: 2764.0000 - val_tn: 56045.0000 - val_fn: 3999.0000 - lr: 1.0000e-04\n", "Epoch 54/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3747 - accuracy: 0.8597 - precision: 0.8838 - recall: 0.8343 - auc: 0.9755 - tp: 65419.0000 - fp: 8601.0000 - tn: 226635.0000 - fn: 12993.0000 - val_loss: 0.5058 - val_accuracy: 0.8235 - val_precision: 0.8496 - val_recall: 0.7958 - val_auc: 0.9573 - val_tp: 15601.0000 - val_fp: 2761.0000 - val_tn: 56048.0000 - val_fn: 4002.0000 - lr: 1.0000e-04\n", "Epoch 55/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3742 - accuracy: 0.8599 - precision: 0.8839 - recall: 0.8344 - auc: 0.9756 - tp: 65430.0000 - fp: 8593.0000 - tn: 226643.0000 - fn: 12982.0000 - val_loss: 0.5060 - val_accuracy: 0.8240 - val_precision: 0.8496 - val_recall: 0.7955 - val_auc: 0.9573 - val_tp: 15594.0000 - val_fp: 2761.0000 - val_tn: 56048.0000 - val_fn: 4009.0000 - lr: 1.0000e-04\n", "Epoch 56/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3737 - accuracy: 0.8599 - precision: 0.8840 - recall: 0.8346 - auc: 0.9756 - tp: 65439.0000 - fp: 8591.0000 - tn: 226645.0000 - fn: 12973.0000 - val_loss: 0.5062 - val_accuracy: 0.8232 - val_precision: 0.8494 - val_recall: 0.7961 - val_auc: 0.9573 - val_tp: 15606.0000 - val_fp: 2766.0000 - val_tn: 56043.0000 - val_fn: 3997.0000 - lr: 1.0000e-04\n", "Epoch 57/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3733 - accuracy: 0.8603 - precision: 0.8842 - recall: 0.8353 - auc: 0.9757 - tp: 65497.0000 - fp: 8574.0000 - tn: 226662.0000 - fn: 12915.0000 - val_loss: 0.5064 - val_accuracy: 0.8235 - val_precision: 0.8496 - val_recall: 0.7965 - val_auc: 0.9572 - val_tp: 15614.0000 - val_fp: 2764.0000 - val_tn: 56045.0000 - val_fn: 3989.0000 - lr: 1.0000e-04\n", "Epoch 58/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3728 - accuracy: 0.8604 - precision: 0.8843 - recall: 0.8352 - auc: 0.9758 - tp: 65489.0000 - fp: 8566.0000 - tn: 226670.0000 - fn: 12923.0000 - val_loss: 0.5067 - val_accuracy: 0.8235 - val_precision: 0.8494 - val_recall: 0.7961 - val_auc: 0.9572 - val_tp: 15606.0000 - val_fp: 2768.0000 - val_tn: 56041.0000 - val_fn: 3997.0000 - lr: 1.0000e-04\n", "Epoch 59/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3724 - accuracy: 0.8604 - precision: 0.8843 - recall: 0.8356 - auc: 0.9758 - tp: 65523.0000 - fp: 8569.0000 - tn: 226667.0000 - fn: 12889.0000 - val_loss: 0.5070 - val_accuracy: 0.8238 - val_precision: 0.8494 - val_recall: 0.7968 - val_auc: 0.9572 - val_tp: 15619.0000 - val_fp: 2769.0000 - val_tn: 56040.0000 - val_fn: 3984.0000 - lr: 1.0000e-04\n", "Epoch 60/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3719 - accuracy: 0.8605 - precision: 0.8846 - recall: 0.8357 - auc: 0.9759 - tp: 65531.0000 - fp: 8547.0000 - tn: 226689.0000 - fn: 12881.0000 - val_loss: 0.5071 - val_accuracy: 0.8233 - val_precision: 0.8496 - val_recall: 0.7964 - val_auc: 0.9572 - val_tp: 15611.0000 - val_fp: 2763.0000 - val_tn: 56046.0000 - val_fn: 3992.0000 - lr: 1.0000e-04\n", "Epoch 61/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3715 - accuracy: 0.8606 - precision: 0.8847 - recall: 0.8357 - auc: 0.9759 - tp: 65527.0000 - fp: 8540.0000 - tn: 226696.0000 - fn: 12885.0000 - val_loss: 0.5074 - val_accuracy: 0.8235 - val_precision: 0.8496 - val_recall: 0.7966 - val_auc: 0.9572 - val_tp: 15616.0000 - val_fp: 2765.0000 - val_tn: 56044.0000 - val_fn: 3987.0000 - lr: 1.0000e-04\n", "Epoch 62/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3710 - accuracy: 0.8608 - precision: 0.8848 - recall: 0.8362 - auc: 0.9760 - tp: 65571.0000 - fp: 8539.0000 - tn: 226697.0000 - fn: 12841.0000 - val_loss: 0.5077 - val_accuracy: 0.8232 - val_precision: 0.8494 - val_recall: 0.7967 - val_auc: 0.9572 - val_tp: 15618.0000 - val_fp: 2769.0000 - val_tn: 56040.0000 - val_fn: 3985.0000 - lr: 1.0000e-04\n", "Epoch 63/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3706 - accuracy: 0.8610 - precision: 0.8848 - recall: 0.8363 - auc: 0.9760 - tp: 65579.0000 - fp: 8537.0000 - tn: 226699.0000 - fn: 12833.0000 - val_loss: 0.5078 - val_accuracy: 0.8233 - val_precision: 0.8494 - val_recall: 0.7967 - val_auc: 0.9571 - val_tp: 15618.0000 - val_fp: 2769.0000 - val_tn: 56040.0000 - val_fn: 3985.0000 - lr: 1.0000e-04\n", "Epoch 64/100\n", "30/30 [==============================] - 1s 26ms/step - loss: 0.3702 - accuracy: 0.8612 - precision: 0.8849 - recall: 0.8363 - auc: 0.9761 - tp: 65579.0000 - fp: 8534.0000 - tn: 226702.0000 - fn: 12833.0000 - val_loss: 0.5082 - val_accuracy: 0.8235 - val_precision: 0.8493 - val_recall: 0.7964 - val_auc: 0.9571 - val_tp: 15612.0000 - val_fp: 2771.0000 - val_tn: 56038.0000 - val_fn: 3991.0000 - lr: 1.0000e-04\n", "Epoch 65/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3697 - accuracy: 0.8617 - precision: 0.8853 - recall: 0.8366 - auc: 0.9761 - tp: 65601.0000 - fp: 8501.0000 - tn: 226735.0000 - fn: 12811.0000 - val_loss: 0.5084 - val_accuracy: 0.8241 - val_precision: 0.8499 - val_recall: 0.7969 - val_auc: 0.9572 - val_tp: 15622.0000 - val_fp: 2758.0000 - val_tn: 56051.0000 - val_fn: 3981.0000 - lr: 1.0000e-04\n", "Epoch 66/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3692 - accuracy: 0.8616 - precision: 0.8853 - recall: 0.8368 - auc: 0.9762 - tp: 65615.0000 - fp: 8497.0000 - tn: 226739.0000 - fn: 12797.0000 - val_loss: 0.5087 - val_accuracy: 0.8236 - val_precision: 0.8493 - val_recall: 0.7971 - val_auc: 0.9571 - val_tp: 15625.0000 - val_fp: 2772.0000 - val_tn: 56037.0000 - val_fn: 3978.0000 - lr: 1.0000e-04\n", "Epoch 67/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3688 - accuracy: 0.8617 - precision: 0.8852 - recall: 0.8371 - auc: 0.9762 - tp: 65639.0000 - fp: 8512.0000 - tn: 226724.0000 - fn: 12773.0000 - val_loss: 0.5089 - val_accuracy: 0.8234 - val_precision: 0.8496 - val_recall: 0.7972 - val_auc: 0.9571 - val_tp: 15628.0000 - val_fp: 2767.0000 - val_tn: 56042.0000 - val_fn: 3975.0000 - lr: 1.0000e-04\n", "Epoch 68/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3684 - accuracy: 0.8618 - precision: 0.8854 - recall: 0.8374 - auc: 0.9763 - tp: 65665.0000 - fp: 8500.0000 - tn: 226736.0000 - fn: 12747.0000 - val_loss: 0.5092 - val_accuracy: 0.8235 - val_precision: 0.8493 - val_recall: 0.7969 - val_auc: 0.9571 - val_tp: 15621.0000 - val_fp: 2772.0000 - val_tn: 56037.0000 - val_fn: 3982.0000 - lr: 1.0000e-04\n", "Epoch 69/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3679 - accuracy: 0.8620 - precision: 0.8855 - recall: 0.8375 - auc: 0.9764 - tp: 65672.0000 - fp: 8492.0000 - tn: 226744.0000 - fn: 12740.0000 - val_loss: 0.5094 - val_accuracy: 0.8235 - val_precision: 0.8496 - val_recall: 0.7970 - val_auc: 0.9571 - val_tp: 15624.0000 - val_fp: 2765.0000 - val_tn: 56044.0000 - val_fn: 3979.0000 - lr: 1.0000e-04\n", "Epoch 70/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3675 - accuracy: 0.8620 - precision: 0.8859 - recall: 0.8376 - auc: 0.9764 - tp: 65679.0000 - fp: 8461.0000 - tn: 226775.0000 - fn: 12733.0000 - val_loss: 0.5097 - val_accuracy: 0.8238 - val_precision: 0.8493 - val_recall: 0.7967 - val_auc: 0.9570 - val_tp: 15618.0000 - val_fp: 2772.0000 - val_tn: 56037.0000 - val_fn: 3985.0000 - lr: 1.0000e-04\n", "Epoch 71/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3671 - accuracy: 0.8624 - precision: 0.8858 - recall: 0.8379 - auc: 0.9764 - tp: 65703.0000 - fp: 8473.0000 - tn: 226763.0000 - fn: 12709.0000 - val_loss: 0.5100 - val_accuracy: 0.8236 - val_precision: 0.8496 - val_recall: 0.7968 - val_auc: 0.9570 - val_tp: 15620.0000 - val_fp: 2765.0000 - val_tn: 56044.0000 - val_fn: 3983.0000 - lr: 1.0000e-04\n", "Epoch 72/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3667 - accuracy: 0.8625 - precision: 0.8861 - recall: 0.8381 - auc: 0.9765 - tp: 65716.0000 - fp: 8447.0000 - tn: 226789.0000 - fn: 12696.0000 - val_loss: 0.5102 - val_accuracy: 0.8238 - val_precision: 0.8493 - val_recall: 0.7972 - val_auc: 0.9570 - val_tp: 15628.0000 - val_fp: 2772.0000 - val_tn: 56037.0000 - val_fn: 3975.0000 - lr: 1.0000e-04\n", "Epoch 73/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3662 - accuracy: 0.8625 - precision: 0.8862 - recall: 0.8383 - auc: 0.9766 - tp: 65731.0000 - fp: 8443.0000 - tn: 226793.0000 - fn: 12681.0000 - val_loss: 0.5105 - val_accuracy: 0.8236 - val_precision: 0.8493 - val_recall: 0.7975 - val_auc: 0.9570 - val_tp: 15634.0000 - val_fp: 2775.0000 - val_tn: 56034.0000 - val_fn: 3969.0000 - lr: 1.0000e-04\n", "Epoch 74/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3658 - accuracy: 0.8626 - precision: 0.8861 - recall: 0.8385 - auc: 0.9766 - tp: 65751.0000 - fp: 8452.0000 - tn: 226784.0000 - fn: 12661.0000 - val_loss: 0.5108 - val_accuracy: 0.8235 - val_precision: 0.8494 - val_recall: 0.7975 - val_auc: 0.9570 - val_tp: 15634.0000 - val_fp: 2772.0000 - val_tn: 56037.0000 - val_fn: 3969.0000 - lr: 1.0000e-04\n", "Epoch 75/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3654 - accuracy: 0.8628 - precision: 0.8864 - recall: 0.8388 - auc: 0.9767 - tp: 65771.0000 - fp: 8428.0000 - tn: 226808.0000 - fn: 12641.0000 - val_loss: 0.5110 - val_accuracy: 0.8239 - val_precision: 0.8491 - val_recall: 0.7977 - val_auc: 0.9570 - val_tp: 15638.0000 - val_fp: 2779.0000 - val_tn: 56030.0000 - val_fn: 3965.0000 - lr: 1.0000e-04\n", "Epoch 76/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3650 - accuracy: 0.8628 - precision: 0.8863 - recall: 0.8388 - auc: 0.9767 - tp: 65773.0000 - fp: 8435.0000 - tn: 226801.0000 - fn: 12639.0000 - val_loss: 0.5113 - val_accuracy: 0.8230 - val_precision: 0.8489 - val_recall: 0.7968 - val_auc: 0.9569 - val_tp: 15619.0000 - val_fp: 2781.0000 - val_tn: 56028.0000 - val_fn: 3984.0000 - lr: 1.0000e-04\n", "Epoch 77/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3646 - accuracy: 0.8632 - precision: 0.8867 - recall: 0.8392 - auc: 0.9768 - tp: 65801.0000 - fp: 8412.0000 - tn: 226824.0000 - fn: 12611.0000 - val_loss: 0.5116 - val_accuracy: 0.8232 - val_precision: 0.8491 - val_recall: 0.7979 - val_auc: 0.9569 - val_tp: 15641.0000 - val_fp: 2779.0000 - val_tn: 56030.0000 - val_fn: 3962.0000 - lr: 1.0000e-04\n", "Epoch 78/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3642 - accuracy: 0.8630 - precision: 0.8868 - recall: 0.8392 - auc: 0.9768 - tp: 65806.0000 - fp: 8404.0000 - tn: 226832.0000 - fn: 12606.0000 - val_loss: 0.5119 - val_accuracy: 0.8233 - val_precision: 0.8487 - val_recall: 0.7976 - val_auc: 0.9569 - val_tp: 15635.0000 - val_fp: 2787.0000 - val_tn: 56022.0000 - val_fn: 3968.0000 - lr: 1.0000e-04\n", "Epoch 79/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3637 - accuracy: 0.8633 - precision: 0.8870 - recall: 0.8393 - auc: 0.9769 - tp: 65809.0000 - fp: 8385.0000 - tn: 226851.0000 - fn: 12603.0000 - val_loss: 0.5121 - val_accuracy: 0.8235 - val_precision: 0.8486 - val_recall: 0.7977 - val_auc: 0.9569 - val_tp: 15638.0000 - val_fp: 2791.0000 - val_tn: 56018.0000 - val_fn: 3965.0000 - lr: 1.0000e-04\n", "Epoch 80/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3633 - accuracy: 0.8633 - precision: 0.8870 - recall: 0.8396 - auc: 0.9769 - tp: 65831.0000 - fp: 8387.0000 - tn: 226849.0000 - fn: 12581.0000 - val_loss: 0.5124 - val_accuracy: 0.8227 - val_precision: 0.8488 - val_recall: 0.7970 - val_auc: 0.9568 - val_tp: 15624.0000 - val_fp: 2784.0000 - val_tn: 56025.0000 - val_fn: 3979.0000 - lr: 1.0000e-04\n", "Epoch 81/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3629 - accuracy: 0.8636 - precision: 0.8871 - recall: 0.8399 - auc: 0.9770 - tp: 65861.0000 - fp: 8381.0000 - tn: 226855.0000 - fn: 12551.0000 - val_loss: 0.5127 - val_accuracy: 0.8227 - val_precision: 0.8484 - val_recall: 0.7969 - val_auc: 0.9568 - val_tp: 15622.0000 - val_fp: 2792.0000 - val_tn: 56017.0000 - val_fn: 3981.0000 - lr: 1.0000e-04\n", "Epoch 82/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3625 - accuracy: 0.8635 - precision: 0.8870 - recall: 0.8399 - auc: 0.9770 - tp: 65856.0000 - fp: 8392.0000 - tn: 226844.0000 - fn: 12556.0000 - val_loss: 0.5130 - val_accuracy: 0.8230 - val_precision: 0.8486 - val_recall: 0.7976 - val_auc: 0.9568 - val_tp: 15635.0000 - val_fp: 2789.0000 - val_tn: 56020.0000 - val_fn: 3968.0000 - lr: 1.0000e-04\n", "Epoch 83/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3621 - accuracy: 0.8637 - precision: 0.8873 - recall: 0.8401 - auc: 0.9771 - tp: 65876.0000 - fp: 8367.0000 - tn: 226869.0000 - fn: 12536.0000 - val_loss: 0.5132 - val_accuracy: 0.8227 - val_precision: 0.8486 - val_recall: 0.7979 - val_auc: 0.9568 - val_tp: 15642.0000 - val_fp: 2791.0000 - val_tn: 56018.0000 - val_fn: 3961.0000 - lr: 1.0000e-04\n", "Epoch 84/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3617 - accuracy: 0.8640 - precision: 0.8874 - recall: 0.8402 - auc: 0.9771 - tp: 65881.0000 - fp: 8360.0000 - tn: 226876.0000 - fn: 12531.0000 - val_loss: 0.5135 - val_accuracy: 0.8227 - val_precision: 0.8482 - val_recall: 0.7978 - val_auc: 0.9568 - val_tp: 15640.0000 - val_fp: 2799.0000 - val_tn: 56010.0000 - val_fn: 3963.0000 - lr: 1.0000e-04\n", "Epoch 85/100\n", "30/30 [==============================] - 1s 18ms/step - loss: 0.3613 - accuracy: 0.8639 - precision: 0.8876 - recall: 0.8406 - auc: 0.9772 - tp: 65910.0000 - fp: 8346.0000 - tn: 226890.0000 - fn: 12502.0000 - val_loss: 0.5138 - val_accuracy: 0.8226 - val_precision: 0.8486 - val_recall: 0.7977 - val_auc: 0.9568 - val_tp: 15638.0000 - val_fp: 2790.0000 - val_tn: 56019.0000 - val_fn: 3965.0000 - lr: 1.0000e-04\n", "Epoch 86/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3609 - accuracy: 0.8641 - precision: 0.8877 - recall: 0.8407 - auc: 0.9772 - tp: 65924.0000 - fp: 8342.0000 - tn: 226894.0000 - fn: 12488.0000 - val_loss: 0.5141 - val_accuracy: 0.8224 - val_precision: 0.8483 - val_recall: 0.7979 - val_auc: 0.9567 - val_tp: 15642.0000 - val_fp: 2797.0000 - val_tn: 56012.0000 - val_fn: 3961.0000 - lr: 1.0000e-04\n", "Epoch 87/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3605 - accuracy: 0.8642 - precision: 0.8876 - recall: 0.8408 - auc: 0.9773 - tp: 65927.0000 - fp: 8350.0000 - tn: 226886.0000 - fn: 12485.0000 - val_loss: 0.5143 - val_accuracy: 0.8225 - val_precision: 0.8485 - val_recall: 0.7983 - val_auc: 0.9568 - val_tp: 15649.0000 - val_fp: 2795.0000 - val_tn: 56014.0000 - val_fn: 3954.0000 - lr: 1.0000e-04\n", "Epoch 88/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.3601 - accuracy: 0.8644 - precision: 0.8877 - recall: 0.8412 - auc: 0.9773 - tp: 65960.0000 - fp: 8346.0000 - tn: 226890.0000 - fn: 12452.0000 - val_loss: 0.5147 - val_accuracy: 0.8225 - val_precision: 0.8480 - val_recall: 0.7983 - val_auc: 0.9567 - val_tp: 15649.0000 - val_fp: 2805.0000 - val_tn: 56004.0000 - val_fn: 3954.0000 - lr: 1.0000e-04\n", "Epoch 89/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3597 - accuracy: 0.8644 - precision: 0.8881 - recall: 0.8412 - auc: 0.9774 - tp: 65958.0000 - fp: 8310.0000 - tn: 226926.0000 - fn: 12454.0000 - val_loss: 0.5150 - val_accuracy: 0.8230 - val_precision: 0.8486 - val_recall: 0.7986 - val_auc: 0.9567 - val_tp: 15654.0000 - val_fp: 2793.0000 - val_tn: 56016.0000 - val_fn: 3949.0000 - lr: 1.0000e-04\n", "Epoch 90/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3593 - accuracy: 0.8647 - precision: 0.8878 - recall: 0.8414 - auc: 0.9774 - tp: 65977.0000 - fp: 8335.0000 - tn: 226901.0000 - fn: 12435.0000 - val_loss: 0.5153 - val_accuracy: 0.8226 - val_precision: 0.8482 - val_recall: 0.7982 - val_auc: 0.9566 - val_tp: 15647.0000 - val_fp: 2800.0000 - val_tn: 56009.0000 - val_fn: 3956.0000 - lr: 1.0000e-04\n", "Epoch 91/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3589 - accuracy: 0.8647 - precision: 0.8884 - recall: 0.8416 - auc: 0.9775 - tp: 65994.0000 - fp: 8287.0000 - tn: 226949.0000 - fn: 12418.0000 - val_loss: 0.5156 - val_accuracy: 0.8227 - val_precision: 0.8483 - val_recall: 0.7979 - val_auc: 0.9566 - val_tp: 15642.0000 - val_fp: 2797.0000 - val_tn: 56012.0000 - val_fn: 3961.0000 - lr: 1.0000e-04\n", "Epoch 92/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3585 - accuracy: 0.8649 - precision: 0.8884 - recall: 0.8416 - auc: 0.9775 - tp: 65989.0000 - fp: 8288.0000 - tn: 226948.0000 - fn: 12423.0000 - val_loss: 0.5159 - val_accuracy: 0.8226 - val_precision: 0.8483 - val_recall: 0.7982 - val_auc: 0.9566 - val_tp: 15648.0000 - val_fp: 2799.0000 - val_tn: 56010.0000 - val_fn: 3955.0000 - lr: 1.0000e-04\n", "Epoch 93/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3581 - accuracy: 0.8650 - precision: 0.8884 - recall: 0.8419 - auc: 0.9776 - tp: 66014.0000 - fp: 8290.0000 - tn: 226946.0000 - fn: 12398.0000 - val_loss: 0.5160 - val_accuracy: 0.8224 - val_precision: 0.8484 - val_recall: 0.7983 - val_auc: 0.9566 - val_tp: 15650.0000 - val_fp: 2797.0000 - val_tn: 56012.0000 - val_fn: 3953.0000 - lr: 1.0000e-04\n", "Epoch 94/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3577 - accuracy: 0.8653 - precision: 0.8885 - recall: 0.8421 - auc: 0.9776 - tp: 66028.0000 - fp: 8284.0000 - tn: 226952.0000 - fn: 12384.0000 - val_loss: 0.5165 - val_accuracy: 0.8227 - val_precision: 0.8479 - val_recall: 0.7987 - val_auc: 0.9566 - val_tp: 15656.0000 - val_fp: 2808.0000 - val_tn: 56001.0000 - val_fn: 3947.0000 - lr: 1.0000e-04\n", "Epoch 95/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3573 - accuracy: 0.8653 - precision: 0.8885 - recall: 0.8421 - auc: 0.9776 - tp: 66034.0000 - fp: 8284.0000 - tn: 226952.0000 - fn: 12378.0000 - val_loss: 0.5167 - val_accuracy: 0.8222 - val_precision: 0.8478 - val_recall: 0.7978 - val_auc: 0.9565 - val_tp: 15639.0000 - val_fp: 2808.0000 - val_tn: 56001.0000 - val_fn: 3964.0000 - lr: 1.0000e-04\n", "Epoch 96/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3569 - accuracy: 0.8655 - precision: 0.8887 - recall: 0.8421 - auc: 0.9777 - tp: 66031.0000 - fp: 8266.0000 - tn: 226970.0000 - fn: 12381.0000 - val_loss: 0.5171 - val_accuracy: 0.8227 - val_precision: 0.8480 - val_recall: 0.7987 - val_auc: 0.9565 - val_tp: 15656.0000 - val_fp: 2806.0000 - val_tn: 56003.0000 - val_fn: 3947.0000 - lr: 1.0000e-04\n", "Epoch 97/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3565 - accuracy: 0.8660 - precision: 0.8890 - recall: 0.8425 - auc: 0.9777 - tp: 66065.0000 - fp: 8247.0000 - tn: 226989.0000 - fn: 12347.0000 - val_loss: 0.5173 - val_accuracy: 0.8230 - val_precision: 0.8481 - val_recall: 0.7987 - val_auc: 0.9565 - val_tp: 15657.0000 - val_fp: 2804.0000 - val_tn: 56005.0000 - val_fn: 3946.0000 - lr: 1.0000e-04\n", "Epoch 98/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3561 - accuracy: 0.8658 - precision: 0.8891 - recall: 0.8426 - auc: 0.9778 - tp: 66069.0000 - fp: 8239.0000 - tn: 226997.0000 - fn: 12343.0000 - val_loss: 0.5176 - val_accuracy: 0.8228 - val_precision: 0.8482 - val_recall: 0.7986 - val_auc: 0.9564 - val_tp: 15654.0000 - val_fp: 2802.0000 - val_tn: 56007.0000 - val_fn: 3949.0000 - lr: 1.0000e-04\n", "Epoch 99/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3558 - accuracy: 0.8659 - precision: 0.8891 - recall: 0.8425 - auc: 0.9778 - tp: 66064.0000 - fp: 8243.0000 - tn: 226993.0000 - fn: 12348.0000 - val_loss: 0.5179 - val_accuracy: 0.8227 - val_precision: 0.8476 - val_recall: 0.7982 - val_auc: 0.9564 - val_tp: 15647.0000 - val_fp: 2814.0000 - val_tn: 55995.0000 - val_fn: 3956.0000 - lr: 1.0000e-04\n", "Epoch 100/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3554 - accuracy: 0.8661 - precision: 0.8890 - recall: 0.8425 - auc: 0.9779 - tp: 66066.0000 - fp: 8250.0000 - tn: 226986.0000 - fn: 12346.0000 - val_loss: 0.5182 - val_accuracy: 0.8227 - val_precision: 0.8479 - val_recall: 0.7988 - val_auc: 0.9564 - val_tp: 15659.0000 - val_fp: 2809.0000 - val_tn: 56000.0000 - val_fn: 3944.0000 - lr: 1.0000e-04\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2024-06-08 13:59:32,822] Trial 3 finished with value: 0.8176215887069702 and parameters: {'num_filters': 156, 'kernel_size': 3, 'learning_rate': 0.0006131200612586219}. Best is trial 0 with value: 0.819294810295105.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Loss: 0.517291784286499\n", "Accuracy: 0.8176215887069702\n", "Precision: 0.8432790637016296\n", "Recall: 0.7946865558624268\n", "AUC: 0.9564386010169983\n", "True Positives: 19473.0\n", "False Positives: 3619.0\n", "True Negatives: 69893.0\n", "False Negatives: 5031.0\n", "Epoch 1/100\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\Michał\\AppData\\Local\\Temp\\ipykernel_33252\\265862631.py:5: FutureWarning: suggest_loguniform has been deprecated in v3.0.0. This feature will be removed in v6.0.0. See https://github.com/optuna/optuna/releases/tag/v3.0.0. Use suggest_float(..., log=True) instead.\n", " learning_rate = trial.suggest_loguniform('learning_rate', 1e-4, 1e-2)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "30/30 [==============================] - 3s 50ms/step - loss: 1.1794 - accuracy: 0.5739 - precision: 0.8297 - recall: 0.1232 - auc: 0.7761 - tp: 9664.0000 - fp: 1984.0000 - tn: 233252.0000 - fn: 68748.0000 - val_loss: 0.8664 - val_accuracy: 0.6758 - val_precision: 0.7996 - val_recall: 0.5232 - val_auc: 0.8750 - val_tp: 10257.0000 - val_fp: 2570.0000 - val_tn: 56239.0000 - val_fn: 9346.0000 - lr: 0.0026\n", "Epoch 2/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.6760 - accuracy: 0.7388 - precision: 0.8258 - recall: 0.6619 - auc: 0.9236 - tp: 51902.0000 - fp: 10946.0000 - tn: 224290.0000 - fn: 26510.0000 - val_loss: 0.5842 - val_accuracy: 0.7770 - val_precision: 0.8348 - val_recall: 0.7061 - val_auc: 0.9421 - val_tp: 13842.0000 - val_fp: 2739.0000 - val_tn: 56070.0000 - val_fn: 5761.0000 - lr: 0.0026\n", "Epoch 3/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.5043 - accuracy: 0.8162 - precision: 0.8599 - recall: 0.7563 - auc: 0.9568 - tp: 59300.0000 - fp: 9665.0000 - tn: 225571.0000 - fn: 19112.0000 - val_loss: 0.5056 - val_accuracy: 0.8136 - val_precision: 0.8510 - val_recall: 0.7759 - val_auc: 0.9560 - val_tp: 15210.0000 - val_fp: 2664.0000 - val_tn: 56145.0000 - val_fn: 4393.0000 - lr: 0.0026\n", "Epoch 4/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.4368 - accuracy: 0.8396 - precision: 0.8701 - recall: 0.8068 - auc: 0.9670 - tp: 63264.0000 - fp: 9447.0000 - tn: 225789.0000 - fn: 15148.0000 - val_loss: 0.4854 - val_accuracy: 0.8208 - val_precision: 0.8507 - val_recall: 0.7904 - val_auc: 0.9594 - val_tp: 15495.0000 - val_fp: 2719.0000 - val_tn: 56090.0000 - val_fn: 4108.0000 - lr: 0.0026\n", "Epoch 5/100\n", "30/30 [==============================] - 1s 22ms/step - loss: 0.4057 - accuracy: 0.8494 - precision: 0.8761 - recall: 0.8224 - auc: 0.9713 - tp: 64484.0000 - fp: 9121.0000 - tn: 226115.0000 - fn: 13928.0000 - val_loss: 0.4810 - val_accuracy: 0.8220 - val_precision: 0.8525 - val_recall: 0.7944 - val_auc: 0.9603 - val_tp: 15572.0000 - val_fp: 2695.0000 - val_tn: 56114.0000 - val_fn: 4031.0000 - lr: 0.0026\n", "Epoch 6/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3856 - accuracy: 0.8560 - precision: 0.8806 - recall: 0.8309 - auc: 0.9740 - tp: 65152.0000 - fp: 8838.0000 - tn: 226398.0000 - fn: 13260.0000 - val_loss: 0.4865 - val_accuracy: 0.8211 - val_precision: 0.8538 - val_recall: 0.7912 - val_auc: 0.9596 - val_tp: 15509.0000 - val_fp: 2655.0000 - val_tn: 56154.0000 - val_fn: 4094.0000 - lr: 0.0026\n", "Epoch 7/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3702 - accuracy: 0.8631 - precision: 0.8860 - recall: 0.8391 - auc: 0.9760 - tp: 65796.0000 - fp: 8467.0000 - tn: 226769.0000 - fn: 12616.0000 - val_loss: 0.4915 - val_accuracy: 0.8225 - val_precision: 0.8515 - val_recall: 0.7935 - val_auc: 0.9592 - val_tp: 15555.0000 - val_fp: 2712.0000 - val_tn: 56097.0000 - val_fn: 4048.0000 - lr: 0.0026\n", "Epoch 8/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3573 - accuracy: 0.8664 - precision: 0.8897 - recall: 0.8439 - auc: 0.9776 - tp: 66173.0000 - fp: 8202.0000 - tn: 227034.0000 - fn: 12239.0000 - val_loss: 0.4998 - val_accuracy: 0.8209 - val_precision: 0.8455 - val_recall: 0.7980 - val_auc: 0.9584 - val_tp: 15643.0000 - val_fp: 2858.0000 - val_tn: 55951.0000 - val_fn: 3960.0000 - lr: 0.0026\n", "Epoch 9/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3379 - accuracy: 0.8749 - precision: 0.8966 - recall: 0.8527 - auc: 0.9800 - tp: 66864.0000 - fp: 7708.0000 - tn: 227528.0000 - fn: 11548.0000 - val_loss: 0.4999 - val_accuracy: 0.8213 - val_precision: 0.8454 - val_recall: 0.8006 - val_auc: 0.9584 - val_tp: 15694.0000 - val_fp: 2871.0000 - val_tn: 55938.0000 - val_fn: 3909.0000 - lr: 5.2930e-04\n", "Epoch 10/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3342 - accuracy: 0.8763 - precision: 0.8959 - recall: 0.8558 - auc: 0.9804 - tp: 67102.0000 - fp: 7799.0000 - tn: 227437.0000 - fn: 11310.0000 - val_loss: 0.5021 - val_accuracy: 0.8209 - val_precision: 0.8448 - val_recall: 0.8015 - val_auc: 0.9584 - val_tp: 15711.0000 - val_fp: 2886.0000 - val_tn: 55923.0000 - val_fn: 3892.0000 - lr: 5.2930e-04\n", "Epoch 11/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3319 - accuracy: 0.8770 - precision: 0.8976 - recall: 0.8562 - auc: 0.9806 - tp: 67138.0000 - fp: 7662.0000 - tn: 227574.0000 - fn: 11274.0000 - val_loss: 0.5048 - val_accuracy: 0.8204 - val_precision: 0.8446 - val_recall: 0.7999 - val_auc: 0.9580 - val_tp: 15680.0000 - val_fp: 2884.0000 - val_tn: 55925.0000 - val_fn: 3923.0000 - lr: 5.2930e-04\n", "Epoch 12/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3278 - accuracy: 0.8782 - precision: 0.8986 - recall: 0.8575 - auc: 0.9811 - tp: 67236.0000 - fp: 7584.0000 - tn: 227652.0000 - fn: 11176.0000 - val_loss: 0.5053 - val_accuracy: 0.8203 - val_precision: 0.8439 - val_recall: 0.8005 - val_auc: 0.9580 - val_tp: 15693.0000 - val_fp: 2903.0000 - val_tn: 55906.0000 - val_fn: 3910.0000 - lr: 1.0586e-04\n", "Epoch 13/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3273 - accuracy: 0.8782 - precision: 0.8983 - recall: 0.8580 - auc: 0.9812 - tp: 67276.0000 - fp: 7616.0000 - tn: 227620.0000 - fn: 11136.0000 - val_loss: 0.5059 - val_accuracy: 0.8201 - val_precision: 0.8440 - val_recall: 0.8007 - val_auc: 0.9579 - val_tp: 15696.0000 - val_fp: 2901.0000 - val_tn: 55908.0000 - val_fn: 3907.0000 - lr: 1.0586e-04\n", "Epoch 14/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3268 - accuracy: 0.8783 - precision: 0.8984 - recall: 0.8582 - auc: 0.9812 - tp: 67296.0000 - fp: 7613.0000 - tn: 227623.0000 - fn: 11116.0000 - val_loss: 0.5064 - val_accuracy: 0.8202 - val_precision: 0.8435 - val_recall: 0.8006 - val_auc: 0.9579 - val_tp: 15694.0000 - val_fp: 2912.0000 - val_tn: 55897.0000 - val_fn: 3909.0000 - lr: 1.0586e-04\n", "Epoch 15/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3263 - accuracy: 0.8783 - precision: 0.8987 - recall: 0.8583 - auc: 0.9813 - tp: 67301.0000 - fp: 7587.0000 - tn: 227649.0000 - fn: 11111.0000 - val_loss: 0.5070 - val_accuracy: 0.8203 - val_precision: 0.8442 - val_recall: 0.8003 - val_auc: 0.9578 - val_tp: 15688.0000 - val_fp: 2895.0000 - val_tn: 55914.0000 - val_fn: 3915.0000 - lr: 1.0000e-04\n", "Epoch 16/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3259 - accuracy: 0.8786 - precision: 0.8989 - recall: 0.8584 - auc: 0.9813 - tp: 67307.0000 - fp: 7570.0000 - tn: 227666.0000 - fn: 11105.0000 - val_loss: 0.5076 - val_accuracy: 0.8199 - val_precision: 0.8441 - val_recall: 0.8006 - val_auc: 0.9578 - val_tp: 15695.0000 - val_fp: 2898.0000 - val_tn: 55911.0000 - val_fn: 3908.0000 - lr: 1.0000e-04\n", "Epoch 17/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3255 - accuracy: 0.8785 - precision: 0.8986 - recall: 0.8585 - auc: 0.9814 - tp: 67316.0000 - fp: 7593.0000 - tn: 227643.0000 - fn: 11096.0000 - val_loss: 0.5082 - val_accuracy: 0.8197 - val_precision: 0.8439 - val_recall: 0.8006 - val_auc: 0.9577 - val_tp: 15694.0000 - val_fp: 2902.0000 - val_tn: 55907.0000 - val_fn: 3909.0000 - lr: 1.0000e-04\n", "Epoch 18/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3251 - accuracy: 0.8788 - precision: 0.8989 - recall: 0.8586 - auc: 0.9814 - tp: 67325.0000 - fp: 7570.0000 - tn: 227666.0000 - fn: 11087.0000 - val_loss: 0.5087 - val_accuracy: 0.8197 - val_precision: 0.8439 - val_recall: 0.8003 - val_auc: 0.9577 - val_tp: 15689.0000 - val_fp: 2901.0000 - val_tn: 55908.0000 - val_fn: 3914.0000 - lr: 1.0000e-04\n", "Epoch 19/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3246 - accuracy: 0.8789 - precision: 0.8993 - recall: 0.8587 - auc: 0.9815 - tp: 67332.0000 - fp: 7543.0000 - tn: 227693.0000 - fn: 11080.0000 - val_loss: 0.5093 - val_accuracy: 0.8196 - val_precision: 0.8439 - val_recall: 0.8001 - val_auc: 0.9576 - val_tp: 15685.0000 - val_fp: 2902.0000 - val_tn: 55907.0000 - val_fn: 3918.0000 - lr: 1.0000e-04\n", "Epoch 20/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3243 - accuracy: 0.8789 - precision: 0.8988 - recall: 0.8592 - auc: 0.9815 - tp: 67375.0000 - fp: 7590.0000 - tn: 227646.0000 - fn: 11037.0000 - val_loss: 0.5098 - val_accuracy: 0.8198 - val_precision: 0.8437 - val_recall: 0.8002 - val_auc: 0.9576 - val_tp: 15687.0000 - val_fp: 2906.0000 - val_tn: 55903.0000 - val_fn: 3916.0000 - lr: 1.0000e-04\n", "Epoch 21/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3238 - accuracy: 0.8793 - precision: 0.8992 - recall: 0.8591 - auc: 0.9815 - tp: 67360.0000 - fp: 7548.0000 - tn: 227688.0000 - fn: 11052.0000 - val_loss: 0.5104 - val_accuracy: 0.8198 - val_precision: 0.8437 - val_recall: 0.8006 - val_auc: 0.9575 - val_tp: 15694.0000 - val_fp: 2907.0000 - val_tn: 55902.0000 - val_fn: 3909.0000 - lr: 1.0000e-04\n", "Epoch 22/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3234 - accuracy: 0.8793 - precision: 0.8993 - recall: 0.8594 - auc: 0.9816 - tp: 67391.0000 - fp: 7545.0000 - tn: 227691.0000 - fn: 11021.0000 - val_loss: 0.5111 - val_accuracy: 0.8200 - val_precision: 0.8436 - val_recall: 0.8007 - val_auc: 0.9574 - val_tp: 15697.0000 - val_fp: 2910.0000 - val_tn: 55899.0000 - val_fn: 3906.0000 - lr: 1.0000e-04\n", "Epoch 23/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3230 - accuracy: 0.8795 - precision: 0.8996 - recall: 0.8594 - auc: 0.9816 - tp: 67386.0000 - fp: 7523.0000 - tn: 227713.0000 - fn: 11026.0000 - val_loss: 0.5116 - val_accuracy: 0.8199 - val_precision: 0.8437 - val_recall: 0.8007 - val_auc: 0.9575 - val_tp: 15696.0000 - val_fp: 2908.0000 - val_tn: 55901.0000 - val_fn: 3907.0000 - lr: 1.0000e-04\n", "Epoch 24/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3226 - accuracy: 0.8795 - precision: 0.8994 - recall: 0.8597 - auc: 0.9817 - tp: 67410.0000 - fp: 7540.0000 - tn: 227696.0000 - fn: 11002.0000 - val_loss: 0.5122 - val_accuracy: 0.8201 - val_precision: 0.8433 - val_recall: 0.8008 - val_auc: 0.9574 - val_tp: 15698.0000 - val_fp: 2917.0000 - val_tn: 55892.0000 - val_fn: 3905.0000 - lr: 1.0000e-04\n", "Epoch 25/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3223 - accuracy: 0.8794 - precision: 0.8995 - recall: 0.8602 - auc: 0.9817 - tp: 67452.0000 - fp: 7537.0000 - tn: 227699.0000 - fn: 10960.0000 - val_loss: 0.5128 - val_accuracy: 0.8199 - val_precision: 0.8429 - val_recall: 0.8011 - val_auc: 0.9574 - val_tp: 15703.0000 - val_fp: 2926.0000 - val_tn: 55883.0000 - val_fn: 3900.0000 - lr: 1.0000e-04\n", "Epoch 26/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3219 - accuracy: 0.8796 - precision: 0.8997 - recall: 0.8603 - auc: 0.9818 - tp: 67455.0000 - fp: 7519.0000 - tn: 227717.0000 - fn: 10957.0000 - val_loss: 0.5133 - val_accuracy: 0.8197 - val_precision: 0.8428 - val_recall: 0.8008 - val_auc: 0.9573 - val_tp: 15699.0000 - val_fp: 2929.0000 - val_tn: 55880.0000 - val_fn: 3904.0000 - lr: 1.0000e-04\n", "Epoch 27/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3215 - accuracy: 0.8797 - precision: 0.8996 - recall: 0.8601 - auc: 0.9818 - tp: 67442.0000 - fp: 7524.0000 - tn: 227712.0000 - fn: 10970.0000 - val_loss: 0.5139 - val_accuracy: 0.8199 - val_precision: 0.8426 - val_recall: 0.8006 - val_auc: 0.9572 - val_tp: 15695.0000 - val_fp: 2931.0000 - val_tn: 55878.0000 - val_fn: 3908.0000 - lr: 1.0000e-04\n", "Epoch 28/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3211 - accuracy: 0.8798 - precision: 0.8998 - recall: 0.8605 - auc: 0.9818 - tp: 67476.0000 - fp: 7517.0000 - tn: 227719.0000 - fn: 10936.0000 - val_loss: 0.5145 - val_accuracy: 0.8200 - val_precision: 0.8425 - val_recall: 0.8008 - val_auc: 0.9572 - val_tp: 15699.0000 - val_fp: 2934.0000 - val_tn: 55875.0000 - val_fn: 3904.0000 - lr: 1.0000e-04\n", "Epoch 29/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3208 - accuracy: 0.8800 - precision: 0.8997 - recall: 0.8607 - auc: 0.9819 - tp: 67488.0000 - fp: 7523.0000 - tn: 227713.0000 - fn: 10924.0000 - val_loss: 0.5152 - val_accuracy: 0.8195 - val_precision: 0.8421 - val_recall: 0.8009 - val_auc: 0.9571 - val_tp: 15700.0000 - val_fp: 2943.0000 - val_tn: 55866.0000 - val_fn: 3903.0000 - lr: 1.0000e-04\n", "Epoch 30/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3204 - accuracy: 0.8800 - precision: 0.9000 - recall: 0.8608 - auc: 0.9819 - tp: 67497.0000 - fp: 7497.0000 - tn: 227739.0000 - fn: 10915.0000 - val_loss: 0.5157 - val_accuracy: 0.8191 - val_precision: 0.8419 - val_recall: 0.8005 - val_auc: 0.9571 - val_tp: 15693.0000 - val_fp: 2947.0000 - val_tn: 55862.0000 - val_fn: 3910.0000 - lr: 1.0000e-04\n", "Epoch 31/100\n", "30/30 [==============================] - 1s 19ms/step - loss: 0.3200 - accuracy: 0.8801 - precision: 0.9001 - recall: 0.8610 - auc: 0.9819 - tp: 67514.0000 - fp: 7490.0000 - tn: 227746.0000 - fn: 10898.0000 - val_loss: 0.5163 - val_accuracy: 0.8194 - val_precision: 0.8417 - val_recall: 0.8004 - val_auc: 0.9570 - val_tp: 15690.0000 - val_fp: 2951.0000 - val_tn: 55858.0000 - val_fn: 3913.0000 - lr: 1.0000e-04\n", "Epoch 32/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3197 - accuracy: 0.8803 - precision: 0.9000 - recall: 0.8615 - auc: 0.9820 - tp: 67555.0000 - fp: 7504.0000 - tn: 227732.0000 - fn: 10857.0000 - val_loss: 0.5169 - val_accuracy: 0.8196 - val_precision: 0.8417 - val_recall: 0.8012 - val_auc: 0.9570 - val_tp: 15705.0000 - val_fp: 2953.0000 - val_tn: 55856.0000 - val_fn: 3898.0000 - lr: 1.0000e-04\n", "Epoch 33/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3193 - accuracy: 0.8806 - precision: 0.9000 - recall: 0.8615 - auc: 0.9820 - tp: 67550.0000 - fp: 7503.0000 - tn: 227733.0000 - fn: 10862.0000 - val_loss: 0.5174 - val_accuracy: 0.8193 - val_precision: 0.8416 - val_recall: 0.8007 - val_auc: 0.9569 - val_tp: 15697.0000 - val_fp: 2954.0000 - val_tn: 55855.0000 - val_fn: 3906.0000 - lr: 1.0000e-04\n", "Epoch 34/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3190 - accuracy: 0.8805 - precision: 0.9001 - recall: 0.8618 - auc: 0.9821 - tp: 67572.0000 - fp: 7503.0000 - tn: 227733.0000 - fn: 10840.0000 - val_loss: 0.5180 - val_accuracy: 0.8194 - val_precision: 0.8414 - val_recall: 0.8006 - val_auc: 0.9568 - val_tp: 15694.0000 - val_fp: 2959.0000 - val_tn: 55850.0000 - val_fn: 3909.0000 - lr: 1.0000e-04\n", "Epoch 35/100\n", "30/30 [==============================] - 1s 20ms/step - loss: 0.3186 - accuracy: 0.8806 - precision: 0.9004 - recall: 0.8622 - auc: 0.9821 - tp: 67603.0000 - fp: 7480.0000 - tn: 227756.0000 - fn: 10809.0000 - val_loss: 0.5186 - val_accuracy: 0.8192 - val_precision: 0.8416 - val_recall: 0.8005 - val_auc: 0.9568 - val_tp: 15693.0000 - val_fp: 2954.0000 - val_tn: 55855.0000 - val_fn: 3910.0000 - lr: 1.0000e-04\n", "Epoch 36/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3182 - accuracy: 0.8809 - precision: 0.9006 - recall: 0.8620 - auc: 0.9821 - tp: 67595.0000 - fp: 7461.0000 - tn: 227775.0000 - fn: 10817.0000 - val_loss: 0.5192 - val_accuracy: 0.8191 - val_precision: 0.8413 - val_recall: 0.8005 - val_auc: 0.9567 - val_tp: 15693.0000 - val_fp: 2961.0000 - val_tn: 55848.0000 - val_fn: 3910.0000 - lr: 1.0000e-04\n", "Epoch 37/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3179 - accuracy: 0.8811 - precision: 0.9006 - recall: 0.8621 - auc: 0.9822 - tp: 67602.0000 - fp: 7459.0000 - tn: 227777.0000 - fn: 10810.0000 - val_loss: 0.5197 - val_accuracy: 0.8190 - val_precision: 0.8408 - val_recall: 0.8005 - val_auc: 0.9566 - val_tp: 15692.0000 - val_fp: 2972.0000 - val_tn: 55837.0000 - val_fn: 3911.0000 - lr: 1.0000e-04\n", "Epoch 38/100\n", "30/30 [==============================] - 1s 21ms/step - loss: 0.3175 - accuracy: 0.8813 - precision: 0.9006 - recall: 0.8624 - auc: 0.9822 - tp: 67623.0000 - fp: 7463.0000 - tn: 227773.0000 - fn: 10789.0000 - val_loss: 0.5203 - val_accuracy: 0.8187 - val_precision: 0.8404 - val_recall: 0.8006 - val_auc: 0.9566 - val_tp: 15695.0000 - val_fp: 2980.0000 - val_tn: 55829.0000 - val_fn: 3908.0000 - lr: 1.0000e-04\n", "Epoch 39/100\n", "30/30 [==============================] - 1s 23ms/step - loss: 0.3172 - accuracy: 0.8814 - precision: 0.9009 - recall: 0.8628 - auc: 0.9823 - tp: 67654.0000 - fp: 7444.0000 - tn: 227792.0000 - fn: 10758.0000 - val_loss: 0.5210 - val_accuracy: 0.8185 - val_precision: 0.8406 - val_recall: 0.8003 - val_auc: 0.9565 - val_tp: 15688.0000 - val_fp: 2974.0000 - val_tn: 55835.0000 - val_fn: 3915.0000 - lr: 1.0000e-04\n", "Epoch 40/100\n", "30/30 [==============================] - 1s 32ms/step - loss: 0.3168 - accuracy: 0.8817 - precision: 0.9007 - recall: 0.8627 - auc: 0.9823 - tp: 67643.0000 - fp: 7454.0000 - tn: 227782.0000 - fn: 10769.0000 - val_loss: 0.5215 - val_accuracy: 0.8186 - val_precision: 0.8403 - val_recall: 0.8003 - val_auc: 0.9565 - val_tp: 15689.0000 - val_fp: 2981.0000 - val_tn: 55828.0000 - val_fn: 3914.0000 - lr: 1.0000e-04\n", "Epoch 41/100\n", "30/30 [==============================] - 1s 29ms/step - loss: 0.3165 - accuracy: 0.8816 - precision: 0.9008 - recall: 0.8630 - auc: 0.9823 - tp: 67669.0000 - fp: 7450.0000 - tn: 227786.0000 - fn: 10743.0000 - val_loss: 0.5221 - val_accuracy: 0.8183 - val_precision: 0.8402 - val_recall: 0.8004 - val_auc: 0.9564 - val_tp: 15691.0000 - val_fp: 2984.0000 - val_tn: 55825.0000 - val_fn: 3912.0000 - lr: 1.0000e-04\n", "Epoch 42/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.3161 - accuracy: 0.8819 - precision: 0.9011 - recall: 0.8636 - auc: 0.9824 - tp: 67713.0000 - fp: 7435.0000 - tn: 227801.0000 - fn: 10699.0000 - val_loss: 0.5226 - val_accuracy: 0.8184 - val_precision: 0.8399 - val_recall: 0.8010 - val_auc: 0.9564 - val_tp: 15702.0000 - val_fp: 2992.0000 - val_tn: 55817.0000 - val_fn: 3901.0000 - lr: 1.0000e-04\n", "Epoch 43/100\n", "30/30 [==============================] - 1s 34ms/step - loss: 0.3158 - accuracy: 0.8820 - precision: 0.9010 - recall: 0.8636 - auc: 0.9824 - tp: 67720.0000 - fp: 7443.0000 - tn: 227793.0000 - fn: 10692.0000 - val_loss: 0.5231 - val_accuracy: 0.8188 - val_precision: 0.8399 - val_recall: 0.8006 - val_auc: 0.9563 - val_tp: 15694.0000 - val_fp: 2992.0000 - val_tn: 55817.0000 - val_fn: 3909.0000 - lr: 1.0000e-04\n", "Epoch 44/100\n", "30/30 [==============================] - 1s 29ms/step - loss: 0.3155 - accuracy: 0.8820 - precision: 0.9012 - recall: 0.8634 - auc: 0.9824 - tp: 67701.0000 - fp: 7426.0000 - tn: 227810.0000 - fn: 10711.0000 - val_loss: 0.5238 - val_accuracy: 0.8186 - val_precision: 0.8398 - val_recall: 0.8002 - val_auc: 0.9562 - val_tp: 15686.0000 - val_fp: 2993.0000 - val_tn: 55816.0000 - val_fn: 3917.0000 - lr: 1.0000e-04\n", "Epoch 45/100\n", "30/30 [==============================] - 1s 30ms/step - loss: 0.3151 - accuracy: 0.8817 - precision: 0.9008 - recall: 0.8637 - auc: 0.9824 - tp: 67725.0000 - fp: 7454.0000 - tn: 227782.0000 - fn: 10687.0000 - val_loss: 0.5244 - val_accuracy: 0.8187 - val_precision: 0.8401 - val_recall: 0.8007 - val_auc: 0.9561 - val_tp: 15696.0000 - val_fp: 2987.0000 - val_tn: 55822.0000 - val_fn: 3907.0000 - lr: 1.0000e-04\n", "Epoch 46/100\n", "30/30 [==============================] - 1s 30ms/step - loss: 0.3148 - accuracy: 0.8824 - precision: 0.9011 - recall: 0.8635 - auc: 0.9825 - tp: 67705.0000 - fp: 7432.0000 - tn: 227804.0000 - fn: 10707.0000 - val_loss: 0.5249 - val_accuracy: 0.8182 - val_precision: 0.8395 - val_recall: 0.8009 - val_auc: 0.9561 - val_tp: 15701.0000 - val_fp: 3002.0000 - val_tn: 55807.0000 - val_fn: 3902.0000 - lr: 1.0000e-04\n", "Epoch 47/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.3145 - accuracy: 0.8820 - precision: 0.9011 - recall: 0.8641 - auc: 0.9825 - tp: 67756.0000 - fp: 7434.0000 - tn: 227802.0000 - fn: 10656.0000 - val_loss: 0.5255 - val_accuracy: 0.8187 - val_precision: 0.8398 - val_recall: 0.8007 - val_auc: 0.9560 - val_tp: 15697.0000 - val_fp: 2995.0000 - val_tn: 55814.0000 - val_fn: 3906.0000 - lr: 1.0000e-04\n", "Epoch 48/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3141 - accuracy: 0.8824 - precision: 0.9014 - recall: 0.8643 - auc: 0.9826 - tp: 67770.0000 - fp: 7417.0000 - tn: 227819.0000 - fn: 10642.0000 - val_loss: 0.5260 - val_accuracy: 0.8187 - val_precision: 0.8394 - val_recall: 0.8006 - val_auc: 0.9559 - val_tp: 15694.0000 - val_fp: 3002.0000 - val_tn: 55807.0000 - val_fn: 3909.0000 - lr: 1.0000e-04\n", "Epoch 49/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3138 - accuracy: 0.8824 - precision: 0.9015 - recall: 0.8643 - auc: 0.9826 - tp: 67773.0000 - fp: 7408.0000 - tn: 227828.0000 - fn: 10639.0000 - val_loss: 0.5266 - val_accuracy: 0.8189 - val_precision: 0.8391 - val_recall: 0.8007 - val_auc: 0.9559 - val_tp: 15696.0000 - val_fp: 3009.0000 - val_tn: 55800.0000 - val_fn: 3907.0000 - lr: 1.0000e-04\n", "Epoch 50/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3135 - accuracy: 0.8825 - precision: 0.9015 - recall: 0.8644 - auc: 0.9826 - tp: 67779.0000 - fp: 7406.0000 - tn: 227830.0000 - fn: 10633.0000 - val_loss: 0.5271 - val_accuracy: 0.8187 - val_precision: 0.8393 - val_recall: 0.8007 - val_auc: 0.9558 - val_tp: 15697.0000 - val_fp: 3006.0000 - val_tn: 55803.0000 - val_fn: 3906.0000 - lr: 1.0000e-04\n", "Epoch 51/100\n", "30/30 [==============================] - 1s 34ms/step - loss: 0.3132 - accuracy: 0.8828 - precision: 0.9017 - recall: 0.8649 - auc: 0.9827 - tp: 67821.0000 - fp: 7395.0000 - tn: 227841.0000 - fn: 10591.0000 - val_loss: 0.5277 - val_accuracy: 0.8185 - val_precision: 0.8394 - val_recall: 0.8009 - val_auc: 0.9557 - val_tp: 15700.0000 - val_fp: 3003.0000 - val_tn: 55806.0000 - val_fn: 3903.0000 - lr: 1.0000e-04\n", "Epoch 52/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3129 - accuracy: 0.8827 - precision: 0.9016 - recall: 0.8649 - auc: 0.9827 - tp: 67816.0000 - fp: 7400.0000 - tn: 227836.0000 - fn: 10596.0000 - val_loss: 0.5282 - val_accuracy: 0.8187 - val_precision: 0.8391 - val_recall: 0.8014 - val_auc: 0.9556 - val_tp: 15710.0000 - val_fp: 3012.0000 - val_tn: 55797.0000 - val_fn: 3893.0000 - lr: 1.0000e-04\n", "Epoch 53/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3126 - accuracy: 0.8829 - precision: 0.9020 - recall: 0.8649 - auc: 0.9827 - tp: 67822.0000 - fp: 7372.0000 - tn: 227864.0000 - fn: 10590.0000 - val_loss: 0.5288 - val_accuracy: 0.8183 - val_precision: 0.8388 - val_recall: 0.8008 - val_auc: 0.9556 - val_tp: 15698.0000 - val_fp: 3017.0000 - val_tn: 55792.0000 - val_fn: 3905.0000 - lr: 1.0000e-04\n", "Epoch 54/100\n", "30/30 [==============================] - 1s 32ms/step - loss: 0.3122 - accuracy: 0.8830 - precision: 0.9015 - recall: 0.8653 - auc: 0.9828 - tp: 67846.0000 - fp: 7412.0000 - tn: 227824.0000 - fn: 10566.0000 - val_loss: 0.5294 - val_accuracy: 0.8183 - val_precision: 0.8387 - val_recall: 0.8009 - val_auc: 0.9555 - val_tp: 15700.0000 - val_fp: 3019.0000 - val_tn: 55790.0000 - val_fn: 3903.0000 - lr: 1.0000e-04\n", "Epoch 55/100\n", "30/30 [==============================] - 1s 34ms/step - loss: 0.3119 - accuracy: 0.8832 - precision: 0.9020 - recall: 0.8653 - auc: 0.9828 - tp: 67853.0000 - fp: 7370.0000 - tn: 227866.0000 - fn: 10559.0000 - val_loss: 0.5300 - val_accuracy: 0.8183 - val_precision: 0.8390 - val_recall: 0.8013 - val_auc: 0.9554 - val_tp: 15708.0000 - val_fp: 3015.0000 - val_tn: 55794.0000 - val_fn: 3895.0000 - lr: 1.0000e-04\n", "Epoch 56/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3116 - accuracy: 0.8832 - precision: 0.9017 - recall: 0.8656 - auc: 0.9828 - tp: 67870.0000 - fp: 7395.0000 - tn: 227841.0000 - fn: 10542.0000 - val_loss: 0.5305 - val_accuracy: 0.8185 - val_precision: 0.8386 - val_recall: 0.8012 - val_auc: 0.9554 - val_tp: 15706.0000 - val_fp: 3023.0000 - val_tn: 55786.0000 - val_fn: 3897.0000 - lr: 1.0000e-04\n", "Epoch 57/100\n", "30/30 [==============================] - 1s 29ms/step - loss: 0.3114 - accuracy: 0.8833 - precision: 0.9021 - recall: 0.8656 - auc: 0.9828 - tp: 67871.0000 - fp: 7363.0000 - tn: 227873.0000 - fn: 10541.0000 - val_loss: 0.5311 - val_accuracy: 0.8181 - val_precision: 0.8388 - val_recall: 0.8012 - val_auc: 0.9553 - val_tp: 15705.0000 - val_fp: 3019.0000 - val_tn: 55790.0000 - val_fn: 3898.0000 - lr: 1.0000e-04\n", "Epoch 58/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3110 - accuracy: 0.8833 - precision: 0.9020 - recall: 0.8654 - auc: 0.9829 - tp: 67857.0000 - fp: 7373.0000 - tn: 227863.0000 - fn: 10555.0000 - val_loss: 0.5316 - val_accuracy: 0.8183 - val_precision: 0.8384 - val_recall: 0.8012 - val_auc: 0.9553 - val_tp: 15705.0000 - val_fp: 3027.0000 - val_tn: 55782.0000 - val_fn: 3898.0000 - lr: 1.0000e-04\n", "Epoch 59/100\n", "30/30 [==============================] - 1s 30ms/step - loss: 0.3107 - accuracy: 0.8833 - precision: 0.9021 - recall: 0.8658 - auc: 0.9829 - tp: 67891.0000 - fp: 7368.0000 - tn: 227868.0000 - fn: 10521.0000 - val_loss: 0.5322 - val_accuracy: 0.8181 - val_precision: 0.8385 - val_recall: 0.8010 - val_auc: 0.9553 - val_tp: 15702.0000 - val_fp: 3025.0000 - val_tn: 55784.0000 - val_fn: 3901.0000 - lr: 1.0000e-04\n", "Epoch 60/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3105 - accuracy: 0.8836 - precision: 0.9022 - recall: 0.8659 - auc: 0.9829 - tp: 67894.0000 - fp: 7360.0000 - tn: 227876.0000 - fn: 10518.0000 - val_loss: 0.5328 - val_accuracy: 0.8187 - val_precision: 0.8387 - val_recall: 0.8008 - val_auc: 0.9553 - val_tp: 15698.0000 - val_fp: 3020.0000 - val_tn: 55789.0000 - val_fn: 3905.0000 - lr: 1.0000e-04\n", "Epoch 61/100\n", "30/30 [==============================] - 1s 29ms/step - loss: 0.3102 - accuracy: 0.8837 - precision: 0.9024 - recall: 0.8660 - auc: 0.9830 - tp: 67906.0000 - fp: 7343.0000 - tn: 227893.0000 - fn: 10506.0000 - val_loss: 0.5332 - val_accuracy: 0.8183 - val_precision: 0.8384 - val_recall: 0.8012 - val_auc: 0.9552 - val_tp: 15705.0000 - val_fp: 3028.0000 - val_tn: 55781.0000 - val_fn: 3898.0000 - lr: 1.0000e-04\n", "Epoch 62/100\n", "30/30 [==============================] - 1s 30ms/step - loss: 0.3099 - accuracy: 0.8835 - precision: 0.9022 - recall: 0.8663 - auc: 0.9830 - tp: 67926.0000 - fp: 7360.0000 - tn: 227876.0000 - fn: 10486.0000 - val_loss: 0.5339 - val_accuracy: 0.8179 - val_precision: 0.8380 - val_recall: 0.8012 - val_auc: 0.9551 - val_tp: 15705.0000 - val_fp: 3037.0000 - val_tn: 55772.0000 - val_fn: 3898.0000 - lr: 1.0000e-04\n", "Epoch 63/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.3096 - accuracy: 0.8839 - precision: 0.9023 - recall: 0.8663 - auc: 0.9830 - tp: 67927.0000 - fp: 7357.0000 - tn: 227879.0000 - fn: 10485.0000 - val_loss: 0.5344 - val_accuracy: 0.8180 - val_precision: 0.8381 - val_recall: 0.8011 - val_auc: 0.9551 - val_tp: 15704.0000 - val_fp: 3034.0000 - val_tn: 55775.0000 - val_fn: 3899.0000 - lr: 1.0000e-04\n", "Epoch 64/100\n", "30/30 [==============================] - 1s 30ms/step - loss: 0.3092 - accuracy: 0.8839 - precision: 0.9023 - recall: 0.8666 - auc: 0.9831 - tp: 67948.0000 - fp: 7358.0000 - tn: 227878.0000 - fn: 10464.0000 - val_loss: 0.5351 - val_accuracy: 0.8183 - val_precision: 0.8379 - val_recall: 0.8012 - val_auc: 0.9550 - val_tp: 15705.0000 - val_fp: 3038.0000 - val_tn: 55771.0000 - val_fn: 3898.0000 - lr: 1.0000e-04\n", "Epoch 65/100\n", "30/30 [==============================] - 1s 30ms/step - loss: 0.3090 - accuracy: 0.8840 - precision: 0.9025 - recall: 0.8668 - auc: 0.9831 - tp: 67969.0000 - fp: 7345.0000 - tn: 227891.0000 - fn: 10443.0000 - val_loss: 0.5355 - val_accuracy: 0.8178 - val_precision: 0.8379 - val_recall: 0.8014 - val_auc: 0.9550 - val_tp: 15709.0000 - val_fp: 3039.0000 - val_tn: 55770.0000 - val_fn: 3894.0000 - lr: 1.0000e-04\n", "Epoch 66/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3087 - accuracy: 0.8841 - precision: 0.9024 - recall: 0.8668 - auc: 0.9831 - tp: 67970.0000 - fp: 7348.0000 - tn: 227888.0000 - fn: 10442.0000 - val_loss: 0.5361 - val_accuracy: 0.8182 - val_precision: 0.8381 - val_recall: 0.8016 - val_auc: 0.9549 - val_tp: 15713.0000 - val_fp: 3036.0000 - val_tn: 55773.0000 - val_fn: 3890.0000 - lr: 1.0000e-04\n", "Epoch 67/100\n", "30/30 [==============================] - 1s 35ms/step - loss: 0.3084 - accuracy: 0.8841 - precision: 0.9023 - recall: 0.8671 - auc: 0.9832 - tp: 67992.0000 - fp: 7361.0000 - tn: 227875.0000 - fn: 10420.0000 - val_loss: 0.5367 - val_accuracy: 0.8176 - val_precision: 0.8375 - val_recall: 0.8011 - val_auc: 0.9549 - val_tp: 15703.0000 - val_fp: 3047.0000 - val_tn: 55762.0000 - val_fn: 3900.0000 - lr: 1.0000e-04\n", "Epoch 68/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3081 - accuracy: 0.8843 - precision: 0.9028 - recall: 0.8670 - auc: 0.9832 - tp: 67986.0000 - fp: 7322.0000 - tn: 227914.0000 - fn: 10426.0000 - val_loss: 0.5373 - val_accuracy: 0.8180 - val_precision: 0.8377 - val_recall: 0.8015 - val_auc: 0.9548 - val_tp: 15711.0000 - val_fp: 3044.0000 - val_tn: 55765.0000 - val_fn: 3892.0000 - lr: 1.0000e-04\n", "Epoch 69/100\n", "30/30 [==============================] - 1s 32ms/step - loss: 0.3078 - accuracy: 0.8845 - precision: 0.9026 - recall: 0.8676 - auc: 0.9832 - tp: 68027.0000 - fp: 7342.0000 - tn: 227894.0000 - fn: 10385.0000 - val_loss: 0.5377 - val_accuracy: 0.8179 - val_precision: 0.8371 - val_recall: 0.8014 - val_auc: 0.9548 - val_tp: 15709.0000 - val_fp: 3057.0000 - val_tn: 55752.0000 - val_fn: 3894.0000 - lr: 1.0000e-04\n", "Epoch 70/100\n", "30/30 [==============================] - 1s 32ms/step - loss: 0.3076 - accuracy: 0.8847 - precision: 0.9028 - recall: 0.8674 - auc: 0.9833 - tp: 68013.0000 - fp: 7323.0000 - tn: 227913.0000 - fn: 10399.0000 - val_loss: 0.5383 - val_accuracy: 0.8183 - val_precision: 0.8377 - val_recall: 0.8018 - val_auc: 0.9547 - val_tp: 15717.0000 - val_fp: 3044.0000 - val_tn: 55765.0000 - val_fn: 3886.0000 - lr: 1.0000e-04\n", "Epoch 71/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3073 - accuracy: 0.8845 - precision: 0.9027 - recall: 0.8675 - auc: 0.9833 - tp: 68021.0000 - fp: 7333.0000 - tn: 227903.0000 - fn: 10391.0000 - val_loss: 0.5389 - val_accuracy: 0.8182 - val_precision: 0.8375 - val_recall: 0.8015 - val_auc: 0.9546 - val_tp: 15712.0000 - val_fp: 3049.0000 - val_tn: 55760.0000 - val_fn: 3891.0000 - lr: 1.0000e-04\n", "Epoch 72/100\n", "30/30 [==============================] - 1s 40ms/step - loss: 0.3070 - accuracy: 0.8846 - precision: 0.9030 - recall: 0.8677 - auc: 0.9833 - tp: 68040.0000 - fp: 7312.0000 - tn: 227924.0000 - fn: 10372.0000 - val_loss: 0.5394 - val_accuracy: 0.8183 - val_precision: 0.8373 - val_recall: 0.8015 - val_auc: 0.9546 - val_tp: 15711.0000 - val_fp: 3053.0000 - val_tn: 55756.0000 - val_fn: 3892.0000 - lr: 1.0000e-04\n", "Epoch 73/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.3068 - accuracy: 0.8845 - precision: 0.9027 - recall: 0.8677 - auc: 0.9833 - tp: 68037.0000 - fp: 7334.0000 - tn: 227902.0000 - fn: 10375.0000 - val_loss: 0.5400 - val_accuracy: 0.8177 - val_precision: 0.8375 - val_recall: 0.8016 - val_auc: 0.9545 - val_tp: 15714.0000 - val_fp: 3049.0000 - val_tn: 55760.0000 - val_fn: 3889.0000 - lr: 1.0000e-04\n", "Epoch 74/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.3065 - accuracy: 0.8848 - precision: 0.9030 - recall: 0.8679 - auc: 0.9834 - tp: 68056.0000 - fp: 7311.0000 - tn: 227925.0000 - fn: 10356.0000 - val_loss: 0.5405 - val_accuracy: 0.8180 - val_precision: 0.8371 - val_recall: 0.8015 - val_auc: 0.9545 - val_tp: 15712.0000 - val_fp: 3057.0000 - val_tn: 55752.0000 - val_fn: 3891.0000 - lr: 1.0000e-04\n", "Epoch 75/100\n", "30/30 [==============================] - 1s 30ms/step - loss: 0.3062 - accuracy: 0.8848 - precision: 0.9029 - recall: 0.8680 - auc: 0.9834 - tp: 68058.0000 - fp: 7315.0000 - tn: 227921.0000 - fn: 10354.0000 - val_loss: 0.5411 - val_accuracy: 0.8183 - val_precision: 0.8375 - val_recall: 0.8020 - val_auc: 0.9544 - val_tp: 15722.0000 - val_fp: 3051.0000 - val_tn: 55758.0000 - val_fn: 3881.0000 - lr: 1.0000e-04\n", "Epoch 76/100\n", "30/30 [==============================] - 1s 30ms/step - loss: 0.3059 - accuracy: 0.8847 - precision: 0.9031 - recall: 0.8684 - auc: 0.9834 - tp: 68095.0000 - fp: 7310.0000 - tn: 227926.0000 - fn: 10317.0000 - val_loss: 0.5417 - val_accuracy: 0.8177 - val_precision: 0.8366 - val_recall: 0.8016 - val_auc: 0.9544 - val_tp: 15714.0000 - val_fp: 3069.0000 - val_tn: 55740.0000 - val_fn: 3889.0000 - lr: 1.0000e-04\n", "Epoch 77/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.3057 - accuracy: 0.8849 - precision: 0.9031 - recall: 0.8684 - auc: 0.9835 - tp: 68093.0000 - fp: 7307.0000 - tn: 227929.0000 - fn: 10319.0000 - val_loss: 0.5421 - val_accuracy: 0.8180 - val_precision: 0.8369 - val_recall: 0.8018 - val_auc: 0.9544 - val_tp: 15717.0000 - val_fp: 3063.0000 - val_tn: 55746.0000 - val_fn: 3886.0000 - lr: 1.0000e-04\n", "Epoch 78/100\n", "30/30 [==============================] - 1s 30ms/step - loss: 0.3054 - accuracy: 0.8850 - precision: 0.9034 - recall: 0.8685 - auc: 0.9835 - tp: 68102.0000 - fp: 7284.0000 - tn: 227952.0000 - fn: 10310.0000 - val_loss: 0.5427 - val_accuracy: 0.8179 - val_precision: 0.8369 - val_recall: 0.8016 - val_auc: 0.9543 - val_tp: 15714.0000 - val_fp: 3063.0000 - val_tn: 55746.0000 - val_fn: 3889.0000 - lr: 1.0000e-04\n", "Epoch 79/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3052 - accuracy: 0.8849 - precision: 0.9034 - recall: 0.8687 - auc: 0.9835 - tp: 68116.0000 - fp: 7287.0000 - tn: 227949.0000 - fn: 10296.0000 - val_loss: 0.5431 - val_accuracy: 0.8175 - val_precision: 0.8370 - val_recall: 0.8016 - val_auc: 0.9543 - val_tp: 15713.0000 - val_fp: 3061.0000 - val_tn: 55748.0000 - val_fn: 3890.0000 - lr: 1.0000e-04\n", "Epoch 80/100\n", "30/30 [==============================] - 1s 36ms/step - loss: 0.3049 - accuracy: 0.8852 - precision: 0.9033 - recall: 0.8690 - auc: 0.9835 - tp: 68140.0000 - fp: 7292.0000 - tn: 227944.0000 - fn: 10272.0000 - val_loss: 0.5438 - val_accuracy: 0.8176 - val_precision: 0.8363 - val_recall: 0.8017 - val_auc: 0.9542 - val_tp: 15716.0000 - val_fp: 3076.0000 - val_tn: 55733.0000 - val_fn: 3887.0000 - lr: 1.0000e-04\n", "Epoch 81/100\n", "30/30 [==============================] - 1s 32ms/step - loss: 0.3047 - accuracy: 0.8852 - precision: 0.9038 - recall: 0.8688 - auc: 0.9836 - tp: 68128.0000 - fp: 7254.0000 - tn: 227982.0000 - fn: 10284.0000 - val_loss: 0.5444 - val_accuracy: 0.8181 - val_precision: 0.8366 - val_recall: 0.8017 - val_auc: 0.9542 - val_tp: 15716.0000 - val_fp: 3070.0000 - val_tn: 55739.0000 - val_fn: 3887.0000 - lr: 1.0000e-04\n", "Epoch 82/100\n", "30/30 [==============================] - 1s 32ms/step - loss: 0.3044 - accuracy: 0.8855 - precision: 0.9037 - recall: 0.8692 - auc: 0.9836 - tp: 68158.0000 - fp: 7265.0000 - tn: 227971.0000 - fn: 10254.0000 - val_loss: 0.5449 - val_accuracy: 0.8178 - val_precision: 0.8367 - val_recall: 0.8017 - val_auc: 0.9541 - val_tp: 15715.0000 - val_fp: 3067.0000 - val_tn: 55742.0000 - val_fn: 3888.0000 - lr: 1.0000e-04\n", "Epoch 83/100\n", "30/30 [==============================] - 1s 30ms/step - loss: 0.3042 - accuracy: 0.8854 - precision: 0.9036 - recall: 0.8689 - auc: 0.9836 - tp: 68134.0000 - fp: 7266.0000 - tn: 227970.0000 - fn: 10278.0000 - val_loss: 0.5454 - val_accuracy: 0.8176 - val_precision: 0.8365 - val_recall: 0.8014 - val_auc: 0.9541 - val_tp: 15709.0000 - val_fp: 3071.0000 - val_tn: 55738.0000 - val_fn: 3894.0000 - lr: 1.0000e-04\n", "Epoch 84/100\n", "30/30 [==============================] - 1s 32ms/step - loss: 0.3039 - accuracy: 0.8858 - precision: 0.9038 - recall: 0.8693 - auc: 0.9837 - tp: 68163.0000 - fp: 7257.0000 - tn: 227979.0000 - fn: 10249.0000 - val_loss: 0.5459 - val_accuracy: 0.8178 - val_precision: 0.8363 - val_recall: 0.8017 - val_auc: 0.9540 - val_tp: 15716.0000 - val_fp: 3077.0000 - val_tn: 55732.0000 - val_fn: 3887.0000 - lr: 1.0000e-04\n", "Epoch 85/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3036 - accuracy: 0.8857 - precision: 0.9039 - recall: 0.8692 - auc: 0.9837 - tp: 68156.0000 - fp: 7250.0000 - tn: 227986.0000 - fn: 10256.0000 - val_loss: 0.5464 - val_accuracy: 0.8177 - val_precision: 0.8361 - val_recall: 0.8013 - val_auc: 0.9540 - val_tp: 15708.0000 - val_fp: 3080.0000 - val_tn: 55729.0000 - val_fn: 3895.0000 - lr: 1.0000e-04\n", "Epoch 86/100\n", "30/30 [==============================] - 1s 30ms/step - loss: 0.3034 - accuracy: 0.8857 - precision: 0.9040 - recall: 0.8694 - auc: 0.9837 - tp: 68174.0000 - fp: 7238.0000 - tn: 227998.0000 - fn: 10238.0000 - val_loss: 0.5469 - val_accuracy: 0.8174 - val_precision: 0.8359 - val_recall: 0.8010 - val_auc: 0.9539 - val_tp: 15702.0000 - val_fp: 3082.0000 - val_tn: 55727.0000 - val_fn: 3901.0000 - lr: 1.0000e-04\n", "Epoch 87/100\n", "30/30 [==============================] - 1s 38ms/step - loss: 0.3031 - accuracy: 0.8860 - precision: 0.9043 - recall: 0.8696 - auc: 0.9837 - tp: 68185.0000 - fp: 7213.0000 - tn: 228023.0000 - fn: 10227.0000 - val_loss: 0.5476 - val_accuracy: 0.8178 - val_precision: 0.8363 - val_recall: 0.8015 - val_auc: 0.9538 - val_tp: 15712.0000 - val_fp: 3075.0000 - val_tn: 55734.0000 - val_fn: 3891.0000 - lr: 1.0000e-04\n", "Epoch 88/100\n", "30/30 [==============================] - 1s 32ms/step - loss: 0.3029 - accuracy: 0.8858 - precision: 0.9039 - recall: 0.8698 - auc: 0.9838 - tp: 68204.0000 - fp: 7250.0000 - tn: 227986.0000 - fn: 10208.0000 - val_loss: 0.5481 - val_accuracy: 0.8179 - val_precision: 0.8357 - val_recall: 0.8014 - val_auc: 0.9539 - val_tp: 15709.0000 - val_fp: 3089.0000 - val_tn: 55720.0000 - val_fn: 3894.0000 - lr: 1.0000e-04\n", "Epoch 89/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.3027 - accuracy: 0.8859 - precision: 0.9040 - recall: 0.8696 - auc: 0.9838 - tp: 68186.0000 - fp: 7237.0000 - tn: 227999.0000 - fn: 10226.0000 - val_loss: 0.5487 - val_accuracy: 0.8177 - val_precision: 0.8359 - val_recall: 0.8010 - val_auc: 0.9538 - val_tp: 15702.0000 - val_fp: 3083.0000 - val_tn: 55726.0000 - val_fn: 3901.0000 - lr: 1.0000e-04\n", "Epoch 90/100\n", "30/30 [==============================] - 1s 34ms/step - loss: 0.3025 - accuracy: 0.8858 - precision: 0.9040 - recall: 0.8697 - auc: 0.9838 - tp: 68198.0000 - fp: 7241.0000 - tn: 227995.0000 - fn: 10214.0000 - val_loss: 0.5491 - val_accuracy: 0.8176 - val_precision: 0.8360 - val_recall: 0.8011 - val_auc: 0.9537 - val_tp: 15704.0000 - val_fp: 3080.0000 - val_tn: 55729.0000 - val_fn: 3899.0000 - lr: 1.0000e-04\n", "Epoch 91/100\n", "30/30 [==============================] - 1s 30ms/step - loss: 0.3022 - accuracy: 0.8864 - precision: 0.9043 - recall: 0.8700 - auc: 0.9838 - tp: 68220.0000 - fp: 7217.0000 - tn: 228019.0000 - fn: 10192.0000 - val_loss: 0.5496 - val_accuracy: 0.8180 - val_precision: 0.8361 - val_recall: 0.8013 - val_auc: 0.9537 - val_tp: 15708.0000 - val_fp: 3079.0000 - val_tn: 55730.0000 - val_fn: 3895.0000 - lr: 1.0000e-04\n", "Epoch 92/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3020 - accuracy: 0.8863 - precision: 0.9043 - recall: 0.8699 - auc: 0.9838 - tp: 68213.0000 - fp: 7218.0000 - tn: 228018.0000 - fn: 10199.0000 - val_loss: 0.5502 - val_accuracy: 0.8176 - val_precision: 0.8360 - val_recall: 0.8008 - val_auc: 0.9536 - val_tp: 15699.0000 - val_fp: 3079.0000 - val_tn: 55730.0000 - val_fn: 3904.0000 - lr: 1.0000e-04\n", "Epoch 93/100\n", "30/30 [==============================] - 1s 42ms/step - loss: 0.3018 - accuracy: 0.8862 - precision: 0.9044 - recall: 0.8700 - auc: 0.9839 - tp: 68222.0000 - fp: 7213.0000 - tn: 228023.0000 - fn: 10190.0000 - val_loss: 0.5508 - val_accuracy: 0.8179 - val_precision: 0.8355 - val_recall: 0.8011 - val_auc: 0.9536 - val_tp: 15704.0000 - val_fp: 3092.0000 - val_tn: 55717.0000 - val_fn: 3899.0000 - lr: 1.0000e-04\n", "Epoch 94/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3015 - accuracy: 0.8866 - precision: 0.9043 - recall: 0.8702 - auc: 0.9839 - tp: 68237.0000 - fp: 7218.0000 - tn: 228018.0000 - fn: 10175.0000 - val_loss: 0.5513 - val_accuracy: 0.8173 - val_precision: 0.8360 - val_recall: 0.8012 - val_auc: 0.9535 - val_tp: 15705.0000 - val_fp: 3082.0000 - val_tn: 55727.0000 - val_fn: 3898.0000 - lr: 1.0000e-04\n", "Epoch 95/100\n", "30/30 [==============================] - 1s 29ms/step - loss: 0.3013 - accuracy: 0.8866 - precision: 0.9045 - recall: 0.8706 - auc: 0.9839 - tp: 68263.0000 - fp: 7208.0000 - tn: 228028.0000 - fn: 10149.0000 - val_loss: 0.5518 - val_accuracy: 0.8175 - val_precision: 0.8356 - val_recall: 0.8014 - val_auc: 0.9534 - val_tp: 15710.0000 - val_fp: 3090.0000 - val_tn: 55719.0000 - val_fn: 3893.0000 - lr: 1.0000e-04\n", "Epoch 96/100\n", "30/30 [==============================] - 1s 31ms/step - loss: 0.3010 - accuracy: 0.8865 - precision: 0.9045 - recall: 0.8705 - auc: 0.9839 - tp: 68258.0000 - fp: 7206.0000 - tn: 228030.0000 - fn: 10154.0000 - val_loss: 0.5524 - val_accuracy: 0.8176 - val_precision: 0.8355 - val_recall: 0.8011 - val_auc: 0.9534 - val_tp: 15704.0000 - val_fp: 3092.0000 - val_tn: 55717.0000 - val_fn: 3899.0000 - lr: 1.0000e-04\n", "Epoch 97/100\n", "30/30 [==============================] - 1s 32ms/step - loss: 0.3008 - accuracy: 0.8866 - precision: 0.9044 - recall: 0.8702 - auc: 0.9840 - tp: 68237.0000 - fp: 7209.0000 - tn: 228027.0000 - fn: 10175.0000 - val_loss: 0.5529 - val_accuracy: 0.8178 - val_precision: 0.8356 - val_recall: 0.8014 - val_auc: 0.9533 - val_tp: 15709.0000 - val_fp: 3090.0000 - val_tn: 55719.0000 - val_fn: 3894.0000 - lr: 1.0000e-04\n", "Epoch 98/100\n", "30/30 [==============================] - 1s 32ms/step - loss: 0.3006 - accuracy: 0.8870 - precision: 0.9046 - recall: 0.8707 - auc: 0.9840 - tp: 68273.0000 - fp: 7199.0000 - tn: 228037.0000 - fn: 10139.0000 - val_loss: 0.5534 - val_accuracy: 0.8177 - val_precision: 0.8354 - val_recall: 0.8014 - val_auc: 0.9533 - val_tp: 15709.0000 - val_fp: 3095.0000 - val_tn: 55714.0000 - val_fn: 3894.0000 - lr: 1.0000e-04\n", "Epoch 99/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.3003 - accuracy: 0.8868 - precision: 0.9046 - recall: 0.8707 - auc: 0.9840 - tp: 68273.0000 - fp: 7201.0000 - tn: 228035.0000 - fn: 10139.0000 - val_loss: 0.5539 - val_accuracy: 0.8176 - val_precision: 0.8353 - val_recall: 0.8011 - val_auc: 0.9533 - val_tp: 15703.0000 - val_fp: 3097.0000 - val_tn: 55712.0000 - val_fn: 3900.0000 - lr: 1.0000e-04\n", "Epoch 100/100\n", "30/30 [==============================] - 1s 33ms/step - loss: 0.3001 - accuracy: 0.8870 - precision: 0.9048 - recall: 0.8707 - auc: 0.9840 - tp: 68275.0000 - fp: 7185.0000 - tn: 228051.0000 - fn: 10137.0000 - val_loss: 0.5545 - val_accuracy: 0.8173 - val_precision: 0.8353 - val_recall: 0.8011 - val_auc: 0.9532 - val_tp: 15703.0000 - val_fp: 3096.0000 - val_tn: 55713.0000 - val_fn: 3900.0000 - lr: 1.0000e-04\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "[I 2024-06-08 14:01:04,384] Trial 4 finished with value: 0.815499484539032 and parameters: {'num_filters': 86, 'kernel_size': 5, 'learning_rate': 0.0026465008265824665}. Best is trial 0 with value: 0.819294810295105.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Loss: 0.558591902256012\n", "Accuracy: 0.815499484539032\n", "Precision: 0.8317180871963501\n", "Recall: 0.7985227108001709\n", "AUC: 0.95306795835495\n", "True Positives: 19567.0\n", "False Positives: 3959.0\n", "True Negatives: 69553.0\n", "False Negatives: 4937.0\n" ] } ], "source": [ "def objective(trial):\n", " \n", " num_filters = trial.suggest_int('num_filters', 32, 256)\n", " kernel_size = trial.suggest_int('kernel_size', 3, 5)\n", " learning_rate = trial.suggest_loguniform('learning_rate', 1e-4, 1e-2)\n", "\n", " model_2_fine_tune = Sequential([\n", " Embedding(input_dim=10000, output_dim=16),\n", " Conv1D(filters=num_filters, kernel_size=kernel_size, activation='relu'),\n", " GlobalMaxPooling1D(),\n", " Dense(4, activation='softmax')\n", " ])\n", "\n", " optimizer = Adam(learning_rate=learning_rate)\n", "\n", " model_2_fine_tune.compile(optimizer=optimizer, loss='categorical_crossentropy', \n", " metrics=['accuracy', \n", " tf.keras.metrics.Precision(name='precision'),\n", " tf.keras.metrics.Recall(name='recall'),\n", " tf.keras.metrics.AUC(name='auc'),\n", " tf.keras.metrics.TruePositives(name='tp'),\n", " tf.keras.metrics.FalsePositives(name='fp'),\n", " tf.keras.metrics.TrueNegatives(name='tn'),\n", " tf.keras.metrics.FalseNegatives(name='fn')])\n", "\n", " model_2_fine_tune.fit(padded_train, y_train,\n", " steps_per_epoch = 30,\n", " epochs = 100,\n", " validation_split=0.2,\n", " verbose = 1,\n", " validation_steps = 50,\n", " callbacks=[checkpoint(\"2_fine_tune\"), reduce_lr], \n", " )\n", "\n", " score = model_2_fine_tune.evaluate(padded_test, y_test, verbose=0)\n", " \n", " print(\"Loss:\", score[0])\n", " print(\"Accuracy:\", score[1])\n", " print(\"Precision:\", score[2])\n", " print(\"Recall:\", score[3])\n", " print(\"AUC:\", score[4])\n", " print(\"True Positives:\", score[5])\n", " print(\"False Positives:\", score[6])\n", " print(\"True Negatives:\", score[7])\n", " print(\"False Negatives:\", score[8])\n", " \n", " return score[1]\n", "\n", "study = optuna.create_study(direction='maximize')\n", "\n", "study.optimize(objective, n_trials=5)" ] }, { "cell_type": "code", "execution_count": 65, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Najlepsze wartości hiperparametrów: {'num_filters': 37, 'kernel_size': 5, 'learning_rate': 0.0005838498821337493}\n", "Wartość metryki dla najlepszych hiperparametrów: 0.819294810295105\n" ] } ], "source": [ "print(\"Najlepsze wartości hiperparametrów:\", study.best_params)\n", "print(\"Wartość metryki dla najlepszych hiperparametrów:\", study.best_value)" ] } ], "metadata": { "kernelspec": { "display_name": "tf_gpu", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.14" } }, "nbformat": 4, "nbformat_minor": 2 }