2200 lines
103 KiB
Plaintext
2200 lines
103 KiB
Plaintext
{
|
||
"cells": [
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 1,
|
||
"id": "3473477b",
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"/home/gedin/.local/lib/python3.10/site-packages/requests/__init__.py:102: RequestsDependencyWarning: urllib3 (1.26.13) or chardet (5.1.0)/charset_normalizer (2.0.12) doesn't match a supported version!\n",
|
||
" warnings.warn(\"urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported \"\n",
|
||
"titanic.zip: Skipping, found more recently modified local copy (use --force to force download)\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"!kaggle competitions download -c titanic"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 3,
|
||
"id": "0c37c704",
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Archive: titanic.zip\r\n",
|
||
" inflating: gender_submission.csv \r\n",
|
||
" inflating: test.csv \r\n",
|
||
" inflating: train.csv \r\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"!unzip titanic.zip"
|
||
]
|
||
},
|
||
{
|
||
"attachments": {},
|
||
"cell_type": "markdown",
|
||
"id": "b6adfde9",
|
||
"metadata": {},
|
||
"source": [
|
||
"### Dane o pliku"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 2,
|
||
"id": "a9d9a8ee",
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"892 train.csv\n",
|
||
"419 test.csv\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"!wc -l train.csv\n",
|
||
"!wc -l test.csv"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 4,
|
||
"id": "bf08fe16",
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"import pandas as pd"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 228,
|
||
"id": "fc59f320",
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"df = pd.read_csv(\"train.csv\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 47,
|
||
"id": "aa5ea30b",
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/html": [
|
||
"<div>\n",
|
||
"<style scoped>\n",
|
||
" .dataframe tbody tr th:only-of-type {\n",
|
||
" vertical-align: middle;\n",
|
||
" }\n",
|
||
"\n",
|
||
" .dataframe tbody tr th {\n",
|
||
" vertical-align: top;\n",
|
||
" }\n",
|
||
"\n",
|
||
" .dataframe thead th {\n",
|
||
" text-align: right;\n",
|
||
" }\n",
|
||
"</style>\n",
|
||
"<table border=\"1\" class=\"dataframe\">\n",
|
||
" <thead>\n",
|
||
" <tr style=\"text-align: right;\">\n",
|
||
" <th></th>\n",
|
||
" <th>PassengerId</th>\n",
|
||
" <th>Survived</th>\n",
|
||
" <th>Pclass</th>\n",
|
||
" <th>Name</th>\n",
|
||
" <th>Sex</th>\n",
|
||
" <th>Age</th>\n",
|
||
" <th>SibSp</th>\n",
|
||
" <th>Parch</th>\n",
|
||
" <th>Ticket</th>\n",
|
||
" <th>Fare</th>\n",
|
||
" <th>Cabin</th>\n",
|
||
" <th>Embarked</th>\n",
|
||
" </tr>\n",
|
||
" </thead>\n",
|
||
" <tbody>\n",
|
||
" <tr>\n",
|
||
" <th>0</th>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>3</td>\n",
|
||
" <td>Braund, Mr. Owen Harris</td>\n",
|
||
" <td>male</td>\n",
|
||
" <td>22.0</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>A/5 21171</td>\n",
|
||
" <td>7.2500</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>S</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>1</th>\n",
|
||
" <td>2</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>Cumings, Mrs. John Bradley (Florence Briggs Th...</td>\n",
|
||
" <td>female</td>\n",
|
||
" <td>38.0</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>PC 17599</td>\n",
|
||
" <td>71.2833</td>\n",
|
||
" <td>C85</td>\n",
|
||
" <td>C</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>2</th>\n",
|
||
" <td>3</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>3</td>\n",
|
||
" <td>Heikkinen, Miss. Laina</td>\n",
|
||
" <td>female</td>\n",
|
||
" <td>26.0</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>STON/O2. 3101282</td>\n",
|
||
" <td>7.9250</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>S</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>3</th>\n",
|
||
" <td>4</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>Futrelle, Mrs. Jacques Heath (Lily May Peel)</td>\n",
|
||
" <td>female</td>\n",
|
||
" <td>35.0</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>113803</td>\n",
|
||
" <td>53.1000</td>\n",
|
||
" <td>C123</td>\n",
|
||
" <td>S</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>4</th>\n",
|
||
" <td>5</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>3</td>\n",
|
||
" <td>Allen, Mr. William Henry</td>\n",
|
||
" <td>male</td>\n",
|
||
" <td>35.0</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>373450</td>\n",
|
||
" <td>8.0500</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>S</td>\n",
|
||
" </tr>\n",
|
||
" </tbody>\n",
|
||
"</table>\n",
|
||
"</div>"
|
||
],
|
||
"text/plain": [
|
||
" PassengerId Survived Pclass \\\n",
|
||
"0 1 0 3 \n",
|
||
"1 2 1 1 \n",
|
||
"2 3 1 3 \n",
|
||
"3 4 1 1 \n",
|
||
"4 5 0 3 \n",
|
||
"\n",
|
||
" Name Sex Age SibSp \\\n",
|
||
"0 Braund, Mr. Owen Harris male 22.0 1 \n",
|
||
"1 Cumings, Mrs. John Bradley (Florence Briggs Th... female 38.0 1 \n",
|
||
"2 Heikkinen, Miss. Laina female 26.0 0 \n",
|
||
"3 Futrelle, Mrs. Jacques Heath (Lily May Peel) female 35.0 1 \n",
|
||
"4 Allen, Mr. William Henry male 35.0 0 \n",
|
||
"\n",
|
||
" Parch Ticket Fare Cabin Embarked \n",
|
||
"0 0 A/5 21171 7.2500 NaN S \n",
|
||
"1 0 PC 17599 71.2833 C85 C \n",
|
||
"2 0 STON/O2. 3101282 7.9250 NaN S \n",
|
||
"3 0 113803 53.1000 C123 S \n",
|
||
"4 0 373450 8.0500 NaN S "
|
||
]
|
||
},
|
||
"execution_count": 47,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"df.head(5)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 48,
|
||
"id": "32d4140c",
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/html": [
|
||
"<div>\n",
|
||
"<style scoped>\n",
|
||
" .dataframe tbody tr th:only-of-type {\n",
|
||
" vertical-align: middle;\n",
|
||
" }\n",
|
||
"\n",
|
||
" .dataframe tbody tr th {\n",
|
||
" vertical-align: top;\n",
|
||
" }\n",
|
||
"\n",
|
||
" .dataframe thead th {\n",
|
||
" text-align: right;\n",
|
||
" }\n",
|
||
"</style>\n",
|
||
"<table border=\"1\" class=\"dataframe\">\n",
|
||
" <thead>\n",
|
||
" <tr style=\"text-align: right;\">\n",
|
||
" <th></th>\n",
|
||
" <th>PassengerId</th>\n",
|
||
" <th>Survived</th>\n",
|
||
" <th>Pclass</th>\n",
|
||
" <th>Age</th>\n",
|
||
" <th>SibSp</th>\n",
|
||
" <th>Parch</th>\n",
|
||
" <th>Fare</th>\n",
|
||
" </tr>\n",
|
||
" </thead>\n",
|
||
" <tbody>\n",
|
||
" <tr>\n",
|
||
" <th>count</th>\n",
|
||
" <td>891.000000</td>\n",
|
||
" <td>891.000000</td>\n",
|
||
" <td>891.000000</td>\n",
|
||
" <td>714.000000</td>\n",
|
||
" <td>891.000000</td>\n",
|
||
" <td>891.000000</td>\n",
|
||
" <td>891.000000</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>mean</th>\n",
|
||
" <td>446.000000</td>\n",
|
||
" <td>0.383838</td>\n",
|
||
" <td>2.308642</td>\n",
|
||
" <td>29.699118</td>\n",
|
||
" <td>0.523008</td>\n",
|
||
" <td>0.381594</td>\n",
|
||
" <td>32.204208</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>std</th>\n",
|
||
" <td>257.353842</td>\n",
|
||
" <td>0.486592</td>\n",
|
||
" <td>0.836071</td>\n",
|
||
" <td>14.526497</td>\n",
|
||
" <td>1.102743</td>\n",
|
||
" <td>0.806057</td>\n",
|
||
" <td>49.693429</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>min</th>\n",
|
||
" <td>1.000000</td>\n",
|
||
" <td>0.000000</td>\n",
|
||
" <td>1.000000</td>\n",
|
||
" <td>0.420000</td>\n",
|
||
" <td>0.000000</td>\n",
|
||
" <td>0.000000</td>\n",
|
||
" <td>0.000000</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>25%</th>\n",
|
||
" <td>223.500000</td>\n",
|
||
" <td>0.000000</td>\n",
|
||
" <td>2.000000</td>\n",
|
||
" <td>20.125000</td>\n",
|
||
" <td>0.000000</td>\n",
|
||
" <td>0.000000</td>\n",
|
||
" <td>7.910400</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>50%</th>\n",
|
||
" <td>446.000000</td>\n",
|
||
" <td>0.000000</td>\n",
|
||
" <td>3.000000</td>\n",
|
||
" <td>28.000000</td>\n",
|
||
" <td>0.000000</td>\n",
|
||
" <td>0.000000</td>\n",
|
||
" <td>14.454200</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>75%</th>\n",
|
||
" <td>668.500000</td>\n",
|
||
" <td>1.000000</td>\n",
|
||
" <td>3.000000</td>\n",
|
||
" <td>38.000000</td>\n",
|
||
" <td>1.000000</td>\n",
|
||
" <td>0.000000</td>\n",
|
||
" <td>31.000000</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>max</th>\n",
|
||
" <td>891.000000</td>\n",
|
||
" <td>1.000000</td>\n",
|
||
" <td>3.000000</td>\n",
|
||
" <td>80.000000</td>\n",
|
||
" <td>8.000000</td>\n",
|
||
" <td>6.000000</td>\n",
|
||
" <td>512.329200</td>\n",
|
||
" </tr>\n",
|
||
" </tbody>\n",
|
||
"</table>\n",
|
||
"</div>"
|
||
],
|
||
"text/plain": [
|
||
" PassengerId Survived Pclass Age SibSp \\\n",
|
||
"count 891.000000 891.000000 891.000000 714.000000 891.000000 \n",
|
||
"mean 446.000000 0.383838 2.308642 29.699118 0.523008 \n",
|
||
"std 257.353842 0.486592 0.836071 14.526497 1.102743 \n",
|
||
"min 1.000000 0.000000 1.000000 0.420000 0.000000 \n",
|
||
"25% 223.500000 0.000000 2.000000 20.125000 0.000000 \n",
|
||
"50% 446.000000 0.000000 3.000000 28.000000 0.000000 \n",
|
||
"75% 668.500000 1.000000 3.000000 38.000000 1.000000 \n",
|
||
"max 891.000000 1.000000 3.000000 80.000000 8.000000 \n",
|
||
"\n",
|
||
" Parch Fare \n",
|
||
"count 891.000000 891.000000 \n",
|
||
"mean 0.381594 32.204208 \n",
|
||
"std 0.806057 49.693429 \n",
|
||
"min 0.000000 0.000000 \n",
|
||
"25% 0.000000 7.910400 \n",
|
||
"50% 0.000000 14.454200 \n",
|
||
"75% 0.000000 31.000000 \n",
|
||
"max 6.000000 512.329200 "
|
||
]
|
||
},
|
||
"execution_count": 48,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"df.describe()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 49,
|
||
"id": "920ea21b",
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([[<Axes: title={'center': 'Survived'}>,\n",
|
||
" <Axes: title={'center': 'Pclass'}>]], dtype=object)"
|
||
]
|
||
},
|
||
"execution_count": 49,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
},
|
||
{
|
||
"data": {
|
||
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAikAAAGzCAYAAADqhoemAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA9uUlEQVR4nO3dfVxUdd7/8TfgMIg6oKagiWS1qZh3acq0tVvGTcZaW1RaXUZl9YvQTdms3HW9rTC3zW1btK7W1bYu17TNbsyS0by5SkijLMRy7WaXXBvYLEUlhxHO7499MFcTIAwOzBd9PR8PHjnf8z3f8/3MgTPvzpwzE2ZZliUAAADDhId6AgAAAA0hpAAAACMRUgAAgJEIKQAAwEiEFAAAYCRCCgAAMBIhBQAAGImQAgAAjERIAQAARiKkoE3deuutOuuss0Ky7bCwMM2ZMyck2wZw8v7xj38oLCxMy5cvD/VU0EYIKae4kpISXXfddUpMTFRUVJTOPPNMpaam6sknnwz11ACcJpYvX66wsDDfT1RUlM477zxNnjxZ5eXloZ4eDNYh1BNA69m2bZsuu+wy9e3bV3feeafi4+P15ZdfqqioSE888YSmTJnS5nN65plnVFtb2+bbBRB68+bNU79+/XTs2DG9/fbbWrJkidatW6ddu3YpOjo61NODgQgpp7CHH35YMTEx2rFjh2JjY/2WVVRUBGUbR48eVadOnZrd32azBWW7ANqfsWPHauTIkZKkO+64Q927d9fjjz+uV155RTfeeGOIZwcT8XbPKeyzzz7ToEGD6gUUSerZs6ekE7/H+8NrOObMmaOwsDDt3r1bN910k7p27aqLL75Yjz32mMLCwvTPf/6z3hgzZsxQZGSkvv32W0n+16R4vV5169ZNt912W731KisrFRUVpfvuu8/X5vF4NHv2bJ177rmy2+1KSEjQ/fffL4/H47eux+PRtGnT1KNHD3Xp0kVXXXWV9u3b19TTBaCNjRkzRpL0xRdfSJIOHjyoadOm6ayzzpLdblefPn10yy236Ouvv250jI8++ki33nqrzj77bEVFRSk+Pl633367Dhw44Nfv8OHDmjp1qm/snj17KjU1Ve+//76vz969e5WZman4+HhFRUWpT58+mjBhgg4dOtQK1aM5OJNyCktMTFRhYaF27dql888/P2jjXn/99frRj36kRx55RJZl6Wc/+5nuv/9+rVq1StOnT/fru2rVKqWlpalr1671xrHZbLrmmmv00ksv6emnn1ZkZKRv2csvvyyPx6MJEyZIkmpra3XVVVfp7bff1l133aWBAweqpKREixYt0t///ne9/PLLvnXvuOMOPf/887rpppt00UUX6a233lJGRkbQ6gcQHJ999pkkqXv37jpy5IguueQSffzxx7r99tt1wQUX6Ouvv9arr76qffv26YwzzmhwDJfLpc8//1y33Xab4uPjVVpaqv/+7/9WaWmpioqKFBYWJkm6++679eKLL2ry5MlKSkrSgQMH9Pbbb+vjjz/WBRdcoOrqaqWnp8vj8WjKlCmKj4/Xv/71L61du1YHDx5UTExMmz0v+B4Lp6yCggIrIiLCioiIsJxOp3X//fdb69evt6qrq319vvjiC0uStWzZsnrrS7Jmz57tezx79mxLknXjjTfW6+t0Oq0RI0b4tW3fvt2SZP3lL3/xtWVlZVmJiYm+x+vXr7ckWa+99prfuldeeaV19tln+x4/99xzVnh4uPW///u/fv2eeuopS5L1zjvvWJZlWTt37rQkWffcc49fv5tuuqlePQDaxrJlyyxJ1oYNG6x///vf1pdffmmtXLnS6t69u9WxY0dr37591qxZsyxJ1ksvvVRv/draWsuyGj5eVVVV1ev/17/+1ZJkbd261dcWExNj5eTkNDrHDz74wJJkrV69+iQqRbDxds8pLDU1VYWFhbrqqqv04YcfauHChUpPT9eZZ56pV199tcXj3n333fXaxo8fr+LiYt//GUnSCy+8ILvdrquvvrrRscaMGaMzzjhDL7zwgq/t22+/lcvl0vjx431tq1ev1sCBAzVgwAB9/fXXvp+608WbNm2SJK1bt06S9Itf/MJvO1OnTg28UABBlZKSoh49eighIUETJkxQ586dtWbNGp155pn629/+pqFDh+qaa66pt17d2ZCGdOzY0ffvY8eO6euvv1ZycrIk+b2VExsbq3fffVf79+9vcJy6MyXr169XVVVVi+pD8BFSTnEXXnihXnrpJX377bfavn27ZsyYocOHD+u6667T7t27WzRmv3796rVdf/31Cg8P94UNy7K0evVqjR07Vg6Ho9GxOnTooMzMTL3yyiu+a0teeukleb1ev5Cyd+9elZaWqkePHn4/5513nqT/uxD4n//8p8LDw3XOOef4bad///4tqhVA8OTn58vlcmnTpk3avXu3Pv/8c6Wnp0v6z1s/LXlb+ptvvtG9996ruLg4dezYUT169PAdo75/LcnChQu1a9cuJSQkaNSoUZozZ44+//xz3/J+/fopNzdXf/rTn3TGGWcoPT1d+fn5XI8SYoSU00RkZKQuvPBCPfLII1qyZIm8Xq9Wr17d6P+h1NTUNDrW9//PpU7v3r11ySWXaNWqVZKkoqIilZWV+QWNxkyYMEGHDx/WG2+8Iek/17EMGDBAQ4cO9fWpra3V4MGD5XK5Gvy55557mtwOgNAaNWqUUlJSdOmll2rgwIEKDz/5l6AbbrhBzzzzjO6++2699NJLKigo0JtvvilJfh93cMMNN+jzzz/Xk08+qd69e+u3v/2tBg0a5DvuSNLvfvc7ffTRR/rVr36l7777Tr/4xS80aNAgLrwPIS6cPQ3V3QL41Vdf+S5oPXjwoF+fhu7Uacr48eN1zz33aM+ePXrhhRcUHR2tcePGNbneT37yE/Xq1UsvvPCCLr74Yr311lv69a9/7dfnnHPO0YcffqjLL7/8hKd+ExMTVVtbq88++8zv7MmePXsCrgdA2znnnHO0a9eugNb59ttvtXHjRs2dO1ezZs3yte/du7fB/r169dI999yje+65RxUVFbrgggv08MMPa+zYsb4+gwcP1uDBgzVz5kxt27ZNP/7xj/XUU0/poYceallhOCmcSTmFbdq0SZZl1Wuvu26jf//+cjgcOuOMM7R161a/PosXLw54e5mZmYqIiNBf//pXrV69Wj/72c+a9Rkq4eHhuu666/Taa6/pueee0/Hjx+udgbnhhhv0r3/9S88880y99b/77jsdPXpUknwHmz/84Q9+fX7/+98HXA+AtpOZmakPP/xQa9asqbesoeOYJEVERDS4/Id/7zU1NfXetunZs6d69+7te5u5srJSx48f9+szePBghYeH1/uYA7QdzqScwqZMmaKqqipdc801GjBggKqrq7Vt2za98MILOuuss3yfT3LHHXdowYIFuuOOOzRy5Eht3bpVf//73wPeXs+ePXXZZZfp8ccf1+HDh5v1Vk+d8ePH68knn9Ts2bM1ePBgDRw40G/5xIkTtWrVKt19993atGmTfvzjH6umpkaffPKJVq1apfXr12vkyJEaNmyYbrzxRi1evFiHDh3SRRddpI0bN+rTTz8NuB4AbWf69Ol68cUXdf311+v222/XiBEj9M033+jVV1/VU0895ff2bx2Hw6Gf/OQnWrhwobxer84880wVFBT4PnelzuHDh9WnTx9dd911Gjp0qDp37qwNGzZox44d+t3vfidJeuuttzR58mRdf/31Ou+883T8+HE999xzioiIUGZmZps8B2hAaG8uQmt64403rNtvv90aMGCA1blzZysyMtI699xzrSlTpljl5eW+flVVVdakSZOsmJgYq0uXLtYNN9xgVVRUNHoL8r///e9Gt/nMM89YkqwuXbpY3333Xb3lP7wFuU5tba2VkJBgSbIeeuihBseurq62Hn30UWvQoEGW3W63unbtao0YMcKaO3eudejQIV+/7777zvrFL35hde/e3erUqZM1btw468svv+QWZCBE6m5B3rFjxwn7HThwwJo8ebJ15plnWpGRkVafPn2srKws6+uvv7Ysq+FbkPft22ddc801VmxsrBUTE2Ndf/311v79+/3+3j0ejzV9+nRr6NChVpcuXaxOnTpZQ4cOtRYvXuwb5/PPP7duv/1265xzzrGioqKsbt26WZdddpm1YcOGoD8faL4wy2rkPBoAAEAIcU0KAAAwEiEFAAAYiZACAACMREgBAABGIqQAAAAjEVIAAICR2uWHudXW1mr//v3q0qXLCT8iHUDgLMvS4cOH1bt376B8t0p7w/EFaD2BHl/aZUjZv3+/EhISQj0N4JT25Zdfqk+fPqGeRpvj+AK0vuYeX9plSOnSpYuk/xTpcDga7ef1elVQUKC0tDTZbLa2ml5QUYMZTqcaKisrlZCQ4Ps7O91wfGlfqMEMrXV8aZchpe4UrMPhaPIgEh0dLYfD0a53PDWE3ulYw+n6VgfHl/aFGszQWseX0+8NZwAA0C4QUgAAgJEIKQAAwEiEFAAAYCRCCgBjzJkzR2FhYX4/AwYM8C0/duyYcnJy1L17d3Xu3FmZmZkqLy/3G6OsrEwZGRmKjo5Wz549NX36dB0/frytSwEQBO3y7h4Ap65BgwZpw4YNvscdOvzfYWratGl6/fXXtXr1asXExGjy5Mm69tpr9c4770iSampqlJGRofj4eG3btk1fffWVbrnlFtlsNj3yyCNtXguAk0NIAWCUDh06KD4+vl77oUOHtHTpUq1YsUJjxoyRJC1btkwDBw5UUVGRkpOTVVBQoN27d2vDhg2Ki4vTsGHDNH/+fD3wwAOaM2eOIiMj27ocACeBkALAKHv37lXv3r0VFRUlp9OpvLw89e3bV8XFxfJ6vUpJSfH1HTBggPr27avCwkIlJyersLBQgwcPVlxcnK9Penq6srOzVVpaquHDh9fbnsfjkcfj8T2urKyU9J/PffB6vY3Os27ZifqYjhrMcDrVEGiNhBQAxhg9erSWL1+u/v3766uvvtLcuXN1ySWXaNeuXXK73YqMjFRsbKzfOnFxcXK73ZIkt9vtF1Dqltcta0heXp7mzp1br72goEDR0dFNztnlcjWnNKNRgxlOhxqqqqoCGo+QAsAYY8eO9f17yJAhGj16tBITE7Vq1Sp17NixVbY5Y8YM5ebm+h7XfWx3Wlpak58463K5lJqa2q4/JZQaQu90qqHuTGVzEVIAGCs2NlbnnXeePv30U6Wmpqq6uloHDx70O5tSXl7uu4YlPj5e27dv9xuj7u6fhq5zkSS73S673V6v3WazNesFo7n9TEYNZjgdagi0Pm5BBmCsI0eO6LPPPlOvXr00YsQI2Ww2bdy40bd8z549Kisrk9PplCQ5nU6VlJSooqLC18flcsnhcCgpKanN5w/g5HAmBYAx7rvvPo0bN06JiYnav3+/Zs+erYiICN14442KiYnRpEmTlJubq27dusnhcGjKlClyOp1KTk6WJKWlpSkpKUkTJ07UwoUL5Xa7NXPmTOXk5DR4tgSA2QgpAIyxb98+3XjjjTpw4IB69Oihiy++WEVFRerRo4ckadGiRQoPD1dmZqY8Ho/S09O1ePFi3/oRERFau3atsrOz5XQ61alTJ2VlZWnevHmhKgnASTgtQsr5c9bLUxOcr53/x4KMoIwDoL6VK1eecHlUVJTy8/OVn5/faJ/ExEStW7cu2FMDTklnPfh6UMaxR1haOCooQ/nhmhQAAGAkQgoAADASIQUAABiJkAIAAIxESAEAAEYipAAAACMRUgAAgJEIKQAAwEiEFAAAYCRCCgAAMBIhBQAAGImQAgAAjERIAQAARiKkAAAAIxFSAACAkQgpAADASIQUAABgJEIKAAAwEiEFAAAYiZACAACMREgBAABGIqQAAAAjEVIAAICRCCkAAMBIhBQAAGAkQgoAADASIQUAABgpoJAyZ84chYWF+f0MGDDAt/zYsWPKyclR9+7d1blzZ2VmZqq8vNxvjLKyMmVkZCg6Olo9e/bU9OnTdfz48eBUAwAAThkdAl1h0KBB2rBhw/8N0OH/hpg2bZpef/11rV69WjExMZo8ebKuvfZavfPOO5KkmpoaZWRkKD4+Xtu2bdNXX32lW265RTabTY888kgQygEAAKeKgENKhw4dFB8fX6/90KFDWrp0qVasWKExY8ZIkpYtW6aBAweqqKhIycnJKigo0O7du7VhwwbFxcVp2LBhmj9/vh544AHNmTNHkZGRJ18RAAA4JQQcUvbu3avevXsrKipKTqdTeXl56tu3r4qLi+X1epWSkuLrO2DAAPXt21eFhYVKTk5WYWGhBg8erLi4OF+f9PR0ZWdnq7S0VMOHD29wmx6PRx6Px/e4srJSkuT1euX1ehuda90ye7gVaJlNjtlW6rbX1tsNJmowQ3NraM81Aji1BBRSRo8ereXLl6t///766quvNHfuXF1yySXatWuX3G63IiMjFRsb67dOXFyc3G63JMntdvsFlLrldcsak5eXp7lz59ZrLygoUHR0dJPznj+ytsk+zbVu3bqgjRUIl8sVku0GEzWYoakaqqqq2mgmAHBiAYWUsWPH+v49ZMgQjR49WomJiVq1apU6duwY9MnVmTFjhnJzc32PKysrlZCQoLS0NDkcjkbX83q9crlc+s174fLUhgVlLrvmpAdlnOaqqyE1NVU2m61Ntx0s1GCG5tZQd6YSAEIt4Ld7vi82NlbnnXeePv30U6Wmpqq6uloHDx70O5tSXl7uu4YlPj5e27dv9xuj7u6fhq5zqWO322W32+u122y2Zr1geGrD5KkJTkgJ1QtUc2s1GTWYoaka2nt9AE4dJ/U5KUeOHNFnn32mXr16acSIEbLZbNq4caNv+Z49e1RWVian0ylJcjqdKikpUUVFha+Py+WSw+FQUlLSyUwFAACcYgI6k3Lfffdp3LhxSkxM1P79+zV79mxFREToxhtvVExMjCZNmqTc3Fx169ZNDodDU6ZMkdPpVHJysiQpLS1NSUlJmjhxohYuXCi3262ZM2cqJyenwTMlAADg9BVQSNm3b59uvPFGHThwQD169NDFF1+soqIi9ejRQ5K0aNEihYeHKzMzUx6PR+np6Vq8eLFv/YiICK1du1bZ2dlyOp3q1KmTsrKyNG/evOBWBQAA2r2AQsrKlStPuDwqKkr5+fnKz89vtE9iYmLI7pABAADtB9/dAwAAjERIAQAARiKkAAAAIxFSAACAkQgpAADASIQUAABgJEIKAAAwEiEFAAAYiZACAACMREgBAABGIqQAAAAjEVIAAICRCCkAAMBIhBQAAGAkQgoAADASIQUAABiJkAIAAIxESAEAAEYipAAAACMRUgAAgJEIKQAAwEiEFAAAYCRCCgAAMBIhBYCRFixYoLCwME2dOtXXduzYMeXk5Kh79+7q3LmzMjMzVV5e7rdeWVmZMjIyFB0drZ49e2r69Ok6fvx4G88eQDAQUgAYZ8eOHXr66ac1ZMgQv/Zp06bptdde0+rVq7Vlyxbt379f1157rW95TU2NMjIyVF1drW3btunZZ5/V8uXLNWvWrLYuAUAQEFIAGOXIkSO6+eab9cwzz6hr166+9kOHDmnp0qV6/PHHNWbMGI0YMULLli3Ttm3bVFRUJEkqKCjQ7t279fzzz2vYsGEaO3as5s+fr/z8fFVXV4eqJAAt1CHUEwCA78vJyVFGRoZSUlL00EMP+dqLi4vl9XqVkpLiaxswYID69u2rwsJCJScnq7CwUIMHD1ZcXJyvT3p6urKzs1VaWqrhw4fX257H45HH4/E9rqyslCR5vV55vd5G51m37ER9TEcNZghlDfYIKzjjhP9nnKZqCLRGQgoAY6xcuVLvv/++duzYUW+Z2+1WZGSkYmNj/drj4uLkdrt9fb4fUOqW1y1rSF5enubOnVuvvaCgQNHR0U3O2eVyNdnHdNRghlDUsHBUcMdrqoaqqqqAxiOkADDCl19+qXvvvVcul0tRUVFttt0ZM2YoNzfX97iyslIJCQlKS0uTw+FodD2v1yuXy6XU1FTZbLa2mGrQUYMZQlnD+XPWB2Uce7il+SNrm6yh7kxlcxFSABihuLhYFRUVuuCCC3xtNTU12rp1q/74xz9q/fr1qq6u1sGDB/3OppSXlys+Pl6SFB8fr+3bt/uNW3f3T12fH7Lb7bLb7fXabTZbs14wmtvPZNRghlDU4KkJC+p4TdUQaH1cOAvACJdffrlKSkq0c+dO38/IkSN18803+/5ts9m0ceNG3zp79uxRWVmZnE6nJMnpdKqkpEQVFRW+Pi6XSw6HQ0lJSW1eE4CTw5kUAEbo0qWLzj//fL+2Tp06qXv37r72SZMmKTc3V926dZPD4dCUKVPkdDqVnJwsSUpLS1NSUpImTpyohQsXyu12a+bMmcrJyWnwbAkAsxFSALQbixYtUnh4uDIzM+XxeJSenq7Fixf7lkdERGjt2rXKzs6W0+lUp06dlJWVpXnz5oVw1gBaipACwFibN2/2exwVFaX8/Hzl5+c3uk5iYqLWrVvXyjMD0Ba4JgUAABiJkAIAAIxESAEAAEYipAAAACMRUgAAgJEIKQAAwEiEFAAAYCRCCgAAMBIhBQAAGImQAgAAjERIAQAARiKkAAAAIxFSAACAkQgpAADASIQUAABgJEIKAAAwEiEFAAAYiZACAACMREgBAABGOqmQsmDBAoWFhWnq1Km+tmPHjiknJ0fdu3dX586dlZmZqfLycr/1ysrKlJGRoejoaPXs2VPTp0/X8ePHT2YqAADgFNPikLJjxw49/fTTGjJkiF/7tGnT9Nprr2n16tXasmWL9u/fr2uvvda3vKamRhkZGaqurta2bdv07LPPavny5Zo1a1bLqwAAAKecFoWUI0eO6Oabb9Yzzzyjrl27+toPHTqkpUuX6vHHH9eYMWM0YsQILVu2TNu2bVNRUZEkqaCgQLt379bzzz+vYcOGaezYsZo/f77y8/NVXV0dnKoAAEC716ElK+Xk5CgjI0MpKSl66KGHfO3FxcXyer1KSUnxtQ0YMEB9+/ZVYWGhkpOTVVhYqMGDBysuLs7XJz09XdnZ2SotLdXw4cPrbc/j8cjj8fgeV1ZWSpK8Xq+8Xm+j86xbZg+3WlLmCcdsK3Xba+vtBhM1mKG5NbTnGgGcWgIOKStXrtT777+vHTt21FvmdrsVGRmp2NhYv/a4uDi53W5fn+8HlLrldcsakpeXp7lz59ZrLygoUHR0dJNznj+ytsk+zbVu3bqgjRUIl8sVku0GEzWYoakaqqqq2mgmAHBiAYWUL7/8Uvfee69cLpeioqJaa071zJgxQ7m5ub7HlZWVSkhIUFpamhwOR6Preb1euVwu/ea9cHlqw4Iyl11z0oMyTnPV1ZCamiqbzdam2w4WajBDc2uoO1MJAKEWUEgpLi5WRUWFLrjgAl9bTU2Ntm7dqj/+8Y9av369qqurdfDgQb+zKeXl5YqPj5ckxcfHa/v27X7j1t39U9fnh+x2u+x2e712m83WrBcMT22YPDXBCSmheoFqbq0mowYzNFVDe68PwKkjoAtnL7/8cpWUlGjnzp2+n5EjR+rmm2/2/dtms2njxo2+dfbs2aOysjI5nU5JktPpVElJiSoqKnx9XC6XHA6HkpKSglQWAABo7wI6k9KlSxedf/75fm2dOnVS9+7dfe2TJk1Sbm6uunXrJofDoSlTpsjpdCo5OVmSlJaWpqSkJE2cOFELFy6U2+3WzJkzlZOT0+DZEgAAcHpq0d09J7Jo0SKFh4crMzNTHo9H6enpWrx4sW95RESE1q5dq+zsbDmdTnXq1ElZWVmaN29esKcCAADasZMOKZs3b/Z7HBUVpfz8fOXn5ze6TmJiYsjukgEAAO0D390DAACMREgBAABGIqQAAAAjEVIAAICRCCkAAMBIhBQAAGAkQgoAADASIQUAABiJkAIAAIxESAEAAEYipAAAACMRUgAAgJEIKQAAwEiEFAAAYCRCCgAAMBIhBQAAGImQAgAAjERIAQAARiKkAAAAIxFSAACAkQgpAADASIQUAABgJEIKAAAwEiEFAAAYiZACAACMREgBAABGIqQAAAAjEVIAAICRCCkAAMBIhBQAAGCkDqGeAIDGnfXg60Ebyx5haeGooA3XKpYsWaIlS5boH//4hyRp0KBBmjVrlsaOHStJOnbsmH75y19q5cqV8ng8Sk9P1+LFixUXF+cbo6ysTNnZ2dq0aZM6d+6srKws5eXlqUMHDndAe8OZFADG6NOnjxYsWKDi4mK99957GjNmjK6++mqVlpZKkqZNm6bXXntNq1ev1pYtW7R//35de+21vvVramqUkZGh6upqbdu2Tc8++6yWL1+uWbNmhaokACeB/7UAYIxx48b5PX744Ye1ZMkSFRUVqU+fPlq6dKlWrFihMWPGSJKWLVumgQMHqqioSMnJySooKNDu3bu1YcMGxcXFadiwYZo/f74eeOABzZkzR5GRkaEoC0ALEVIAGKmmpkarV6/W0aNH5XQ6VVxcLK/Xq5SUFF+fAQMGqG/fviosLFRycrIKCws1ePBgv7d/0tPTlZ2drdLSUg0fPrzedjwejzwej+9xZWWlJMnr9crr9TY6v7plJ+pjOmowQyhrsEdYwRkn/D/jNFVDoDUSUgAYpaSkRE6nU8eOHVPnzp21Zs0aJSUlaefOnYqMjFRsbKxf/7i4OLndbkmS2+32Cyh1y+uWNSQvL09z586t115QUKDo6Ogm5+tyuZpTltGowQyhqCHY16k1VUNVVVVA4xFSABilf//+2rlzpw4dOqQXX3xRWVlZ2rJlS6ttb8aMGcrNzfU9rqysVEJCgtLS0uRwOBpdz+v1yuVyKTU1VTabrdXm15qowQyhrOH8OeuDMo493NL8kbVN1lB3prK5CCkAjBIZGalzzz1XkjRixAjt2LFDTzzxhMaPH6/q6modPHjQ72xKeXm54uPjJUnx8fHavn2733jl5eW+ZQ2x2+2y2+312m02W7NeMJrbz2TUYIZQ1OCpCQvqeE3VEGh93N0DwGi1tbXyeDwaMWKEbDabNm7c6Fu2Z88elZWVyel0SpKcTqdKSkpUUVHh6+NyueRwOJSUlNTmcwdwcjiTAsAYM2bM0NixY9W3b18dPnxYK1as0ObNm7V+/XrFxMRo0qRJys3NVbdu3eRwODRlyhQ5nU4lJydLktLS0pSUlKSJEydq4cKFcrvdmjlzpnJycho8WwLAbIQUAMaoqKjQLbfcoq+++koxMTEaMmSI1q9fr9TUVEnSokWLFB4erszMTL8Pc6sTERGhtWvXKjs7W06nU506dVJWVpbmzZsXqpIAnARCCgBjLF269ITLo6KilJ+fr/z8/Eb7JCYmat26dcGeGoAQ4JoUAABgJEIKAAAwEiEFAAAYiZACAACMREgBAABGIqQAAAAjEVIAAICRCCkAAMBIhBQAAGAkQgoAADASIQUAABiJkAIAAIwUUEhZsmSJhgwZIofDIYfDIafTqTfeeMO3/NixY8rJyVH37t3VuXNnZWZmqry83G+MsrIyZWRkKDo6Wj179tT06dN1/Pjx4FQDAABOGQGFlD59+mjBggUqLi7We++9pzFjxujqq69WaWmpJGnatGl67bXXtHr1am3ZskX79+/Xtdde61u/pqZGGRkZqq6u1rZt2/Tss89q+fLlmjVrVnCrAgAA7V6HQDqPGzfO7/HDDz+sJUuWqKioSH369NHSpUu1YsUKjRkzRpK0bNkyDRw4UEVFRUpOTlZBQYF2796tDRs2KC4uTsOGDdP8+fP1wAMPaM6cOYqMjAxeZQAAoF0LKKR8X01NjVavXq2jR4/K6XSquLhYXq9XKSkpvj4DBgxQ3759VVhYqOTkZBUWFmrw4MGKi4vz9UlPT1d2drZKS0s1fPjwBrfl8Xjk8Xh8jysrKyVJXq9XXq+30TnWLbOHWy0ts9Ex20rd9tp6u8FEDS1njwje727d30FTNbTn/QTg1BJwSCkpKZHT6dSxY8fUuXNnrVmzRklJSdq5c6ciIyMVGxvr1z8uLk5ut1uS5Ha7/QJK3fK6ZY3Jy8vT3Llz67UXFBQoOjq6yTnPH1nbZJ/mWrduXdDGCoTL5QrJdoOJGgK3cFTwx2yqhqqqquBvFABaIOCQ0r9/f+3cuVOHDh3Siy++qKysLG3ZsqU15uYzY8YM5ebm+h5XVlYqISFBaWlpcjgcja7n9Xrlcrn0m/fC5akNC8pcds1JD8o4zVVXQ2pqqmw2W5tuO1iooeXOn7M+aGPZwy3NH1nbZA11ZyoBINQCDimRkZE699xzJUkjRozQjh079MQTT2j8+PGqrq7WwYMH/c6mlJeXKz4+XpIUHx+v7du3+41Xd/dPXZ+G2O122e32eu02m61ZLxie2jB5aoITUkL1ItvcWk1GDYEL1u/t9zVVQ3vfRwBOHSf9OSm1tbXyeDwaMWKEbDabNm7c6Fu2Z88elZWVyel0SpKcTqdKSkpUUVHh6+NyueRwOJSUlHSyUwEAAKeQgM6kzJgxQ2PHjlXfvn11+PBhrVixQps3b9b69esVExOjSZMmKTc3V926dZPD4dCUKVPkdDqVnJwsSUpLS1NSUpImTpyohQsXyu12a+bMmcrJyWnwTAkAADh9BRRSKioqdMstt+irr75STEyMhgwZovXr1ys1NVWStGjRIoWHhyszM1Mej0fp6elavHixb/2IiAitXbtW2dnZcjqd6tSpk7KysjRv3rzgVgUAANq9gELK0qVLT7g8KipK+fn5ys/Pb7RPYmJiyO6QAQAA7Qff3QMAAIxESAEAAEYipAAAACMRUgAAgJEIKQAAwEiEFAAAYCRCCgAAMBIhBQAAGImQAgAAjERIAQAARiKkAAAAIxFSAACAkQgpAADASIQUAABgJEIKAAAwEiEFAAAYiZACAACMREgBAABGIqQAAAAjEVIAAICRCCkAAMBIhBQAAGAkQgoAADASIQUAABiJkAIAAIxESAEAAEYipAAAACMRUgAAgJEIKQAAwEiEFAAAYKQOoZ4AALRn589ZL09NWFDG+seCjKCMA5wqOJMCAACMREgBAABGIqQAAAAjEVIAAICRCCkAAMBIhBQAAGAkQgoAADASIQWAEfLy8nThhReqS5cu6tmzp37+859rz549fn2OHTumnJwcde/eXZ07d1ZmZqbKy8v9+pSVlSkjI0PR0dHq2bOnpk+fruPHj7dlKQCChJACwAhbtmxRTk6OioqK5HK55PV6lZaWpqNHj/r6TJs2Ta+99ppWr16tLVu2aP/+/br22mt9y2tqapSRkaHq6mpt27ZNzz77rJYvX65Zs2aFoiQAJ4lPnAVghDfffNPv8fLly9WzZ08VFxfrJz/5iQ4dOqSlS5dqxYoVGjNmjCRp2bJlGjhwoIqKipScnKyCggLt3r1bGzZsUFxcnIYNG6b58+frgQce0Jw5cxQZGRmK0gC0ECEFgJEOHTokSerWrZskqbi4WF6vVykpKb4+AwYMUN++fVVYWKjk5GQVFhZq8ODBiouL8/VJT09Xdna2SktLNXz48Hrb8Xg88ng8vseVlZWSJK/XK6/X2+j86pbZw62TqLLhMdtK3fbaervBRA0nxx4RnN/fur+DpmoItEZCCgDj1NbWaurUqfrxj3+s888/X5LkdrsVGRmp2NhYv75xcXFyu92+Pt8PKHXL65Y1JC8vT3Pnzq3XXlBQoOjo6CbnOn9kbZN9mmvdunVBGysQLpcrJNsNJmpomYWjgjteUzVUVVUFNB4hBYBxcnJytGvXLr399tutvq0ZM2YoNzfX97iyslIJCQlKS0uTw+FodD2v1yuXy6XfvBcuT21wvmBw15z0oIzTXHU1pKamymaztem2g4UaTs75c9YHZRx7uKX5I2ubrKHuTGVzEVIAGGXy5Mlau3attm7dqj59+vja4+PjVV1drYMHD/qdTSkvL1d8fLyvz/bt2/3Gq7v7p67PD9ntdtnt9nrtNputWS8YntqwoH0LcqheZJtbq8mooWWC9btbp6kaAq2Pu3sAGMGyLE2ePFlr1qzRW2+9pX79+vktHzFihGw2mzZu3Ohr27Nnj8rKyuR0OiVJTqdTJSUlqqio8PVxuVxyOBxKSkpqm0IABA1nUgAYIScnRytWrNArr7yiLl26+K4hiYmJUceOHRUTE6NJkyYpNzdX3bp1k8Ph0JQpU+R0OpWcnCxJSktLU1JSkiZOnKiFCxfK7XZr5syZysnJafBsCQCzEVIAGGHJkiWSpEsvvdSvfdmyZbr11lslSYsWLVJ4eLgyMzPl8XiUnp6uxYsX+/pGRERo7dq1ys7OltPpVKdOnZSVlaV58+a1VRkAgoiQAsAIltX0rZBRUVHKz89Xfn5+o30SExNDdpcMgODimhQAAGAkQgoAADASIQUAABiJkAIAAIwU0IWzeXl5eumll/TJJ5+oY8eOuuiii/Too4+qf//+vj7Hjh3TL3/5S61cudLv6vvvf1R1WVmZsrOztWnTJnXu3FlZWVnKy8tThw5cxwsACNz5c9YH7YPJ/rEgIyjj4OQFdCaFr1IHAABtJaBTF3yVOgAAaCsn9f4KX6Xe+vgacjOEqoZgfY261HpfpQ4AraXFIYWvUm9bfA25Gdq6hmB/jboU/K9SB4DW0uKQwleptw2+htwMoaohWF+jLrXeV6kDQGtpUUjhq9TbHl9Dboa2riHYX6MuBf+r1AGgtQR0dw9fpQ4AANpKQGdS+Cp1AADQVgIKKXyVOgAAaCsBhRS+Sh0AALQVvrsHAAAYiZACAACMREgBAABGIqQAAAAjEVIAAICRCCkAAMBIhBQAAGAkQgoAADASIQUAABiJkAIAAIxESAEAAEYipAAAACMRUgAAgJEIKQAAwEiEFAAAYCRCCgAAMBIhBQAAGImQAgAAjERIAQAARiKkAAAAIxFSAACAkQgpAADASIQUAABgJEIKAAAwEiEFAAAYiZACAACMREgBAABGIqQAAAAjEVIAAICRCCkAAMBIhBQAAGAkQgoAADASIQUAABiJkAIAAIxESAEAAEYipAAAACMRUgAAgJEIKQAAwEiEFAAAYCRCCgAAMBIhBQAAGImQAgAAjERIAQAARiKkAAAAIxFSAACAkQgpAADASIQUAMbYunWrxo0bp969eyssLEwvv/yy33LLsjRr1iz16tVLHTt2VEpKivbu3evX55tvvtHNN98sh8Oh2NhYTZo0SUeOHGnDKgAECyEFgDGOHj2qoUOHKj8/v8HlCxcu1B/+8Ac99dRTevfdd9WpUyelp6fr2LFjvj4333yzSktL5XK5tHbtWm3dulV33XVXW5UAIIg6hHoCAFBn7NixGjt2bIPLLMvS73//e82cOVNXX321JOkvf/mL4uLi9PLLL2vChAn6+OOP9eabb2rHjh0aOXKkJOnJJ5/UlVdeqccee0y9e/dus1oAnDxCCoB24YsvvpDb7VZKSoqvLSYmRqNHj1ZhYaEmTJigwsJCxcbG+gKKJKWkpCg8PFzvvvuurrnmmnrjejweeTwe3+PKykpJktfrldfrbXQ+dcvs4dZJ1/bDMdtK3fbaervBxH44OfaI4Dxvdc9/UzUEWiMhBUC74Ha7JUlxcXF+7XFxcb5lbrdbPXv29FveoUMHdevWzdfnh/Ly8jR37tx67QUFBYqOjm5yXvNH1jZr/s2xbt26oI0VCJfLFZLtBhP7oWUWjgrueE3VUFVVFdB4hBQAp7UZM2YoNzfX97iyslIJCQlKS0uTw+FodD2v1yuXy6XfvBcuT21YUOaya056UMZprroaUlNTZbPZ2nTbwcJ+ODnnz1kflHHs4Zbmj6xtsoa6M5XNRUgB0C7Ex8dLksrLy9WrVy9fe3l5uYYNG+brU1FR4bfe8ePH9c033/jW/yG73S673V6v3WazNesFw1MbJk9NcF4cQxUUmlurydgPLROs56xOUzUEWl/Ad/dwiyCAUOjXr5/i4+O1ceNGX1tlZaXeffddOZ1OSZLT6dTBgwdVXFzs6/PWW2+ptrZWo0ePbvM5Azg5AYcUbhEE0FqOHDminTt3aufOnZL+c7Hszp07VVZWprCwME2dOlUPPfSQXn31VZWUlOiWW25R79699fOf/1ySNHDgQF1xxRW68847tX37dr3zzjuaPHmyJkyYwJ09QDsU8Ns93CIIoLW89957uuyyy3yP664VycrK0vLly3X//ffr6NGjuuuuu3Tw4EFdfPHFevPNNxUVFeVb53/+5380efJkXX755QoPD1dmZqb+8Ic/tHktAE5eUK9J4RbB4DuVbhGkhsAF6/ZAqfVuEQymSy+9VJbVeM1hYWGaN2+e5s2b12ifbt26acWKFa0xPQBtLKghhVsEW8+pcIsgNQQu2LcHSsG/RRAAWku7uLuHWwRPjVsEqSFwwbo9UGq9WwQBoLUENaRwi2DrORVuEaSGwAX79kAp+LcIAkBrCeoXDHKLIAAACJaAz6QcOXJEn376qe9x3S2C3bp1U9++fX23CP7oRz9Sv3799Jvf/KbRWwSfeuopeb1ebhEEAAD1BBxSuEUQAAC0hYBDCrcIAgCAthDUa1IAAACChZACAACMREgBAABGIqQAAAAjEVIAAICRCCkAAMBIhBQAAGAkQgoAADASIQUAABiJkAIAAIxESAEAAEYipAAAACMRUgAAgJEIKQAAwEiEFAAAYCRCCgAAMBIhBQAAGImQAgAAjERIAQAARiKkAAAAIxFSAACAkQgpAADASIQUAABgJEIKAAAwEiEFAAAYiZACAACMREgBAABGIqQAAAAjEVIAAICRCCkAAMBIhBQAAGAkQgoAADASIQUAABiJkAIAAIxESAEAAEYipAAAACMRUgAAgJEIKQAAwEiEFAAAYCRCCgAAMBIhBQAAGImQAgAAjERIAQAARiKkAAAAIxFSAACAkQgpAADASIQUAABgJEIKAAAwEiEFAAAYiZACAACMREgBAABGIqQAAAAjEVIAAICRQhpS8vPzddZZZykqKkqjR4/W9u3bQzkdAKcQji9A+xeykPLCCy8oNzdXs2fP1vvvv6+hQ4cqPT1dFRUVoZoSgFMExxfg1BCykPL444/rzjvv1G233aakpCQ99dRTio6O1p///OdQTQnAKYLjC3Bq6BCKjVZXV6u4uFgzZszwtYWHhyslJUWFhYX1+ns8Hnk8Ht/jQ4cOSZK++eYbeb3eRrfj9XpVVVWlDt5w1dSGBWXuBw4cCMo4zVVXw4EDB2Sz2dp028FCDS3X4fjR4I1Va6mqqrbJGg4fPixJsiwraNtuSxxfmu9U+ttkP7RMsI4xrXV8CUlI+frrr1VTU6O4uDi/9ri4OH3yySf1+ufl5Wnu3Ln12vv169dqc2zMGb9r800CQXNTAH0PHz6smJiYVptLa+H4gpPFfmiZ1ji+hCSkBGrGjBnKzc31Pa6trdU333yj7t27Kyys8eRcWVmphIQEffnll3I4HG0x1aCjBjOcTjVYlqXDhw+rd+/ebTi70OH4Qg2hdjrVEOjxJSQh5YwzzlBERITKy8v92svLyxUfH1+vv91ul91u92uLjY1t9vYcDke73fF1qMEMp0sN7fEMSh2OL4GjBjOcLjUEcnwJyYWzkZGRGjFihDZu3Ohrq62t1caNG+V0OkMxJQCnCI4vwKkjZG/35ObmKisrSyNHjtSoUaP0+9//XkePHtVtt90WqikBOEVwfAFODSELKePHj9e///1vzZo1S263W8OGDdObb75Z72K3k2G32zV79ux6p3LbE2owAzW0LxxfmocazEANjQuz2ut9hgAA4JTGd/cAAAAjEVIAAICRCCkAAMBIhBQAAGAkQgoAADBSuwop+fn5OuussxQVFaXRo0dr+/btJ+y/evVqDRgwQFFRURo8eLDWrVvnt9yyLM2aNUu9evVSx44dlZKSor1797ZmCQHV8Mwzz+iSSy5R165d1bVrV6WkpNTrf+uttyosLMzv54orrjCmhuXLl9ebX1RUlF8f0/fDpZdeWq+GsLAwZWRk+Pq09X7YunWrxo0bp969eyssLEwvv/xyk+ts3rxZF1xwgex2u84991wtX768Xp9A/8ZOZa31HLelQGvYvHlzg7/rbre7bSb8A3l5ebrwwgvVpUsX9ezZUz//+c+1Z8+eJtdr6tjfllpSQ3OOm21pyZIlGjJkiO/TZJ1Op954440TrhO0fWC1EytXrrQiIyOtP//5z1Zpaal15513WrGxsVZ5eXmD/d955x0rIiLCWrhwobV7925r5syZls1ms0pKSnx9FixYYMXExFgvv/yy9eGHH1pXXXWV1a9fP+u7774zooabbrrJys/Ptz744APr448/tm699VYrJibG2rdvn69PVlaWdcUVV1hfffWV7+ebb75plfm3pIZly5ZZDofDb35ut9uvj+n74cCBA37z37VrlxUREWEtW7bM16et98O6deusX//619ZLL71kSbLWrFlzwv6ff/65FR0dbeXm5lq7d++2nnzySSsiIsJ68803fX0CfV5Oda3xHLe1QGvYtGmTJcnas2eP3+9yTU1N20z4B9LT061ly5ZZu3btsnbu3GldeeWVVt++fa0jR440uk5zjv1tqSU1NOe42ZZeffVV6/XXX7f+/ve/W3v27LF+9atfWTabzdq1a1eD/YO5D9pNSBk1apSVk5Pje1xTU2P17t3bysvLa7D/DTfcYGVkZPi1jR492vp//+//WZZlWbW1tVZ8fLz129/+1rf84MGDlt1ut/7617+2QgWB1/BDx48ft7p06WI9++yzvrasrCzr6quvDvZUGxVoDcuWLbNiYmIaHa897odFixZZXbp08TvItPV++L7mvPjcf//91qBBg/zaxo8fb6Wnp/sen+zzcioL1nMcSoGElG+//bZN5hSoiooKS5K1ZcuWRvs0dewPtebU0NRx0wRdu3a1/vSnPzW4LJj7oF283VNdXa3i4mKlpKT42sLDw5WSkqLCwsIG1yksLPTrL0np6em+/l988YXcbrdfn5iYGI0ePbrRMdu6hh+qqqqS1+tVt27d/No3b96snj17qn///srOztaBAweCOvc6La3hyJEjSkxMVEJCgq6++mqVlpb6lrXH/bB06VJNmDBBnTp18mtvq/3QEk39PQTjeTndNfUctyfDhg1Tr169lJqaqnfeeSfU0/E5dOiQJNU7Bn6f6fuhOTVIJz5uhlJNTY1Wrlypo0ePNvpdWMHcB+0ipHz99deqqamp95HWcXFxjb5X6na7T9i/7r+BjHkyWlLDDz3wwAPq3bu3386/4oor9Je//EUbN27Uo48+qi1btmjs2LGqqakJ6vylltXQv39//fnPf9Yrr7yi559/XrW1tbrooou0b98+Se1vP2zfvl27du3SHXfc4dfelvuhJRr7e6isrNR3330XlN/P011Tz3F70KtXLz311FP629/+pr/97W9KSEjQpZdeqvfffz/UU1Ntba2mTp2qH//4xzr//PMb7dfUsT+UmltDU8fNUCgpKVHnzp1lt9t19913a82aNUpKSmqwbzD3Qci+uweBWbBggVauXKnNmzf7XUA1YcIE378HDx6sIUOG6JxzztHmzZt1+eWXh2KqfpxOp1/avuiiizRw4EA9/fTTmj9/fghn1jJLly7V4MGDNWrUKL920/cD0Bz9+/dX//79fY8vuugiffbZZ1q0aJGee+65EM5MysnJ0a5du/T222+HdB4no7k1mHjc7N+/v3bu3KlDhw7pxRdfVFZWlrZs2dJoUAmWdnEm5YwzzlBERITKy8v92svLyxUfH9/gOvHx8SfsX/ffQMY8GS2poc5jjz2mBQsWqKCgQEOGDDlh37PPPltnnHGGPv3005Oe8w+dTA11bDabhg8f7ptfe9oPR48e1cqVKzVp0qQmt9Oa+6ElGvt7cDgc6tixY1D27emuqee4vRo1alTIf48nT56stWvXatOmTerTp88J+zZ17A+VQGr4oR8eN0MhMjJS5557rkaMGKG8vDwNHTpUTzzxRIN9g7kP2kVIiYyM1IgRI7Rx40ZfW21trTZu3Njoe2JOp9OvvyS5XC5f/379+ik+Pt6vT2Vlpd59991Gx2zrGiRp4cKFmj9/vt58802NHDmyye3s27dPBw4cUK9evYIy7+9raQ3fV1NTo5KSEt/82st+kP5zS53H49F//dd/Nbmd1twPLdHU30Mw9u3prqnnuL3auXNnyH6PLcvS5MmTtWbNGr311lvq169fk+uYth9aUsMP/fC4aYLa2lp5PJ4GlwV1H7Tgot6QWLlypWW3263ly5dbu3fvtu666y4rNjbWd1vWxIkTrQcffNDX/5133rE6dOhgPfbYY9bHH39szZ49u8FbkGNjY61XXnnF+uijj6yrr7661W99DaSGBQsWWJGRkdaLL77odyva4cOHLcuyrMOHD1v33XefVVhYaH3xxRfWhg0brAsuuMD60Y9+ZB07dsyIGubOnWutX7/e+uyzz6zi4mJrwoQJVlRUlFVaWupXp8n7oc7FF19sjR8/vl57KPbD4cOHrQ8++MD64IMPLEnW448/bn3wwQfWP//5T8uyLOvBBx+0Jk6c6Otfd3vs9OnTrY8//tjKz89v8BbkEz0vp5vWeI7bWqA1LFq0yHr55ZetvXv3WiUlJda9995rhYeHWxs2bAjJ/LOzs62YmBhr8+bNfsfAqqoqX5+WHPvbUktqaM5xsy09+OCD1pYtW6wvvvjC+uijj6wHH3zQCgsLswoKChqcfzD3QbsJKZZlWU8++aTVt29fKzIy0ho1apRVVFTkW/bTn/7UysrK8uu/atUq67zzzrMiIyOtQYMGWa+//rrf8traWus3v/mNFRcXZ9ntduvyyy+39uzZY0wNiYmJlqR6P7Nnz7Ysy7KqqqqstLQ0q0ePHpbNZrMSExOtO++8s9VfVAKpYerUqb6+cXFx1pVXXmm9//77fuOZvh8sy7I++eQTS5Lvj/L7QrEf6m4V/eFP3byzsrKsn/70p/XWGTZsmBUZGWmdffbZfp/zUudEz8vpprWe47YUaA2PPvqodc4551hRUVFWt27drEsvvdR66623QjN5y2pw7pL8nteWHPvbUktqaM5xsy3dfvvtVmJiohUZGWn16NHDuvzyy/2Oha25D8Isy7ICP/8CAADQutrFNSkAAOD0Q0gBAABGIqQAAAAjEVIAAICRCCkAAMBIhBQAAGAkQgoAADASIQUAABiJkAIAAIxESAEAAEYipAAAACP9f6Os83Nuny12AAAAAElFTkSuQmCC",
|
||
"text/plain": [
|
||
"<Figure size 640x480 with 2 Axes>"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
}
|
||
],
|
||
"source": [
|
||
"df.hist([\"Survived\", \"Pclass\"])\n"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 50,
|
||
"id": "be20c939",
|
||
"metadata": {
|
||
"scrolled": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"<Axes: xlabel='Embarked'>"
|
||
]
|
||
},
|
||
"execution_count": 50,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
},
|
||
{
|
||
"data": {
|
||
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAigAAAGtCAYAAAA8mI9zAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAlK0lEQVR4nO3de3TU9Z3/8VcuZAIJMzFgZkgNmFbWJApViCVT7VYhECC0tcS1tlmMLQtdmshCVio5IiBWodQtll1utRZoV9aWs0d3wUM0xoIXhluQYwpIxcImNs4EC5kBbCaQfH9/7I/vdgQvQxLmk+T5OOd7DvP9fmbm/cXBPM9kLnGWZVkCAAAwSHysBwAAAPgoAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxkmM9QCXo6OjQ01NTRo4cKDi4uJiPQ4AAPgMLMvS6dOnlZmZqfj4T36OpEcGSlNTk7KysmI9BgAAuAyNjY265pprPnFNjwyUgQMHSvrfE3Q6nTGeBgAAfBahUEhZWVn2z/FP0iMD5cKvdZxOJ4ECAEAP81lensGLZAEAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGCcx1gP0ZtfOfyHWI/Qax5cVx3oEAMAVxDMoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIwTdaD86U9/0t///d9r0KBB6t+/v0aMGKF9+/bZxy3L0sKFCzVkyBD1799fhYWFeueddyJu4+TJkyotLZXT6VRaWpqmT5+uM2fOdP5sAABArxBVoJw6dUq33nqr+vXrp23btunQoUP6l3/5F1111VX2muXLl2vlypVau3atdu/erZSUFBUVFam1tdVeU1paqoMHD6qmpkZbt27Vq6++qpkzZ3bdWQEAgB4tzrIs67Munj9/vt544w299tprlzxuWZYyMzP1z//8z3rggQckScFgUG63Wxs2bNA999yjw4cPKy8vT3v37lV+fr4kqbq6WpMnT9Z7772nzMzMT50jFArJ5XIpGAzK6XR+1vGvuGvnvxDrEXqN48uKYz0CAKCTovn5HdUzKP/93/+t/Px8/d3f/Z0yMjJ0880366mnnrKPHzt2TH6/X4WFhfY+l8ulMWPGyOfzSZJ8Pp/S0tLsOJGkwsJCxcfHa/fu3Ze833A4rFAoFLEBAIDeK6pA+eMf/6g1a9Zo+PDhevHFFzVr1izNnj1bGzdulCT5/X5Jktvtjrie2+22j/n9fmVkZEQcT0xMVHp6ur3mo5YuXSqXy2VvWVlZ0YwNAAB6mKgCpaOjQ6NGjdLjjz+um2++WTNnztSMGTO0du3a7ppPklRVVaVgMGhvjY2N3Xp/AAAgtqIKlCFDhigvLy9iX25urhoaGiRJHo9HkhQIBCLWBAIB+5jH41Fzc3PE8fPnz+vkyZP2mo9yOBxyOp0RGwAA6L2iCpRbb71VR44cidj3hz/8QcOGDZMkZWdny+PxqLa21j4eCoW0e/dueb1eSZLX61VLS4vq6ursNa+88oo6Ojo0ZsyYyz4RAADQeyRGs3ju3Ln68pe/rMcff1x333239uzZo5///Of6+c9/LkmKi4vTnDlz9KMf/UjDhw9Xdna2Hn74YWVmZurOO++U9L/PuEycONH+1dC5c+dUUVGhe+655zO9gwcAAPR+UQXKLbfcoueee05VVVVasmSJsrOz9eSTT6q0tNRe88Mf/lBnz57VzJkz1dLSottuu03V1dVKTk621zzzzDOqqKjQuHHjFB8fr5KSEq1cubLrzgoAAPRoUX0Oiin4HJS+h89BAYCer9s+BwUAAOBKIFAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGCeqQFm8eLHi4uIitpycHPt4a2urysvLNWjQIKWmpqqkpESBQCDiNhoaGlRcXKwBAwYoIyND8+bN0/nz57vmbAAAQK+QGO0VbrjhBr388sv/dwOJ/3cTc+fO1QsvvKDNmzfL5XKpoqJCU6dO1RtvvCFJam9vV3FxsTwej3bu3Kn3339f9957r/r166fHH3+8C04HAAD0BlEHSmJiojwez0X7g8Ggnn76aW3atEljx46VJK1fv165ubnatWuXCgoK9NJLL+nQoUN6+eWX5Xa7ddNNN+nRRx/Vgw8+qMWLFyspKanzZwQAAHq8qF+D8s477ygzM1Of//znVVpaqoaGBklSXV2dzp07p8LCQnttTk6Ohg4dKp/PJ0ny+XwaMWKE3G63vaaoqEihUEgHDx782PsMh8MKhUIRGwAA6L2iCpQxY8Zow4YNqq6u1po1a3Ts2DF95Stf0enTp+X3+5WUlKS0tLSI67jdbvn9fkmS3++PiJMLxy8c+zhLly6Vy+Wyt6ysrGjGBgAAPUxUv+KZNGmS/eeRI0dqzJgxGjZsmH7729+qf//+XT7cBVVVVaqsrLQvh0IhIgUAgF6sU28zTktL09/8zd/o6NGj8ng8amtrU0tLS8SaQCBgv2bF4/Fc9K6eC5cv9bqWCxwOh5xOZ8QGAAB6r04FypkzZ/Tuu+9qyJAhGj16tPr166fa2lr7+JEjR9TQ0CCv1ytJ8nq9qq+vV3Nzs72mpqZGTqdTeXl5nRkFAAD0IlH9iueBBx7Q1772NQ0bNkxNTU1atGiREhIS9O1vf1sul0vTp09XZWWl0tPT5XQ6df/998vr9aqgoECSNGHCBOXl5WnatGlavny5/H6/FixYoPLycjkcjm45QQAA0PNEFSjvvfeevv3tb+vPf/6zrr76at12223atWuXrr76aknSihUrFB8fr5KSEoXDYRUVFWn16tX29RMSErR161bNmjVLXq9XKSkpKisr05IlS7r2rAAAQI8WZ1mWFeshohUKheRyuRQMBo1+Pcq181+I9Qi9xvFlxbEeAQDQSdH8/Oa7eAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYp1OBsmzZMsXFxWnOnDn2vtbWVpWXl2vQoEFKTU1VSUmJAoFAxPUaGhpUXFysAQMGKCMjQ/PmzdP58+c7MwoAAOhFLjtQ9u7dq3Xr1mnkyJER++fOnastW7Zo8+bN2rFjh5qamjR16lT7eHt7u4qLi9XW1qadO3dq48aN2rBhgxYuXHj5ZwEAAHqVywqUM2fOqLS0VE899ZSuuuoqe38wGNTTTz+tn/70pxo7dqxGjx6t9evXa+fOndq1a5ck6aWXXtKhQ4f07//+77rppps0adIkPfroo1q1apXa2tq65qwAAECPdlmBUl5eruLiYhUWFkbsr6ur07lz5yL25+TkaOjQofL5fJIkn8+nESNGyO1222uKiooUCoV08ODBS95fOBxWKBSK2AAAQO+VGO0Vnn32We3fv1979+696Jjf71dSUpLS0tIi9rvdbvn9fnvNX8fJheMXjl3K0qVL9cgjj0Q7KgAA6KGiegalsbFR//RP/6RnnnlGycnJ3TXTRaqqqhQMBu2tsbHxit03AAC48qIKlLq6OjU3N2vUqFFKTExUYmKiduzYoZUrVyoxMVFut1ttbW1qaWmJuF4gEJDH45EkeTyei97Vc+HyhTUf5XA45HQ6IzYAANB7RRUo48aNU319vQ4cOGBv+fn5Ki0ttf/cr18/1dbW2tc5cuSIGhoa5PV6JUler1f19fVqbm6219TU1MjpdCovL6+LTgsAAPRkUb0GZeDAgbrxxhsj9qWkpGjQoEH2/unTp6uyslLp6elyOp26//775fV6VVBQIEmaMGGC8vLyNG3aNC1fvlx+v18LFixQeXm5HA5HF50WAADoyaJ+keynWbFiheLj41VSUqJwOKyioiKtXr3aPp6QkKCtW7dq1qxZ8nq9SklJUVlZmZYsWdLVowAAgB4qzrIsK9ZDRCsUCsnlcikYDBr9epRr578Q6xF6jePLimM9AgCgk6L5+c138QAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACME1WgrFmzRiNHjpTT6ZTT6ZTX69W2bdvs462trSovL9egQYOUmpqqkpISBQKBiNtoaGhQcXGxBgwYoIyMDM2bN0/nz5/vmrMBAAC9QlSBcs0112jZsmWqq6vTvn37NHbsWH3jG9/QwYMHJUlz587Vli1btHnzZu3YsUNNTU2aOnWqff329nYVFxerra1NO3fu1MaNG7VhwwYtXLiwa88KAAD0aHGWZVmduYH09HT95Cc/0V133aWrr75amzZt0l133SVJevvtt5Wbmyufz6eCggJt27ZNU6ZMUVNTk9xutyRp7dq1evDBB3XixAklJSV9pvsMhUJyuVwKBoNyOp2dGb9bXTv/hViP0GscX1Yc6xEAAJ0Uzc/vy34NSnt7u5599lmdPXtWXq9XdXV1OnfunAoLC+01OTk5Gjp0qHw+nyTJ5/NpxIgRdpxIUlFRkUKhkP0szKWEw2GFQqGIDQAA9F5RB0p9fb1SU1PlcDj0j//4j3ruueeUl5cnv9+vpKQkpaWlRax3u93y+/2SJL/fHxEnF45fOPZxli5dKpfLZW9ZWVnRjg0AAHqQqAPl+uuv14EDB7R7927NmjVLZWVlOnToUHfMZquqqlIwGLS3xsbGbr0/AAAQW4nRXiEpKUnXXXedJGn06NHau3evfvazn+lb3/qW2tra1NLSEvEsSiAQkMfjkSR5PB7t2bMn4vYuvMvnwppLcTgccjgc0Y4KAAB6qE5/DkpHR4fC4bBGjx6tfv36qba21j525MgRNTQ0yOv1SpK8Xq/q6+vV3Nxsr6mpqZHT6VReXl5nRwEAAL1EVM+gVFVVadKkSRo6dKhOnz6tTZs2afv27XrxxRflcrk0ffp0VVZWKj09XU6nU/fff7+8Xq8KCgokSRMmTFBeXp6mTZum5cuXy+/3a8GCBSovL+cZEgAAYIsqUJqbm3Xvvffq/fffl8vl0siRI/Xiiy9q/PjxkqQVK1YoPj5eJSUlCofDKioq0urVq+3rJyQkaOvWrZo1a5a8Xq9SUlJUVlamJUuWdO1ZAQCAHq3Tn4MSC3wOSt/D56AAQM93RT4HBQAAoLsQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACME1WgLF26VLfccosGDhyojIwM3XnnnTpy5EjEmtbWVpWXl2vQoEFKTU1VSUmJAoFAxJqGhgYVFxdrwIABysjI0Lx583T+/PnOnw0AAOgVogqUHTt2qLy8XLt27VJNTY3OnTunCRMm6OzZs/aauXPnasuWLdq8ebN27NihpqYmTZ061T7e3t6u4uJitbW1aefOndq4caM2bNighQsXdt1ZAQCAHi3Osizrcq984sQJZWRkaMeOHfrbv/1bBYNBXX311dq0aZPuuusuSdLbb7+t3Nxc+Xw+FRQUaNu2bZoyZYqamprkdrslSWvXrtWDDz6oEydOKCkp6VPvNxQKyeVyKRgMyul0Xu743e7a+S/EeoRe4/iy4liPAADopGh+fnfqNSjBYFCSlJ6eLkmqq6vTuXPnVFhYaK/JycnR0KFD5fP5JEk+n08jRoyw40SSioqKFAqFdPDgwUveTzgcVigUitgAAEDvddmB0tHRoTlz5ujWW2/VjTfeKEny+/1KSkpSWlpaxFq32y2/32+v+es4uXD8wrFLWbp0qVwul71lZWVd7tgAAKAHuOxAKS8v1+9//3s9++yzXTnPJVVVVSkYDNpbY2Njt98nAACIncTLuVJFRYW2bt2qV199Vddcc4293+PxqK2tTS0tLRHPogQCAXk8HnvNnj17Im7vwrt8Lqz5KIfDIYfDcTmjAgCAHiiqZ1Asy1JFRYWee+45vfLKK8rOzo44Pnr0aPXr10+1tbX2viNHjqihoUFer1eS5PV6VV9fr+bmZntNTU2NnE6n8vLyOnMuAACgl4jqGZTy8nJt2rRJ//Vf/6WBAwfarxlxuVzq37+/XC6Xpk+frsrKSqWnp8vpdOr++++X1+tVQUGBJGnChAnKy8vTtGnTtHz5cvn9fi1YsEDl5eU8SwIAACRFGShr1qyRJN1+++0R+9evX6/77rtPkrRixQrFx8erpKRE4XBYRUVFWr16tb02ISFBW7du1axZs+T1epWSkqKysjItWbKkc2cCAAB6jU59Dkqs8DkofQ+fgwIAPd8V+xwUAACA7kCgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAONE9V08AHo+voKha/D1C0D34hkUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgHAIFAAAYh0ABAADGIVAAAIBxCBQAAGAcAgUAABiHQAEAAMYhUAAAgHGiDpRXX31VX/va15SZmam4uDg9//zzEccty9LChQs1ZMgQ9e/fX4WFhXrnnXci1pw8eVKlpaVyOp1KS0vT9OnTdebMmU6dCAAA6D2iDpSzZ8/qi1/8olatWnXJ48uXL9fKlSu1du1a7d69WykpKSoqKlJra6u9prS0VAcPHlRNTY22bt2qV199VTNnzrz8swAAAL1KYrRXmDRpkiZNmnTJY5Zl6cknn9SCBQv0jW98Q5L0q1/9Sm63W88//7zuueceHT58WNXV1dq7d6/y8/MlSf/6r/+qyZMn64knnlBmZmYnTgcAAPQGXfoalGPHjsnv96uwsNDe53K5NGbMGPl8PkmSz+dTWlqaHSeSVFhYqPj4eO3evfuStxsOhxUKhSI2AADQe3VpoPj9fkmS2+2O2O92u+1jfr9fGRkZEccTExOVnp5ur/mopUuXyuVy2VtWVlZXjg0AAAzTI97FU1VVpWAwaG+NjY2xHgkAAHSjLg0Uj8cjSQoEAhH7A4GAfczj8ai5uTni+Pnz53Xy5El7zUc5HA45nc6IDQAA9F5dGijZ2dnyeDyqra2194VCIe3evVter1eS5PV61dLSorq6OnvNK6+8oo6ODo0ZM6YrxwEAAD1U1O/iOXPmjI4ePWpfPnbsmA4cOKD09HQNHTpUc+bM0Y9+9CMNHz5c2dnZevjhh5WZmak777xTkpSbm6uJEydqxowZWrt2rc6dO6eKigrdc889vIMHAABIuoxA2bdvn+644w77cmVlpSSprKxMGzZs0A9/+EOdPXtWM2fOVEtLi2677TZVV1crOTnZvs4zzzyjiooKjRs3TvHx8SopKdHKlSu74HQAAEBvEGdZlhXrIaIVCoXkcrkUDAaNfj3KtfNfiPUIvcbxZcWxHqHX4HHZNXhMAtGL5ud3j3gXDwAA6FsIFAAAYBwCBQAAGIdAAQAAxiFQAACAcQgUAABgnKg/BwUAgK7EW9+7Tm96+zvPoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACMQ6AAAADjECgAAMA4BAoAADAOgQIAAIxDoAAAAOPENFBWrVqla6+9VsnJyRozZoz27NkTy3EAAIAhYhYov/nNb1RZWalFixZp//79+uIXv6iioiI1NzfHaiQAAGCImAXKT3/6U82YMUPf/e53lZeXp7Vr12rAgAH65S9/GauRAACAIRJjcadtbW2qq6tTVVWVvS8+Pl6FhYXy+XwXrQ+HwwqHw/blYDAoSQqFQt0/bCd0hD+M9Qi9hun/rXsSHpddg8dk1+Ex2XVMf1xemM+yrE9dG5NA+eCDD9Te3i632x2x3+126+23375o/dKlS/XII49ctD8rK6vbZoRZXE/GegIgEo9JmKinPC5Pnz4tl8v1iWtiEijRqqqqUmVlpX25o6NDJ0+e1KBBgxQXFxfDyXq+UCikrKwsNTY2yul0xnocgMckjMNjsutYlqXTp08rMzPzU9fGJFAGDx6shIQEBQKBiP2BQEAej+ei9Q6HQw6HI2JfWlpad47Y5zidTv7hwSg8JmEaHpNd49OeObkgJi+STUpK0ujRo1VbW2vv6+joUG1trbxebyxGAgAABonZr3gqKytVVlam/Px8felLX9KTTz6ps2fP6rvf/W6sRgIAAIaIWaB861vf0okTJ7Rw4UL5/X7ddNNNqq6uvuiFs+heDodDixYtuuhXaECs8JiEaXhMxkac9Vne6wMAAHAF8V08AADAOAQKAAAwDoECAACMQ6AAAADjECgAAHyCDz74wPjvuOmNCJQ+xOfzaevWrRH7fvWrXyk7O1sZGRmaOXNmxJcyAt3tlVdeUV5e3iX/5x8MBnXDDTfotddei8Fk6OtaWlpUXl6uwYMHy+1266qrrpLH41FVVZU+/JAvN7wSeJtxHzJp0iTdfvvtevDBByVJ9fX1GjVqlO677z7l5ubqJz/5ib7//e9r8eLFsR0UfcbXv/513XHHHZo7d+4lj69cuVK/+93v9Nxzz13hydCXnTx5Ul6vV3/6059UWlqq3NxcSdKhQ4e0adMm5eTk6PXXX9dbb72lXbt2afbs2TGeuHciUPqQIUOGaMuWLcrPz5ckPfTQQ9qxY4def/11SdLmzZu1aNEiHTp0KJZjog8ZNmyYqqur7R8AH/X2229rwoQJamhouMKToS+bM2eOamtr9fLLL1/04aF+v18TJkzQ9ddfr5deekkrV65UWVlZjCbt3XrEtxmja5w6dSriH9uOHTs0adIk+/Itt9yixsbGWIyGPioQCKhfv34fezwxMVEnTpy4ghMB0vPPP69169Zd8pPNPR6Pli9frsmTJ2vRokXESTfiNSh9iNvt1rFjxyRJbW1t2r9/vwoKCuzjp0+f/sQfFkBX+9znPqff//73H3v8rbfe0pAhQ67gRID0/vvv64YbbvjY4zfeeKPi4+O1aNGiKzhV30Og9CGTJ0/W/Pnz9dprr6mqqkoDBgzQV77yFfv4W2+9pS984QsxnBB9zeTJk/Xwww+rtbX1omN/+ctftGjRIk2ZMiUGk6EvGzx4sI4fP/6xx48dO6aMjIwrN1AfxWtQ+pAPPvhAU6dO1euvv67U1FRt3LhR3/zmN+3j48aNU0FBgR577LEYTom+JBAIaNSoUUpISFBFRYWuv/56Sf/72pNVq1apvb1d+/fv50tEcUV973vf07vvvquamholJSVFHAuHwyoqKtLnP/95/fKXv4zRhH0DgdIHBYNBpaamKiEhIWL/yZMnlZqaetE/SKA7/c///I9mzZqlF198URf+dxQXF6eioiKtWrVK2dnZMZ4Qfc17772n/Px8ORwOlZeXKycnR5Zl6fDhw1q9erXC4bD27t2roUOHxnrUXo1AAWCEU6dO6ejRo7IsS8OHD9dVV10V65HQhx07dkw/+MEP9NJLL0WE8/jx4/Vv//Zvuu6662I8Ye9HoAAA8DFOnTqld955R5J03XXXKT09PcYT9R0ECgAAMA7v4gEAAMYhUAAAgHEIFAAAYBwCBQAAGIdAAdCtFi9erJtuuqlbbnv79u2Ki4tTS0tLl93m8ePHFRcXpwMHDnTZbQKIHoECwHbfffcpLi7uom3ixImxHg1AH8O3GQOIMHHiRK1fvz5in8PhiNE0H+/cuXOxHgFAN+IZFAARHA6HPB5PxHbhU13j4uK0bt06TZkyRQMGDFBubq58Pp+OHj2q22+/XSkpKfryl7+sd99996LbXbdunbKysjRgwADdfffdCgaD9rG9e/dq/PjxGjx4sFwul7761a9q//79EdePi4vTmjVr9PWvf10pKSmX/M6oDz/8UJMmTdKtt95q/9rnF7/4hXJzc5WcnKycnBytXr064jp79uzRzTffrOTkZOXn5+vNN9/s7F8hgC5AoACIyqOPPqp7771XBw4cUE5Ojr7zne/o+9//vqqqqrRv3z5ZlqWKioqI6xw9elS//e1vtWXLFlVXV+vNN9/UD37wA/v46dOnVVZWptdff127du3S8OHDNXnyZJ0+fTridhYvXqxvfvObqq+v1/e+972IYy0tLRo/frw6OjpUU1OjtLQ0PfPMM1q4cKEee+wxHT58WI8//rgefvhhbdy4UZJ05swZTZkyRXl5eaqrq9PixYv1wAMPdNPfHICoWADw/5WVlVkJCQlWSkpKxPbYY49ZlmVZkqwFCxbY630+nyXJevrpp+19//Ef/2ElJyfblxctWmQlJCRY7733nr1v27ZtVnx8vPX+++9fco729nZr4MCB1pYtW+x9kqw5c+ZErPvd735nSbIOHz5sjRw50iopKbHC4bB9/Atf+IK1adOmiOs8+uijltfrtSzLstatW2cNGjTI+stf/mIfX7NmjSXJevPNNz/17wtA9+E1KAAi3HHHHVqzZk3Evr/+/pGRI0faf3a73ZKkESNGROxrbW1VKBSS0+mUJA0dOlSf+9zn7DVer1cdHR06cuSIPB6PAoGAFixYoO3bt6u5uVnt7e368MMP1dDQEDFHfn7+JWceP368vvSlL+k3v/mN/S3dZ8+e1bvvvqvp06drxowZ9trz58/L5XJJkg4fPqyRI0cqOTk5YjYAsUegAIiQkpLyid/U2q9fP/vPcXFxH7uvo6PjM99nWVmZ/vznP+tnP/uZhg0bJofDIa/Xq7a2totmu5Ti4mL953/+pw4dOmTH0pkzZyRJTz31lMaMGROx/kLEADAXgQKg2zU0NKipqUmZmZmSpF27dik+Pl7XX3+9JOmNN97Q6tWrNXnyZElSY2OjPvjgg898+8uWLVNqaqrGjRun7du3Ky8vT263W5mZmfrjH/+o0tLSS14vNzdXv/71r9Xa2mo/i7Jr167OnCqALkKgAIgQDofl9/sj9iUmJmrw4MGXfZvJyckqKyvTE088oVAopNmzZ+vuu++Wx+ORJA0fPly//vWvlZ+fr1AopHnz5ql///5R3ccTTzyh9vZ2jR07Vtu3b1dOTo4eeeQRzZ49Wy6XSxMnTlQ4HNa+fft06tQpVVZW6jvf+Y4eeughzZgxQ1VVVTp+/LieeOKJyz5PAF2Hd/EAiFBdXa0hQ4ZEbLfddlunbvO6667T1KlTNXnyZE2YMEEjR46MeLvv008/rVOnTmnUqFGaNm2aZs+erYyMjKjvZ8WKFbr77rs1duxY/eEPf9A//MM/6Be/+IXWr1+vESNG6Ktf/ao2bNig7OxsSVJqaqq2bNmi+vp63XzzzXrooYf04x//uFPnCqBrxFmWZcV6CAAAgL/GMygAAMA4BAoAADAOgQIAAIxDoAAAAOMQKAAAwDgECgAAMA6BAgAAjEOgAAAA4xAoAADAOAQKAAAwDoECAACM8/8A7glORwdGc0UAAAAASUVORK5CYII=",
|
||
"text/plain": [
|
||
"<Figure size 640x480 with 1 Axes>"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
}
|
||
],
|
||
"source": [
|
||
"embarked = df.value_counts(\"Embarked\")\n",
|
||
"#later will be transformed to one-hot\n",
|
||
"embarked.plot.bar()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 51,
|
||
"id": "8286046e",
|
||
"metadata": {
|
||
"scrolled": true
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"# df.dropna()\n",
|
||
"#df.fillna()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 229,
|
||
"id": "1ed8c693",
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"columns_to_normalize=['Age','Fare']\n",
|
||
"for colname in columns_to_normalize:\n",
|
||
" df[colname]=(df[colname]-df[colname].min())/(df[colname].max()-df[colname].min())"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 230,
|
||
"id": "d5a0fa72",
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/html": [
|
||
"<div>\n",
|
||
"<style scoped>\n",
|
||
" .dataframe tbody tr th:only-of-type {\n",
|
||
" vertical-align: middle;\n",
|
||
" }\n",
|
||
"\n",
|
||
" .dataframe tbody tr th {\n",
|
||
" vertical-align: top;\n",
|
||
" }\n",
|
||
"\n",
|
||
" .dataframe thead th {\n",
|
||
" text-align: right;\n",
|
||
" }\n",
|
||
"</style>\n",
|
||
"<table border=\"1\" class=\"dataframe\">\n",
|
||
" <thead>\n",
|
||
" <tr style=\"text-align: right;\">\n",
|
||
" <th></th>\n",
|
||
" <th>PassengerId</th>\n",
|
||
" <th>Survived</th>\n",
|
||
" <th>Pclass</th>\n",
|
||
" <th>Name</th>\n",
|
||
" <th>Sex</th>\n",
|
||
" <th>Age</th>\n",
|
||
" <th>SibSp</th>\n",
|
||
" <th>Parch</th>\n",
|
||
" <th>Ticket</th>\n",
|
||
" <th>Fare</th>\n",
|
||
" <th>Cabin</th>\n",
|
||
" <th>Embarked</th>\n",
|
||
" </tr>\n",
|
||
" </thead>\n",
|
||
" <tbody>\n",
|
||
" <tr>\n",
|
||
" <th>0</th>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>3</td>\n",
|
||
" <td>Braund, Mr. Owen Harris</td>\n",
|
||
" <td>male</td>\n",
|
||
" <td>0.271174</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>A/5 21171</td>\n",
|
||
" <td>0.014151</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>S</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>1</th>\n",
|
||
" <td>2</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>Cumings, Mrs. John Bradley (Florence Briggs Th...</td>\n",
|
||
" <td>female</td>\n",
|
||
" <td>0.472229</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>PC 17599</td>\n",
|
||
" <td>0.139136</td>\n",
|
||
" <td>C85</td>\n",
|
||
" <td>C</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>2</th>\n",
|
||
" <td>3</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>3</td>\n",
|
||
" <td>Heikkinen, Miss. Laina</td>\n",
|
||
" <td>female</td>\n",
|
||
" <td>0.321438</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>STON/O2. 3101282</td>\n",
|
||
" <td>0.015469</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>S</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>3</th>\n",
|
||
" <td>4</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>Futrelle, Mrs. Jacques Heath (Lily May Peel)</td>\n",
|
||
" <td>female</td>\n",
|
||
" <td>0.434531</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>113803</td>\n",
|
||
" <td>0.103644</td>\n",
|
||
" <td>C123</td>\n",
|
||
" <td>S</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>4</th>\n",
|
||
" <td>5</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>3</td>\n",
|
||
" <td>Allen, Mr. William Henry</td>\n",
|
||
" <td>male</td>\n",
|
||
" <td>0.434531</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>373450</td>\n",
|
||
" <td>0.015713</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>S</td>\n",
|
||
" </tr>\n",
|
||
" </tbody>\n",
|
||
"</table>\n",
|
||
"</div>"
|
||
],
|
||
"text/plain": [
|
||
" PassengerId Survived Pclass \\\n",
|
||
"0 1 0 3 \n",
|
||
"1 2 1 1 \n",
|
||
"2 3 1 3 \n",
|
||
"3 4 1 1 \n",
|
||
"4 5 0 3 \n",
|
||
"\n",
|
||
" Name Sex Age SibSp \\\n",
|
||
"0 Braund, Mr. Owen Harris male 0.271174 1 \n",
|
||
"1 Cumings, Mrs. John Bradley (Florence Briggs Th... female 0.472229 1 \n",
|
||
"2 Heikkinen, Miss. Laina female 0.321438 0 \n",
|
||
"3 Futrelle, Mrs. Jacques Heath (Lily May Peel) female 0.434531 1 \n",
|
||
"4 Allen, Mr. William Henry male 0.434531 0 \n",
|
||
"\n",
|
||
" Parch Ticket Fare Cabin Embarked \n",
|
||
"0 0 A/5 21171 0.014151 NaN S \n",
|
||
"1 0 PC 17599 0.139136 C85 C \n",
|
||
"2 0 STON/O2. 3101282 0.015469 NaN S \n",
|
||
"3 0 113803 0.103644 C123 S \n",
|
||
"4 0 373450 0.015713 NaN S "
|
||
]
|
||
},
|
||
"execution_count": 230,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"df.head(5)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 52,
|
||
"id": "e6ffda37",
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"import pandas as pd\n",
|
||
"df = pd.read_csv(\"train.csv\")\n"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": null,
|
||
"id": "9f7c33a0",
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"# e19191c5.uam.onmicrosoft.com@emea.teams.ms"
|
||
]
|
||
},
|
||
{
|
||
"attachments": {},
|
||
"cell_type": "markdown",
|
||
"id": "54dd7eaa",
|
||
"metadata": {},
|
||
"source": [
|
||
"## lab 5 ml"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 231,
|
||
"id": "ec55ac92",
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Index(['PassengerId', 'Survived', 'Pclass', 'Name', 'Sex', 'Age', 'SibSp',\n",
|
||
" 'Parch', 'Ticket', 'Fare', 'Cabin', 'Embarked'],\n",
|
||
" dtype='object')\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"#data\n",
|
||
"cols = df.columns\n",
|
||
"print(cols)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": null,
|
||
"id": "40225042",
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": []
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 232,
|
||
"id": "11850862",
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"import numpy as np\n",
|
||
"import torch\n",
|
||
"from torch import nn\n",
|
||
"from torch.autograd import Variable\n",
|
||
"from sklearn.datasets import load_iris\n",
|
||
"from sklearn.model_selection import train_test_split\n",
|
||
"from sklearn.metrics import accuracy_score\n",
|
||
"from keras.utils import to_categorical\n",
|
||
"import torch.nn.functional as F"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 259,
|
||
"id": "cfecc11c",
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"class Model(nn.Module):\n",
|
||
" def __init__(self, input_dim):\n",
|
||
" super(Model, self).__init__()\n",
|
||
" self.layer1 = nn.Linear(input_dim, 50)\n",
|
||
" self.layer2 = nn.Linear(50, 20)\n",
|
||
" self.layer3 = nn.Linear(20, 2)\n",
|
||
" \n",
|
||
" def forward(self, x):\n",
|
||
" x = F.relu(self.layer1(x))\n",
|
||
" x = F.relu(self.layer2(x))\n",
|
||
" x = F.softmax(self.layer3(x))\n",
|
||
" \n",
|
||
" return x\n",
|
||
" "
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 235,
|
||
"id": "0af12074",
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stderr",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"/tmp/ipykernel_7802/1323642195.py:6: SettingWithCopyWarning: \n",
|
||
"A value is trying to be set on a copy of a slice from a DataFrame\n",
|
||
"\n",
|
||
"See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
|
||
" X['Sex'].replace(['female', 'male'], [0,1], inplace=True)\n"
|
||
]
|
||
},
|
||
{
|
||
"data": {
|
||
"text/html": [
|
||
"<div>\n",
|
||
"<style scoped>\n",
|
||
" .dataframe tbody tr th:only-of-type {\n",
|
||
" vertical-align: middle;\n",
|
||
" }\n",
|
||
"\n",
|
||
" .dataframe tbody tr th {\n",
|
||
" vertical-align: top;\n",
|
||
" }\n",
|
||
"\n",
|
||
" .dataframe thead th {\n",
|
||
" text-align: right;\n",
|
||
" }\n",
|
||
"</style>\n",
|
||
"<table border=\"1\" class=\"dataframe\">\n",
|
||
" <thead>\n",
|
||
" <tr style=\"text-align: right;\">\n",
|
||
" <th></th>\n",
|
||
" <th>Pclass</th>\n",
|
||
" <th>Sex</th>\n",
|
||
" <th>Age</th>\n",
|
||
" <th>SibSp</th>\n",
|
||
" <th>Fare</th>\n",
|
||
" </tr>\n",
|
||
" </thead>\n",
|
||
" <tbody>\n",
|
||
" <tr>\n",
|
||
" <th>1</th>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0.472229</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0.139136</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>3</th>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0.434531</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0.103644</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>6</th>\n",
|
||
" <td>1</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0.673285</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0.101229</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>10</th>\n",
|
||
" <td>3</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0.044986</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0.032596</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>11</th>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0.723549</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0.051822</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>...</th>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>871</th>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0.585323</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0.102579</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>872</th>\n",
|
||
" <td>1</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0.409399</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0.009759</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>879</th>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0.698417</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0.162314</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>887</th>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0.233476</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0.058556</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>889</th>\n",
|
||
" <td>1</td>\n",
|
||
" <td>1</td>\n",
|
||
" <td>0.321438</td>\n",
|
||
" <td>0</td>\n",
|
||
" <td>0.058556</td>\n",
|
||
" </tr>\n",
|
||
" </tbody>\n",
|
||
"</table>\n",
|
||
"<p>183 rows × 5 columns</p>\n",
|
||
"</div>"
|
||
],
|
||
"text/plain": [
|
||
" Pclass Sex Age SibSp Fare\n",
|
||
"1 1 0 0.472229 1 0.139136\n",
|
||
"3 1 0 0.434531 1 0.103644\n",
|
||
"6 1 1 0.673285 0 0.101229\n",
|
||
"10 3 0 0.044986 1 0.032596\n",
|
||
"11 1 0 0.723549 0 0.051822\n",
|
||
".. ... ... ... ... ...\n",
|
||
"871 1 0 0.585323 1 0.102579\n",
|
||
"872 1 1 0.409399 0 0.009759\n",
|
||
"879 1 0 0.698417 0 0.162314\n",
|
||
"887 1 0 0.233476 0 0.058556\n",
|
||
"889 1 1 0.321438 0 0.058556\n",
|
||
"\n",
|
||
"[183 rows x 5 columns]"
|
||
]
|
||
},
|
||
"execution_count": 235,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"df = df.dropna()\n",
|
||
"X = df[['Pclass', 'Sex', 'Age','SibSp', 'Fare']]\n",
|
||
"Y = df[['Survived']]\n",
|
||
"\n",
|
||
"# X.loc[:,'Age'] = X.loc[:,'Age'].fillna(X['Age'].mean())\n",
|
||
"X['Sex'].replace(['female', 'male'], [0,1], inplace=True)\n",
|
||
"\n",
|
||
"X"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 236,
|
||
"id": "591bfb44",
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"[1 1 0 1 1 1 1 0 1 0 0 1 0 1 0 0 1 0 0 0 1 0 1 0 0 0 1 0 0 0 1 1 1 1 0 1 1\n",
|
||
" 1 1 1 0 1 0 0 1 0 0 1 1 0 1 1 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 1 0 1 1 1 1\n",
|
||
" 1 1 1 0 1 1 1 1 1 1 0 1 0 1 1 0 1 0 1 0 1 1 1 0 0 1 0 1 0 1 0 1 1 1 0 1 1\n",
|
||
" 0 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 0 1 1 1 1 1 1 0 0 0 1 1 1 1 0 0 1 1 1 1 1\n",
|
||
" 0 1 1 1 1 1 0 1 0 0 1 1 1 1 0 1 1 0 0 1 1 0 1 1 1 1 1 1 1 0 1 0 1 1 1]\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"from sklearn.preprocessing import LabelEncoder\n",
|
||
"Y = np.ravel(Y)\n",
|
||
"encoder = LabelEncoder()\n",
|
||
"encoder.fit(Y)\n",
|
||
"Y = encoder.transform(Y)\n",
|
||
"print(Y)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 237,
|
||
"id": "8a7cac39",
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"X_train, X_test, Y_train, Y_test = train_test_split(X,Y, random_state=42, shuffle=True)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 260,
|
||
"id": "93454e63",
|
||
"metadata": {
|
||
"scrolled": false
|
||
},
|
||
"outputs": [],
|
||
"source": [
|
||
"Xt = torch.tensor(X_train.values, dtype = torch.float32)\n",
|
||
"Yt = torch.tensor(Y_train, dtype=torch.long)\n",
|
||
"# .reshape(-1,1)\n",
|
||
"# Yt = Y_train"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 261,
|
||
"id": "3aac198b",
|
||
"metadata": {
|
||
"scrolled": true
|
||
},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"torch.Size([137])"
|
||
]
|
||
},
|
||
"execution_count": 261,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"Yt.shape"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 262,
|
||
"id": "27591bf8",
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"model = Model(Xt.shape[1])\n",
|
||
"optimizer = torch.optim.Adam(model.parameters(), lr=0.01)\n",
|
||
"loss_fn = nn.CrossEntropyLoss()\n",
|
||
"epochs = 500\n",
|
||
"\n",
|
||
"def print_(loss):\n",
|
||
" print (\"The loss calculated: \", loss)\n"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 263,
|
||
"id": "9d700f25",
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Epoch # 1\n",
|
||
"The loss calculated: 0.6927047371864319\n",
|
||
"Epoch # 2\n",
|
||
"The loss calculated: 0.6760580539703369\n",
|
||
"Epoch # 3\n",
|
||
"The loss calculated: 0.6577760577201843\n",
|
||
"Epoch # 4\n",
|
||
"The loss calculated: 0.6410418152809143\n",
|
||
"Epoch # 5\n",
|
||
"The loss calculated: 0.6274042725563049\n",
|
||
"Epoch # 6\n",
|
||
"The loss calculated: 0.6176177263259888\n",
|
||
"Epoch # 7\n",
|
||
"The loss calculated: 0.6114543676376343\n",
|
||
"Epoch # 8\n",
|
||
"The loss calculated: 0.6079199314117432\n",
|
||
"Epoch # 9\n",
|
||
"The loss calculated: 0.6057404279708862\n",
|
||
"Epoch # 10\n",
|
||
"The loss calculated: 0.6039658188819885\n",
|
||
"Epoch # 11\n",
|
||
"The loss calculated: 0.6018784046173096\n",
|
||
"Epoch # 12\n",
|
||
"The loss calculated: 0.5988859534263611\n",
|
||
"Epoch # 13\n",
|
||
"The loss calculated: 0.5944192409515381\n",
|
||
"Epoch # 14\n",
|
||
"The loss calculated: 0.58795166015625\n",
|
||
"Epoch # 15\n",
|
||
"The loss calculated: 0.5793240666389465\n",
|
||
"Epoch # 16\n",
|
||
"The loss calculated: 0.569113552570343\n",
|
||
"Epoch # 17\n",
|
||
"The loss calculated: 0.5591343641281128\n",
|
||
"Epoch # 18\n",
|
||
"The loss calculated: 0.5525994300842285\n",
|
||
"Epoch # 19\n",
|
||
"The loss calculated: 0.549091637134552\n",
|
||
"Epoch # 20\n",
|
||
"The loss calculated: 0.5478854775428772\n",
|
||
"Epoch # 21\n",
|
||
"The loss calculated: 0.5459576845169067\n",
|
||
"Epoch # 22\n",
|
||
"The loss calculated: 0.5430701971054077\n",
|
||
"Epoch # 23\n",
|
||
"The loss calculated: 0.5398197174072266\n",
|
||
"Epoch # 24\n",
|
||
"The loss calculated: 0.5366366505622864\n",
|
||
"Epoch # 25\n",
|
||
"The loss calculated: 0.5338087677955627\n",
|
||
"Epoch # 26\n",
|
||
"The loss calculated: 0.5315443873405457\n",
|
||
"Epoch # 27\n",
|
||
"The loss calculated: 0.5298702716827393\n",
|
||
"Epoch # 28\n",
|
||
"The loss calculated: 0.5285016894340515\n",
|
||
"Epoch # 29\n",
|
||
"The loss calculated: 0.5272928476333618\n",
|
||
"Epoch # 30\n",
|
||
"The loss calculated: 0.5261989235877991\n",
|
||
"Epoch # 31\n",
|
||
"The loss calculated: 0.5251137018203735\n",
|
||
"Epoch # 32\n",
|
||
"The loss calculated: 0.5238412618637085\n",
|
||
"Epoch # 33\n",
|
||
"The loss calculated: 0.5226505398750305\n",
|
||
"Epoch # 34\n",
|
||
"The loss calculated: 0.5215187072753906\n",
|
||
"Epoch # 35\n",
|
||
"The loss calculated: 0.5204036235809326\n",
|
||
"Epoch # 36\n",
|
||
"The loss calculated: 0.5194926857948303\n",
|
||
"Epoch # 37\n",
|
||
"The loss calculated: 0.5188320875167847\n",
|
||
"Epoch # 38\n",
|
||
"The loss calculated: 0.5182497501373291\n",
|
||
"Epoch # 39\n",
|
||
"The loss calculated: 0.5176616907119751\n",
|
||
"Epoch # 40\n",
|
||
"The loss calculated: 0.5170402526855469\n",
|
||
"Epoch # 41\n",
|
||
"The loss calculated: 0.5162948369979858\n",
|
||
"Epoch # 42\n",
|
||
"The loss calculated: 0.5155003070831299\n",
|
||
"Epoch # 43\n",
|
||
"The loss calculated: 0.51481693983078\n",
|
||
"Epoch # 44\n",
|
||
"The loss calculated: 0.5142836570739746\n",
|
||
"Epoch # 45\n",
|
||
"The loss calculated: 0.5137770771980286\n",
|
||
"Epoch # 46\n",
|
||
"The loss calculated: 0.5132609009742737\n",
|
||
"Epoch # 47\n",
|
||
"The loss calculated: 0.5126983523368835\n",
|
||
"Epoch # 48\n",
|
||
"The loss calculated: 0.5120936036109924\n",
|
||
"Epoch # 49\n",
|
||
"The loss calculated: 0.5116094350814819\n",
|
||
"Epoch # 50\n",
|
||
"The loss calculated: 0.5111839175224304\n",
|
||
"Epoch # 51\n",
|
||
"The loss calculated: 0.5106979608535767\n",
|
||
"Epoch # 52\n",
|
||
"The loss calculated: 0.5101208686828613\n",
|
||
"Epoch # 53\n",
|
||
"The loss calculated: 0.5095392465591431\n",
|
||
"Epoch # 54\n",
|
||
"The loss calculated: 0.5090041756629944\n",
|
||
"Epoch # 55\n",
|
||
"The loss calculated: 0.5083613395690918\n",
|
||
"Epoch # 56\n",
|
||
"The loss calculated: 0.5075969099998474\n",
|
||
"Epoch # 57\n",
|
||
"The loss calculated: 0.5067813992500305\n",
|
||
"Epoch # 58\n",
|
||
"The loss calculated: 0.5060149431228638\n",
|
||
"Epoch # 59\n",
|
||
"The loss calculated: 0.5052304863929749\n",
|
||
"Epoch # 60\n",
|
||
"The loss calculated: 0.5044183135032654\n",
|
||
"Epoch # 61\n",
|
||
"The loss calculated: 0.5035461187362671\n",
|
||
"Epoch # 62\n",
|
||
"The loss calculated: 0.5025045871734619\n",
|
||
"Epoch # 63\n",
|
||
"The loss calculated: 0.5014879107475281\n",
|
||
"Epoch # 64\n",
|
||
"The loss calculated: 0.5006436705589294\n",
|
||
"Epoch # 65\n",
|
||
"The loss calculated: 0.499641090631485\n",
|
||
"Epoch # 66\n",
|
||
"The loss calculated: 0.4986647367477417\n",
|
||
"Epoch # 67\n",
|
||
"The loss calculated: 0.497800350189209\n",
|
||
"Epoch # 68\n",
|
||
"The loss calculated: 0.49712076783180237\n",
|
||
"Epoch # 69\n",
|
||
"The loss calculated: 0.49643078446388245\n",
|
||
"Epoch # 70\n",
|
||
"The loss calculated: 0.4957447350025177\n",
|
||
"Epoch # 71\n",
|
||
"The loss calculated: 0.4950644075870514\n",
|
||
"Epoch # 72\n",
|
||
"The loss calculated: 0.4944438636302948\n",
|
||
"Epoch # 73\n",
|
||
"The loss calculated: 0.4937107563018799\n",
|
||
"Epoch # 74\n",
|
||
"The loss calculated: 0.49320393800735474\n",
|
||
"Epoch # 75\n",
|
||
"The loss calculated: 0.49250030517578125\n",
|
||
"Epoch # 76\n",
|
||
"The loss calculated: 0.49141865968704224\n",
|
||
"Epoch # 77\n",
|
||
"The loss calculated: 0.49071067571640015\n",
|
||
"Epoch # 78\n",
|
||
"The loss calculated: 0.4899919629096985\n",
|
||
"Epoch # 79\n",
|
||
"The loss calculated: 0.48904943466186523\n",
|
||
"Epoch # 80\n",
|
||
"The loss calculated: 0.4885300099849701\n",
|
||
"Epoch # 81\n",
|
||
"The loss calculated: 0.48774540424346924\n",
|
||
"Epoch # 82\n",
|
||
"The loss calculated: 0.48720788955688477\n",
|
||
"Epoch # 83\n",
|
||
"The loss calculated: 0.4868374466896057\n",
|
||
"Epoch # 84\n",
|
||
"The loss calculated: 0.48623406887054443\n",
|
||
"Epoch # 85\n",
|
||
"The loss calculated: 0.48583683371543884\n",
|
||
"Epoch # 86\n",
|
||
"The loss calculated: 0.48502254486083984\n",
|
||
"Epoch # 87\n",
|
||
"The loss calculated: 0.4844677746295929\n",
|
||
"Epoch # 88\n",
|
||
"The loss calculated: 0.48361340165138245\n",
|
||
"Epoch # 89\n",
|
||
"The loss calculated: 0.4827542304992676\n",
|
||
"Epoch # 90\n",
|
||
"The loss calculated: 0.4817808270454407\n",
|
||
"Epoch # 91\n",
|
||
"The loss calculated: 0.4809269607067108\n",
|
||
"Epoch # 92\n",
|
||
"The loss calculated: 0.4804893136024475\n",
|
||
"Epoch # 93\n",
|
||
"The loss calculated: 0.48043856024742126\n",
|
||
"Epoch # 94\n",
|
||
"The loss calculated: 0.4801830053329468\n",
|
||
"Epoch # 95\n",
|
||
"The loss calculated: 0.479977011680603\n",
|
||
"Epoch # 96\n",
|
||
"The loss calculated: 0.47945544123649597\n",
|
||
"Epoch # 97\n",
|
||
"The loss calculated: 0.47897064685821533\n",
|
||
"Epoch # 98\n",
|
||
"The loss calculated: 0.4786403775215149\n",
|
||
"Epoch # 99\n",
|
||
"The loss calculated: 0.47828078269958496\n",
|
||
"Epoch # 100\n",
|
||
"The loss calculated: 0.47804537415504456\n",
|
||
"Epoch # 101\n",
|
||
"The loss calculated: 0.4777425527572632\n",
|
||
"Epoch # 102\n",
|
||
"The loss calculated: 0.4773750603199005\n",
|
||
"Epoch # 103\n",
|
||
"The loss calculated: 0.4768853187561035\n",
|
||
"Epoch # 104\n",
|
||
"The loss calculated: 0.4766947627067566\n",
|
||
"Epoch # 105\n",
|
||
"The loss calculated: 0.47633618116378784\n",
|
||
"Epoch # 106\n",
|
||
"The loss calculated: 0.47610870003700256\n",
|
||
"Epoch # 107\n",
|
||
"The loss calculated: 0.47584590315818787\n",
|
||
"Epoch # 108\n",
|
||
"The loss calculated: 0.47565311193466187\n",
|
||
"Epoch # 109\n",
|
||
"The loss calculated: 0.475361168384552\n",
|
||
"Epoch # 110\n",
|
||
"The loss calculated: 0.475079208612442\n",
|
||
"Epoch # 111\n",
|
||
"The loss calculated: 0.47482433915138245\n",
|
||
"Epoch # 112\n",
|
||
"The loss calculated: 0.47465214133262634\n",
|
||
"Epoch # 113\n"
|
||
]
|
||
},
|
||
{
|
||
"name": "stderr",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"/tmp/ipykernel_7802/3372075492.py:11: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.\n",
|
||
" x = F.softmax(self.layer3(x))\n"
|
||
]
|
||
},
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"The loss calculated: 0.4745003283023834\n",
|
||
"Epoch # 114\n",
|
||
"The loss calculated: 0.47428470849990845\n",
|
||
"Epoch # 115\n",
|
||
"The loss calculated: 0.47402113676071167\n",
|
||
"Epoch # 116\n",
|
||
"The loss calculated: 0.4738253355026245\n",
|
||
"Epoch # 117\n",
|
||
"The loss calculated: 0.47366538643836975\n",
|
||
"Epoch # 118\n",
|
||
"The loss calculated: 0.47345176339149475\n",
|
||
"Epoch # 119\n",
|
||
"The loss calculated: 0.47328999638557434\n",
|
||
"Epoch # 120\n",
|
||
"The loss calculated: 0.47304701805114746\n",
|
||
"Epoch # 121\n",
|
||
"The loss calculated: 0.47283679246902466\n",
|
||
"Epoch # 122\n",
|
||
"The loss calculated: 0.47269734740257263\n",
|
||
"Epoch # 123\n",
|
||
"The loss calculated: 0.47256502509117126\n",
|
||
"Epoch # 124\n",
|
||
"The loss calculated: 0.4723707437515259\n",
|
||
"Epoch # 125\n",
|
||
"The loss calculated: 0.4721546471118927\n",
|
||
"Epoch # 126\n",
|
||
"The loss calculated: 0.4719236493110657\n",
|
||
"Epoch # 127\n",
|
||
"The loss calculated: 0.4718014895915985\n",
|
||
"Epoch # 128\n",
|
||
"The loss calculated: 0.4715701937675476\n",
|
||
"Epoch # 129\n",
|
||
"The loss calculated: 0.47162505984306335\n",
|
||
"Epoch # 130\n",
|
||
"The loss calculated: 0.47140219807624817\n",
|
||
"Epoch # 131\n",
|
||
"The loss calculated: 0.47120794653892517\n",
|
||
"Epoch # 132\n",
|
||
"The loss calculated: 0.47121524810791016\n",
|
||
"Epoch # 133\n",
|
||
"The loss calculated: 0.4708421230316162\n",
|
||
"Epoch # 134\n",
|
||
"The loss calculated: 0.47080597281455994\n",
|
||
"Epoch # 135\n",
|
||
"The loss calculated: 0.470735102891922\n",
|
||
"Epoch # 136\n",
|
||
"The loss calculated: 0.47046154737472534\n",
|
||
"Epoch # 137\n",
|
||
"The loss calculated: 0.4704940617084503\n",
|
||
"Epoch # 138\n",
|
||
"The loss calculated: 0.4704982340335846\n",
|
||
"Epoch # 139\n",
|
||
"The loss calculated: 0.470112144947052\n",
|
||
"Epoch # 140\n",
|
||
"The loss calculated: 0.4701041877269745\n",
|
||
"Epoch # 141\n",
|
||
"The loss calculated: 0.47008904814720154\n",
|
||
"Epoch # 142\n",
|
||
"The loss calculated: 0.4698803722858429\n",
|
||
"Epoch # 143\n",
|
||
"The loss calculated: 0.46982747316360474\n",
|
||
"Epoch # 144\n",
|
||
"The loss calculated: 0.469696044921875\n",
|
||
"Epoch # 145\n",
|
||
"The loss calculated: 0.46962815523147583\n",
|
||
"Epoch # 146\n",
|
||
"The loss calculated: 0.469440758228302\n",
|
||
"Epoch # 147\n",
|
||
"The loss calculated: 0.46939632296562195\n",
|
||
"Epoch # 148\n",
|
||
"The loss calculated: 0.4695526957511902\n",
|
||
"Epoch # 149\n",
|
||
"The loss calculated: 0.4697006046772003\n",
|
||
"Epoch # 150\n",
|
||
"The loss calculated: 0.4692654609680176\n",
|
||
"Epoch # 151\n",
|
||
"The loss calculated: 0.4700072407722473\n",
|
||
"Epoch # 152\n",
|
||
"The loss calculated: 0.4690340757369995\n",
|
||
"Epoch # 153\n",
|
||
"The loss calculated: 0.47001826763153076\n",
|
||
"Epoch # 154\n",
|
||
"The loss calculated: 0.46880584955215454\n",
|
||
"Epoch # 155\n",
|
||
"The loss calculated: 0.46919724345207214\n",
|
||
"Epoch # 156\n",
|
||
"The loss calculated: 0.4687418043613434\n",
|
||
"Epoch # 157\n",
|
||
"The loss calculated: 0.4687948226928711\n",
|
||
"Epoch # 158\n",
|
||
"The loss calculated: 0.46873044967651367\n",
|
||
"Epoch # 159\n",
|
||
"The loss calculated: 0.46848490834236145\n",
|
||
"Epoch # 160\n",
|
||
"The loss calculated: 0.4686104953289032\n",
|
||
"Epoch # 161\n",
|
||
"The loss calculated: 0.4683172404766083\n",
|
||
"Epoch # 162\n",
|
||
"The loss calculated: 0.46831050515174866\n",
|
||
"Epoch # 163\n",
|
||
"The loss calculated: 0.46828699111938477\n",
|
||
"Epoch # 164\n",
|
||
"The loss calculated: 0.46824583411216736\n",
|
||
"Epoch # 165\n",
|
||
"The loss calculated: 0.468075156211853\n",
|
||
"Epoch # 166\n",
|
||
"The loss calculated: 0.46814292669296265\n",
|
||
"Epoch # 167\n",
|
||
"The loss calculated: 0.46796467900276184\n",
|
||
"Epoch # 168\n",
|
||
"The loss calculated: 0.46802079677581787\n",
|
||
"Epoch # 169\n",
|
||
"The loss calculated: 0.46778491139411926\n",
|
||
"Epoch # 170\n",
|
||
"The loss calculated: 0.4679405093193054\n",
|
||
"Epoch # 171\n",
|
||
"The loss calculated: 0.46800506114959717\n",
|
||
"Epoch # 172\n",
|
||
"The loss calculated: 0.467818945646286\n",
|
||
"Epoch # 173\n",
|
||
"The loss calculated: 0.4678487181663513\n",
|
||
"Epoch # 174\n",
|
||
"The loss calculated: 0.46776196360588074\n",
|
||
"Epoch # 175\n",
|
||
"The loss calculated: 0.46756404638290405\n",
|
||
"Epoch # 176\n",
|
||
"The loss calculated: 0.4682294726371765\n",
|
||
"Epoch # 177\n",
|
||
"The loss calculated: 0.46777990460395813\n",
|
||
"Epoch # 178\n",
|
||
"The loss calculated: 0.4677632451057434\n",
|
||
"Epoch # 179\n",
|
||
"The loss calculated: 0.46777427196502686\n",
|
||
"Epoch # 180\n",
|
||
"The loss calculated: 0.46746954321861267\n",
|
||
"Epoch # 181\n",
|
||
"The loss calculated: 0.4676474630832672\n",
|
||
"Epoch # 182\n",
|
||
"The loss calculated: 0.46711796522140503\n",
|
||
"Epoch # 183\n",
|
||
"The loss calculated: 0.4677950441837311\n",
|
||
"Epoch # 184\n",
|
||
"The loss calculated: 0.46725085377693176\n",
|
||
"Epoch # 185\n",
|
||
"The loss calculated: 0.4676659107208252\n",
|
||
"Epoch # 186\n",
|
||
"The loss calculated: 0.4672679901123047\n",
|
||
"Epoch # 187\n",
|
||
"The loss calculated: 0.46727195382118225\n",
|
||
"Epoch # 188\n",
|
||
"The loss calculated: 0.466960608959198\n",
|
||
"Epoch # 189\n",
|
||
"The loss calculated: 0.46708735823631287\n",
|
||
"Epoch # 190\n",
|
||
"The loss calculated: 0.4671291708946228\n",
|
||
"Epoch # 191\n",
|
||
"The loss calculated: 0.46684736013412476\n",
|
||
"Epoch # 192\n",
|
||
"The loss calculated: 0.4667331576347351\n",
|
||
"Epoch # 193\n",
|
||
"The loss calculated: 0.46685370802879333\n",
|
||
"Epoch # 194\n",
|
||
"The loss calculated: 0.4668591618537903\n",
|
||
"Epoch # 195\n",
|
||
"The loss calculated: 0.46671974658966064\n",
|
||
"Epoch # 196\n",
|
||
"The loss calculated: 0.46653658151626587\n",
|
||
"Epoch # 197\n",
|
||
"The loss calculated: 0.46659478545188904\n",
|
||
"Epoch # 198\n",
|
||
"The loss calculated: 0.4665440022945404\n",
|
||
"Epoch # 199\n",
|
||
"The loss calculated: 0.4664462208747864\n",
|
||
"Epoch # 200\n",
|
||
"The loss calculated: 0.466394305229187\n",
|
||
"Epoch # 201\n",
|
||
"The loss calculated: 0.4665300250053406\n",
|
||
"Epoch # 202\n",
|
||
"The loss calculated: 0.4664006531238556\n",
|
||
"Epoch # 203\n",
|
||
"The loss calculated: 0.46651187539100647\n",
|
||
"Epoch # 204\n",
|
||
"The loss calculated: 0.4662490487098694\n",
|
||
"Epoch # 205\n",
|
||
"The loss calculated: 0.46683457493782043\n",
|
||
"Epoch # 206\n",
|
||
"The loss calculated: 0.46636930108070374\n",
|
||
"Epoch # 207\n",
|
||
"The loss calculated: 0.4663969576358795\n",
|
||
"Epoch # 208\n",
|
||
"The loss calculated: 0.46641668677330017\n",
|
||
"Epoch # 209\n",
|
||
"The loss calculated: 0.46628400683403015\n",
|
||
"Epoch # 210\n",
|
||
"The loss calculated: 0.4664050042629242\n",
|
||
"Epoch # 211\n",
|
||
"The loss calculated: 0.4661887586116791\n",
|
||
"Epoch # 212\n",
|
||
"The loss calculated: 0.4660308063030243\n",
|
||
"Epoch # 213\n",
|
||
"The loss calculated: 0.4661027491092682\n",
|
||
"Epoch # 214\n",
|
||
"The loss calculated: 0.4660954177379608\n",
|
||
"Epoch # 215\n",
|
||
"The loss calculated: 0.4658938944339752\n",
|
||
"Epoch # 216\n",
|
||
"The loss calculated: 0.4660359025001526\n",
|
||
"Epoch # 217\n",
|
||
"The loss calculated: 0.46567121148109436\n",
|
||
"Epoch # 218\n",
|
||
"The loss calculated: 0.4657202959060669\n",
|
||
"Epoch # 219\n",
|
||
"The loss calculated: 0.4657045900821686\n",
|
||
"Epoch # 220\n",
|
||
"The loss calculated: 0.4655347168445587\n",
|
||
"Epoch # 221\n",
|
||
"The loss calculated: 0.4654804468154907\n",
|
||
"Epoch # 222\n",
|
||
"The loss calculated: 0.4656883180141449\n",
|
||
"Epoch # 223\n",
|
||
"The loss calculated: 0.46542859077453613\n",
|
||
"Epoch # 224\n",
|
||
"The loss calculated: 0.46529003977775574\n",
|
||
"Epoch # 225\n",
|
||
"The loss calculated: 0.46543607115745544\n",
|
||
"Epoch # 226\n",
|
||
"The loss calculated: 0.46531468629837036\n",
|
||
"Epoch # 227\n",
|
||
"The loss calculated: 0.4653342068195343\n",
|
||
"Epoch # 228\n",
|
||
"The loss calculated: 0.46527451276779175\n",
|
||
"Epoch # 229\n",
|
||
"The loss calculated: 0.4652668535709381\n",
|
||
"Epoch # 230\n",
|
||
"The loss calculated: 0.46513044834136963\n",
|
||
"Epoch # 231\n",
|
||
"The loss calculated: 0.4650672972202301\n",
|
||
"Epoch # 232\n",
|
||
"The loss calculated: 0.46511510014533997\n",
|
||
"Epoch # 233\n",
|
||
"The loss calculated: 0.4647628366947174\n",
|
||
"Epoch # 234\n",
|
||
"The loss calculated: 0.4647744596004486\n",
|
||
"Epoch # 235\n",
|
||
"The loss calculated: 0.4648566246032715\n",
|
||
"Epoch # 236\n",
|
||
"The loss calculated: 0.4646404981613159\n",
|
||
"Epoch # 237\n",
|
||
"The loss calculated: 0.4645318388938904\n",
|
||
"Epoch # 238\n",
|
||
"The loss calculated: 0.46459120512008667\n",
|
||
"Epoch # 239\n",
|
||
"The loss calculated: 0.46454647183418274\n",
|
||
"Epoch # 240\n",
|
||
"The loss calculated: 0.46439239382743835\n",
|
||
"Epoch # 241\n",
|
||
"The loss calculated: 0.464549720287323\n",
|
||
"Epoch # 242\n",
|
||
"The loss calculated: 0.4642981290817261\n",
|
||
"Epoch # 243\n",
|
||
"The loss calculated: 0.4640815258026123\n",
|
||
"Epoch # 244\n",
|
||
"The loss calculated: 0.4640815258026123\n",
|
||
"Epoch # 245\n",
|
||
"The loss calculated: 0.4638811945915222\n",
|
||
"Epoch # 246\n",
|
||
"The loss calculated: 0.46409285068511963\n",
|
||
"Epoch # 247\n",
|
||
"The loss calculated: 0.46399882435798645\n",
|
||
"Epoch # 248\n",
|
||
"The loss calculated: 0.4639054536819458\n",
|
||
"Epoch # 249\n",
|
||
"The loss calculated: 0.46384960412979126\n",
|
||
"Epoch # 250\n",
|
||
"The loss calculated: 0.46365633606910706\n",
|
||
"Epoch # 251\n",
|
||
"The loss calculated: 0.4635387361049652\n",
|
||
"Epoch # 252\n",
|
||
"The loss calculated: 0.46366339921951294\n",
|
||
"Epoch # 253\n",
|
||
"The loss calculated: 0.4635831415653229\n",
|
||
"Epoch # 254\n",
|
||
"The loss calculated: 0.46347707509994507\n",
|
||
"Epoch # 255\n",
|
||
"The loss calculated: 0.4633452892303467\n",
|
||
"Epoch # 256\n",
|
||
"The loss calculated: 0.4634377658367157\n",
|
||
"Epoch # 257\n",
|
||
"The loss calculated: 0.46325498819351196\n",
|
||
"Epoch # 258\n",
|
||
"The loss calculated: 0.46343502402305603\n",
|
||
"Epoch # 259\n",
|
||
"The loss calculated: 0.46319177746772766\n",
|
||
"Epoch # 260\n",
|
||
"The loss calculated: 0.4631631076335907\n",
|
||
"Epoch # 261\n",
|
||
"The loss calculated: 0.4630383253097534\n",
|
||
"Epoch # 262\n",
|
||
"The loss calculated: 0.4629758596420288\n",
|
||
"Epoch # 263\n",
|
||
"The loss calculated: 0.46284860372543335\n",
|
||
"Epoch # 264\n",
|
||
"The loss calculated: 0.46269962191581726\n",
|
||
"Epoch # 265\n",
|
||
"The loss calculated: 0.4628857374191284\n",
|
||
"Epoch # 266\n",
|
||
"The loss calculated: 0.4627268314361572\n",
|
||
"Epoch # 267\n",
|
||
"The loss calculated: 0.46238410472869873\n",
|
||
"Epoch # 268\n",
|
||
"The loss calculated: 0.4622679352760315\n",
|
||
"Epoch # 269\n",
|
||
"The loss calculated: 0.46253955364227295\n",
|
||
"Epoch # 270\n",
|
||
"The loss calculated: 0.46243607997894287\n",
|
||
"Epoch # 271\n",
|
||
"The loss calculated: 0.4622651934623718\n",
|
||
"Epoch # 272\n",
|
||
"The loss calculated: 0.4621260166168213\n",
|
||
"Epoch # 273\n",
|
||
"The loss calculated: 0.4619852304458618\n",
|
||
"Epoch # 274\n",
|
||
"The loss calculated: 0.4621600806713104\n",
|
||
"Epoch # 275\n",
|
||
"The loss calculated: 0.46188268065452576\n",
|
||
"Epoch # 276\n",
|
||
"The loss calculated: 0.4619770050048828\n",
|
||
"Epoch # 277\n",
|
||
"The loss calculated: 0.4617985486984253\n",
|
||
"Epoch # 278\n",
|
||
"The loss calculated: 0.46143385767936707\n",
|
||
"Epoch # 279\n",
|
||
"The loss calculated: 0.4618164002895355\n",
|
||
"Epoch # 280\n",
|
||
"The loss calculated: 0.461500883102417\n",
|
||
"Epoch # 281\n",
|
||
"The loss calculated: 0.4614565372467041\n",
|
||
"Epoch # 282\n",
|
||
"The loss calculated: 0.4613018035888672\n",
|
||
"Epoch # 283\n",
|
||
"The loss calculated: 0.4612286388874054\n",
|
||
"Epoch # 284\n",
|
||
"The loss calculated: 0.4610031545162201\n",
|
||
"Epoch # 285\n",
|
||
"The loss calculated: 0.4609623849391937\n",
|
||
"Epoch # 286\n",
|
||
"The loss calculated: 0.4608198404312134\n",
|
||
"Epoch # 287\n",
|
||
"The loss calculated: 0.46074378490448\n",
|
||
"Epoch # 288\n",
|
||
"The loss calculated: 0.46068280935287476\n",
|
||
"Epoch # 289\n",
|
||
"The loss calculated: 0.46061643958091736\n",
|
||
"Epoch # 290\n",
|
||
"The loss calculated: 0.4604104459285736\n",
|
||
"Epoch # 291\n",
|
||
"The loss calculated: 0.4607124626636505\n",
|
||
"Epoch # 292\n",
|
||
"The loss calculated: 0.4607458710670471\n",
|
||
"Epoch # 293\n",
|
||
"The loss calculated: 0.4601185619831085\n",
|
||
"Epoch # 294\n",
|
||
"The loss calculated: 0.460267573595047\n",
|
||
"Epoch # 295\n",
|
||
"The loss calculated: 0.4605766832828522\n",
|
||
"Epoch # 296\n",
|
||
"The loss calculated: 0.46028855443000793\n",
|
||
"Epoch # 297\n",
|
||
"The loss calculated: 0.4599803388118744\n",
|
||
"Epoch # 298\n",
|
||
"The loss calculated: 0.4600617587566376\n",
|
||
"Epoch # 299\n",
|
||
"The loss calculated: 0.46000462770462036\n",
|
||
"Epoch # 300\n",
|
||
"The loss calculated: 0.4595383405685425\n",
|
||
"Epoch # 301\n",
|
||
"The loss calculated: 0.4598424732685089\n",
|
||
"Epoch # 302\n",
|
||
"The loss calculated: 0.4597552418708801\n",
|
||
"Epoch # 303\n",
|
||
"The loss calculated: 0.45939505100250244\n",
|
||
"Epoch # 304\n",
|
||
"The loss calculated: 0.459394633769989\n",
|
||
"Epoch # 305\n",
|
||
"The loss calculated: 0.4592142403125763\n",
|
||
"Epoch # 306\n",
|
||
"The loss calculated: 0.4591156244277954\n",
|
||
"Epoch # 307\n",
|
||
"The loss calculated: 0.4590142071247101\n",
|
||
"Epoch # 308\n",
|
||
"The loss calculated: 0.45902881026268005\n",
|
||
"Epoch # 309\n",
|
||
"The loss calculated: 0.4590888023376465\n",
|
||
"Epoch # 310\n",
|
||
"The loss calculated: 0.45860469341278076\n",
|
||
"Epoch # 311\n",
|
||
"The loss calculated: 0.45852038264274597\n",
|
||
"Epoch # 312\n",
|
||
"The loss calculated: 0.4585433900356293\n",
|
||
"Epoch # 313\n",
|
||
"The loss calculated: 0.4586207866668701\n",
|
||
"Epoch # 314\n",
|
||
"The loss calculated: 0.45869746804237366\n",
|
||
"Epoch # 315\n",
|
||
"The loss calculated: 0.4585130214691162\n",
|
||
"Epoch # 316\n",
|
||
"The loss calculated: 0.45780810713768005\n",
|
||
"Epoch # 317\n",
|
||
"The loss calculated: 0.4584527313709259\n",
|
||
"Epoch # 318\n",
|
||
"The loss calculated: 0.4584985375404358\n",
|
||
"Epoch # 319\n",
|
||
"The loss calculated: 0.4577976167201996\n",
|
||
"Epoch # 320\n",
|
||
"The loss calculated: 0.4578183591365814\n",
|
||
"Epoch # 321\n",
|
||
"The loss calculated: 0.45760011672973633\n",
|
||
"Epoch # 322\n",
|
||
"The loss calculated: 0.4573518931865692\n",
|
||
"Epoch # 323\n",
|
||
"The loss calculated: 0.45755714178085327\n",
|
||
"Epoch # 324\n",
|
||
"The loss calculated: 0.4574785828590393\n",
|
||
"Epoch # 325\n",
|
||
"The loss calculated: 0.4572897255420685\n",
|
||
"Epoch # 326\n",
|
||
"The loss calculated: 0.45682093501091003\n",
|
||
"Epoch # 327\n",
|
||
"The loss calculated: 0.4571937322616577\n",
|
||
"Epoch # 328\n",
|
||
"The loss calculated: 0.45755869150161743\n",
|
||
"Epoch # 329\n",
|
||
"The loss calculated: 0.45663607120513916\n",
|
||
"Epoch # 330\n",
|
||
"The loss calculated: 0.4570084810256958\n",
|
||
"Epoch # 331\n",
|
||
"The loss calculated: 0.45761099457740784\n",
|
||
"Epoch # 332\n",
|
||
"The loss calculated: 0.456558495759964\n",
|
||
"Epoch # 333\n",
|
||
"The loss calculated: 0.45620036125183105\n",
|
||
"Epoch # 334\n",
|
||
"The loss calculated: 0.4563443958759308\n",
|
||
"Epoch # 335\n",
|
||
"The loss calculated: 0.45647644996643066\n",
|
||
"Epoch # 336\n",
|
||
"The loss calculated: 0.45592716336250305\n",
|
||
"Epoch # 337\n",
|
||
"The loss calculated: 0.455634742975235\n",
|
||
"Epoch # 338\n",
|
||
"The loss calculated: 0.4558946192264557\n",
|
||
"Epoch # 339\n",
|
||
"The loss calculated: 0.45598289370536804\n",
|
||
"Epoch # 340\n",
|
||
"The loss calculated: 0.4554951786994934\n",
|
||
"Epoch # 341\n",
|
||
"The loss calculated: 0.4554195702075958\n",
|
||
"Epoch # 342\n",
|
||
"The loss calculated: 0.4554871618747711\n",
|
||
"Epoch # 343\n",
|
||
"The loss calculated: 0.4549509584903717\n",
|
||
"Epoch # 344\n",
|
||
"The loss calculated: 0.4548693597316742\n",
|
||
"Epoch # 345\n",
|
||
"The loss calculated: 0.4558226466178894\n",
|
||
"Epoch # 346\n",
|
||
"The loss calculated: 0.45509448647499084\n",
|
||
"Epoch # 347\n",
|
||
"The loss calculated: 0.45454123616218567\n",
|
||
"Epoch # 348\n",
|
||
"The loss calculated: 0.4553173780441284\n",
|
||
"Epoch # 349\n",
|
||
"The loss calculated: 0.4548755884170532\n"
|
||
]
|
||
},
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Epoch # 350\n",
|
||
"The loss calculated: 0.45442134141921997\n",
|
||
"Epoch # 351\n",
|
||
"The loss calculated: 0.4545627236366272\n",
|
||
"Epoch # 352\n",
|
||
"The loss calculated: 0.4543512463569641\n",
|
||
"Epoch # 353\n",
|
||
"The loss calculated: 0.4541962146759033\n",
|
||
"Epoch # 354\n",
|
||
"The loss calculated: 0.4540751874446869\n",
|
||
"Epoch # 355\n",
|
||
"The loss calculated: 0.45386749505996704\n",
|
||
"Epoch # 356\n",
|
||
"The loss calculated: 0.4536762833595276\n",
|
||
"Epoch # 357\n",
|
||
"The loss calculated: 0.4532167911529541\n",
|
||
"Epoch # 358\n",
|
||
"The loss calculated: 0.4538520872592926\n",
|
||
"Epoch # 359\n",
|
||
"The loss calculated: 0.45413821935653687\n",
|
||
"Epoch # 360\n",
|
||
"The loss calculated: 0.45311087369918823\n",
|
||
"Epoch # 361\n",
|
||
"The loss calculated: 0.45335227251052856\n",
|
||
"Epoch # 362\n",
|
||
"The loss calculated: 0.45350611209869385\n",
|
||
"Epoch # 363\n",
|
||
"The loss calculated: 0.45265665650367737\n",
|
||
"Epoch # 364\n",
|
||
"The loss calculated: 0.4524100124835968\n",
|
||
"Epoch # 365\n",
|
||
"The loss calculated: 0.4523312449455261\n",
|
||
"Epoch # 366\n",
|
||
"The loss calculated: 0.4522554874420166\n",
|
||
"Epoch # 367\n",
|
||
"The loss calculated: 0.4523703455924988\n",
|
||
"Epoch # 368\n",
|
||
"The loss calculated: 0.4521876573562622\n",
|
||
"Epoch # 369\n",
|
||
"The loss calculated: 0.4517895579338074\n",
|
||
"Epoch # 370\n",
|
||
"The loss calculated: 0.4517730474472046\n",
|
||
"Epoch # 371\n",
|
||
"The loss calculated: 0.4515615999698639\n",
|
||
"Epoch # 372\n",
|
||
"The loss calculated: 0.45157772302627563\n",
|
||
"Epoch # 373\n",
|
||
"The loss calculated: 0.4515098035335541\n",
|
||
"Epoch # 374\n",
|
||
"The loss calculated: 0.45118868350982666\n",
|
||
"Epoch # 375\n",
|
||
"The loss calculated: 0.45117509365081787\n",
|
||
"Epoch # 376\n",
|
||
"The loss calculated: 0.45118534564971924\n",
|
||
"Epoch # 377\n",
|
||
"The loss calculated: 0.45082926750183105\n",
|
||
"Epoch # 378\n",
|
||
"The loss calculated: 0.4507909119129181\n",
|
||
"Epoch # 379\n",
|
||
"The loss calculated: 0.45116591453552246\n",
|
||
"Epoch # 380\n",
|
||
"The loss calculated: 0.45066720247268677\n",
|
||
"Epoch # 381\n",
|
||
"The loss calculated: 0.45026636123657227\n",
|
||
"Epoch # 382\n",
|
||
"The loss calculated: 0.4510788321495056\n",
|
||
"Epoch # 383\n",
|
||
"The loss calculated: 0.4512375593185425\n",
|
||
"Epoch # 384\n",
|
||
"The loss calculated: 0.450232595205307\n",
|
||
"Epoch # 385\n",
|
||
"The loss calculated: 0.44986671209335327\n",
|
||
"Epoch # 386\n",
|
||
"The loss calculated: 0.4502098262310028\n",
|
||
"Epoch # 387\n",
|
||
"The loss calculated: 0.4510081112384796\n",
|
||
"Epoch # 388\n",
|
||
"The loss calculated: 0.4499610960483551\n",
|
||
"Epoch # 389\n",
|
||
"The loss calculated: 0.44945529103279114\n",
|
||
"Epoch # 390\n",
|
||
"The loss calculated: 0.45030856132507324\n",
|
||
"Epoch # 391\n",
|
||
"The loss calculated: 0.4493928849697113\n",
|
||
"Epoch # 392\n",
|
||
"The loss calculated: 0.4490446448326111\n",
|
||
"Epoch # 393\n",
|
||
"The loss calculated: 0.4496527910232544\n",
|
||
"Epoch # 394\n",
|
||
"The loss calculated: 0.44922882318496704\n",
|
||
"Epoch # 395\n",
|
||
"The loss calculated: 0.4484827220439911\n",
|
||
"Epoch # 396\n",
|
||
"The loss calculated: 0.44952288269996643\n",
|
||
"Epoch # 397\n",
|
||
"The loss calculated: 0.4490470588207245\n",
|
||
"Epoch # 398\n",
|
||
"The loss calculated: 0.44837456941604614\n",
|
||
"Epoch # 399\n",
|
||
"The loss calculated: 0.44843804836273193\n",
|
||
"Epoch # 400\n",
|
||
"The loss calculated: 0.44825857877731323\n",
|
||
"Epoch # 401\n",
|
||
"The loss calculated: 0.4478710889816284\n",
|
||
"Epoch # 402\n",
|
||
"The loss calculated: 0.4478342533111572\n",
|
||
"Epoch # 403\n",
|
||
"The loss calculated: 0.44727033376693726\n",
|
||
"Epoch # 404\n",
|
||
"The loss calculated: 0.4474068582057953\n",
|
||
"Epoch # 405\n",
|
||
"The loss calculated: 0.4473791718482971\n",
|
||
"Epoch # 406\n",
|
||
"The loss calculated: 0.4471847414970398\n",
|
||
"Epoch # 407\n",
|
||
"The loss calculated: 0.44691354036331177\n",
|
||
"Epoch # 408\n",
|
||
"The loss calculated: 0.44677817821502686\n",
|
||
"Epoch # 409\n",
|
||
"The loss calculated: 0.4468446969985962\n",
|
||
"Epoch # 410\n",
|
||
"The loss calculated: 0.4465027153491974\n",
|
||
"Epoch # 411\n",
|
||
"The loss calculated: 0.44606125354766846\n",
|
||
"Epoch # 412\n",
|
||
"The loss calculated: 0.44594869017601013\n",
|
||
"Epoch # 413\n",
|
||
"The loss calculated: 0.4456939101219177\n",
|
||
"Epoch # 414\n",
|
||
"The loss calculated: 0.445888489484787\n",
|
||
"Epoch # 415\n",
|
||
"The loss calculated: 0.4455548822879791\n",
|
||
"Epoch # 416\n",
|
||
"The loss calculated: 0.44548290967941284\n",
|
||
"Epoch # 417\n",
|
||
"The loss calculated: 0.44544851779937744\n",
|
||
"Epoch # 418\n",
|
||
"The loss calculated: 0.44522538781166077\n",
|
||
"Epoch # 419\n",
|
||
"The loss calculated: 0.44501474499702454\n",
|
||
"Epoch # 420\n",
|
||
"The loss calculated: 0.4449530839920044\n",
|
||
"Epoch # 421\n",
|
||
"The loss calculated: 0.4445208013057709\n",
|
||
"Epoch # 422\n",
|
||
"The loss calculated: 0.4444122314453125\n",
|
||
"Epoch # 423\n",
|
||
"The loss calculated: 0.44473087787628174\n",
|
||
"Epoch # 424\n",
|
||
"The loss calculated: 0.4442698359489441\n",
|
||
"Epoch # 425\n",
|
||
"The loss calculated: 0.44399431347846985\n",
|
||
"Epoch # 426\n",
|
||
"The loss calculated: 0.4437970817089081\n",
|
||
"Epoch # 427\n",
|
||
"The loss calculated: 0.44364386796951294\n",
|
||
"Epoch # 428\n",
|
||
"The loss calculated: 0.4437081217765808\n",
|
||
"Epoch # 429\n",
|
||
"The loss calculated: 0.4436897039413452\n",
|
||
"Epoch # 430\n",
|
||
"The loss calculated: 0.44336003065109253\n",
|
||
"Epoch # 431\n",
|
||
"The loss calculated: 0.4430985748767853\n",
|
||
"Epoch # 432\n",
|
||
"The loss calculated: 0.44310933351516724\n",
|
||
"Epoch # 433\n",
|
||
"The loss calculated: 0.4428543746471405\n",
|
||
"Epoch # 434\n",
|
||
"The loss calculated: 0.44258877635002136\n",
|
||
"Epoch # 435\n",
|
||
"The loss calculated: 0.4427826404571533\n",
|
||
"Epoch # 436\n",
|
||
"The loss calculated: 0.44258812069892883\n",
|
||
"Epoch # 437\n",
|
||
"The loss calculated: 0.442533403635025\n",
|
||
"Epoch # 438\n",
|
||
"The loss calculated: 0.44270434975624084\n",
|
||
"Epoch # 439\n",
|
||
"The loss calculated: 0.4427698850631714\n",
|
||
"Epoch # 440\n",
|
||
"The loss calculated: 0.44257086515426636\n",
|
||
"Epoch # 441\n",
|
||
"The loss calculated: 0.4425719976425171\n",
|
||
"Epoch # 442\n",
|
||
"The loss calculated: 0.4420627951622009\n",
|
||
"Epoch # 443\n",
|
||
"The loss calculated: 0.4421764612197876\n",
|
||
"Epoch # 444\n",
|
||
"The loss calculated: 0.44193679094314575\n",
|
||
"Epoch # 445\n",
|
||
"The loss calculated: 0.44186508655548096\n",
|
||
"Epoch # 446\n",
|
||
"The loss calculated: 0.44136378169059753\n",
|
||
"Epoch # 447\n",
|
||
"The loss calculated: 0.44126731157302856\n",
|
||
"Epoch # 448\n",
|
||
"The loss calculated: 0.44119781255722046\n",
|
||
"Epoch # 449\n",
|
||
"The loss calculated: 0.4413573145866394\n",
|
||
"Epoch # 450\n",
|
||
"The loss calculated: 0.4411191940307617\n",
|
||
"Epoch # 451\n",
|
||
"The loss calculated: 0.4407786428928375\n",
|
||
"Epoch # 452\n",
|
||
"The loss calculated: 0.4407300055027008\n",
|
||
"Epoch # 453\n",
|
||
"The loss calculated: 0.4404629170894623\n",
|
||
"Epoch # 454\n",
|
||
"The loss calculated: 0.44039714336395264\n",
|
||
"Epoch # 455\n",
|
||
"The loss calculated: 0.44031772017478943\n",
|
||
"Epoch # 456\n",
|
||
"The loss calculated: 0.44058850407600403\n",
|
||
"Epoch # 457\n",
|
||
"The loss calculated: 0.44026416540145874\n",
|
||
"Epoch # 458\n",
|
||
"The loss calculated: 0.4401347041130066\n",
|
||
"Epoch # 459\n",
|
||
"The loss calculated: 0.44020867347717285\n",
|
||
"Epoch # 460\n",
|
||
"The loss calculated: 0.43979671597480774\n",
|
||
"Epoch # 461\n",
|
||
"The loss calculated: 0.44035604596138\n",
|
||
"Epoch # 462\n",
|
||
"The loss calculated: 0.4401366412639618\n",
|
||
"Epoch # 463\n",
|
||
"The loss calculated: 0.4404027760028839\n",
|
||
"Epoch # 464\n",
|
||
"The loss calculated: 0.439935564994812\n",
|
||
"Epoch # 465\n",
|
||
"The loss calculated: 0.4399685561656952\n",
|
||
"Epoch # 466\n",
|
||
"The loss calculated: 0.4409003257751465\n",
|
||
"Epoch # 467\n",
|
||
"The loss calculated: 0.43949607014656067\n",
|
||
"Epoch # 468\n",
|
||
"The loss calculated: 0.4398217797279358\n",
|
||
"Epoch # 469\n",
|
||
"The loss calculated: 0.43998679518699646\n",
|
||
"Epoch # 470\n",
|
||
"The loss calculated: 0.4403824508190155\n",
|
||
"Epoch # 471\n",
|
||
"The loss calculated: 0.43901607394218445\n",
|
||
"Epoch # 472\n",
|
||
"The loss calculated: 0.44028377532958984\n",
|
||
"Epoch # 473\n",
|
||
"The loss calculated: 0.4426659643650055\n",
|
||
"Epoch # 474\n",
|
||
"The loss calculated: 0.44038379192352295\n",
|
||
"Epoch # 475\n",
|
||
"The loss calculated: 0.4395928978919983\n",
|
||
"Epoch # 476\n",
|
||
"The loss calculated: 0.44086745381355286\n",
|
||
"Epoch # 477\n",
|
||
"The loss calculated: 0.43867841362953186\n",
|
||
"Epoch # 478\n",
|
||
"The loss calculated: 0.4390256404876709\n",
|
||
"Epoch # 479\n",
|
||
"The loss calculated: 0.4390667676925659\n",
|
||
"Epoch # 480\n",
|
||
"The loss calculated: 0.4384021759033203\n",
|
||
"Epoch # 481\n",
|
||
"The loss calculated: 0.4385366439819336\n",
|
||
"Epoch # 482\n",
|
||
"The loss calculated: 0.4384676516056061\n",
|
||
"Epoch # 483\n",
|
||
"The loss calculated: 0.4386775493621826\n",
|
||
"Epoch # 484\n",
|
||
"The loss calculated: 0.43819159269332886\n",
|
||
"Epoch # 485\n",
|
||
"The loss calculated: 0.4379732608795166\n",
|
||
"Epoch # 486\n",
|
||
"The loss calculated: 0.4379722476005554\n",
|
||
"Epoch # 487\n",
|
||
"The loss calculated: 0.4376266896724701\n",
|
||
"Epoch # 488\n",
|
||
"The loss calculated: 0.4373808205127716\n",
|
||
"Epoch # 489\n",
|
||
"The loss calculated: 0.43826723098754883\n",
|
||
"Epoch # 490\n",
|
||
"The loss calculated: 0.4379383623600006\n",
|
||
"Epoch # 491\n",
|
||
"The loss calculated: 0.4372965395450592\n",
|
||
"Epoch # 492\n",
|
||
"The loss calculated: 0.4375162422657013\n",
|
||
"Epoch # 493\n",
|
||
"The loss calculated: 0.43795913457870483\n",
|
||
"Epoch # 494\n",
|
||
"The loss calculated: 0.43740007281303406\n",
|
||
"Epoch # 495\n",
|
||
"The loss calculated: 0.43741703033447266\n",
|
||
"Epoch # 496\n",
|
||
"The loss calculated: 0.4373546838760376\n",
|
||
"Epoch # 497\n",
|
||
"The loss calculated: 0.4368191957473755\n",
|
||
"Epoch # 498\n",
|
||
"The loss calculated: 0.4367024898529053\n",
|
||
"Epoch # 499\n",
|
||
"The loss calculated: 0.43679192662239075\n",
|
||
"Epoch # 500\n",
|
||
"The loss calculated: 0.436893105506897\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"from torch.utils.data import DataLoader\n",
|
||
"\n",
|
||
"for epoch in range(1, epochs+1):\n",
|
||
" print(\"Epoch #\", epoch)\n",
|
||
" y_pred = model(Xt)\n",
|
||
"# print(y_pred)\n",
|
||
" loss = loss_fn(y_pred, Yt)\n",
|
||
" print_(loss.item())\n",
|
||
" \n",
|
||
" optimizer.zero_grad()\n",
|
||
" loss.backward()\n",
|
||
" optimizer.step()"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 264,
|
||
"id": "45d76c95",
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"x_test = torch.tensor(X_test.values, dtype=torch.float32)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 271,
|
||
"id": "5e98206b",
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stderr",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"/tmp/ipykernel_7802/3372075492.py:11: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.\n",
|
||
" x = F.softmax(self.layer3(x))\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"pred = model(x_test)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 272,
|
||
"id": "35d64340",
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([[1.3141002e-01, 8.6859006e-01],\n",
|
||
" [3.0172759e-16, 1.0000000e+00],\n",
|
||
" [5.9731257e-21, 1.0000000e+00],\n",
|
||
" [8.7287611e-01, 1.2712391e-01],\n",
|
||
" [3.3298880e-01, 6.6701120e-01],\n",
|
||
" [9.9992323e-01, 7.6730175e-05],\n",
|
||
" [6.9742590e-01, 3.0257410e-01],\n",
|
||
" [1.8122771e-10, 1.0000000e+00],\n",
|
||
" [8.1137923e-18, 1.0000000e+00],\n",
|
||
" [9.9391985e-01, 6.0801902e-03],\n",
|
||
" [9.9800962e-01, 1.9904438e-03],\n",
|
||
" [1.4347603e-12, 1.0000000e+00],\n",
|
||
" [8.8945550e-01, 1.1054446e-01],\n",
|
||
" [5.3068206e-19, 1.0000000e+00],\n",
|
||
" [4.4245785e-01, 5.5754209e-01],\n",
|
||
" [3.9323148e-01, 6.0676849e-01],\n",
|
||
" [5.0538932e-23, 1.0000000e+00],\n",
|
||
" [6.8482041e-01, 3.1517953e-01],\n",
|
||
" [9.9650586e-01, 3.4941665e-03],\n",
|
||
" [3.6827392e-24, 1.0000000e+00],\n",
|
||
" [3.4629088e-12, 1.0000000e+00],\n",
|
||
" [2.4781654e-11, 1.0000000e+00],\n",
|
||
" [8.4075117e-01, 1.5924890e-01],\n",
|
||
" [9.9999881e-01, 1.2382451e-06],\n",
|
||
" [9.9950111e-01, 4.9885432e-04],\n",
|
||
" [1.1888127e-14, 1.0000000e+00],\n",
|
||
" [1.5869159e-14, 1.0000000e+00],\n",
|
||
" [9.4683814e-01, 5.3161871e-02],\n",
|
||
" [7.3645154e-08, 9.9999988e-01],\n",
|
||
" [1.2287432e-11, 1.0000000e+00],\n",
|
||
" [5.7253930e-15, 1.0000000e+00],\n",
|
||
" [7.9019060e-08, 9.9999988e-01],\n",
|
||
" [5.5769521e-01, 4.4230482e-01],\n",
|
||
" [1.8103112e-14, 1.0000000e+00],\n",
|
||
" [9.9812454e-01, 1.8754901e-03],\n",
|
||
" [2.5346470e-05, 9.9997461e-01],\n",
|
||
" [1.6169167e-17, 1.0000000e+00],\n",
|
||
" [9.3050295e-01, 6.9496997e-02],\n",
|
||
" [6.1799776e-02, 9.3820024e-01],\n",
|
||
" [9.7120519e-06, 9.9999034e-01],\n",
|
||
" [9.9844283e-01, 1.5571705e-03],\n",
|
||
" [8.0438519e-01, 1.9561480e-01],\n",
|
||
" [2.0653886e-16, 1.0000000e+00],\n",
|
||
" [7.0155847e-01, 2.9844159e-01],\n",
|
||
" [9.9505252e-01, 4.9475045e-03],\n",
|
||
" [9.3824464e-01, 6.1755374e-02]], dtype=float32)"
|
||
]
|
||
},
|
||
"execution_count": 272,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"pred = pred.detach().numpy()\n",
|
||
"pred"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 269,
|
||
"id": "5c18f80f",
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"The accuracy is 0.7391304347826086\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"print (\"The accuracy is\", accuracy_score(Y_test, np.argmax(pred, axis=1)))"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": null,
|
||
"id": "a4638b1d",
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": []
|
||
}
|
||
],
|
||
"metadata": {
|
||
"kernelspec": {
|
||
"display_name": "Python 3 (ipykernel)",
|
||
"language": "python",
|
||
"name": "python3"
|
||
},
|
||
"language_info": {
|
||
"codemirror_mode": {
|
||
"name": "ipython",
|
||
"version": 3
|
||
},
|
||
"file_extension": ".py",
|
||
"mimetype": "text/x-python",
|
||
"name": "python",
|
||
"nbconvert_exporter": "python",
|
||
"pygments_lexer": "ipython3",
|
||
"version": "3.10.9"
|
||
}
|
||
},
|
||
"nbformat": 4,
|
||
"nbformat_minor": 5
|
||
}
|