Computer_Vision/Chapter05/age_gender_prediction.ipynb

951 lines
212 KiB
Plaintext
Raw Normal View History

2024-02-13 03:34:51 +01:00
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "age-gender-without-torch-snippets",
"provenance": [],
"collapsed_sections": [],
"authorship_tag": "ABX9TyN/KZkDxUUGhZ9VYe1r36El",
"include_colab_link": true
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"accelerator": "GPU",
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"a247b4c6ac9d44ab8e4c82ccfa9d8b05": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"state": {
"_view_name": "HBoxView",
"_dom_classes": [],
"_model_name": "HBoxModel",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.5.0",
"box_style": "",
"layout": "IPY_MODEL_e0597664297e4074aafb1abc74462c54",
"_model_module": "@jupyter-widgets/controls",
"children": [
"IPY_MODEL_7a01294a511e42b394dd62f2af3f3014",
"IPY_MODEL_c029002323f34a74a873b4175906a3ea"
]
}
},
"e0597664297e4074aafb1abc74462c54": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"7a01294a511e42b394dd62f2af3f3014": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"state": {
"_view_name": "ProgressView",
"style": "IPY_MODEL_e6a7f4bb399f42969d624bc55fcb81e4",
"_dom_classes": [],
"description": "100%",
"_model_name": "FloatProgressModel",
"bar_style": "success",
"max": 553433881,
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": 553433881,
"_view_count": null,
"_view_module_version": "1.5.0",
"orientation": "horizontal",
"min": 0,
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_169be448255e45e4b69f47740b1fc4e9"
}
},
"c029002323f34a74a873b4175906a3ea": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"state": {
"_view_name": "HTMLView",
"style": "IPY_MODEL_2e780ce258ee4277b0d7e3fc99fef48e",
"_dom_classes": [],
"description": "",
"_model_name": "HTMLModel",
"placeholder": "",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
"value": " 528M/528M [00:17<00:00, 31.1MB/s]",
"_view_count": null,
"_view_module_version": "1.5.0",
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
"layout": "IPY_MODEL_da9aacf457e948a9a406b0e5f7028bb0"
}
},
"e6a7f4bb399f42969d624bc55fcb81e4": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "ProgressStyleModel",
"description_width": "initial",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"bar_color": null,
"_model_module": "@jupyter-widgets/controls"
}
},
"169be448255e45e4b69f47740b1fc4e9": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
},
"2e780ce258ee4277b0d7e3fc99fef48e": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"state": {
"_view_name": "StyleView",
"_model_name": "DescriptionStyleModel",
"description_width": "",
"_view_module": "@jupyter-widgets/base",
"_model_module_version": "1.5.0",
"_view_count": null,
"_view_module_version": "1.2.0",
"_model_module": "@jupyter-widgets/controls"
}
},
"da9aacf457e948a9a406b0e5f7028bb0": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
"_view_name": "LayoutView",
"grid_template_rows": null,
"right": null,
"justify_content": null,
"_view_module": "@jupyter-widgets/base",
"overflow": null,
"_model_module_version": "1.2.0",
"_view_count": null,
"flex_flow": null,
"width": null,
"min_width": null,
"border": null,
"align_items": null,
"bottom": null,
"_model_module": "@jupyter-widgets/base",
"top": null,
"grid_column": null,
"overflow_y": null,
"overflow_x": null,
"grid_auto_flow": null,
"grid_area": null,
"grid_template_columns": null,
"flex": null,
"_model_name": "LayoutModel",
"justify_items": null,
"grid_row": null,
"max_height": null,
"align_content": null,
"visibility": null,
"align_self": null,
"height": null,
"min_height": null,
"padding": null,
"grid_auto_rows": null,
"grid_gap": null,
"max_width": null,
"order": null,
"_view_module_version": "1.2.0",
"grid_template_areas": null,
"object_position": null,
"object_fit": null,
"grid_auto_columns": null,
"margin": null,
"display": null,
"left": null
}
}
}
}
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "view-in-github",
"colab_type": "text"
},
"source": [
"<a href=\"https://colab.research.google.com/github/PacktPublishing/Hands-On-Computer-Vision-with-PyTorch/blob/master/Chapter05/age_gender_prediction.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
"cell_type": "code",
"metadata": {
"id": "GO820-swkq7r"
},
"source": [
"import torch\n",
"import numpy as np, cv2, pandas as pd, glob, time\n",
"import matplotlib.pyplot as plt\n",
"%matplotlib inline\n",
"import torch\n",
"import torch.nn as nn\n",
"from torch import optim\n",
"import torch.nn.functional as F\n",
"from torch.utils.data import Dataset, DataLoader\n",
"import torchvision\n",
"from torchvision import transforms, models, datasets\n",
"device = 'cuda' if torch.cuda.is_available() else 'cpu'"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "bZVyS3nCku1o"
},
"source": [
"from pydrive.auth import GoogleAuth\n",
"from pydrive.drive import GoogleDrive\n",
"from google.colab import auth\n",
"from oauth2client.client import GoogleCredentials\n",
"\n",
"auth.authenticate_user()\n",
"gauth = GoogleAuth()\n",
"gauth.credentials = GoogleCredentials.get_application_default()\n",
"drive = GoogleDrive(gauth)\n",
"\n",
"def getFile_from_drive( file_id, name ):\n",
" downloaded = drive.CreateFile({'id': file_id})\n",
" downloaded.GetContentFile(name)\n",
"\n",
"getFile_from_drive('1Z1RqRo0_JiavaZw2yzZG6WETdZQ8qX86', 'fairface-img-margin025-trainval.zip')\n",
"getFile_from_drive('1k5vvyREmHDW5TSM9QgB04Bvc8C8_7dl-', 'fairface-label-train.csv')\n",
"getFile_from_drive('1_rtz1M1zhvS0d5vVoXUamnohB6cJ02iJ', 'fairface-label-val.csv')\n",
"\n",
"!unzip -qq fairface-img-margin025-trainval.zip"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "NK77ld3gkyvk",
"outputId": "fb5a9f83-6097-4773-e541-41c2a9c08492",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 198
}
},
"source": [
"trn_df = pd.read_csv('fairface-label-train.csv')\n",
"val_df = pd.read_csv('fairface-label-val.csv')\n",
"trn_df.head()"
],
"execution_count": null,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>file</th>\n",
" <th>age</th>\n",
" <th>gender</th>\n",
" <th>race</th>\n",
" <th>service_test</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>train/1.jpg</td>\n",
" <td>59</td>\n",
" <td>Male</td>\n",
" <td>East Asian</td>\n",
" <td>True</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>train/2.jpg</td>\n",
" <td>39</td>\n",
" <td>Female</td>\n",
" <td>Indian</td>\n",
" <td>False</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>train/3.jpg</td>\n",
" <td>11</td>\n",
" <td>Female</td>\n",
" <td>Black</td>\n",
" <td>False</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>train/4.jpg</td>\n",
" <td>26</td>\n",
" <td>Female</td>\n",
" <td>Indian</td>\n",
" <td>True</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>train/5.jpg</td>\n",
" <td>26</td>\n",
" <td>Female</td>\n",
" <td>Indian</td>\n",
" <td>True</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" file age gender race service_test\n",
"0 train/1.jpg 59 Male East Asian True\n",
"1 train/2.jpg 39 Female Indian False\n",
"2 train/3.jpg 11 Female Black False\n",
"3 train/4.jpg 26 Female Indian True\n",
"4 train/5.jpg 26 Female Indian True"
]
},
"metadata": {
"tags": []
},
"execution_count": 3
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "XVOL0emTljbq"
},
"source": [
"from torch.utils.data import Dataset, DataLoader\n",
"import cv2\n",
"IMAGE_SIZE = 224\n",
"class GenderAgeClass(Dataset):\n",
" def __init__(self, df, tfms=None):\n",
" self.df = df\n",
" self.normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], \n",
" std=[0.229, 0.224, 0.225])\n",
" def __len__(self): return len(self.df)\n",
" def __getitem__(self, ix):\n",
" f = self.df.iloc[ix].squeeze()\n",
" file = f.file\n",
" gen = f.gender == 'Female'\n",
" age = f.age\n",
" im = cv2.imread(file)\n",
" im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB)\n",
" return im, age, gen\n",
"\n",
" def preprocess_image(self, im):\n",
" im = cv2.resize(im, (IMAGE_SIZE, IMAGE_SIZE))\n",
" im = torch.tensor(im).permute(2,0,1)\n",
" im = self.normalize(im/255.)\n",
" return im[None]\n",
"\n",
" def collate_fn(self, batch):\n",
" 'preprocess images, ages and genders'\n",
" ims, ages, genders = [], [], []\n",
" for im, age, gender in batch:\n",
" im = self.preprocess_image(im)\n",
" ims.append(im)\n",
"\n",
" ages.append(float(int(age)/80))\n",
" genders.append(float(gender))\n",
"\n",
" ages, genders = [torch.tensor(x).to(device).float() for x in [ages, genders]]\n",
" ims = torch.cat(ims).to(device)\n",
"\n",
" return ims, ages, genders"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "cfQ7WlW1mO8c"
},
"source": [
"trn = GenderAgeClass(trn_df)\n",
"val = GenderAgeClass(val_df)"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "eJEiuBhlnPd-",
"outputId": "fd1dd131-c563-4d02-af65-819886175782",
"colab": {
"base_uri": "https://localhost:8080/"
}
},
"source": [
"device='cuda'\n",
"train_loader = DataLoader(trn, batch_size=32, shuffle=True, drop_last=True, collate_fn=trn.collate_fn)\n",
"test_loader = DataLoader(val, batch_size=32, collate_fn=val.collate_fn)\n",
"a,b,c, = next(iter(train_loader))\n",
"print(a.shape, b.shape, c.shape)"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"text": [
"torch.Size([32, 3, 224, 224]) torch.Size([32]) torch.Size([32])\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "nvj3Nb6_ngim",
"outputId": "e4eab945-a346-4b24-9251-24abeecb96f6",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 84,
"referenced_widgets": [
"a247b4c6ac9d44ab8e4c82ccfa9d8b05",
"e0597664297e4074aafb1abc74462c54",
"7a01294a511e42b394dd62f2af3f3014",
"c029002323f34a74a873b4175906a3ea",
"e6a7f4bb399f42969d624bc55fcb81e4",
"169be448255e45e4b69f47740b1fc4e9",
"2e780ce258ee4277b0d7e3fc99fef48e",
"da9aacf457e948a9a406b0e5f7028bb0"
]
}
},
"source": [
"def get_model():\n",
" model = models.vgg16(pretrained = True)\n",
" # Freeze parameters so we don't backprop through them\n",
" for param in model.parameters():\n",
" param.requires_grad = False\n",
" model.avgpool = nn.Sequential(\n",
" nn.Conv2d(512,512, kernel_size=3),\n",
" nn.MaxPool2d(2),\n",
" nn.ReLU(),\n",
" nn.Flatten()\n",
" )\n",
" class ageGenderClassifier(nn.Module):\n",
" def __init__(self):\n",
" super(ageGenderClassifier, self).__init__()\n",
" self.intermediate = nn.Sequential(\n",
" nn.Linear(2048,512),\n",
" nn.ReLU(),\n",
" nn.Dropout(0.4),\n",
" nn.Linear(512,128),\n",
" nn.ReLU(),\n",
" nn.Dropout(0.4),\n",
" nn.Linear(128,64),\n",
" nn.ReLU(),\n",
" )\n",
" self.age_classifier = nn.Sequential(\n",
" nn.Linear(64, 1),\n",
" nn.Sigmoid()\n",
" )\n",
" self.gender_classifier = nn.Sequential(\n",
" nn.Linear(64, 1),\n",
" nn.Sigmoid()\n",
" )\n",
" def forward(self, x):\n",
" x = self.intermediate(x)\n",
" age = self.age_classifier(x)\n",
" gender = self.gender_classifier(x)\n",
" return gender, age\n",
" \n",
" model.classifier = ageGenderClassifier()\n",
" \n",
" gender_criterion = nn.BCELoss()\n",
" age_criterion = nn.L1Loss()\n",
" loss_functions = gender_criterion, age_criterion\n",
" optimizer = torch.optim.Adam(model.parameters(), lr= 1e-4)\n",
" \n",
" return model.to(device), loss_functions, optimizer\n",
"\n",
"model, loss_functions, optimizer = get_model()"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"text": [
"Downloading: \"https://download.pytorch.org/models/vgg16-397923af.pth\" to /root/.cache/torch/hub/checkpoints/vgg16-397923af.pth\n"
],
"name": "stderr"
},
{
"output_type": "display_data",
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "a247b4c6ac9d44ab8e4c82ccfa9d8b05",
"version_minor": 0,
"version_major": 2
},
"text/plain": [
"HBox(children=(FloatProgress(value=0.0, max=553433881.0), HTML(value='')))"
]
},
"metadata": {
"tags": []
}
},
{
"output_type": "stream",
"text": [
"\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "XceUr1tWoJEL",
"outputId": "777581fd-661e-4b5a-f024-3e35d5d29a22",
"colab": {
"base_uri": "https://localhost:8080/"
}
},
"source": [
"!pip install torchsummary\n",
"from torchsummary import summary\n",
"summary(model, input_size=(3,224,224), device=device)"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"text": [
"Requirement already satisfied: torchsummary in /usr/local/lib/python3.6/dist-packages (1.5.1)\n",
"----------------------------------------------------------------\n",
" Layer (type) Output Shape Param #\n",
"================================================================\n",
" Conv2d-1 [-1, 64, 224, 224] 1,792\n",
" ReLU-2 [-1, 64, 224, 224] 0\n",
" Conv2d-3 [-1, 64, 224, 224] 36,928\n",
" ReLU-4 [-1, 64, 224, 224] 0\n",
" MaxPool2d-5 [-1, 64, 112, 112] 0\n",
" Conv2d-6 [-1, 128, 112, 112] 73,856\n",
" ReLU-7 [-1, 128, 112, 112] 0\n",
" Conv2d-8 [-1, 128, 112, 112] 147,584\n",
" ReLU-9 [-1, 128, 112, 112] 0\n",
" MaxPool2d-10 [-1, 128, 56, 56] 0\n",
" Conv2d-11 [-1, 256, 56, 56] 295,168\n",
" ReLU-12 [-1, 256, 56, 56] 0\n",
" Conv2d-13 [-1, 256, 56, 56] 590,080\n",
" ReLU-14 [-1, 256, 56, 56] 0\n",
" Conv2d-15 [-1, 256, 56, 56] 590,080\n",
" ReLU-16 [-1, 256, 56, 56] 0\n",
" MaxPool2d-17 [-1, 256, 28, 28] 0\n",
" Conv2d-18 [-1, 512, 28, 28] 1,180,160\n",
" ReLU-19 [-1, 512, 28, 28] 0\n",
" Conv2d-20 [-1, 512, 28, 28] 2,359,808\n",
" ReLU-21 [-1, 512, 28, 28] 0\n",
" Conv2d-22 [-1, 512, 28, 28] 2,359,808\n",
" ReLU-23 [-1, 512, 28, 28] 0\n",
" MaxPool2d-24 [-1, 512, 14, 14] 0\n",
" Conv2d-25 [-1, 512, 14, 14] 2,359,808\n",
" ReLU-26 [-1, 512, 14, 14] 0\n",
" Conv2d-27 [-1, 512, 14, 14] 2,359,808\n",
" ReLU-28 [-1, 512, 14, 14] 0\n",
" Conv2d-29 [-1, 512, 14, 14] 2,359,808\n",
" ReLU-30 [-1, 512, 14, 14] 0\n",
" MaxPool2d-31 [-1, 512, 7, 7] 0\n",
" Conv2d-32 [-1, 512, 5, 5] 2,359,808\n",
" MaxPool2d-33 [-1, 512, 2, 2] 0\n",
" ReLU-34 [-1, 512, 2, 2] 0\n",
" Flatten-35 [-1, 2048] 0\n",
" Linear-36 [-1, 512] 1,049,088\n",
" ReLU-37 [-1, 512] 0\n",
" Dropout-38 [-1, 512] 0\n",
" Linear-39 [-1, 128] 65,664\n",
" ReLU-40 [-1, 128] 0\n",
" Dropout-41 [-1, 128] 0\n",
" Linear-42 [-1, 64] 8,256\n",
" ReLU-43 [-1, 64] 0\n",
" Linear-44 [-1, 1] 65\n",
" Sigmoid-45 [-1, 1] 0\n",
" Linear-46 [-1, 1] 65\n",
" Sigmoid-47 [-1, 1] 0\n",
"ageGenderClassifier-48 [[-1, 1], [-1, 1]] 0\n",
"================================================================\n",
"Total params: 18,197,634\n",
"Trainable params: 3,482,946\n",
"Non-trainable params: 14,714,688\n",
"----------------------------------------------------------------\n",
"Input size (MB): 0.57\n",
"Forward/backward pass size (MB): 218.55\n",
"Params size (MB): 69.42\n",
"Estimated Total Size (MB): 288.55\n",
"----------------------------------------------------------------\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "aJoiV7T2oMRc"
},
"source": [
"def train_batch(data, model, optimizer, criteria):\n",
" model.train()\n",
" ims, age, gender = data\n",
" optimizer.zero_grad()\n",
" pred_gender, pred_age = model(ims) \n",
" gender_criterion, age_criterion = criteria\n",
" gender_loss = gender_criterion(pred_gender.squeeze(), gender)\n",
" age_loss = age_criterion(pred_age.squeeze(), age)\n",
" total_loss = gender_loss + age_loss\n",
" total_loss.backward()\n",
" optimizer.step()\n",
" return total_loss\n",
"\n",
"def validate_batch(data, model, criteria):\n",
" model.eval()\n",
" ims, age, gender = data\n",
" with torch.no_grad():\n",
" pred_gender, pred_age = model(ims)\n",
" gender_criterion, age_criterion = criteria\n",
" gender_loss = gender_criterion(pred_gender.squeeze(), gender)\n",
" age_loss = age_criterion(pred_age.squeeze(), age)\n",
" total_loss = gender_loss + age_loss\n",
" pred_gender = (pred_gender > 0.5).squeeze()\n",
" gender_acc = (pred_gender == gender).float().sum()\n",
" age_mae = torch.abs(age - pred_age).float().sum()\n",
" return total_loss, gender_acc, age_mae"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "uhbO3pA0oTuc",
"outputId": "e67a2fa1-5939-4ce6-8651-c0d83c65cda0",
"colab": {
"base_uri": "https://localhost:8080/"
}
},
"source": [
"model, criteria, optimizer = get_model()\n",
"val_gender_accuracies = []\n",
"val_age_maes = []\n",
"train_losses = []\n",
"val_losses = []\n",
"\n",
"n_epochs = 5\n",
"best_test_loss = 1000\n",
"start = time.time()\n",
"\n",
"for epoch in range(n_epochs):\n",
" epoch_train_loss, epoch_test_loss = 0, 0\n",
" val_age_mae, val_gender_acc, ctr = 0, 0, 0\n",
" _n = len(train_loader)\n",
" for ix, data in enumerate(train_loader):\n",
" # if ix == 100: break\n",
" loss = train_batch(data, model, optimizer, criteria)\n",
" epoch_train_loss += loss.item()\n",
"\n",
" for ix, data in enumerate(test_loader):\n",
" # if ix == 10: break\n",
" loss, gender_acc, age_mae = validate_batch(data, model, criteria)\n",
" epoch_test_loss += loss.item()\n",
" val_age_mae += age_mae\n",
" val_gender_acc += gender_acc\n",
" ctr += len(data[0])\n",
"\n",
" val_age_mae /= ctr\n",
" val_gender_acc /= ctr\n",
" epoch_train_loss /= len(train_loader)\n",
" epoch_test_loss /= len(test_loader)\n",
"\n",
" elapsed = time.time()-start\n",
" best_test_loss = min(best_test_loss, epoch_test_loss)\n",
" print('{}/{} ({:.2f}s - {:.2f}s remaining)'.format(epoch+1, n_epochs, time.time()-start, (n_epochs-epoch)*(elapsed/(epoch+1))))\n",
" info = f'''Epoch: {epoch+1:03d}\\tTrain Loss: {epoch_train_loss:.3f}\\tTest: {epoch_test_loss:.3f}\\tBest Test Loss: {best_test_loss:.4f}'''\n",
" info += f'\\nGender Accuracy: {val_gender_acc*100:.2f}%\\tAge MAE: {val_age_mae:.2f}\\n'\n",
" print(info)\n",
"\n",
" val_gender_accuracies.append(val_gender_acc)\n",
" val_age_maes.append(val_age_mae)"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"text": [
"1/5 (844.58s - 4222.92s remaining)\n",
"Epoch: 001\tTrain Loss: 0.548\tTest: 0.469\tBest Test Loss: 0.4695\n",
"Gender Accuracy: 83.54%\tAge MAE: 6.34\n",
"\n",
"2/5 (1682.39s - 3364.79s remaining)\n",
"Epoch: 002\tTrain Loss: 0.400\tTest: 0.440\tBest Test Loss: 0.4399\n",
"Gender Accuracy: 84.88%\tAge MAE: 6.23\n",
"\n",
"3/5 (2526.27s - 2526.27s remaining)\n",
"Epoch: 003\tTrain Loss: 0.286\tTest: 0.494\tBest Test Loss: 0.4399\n",
"Gender Accuracy: 84.67%\tAge MAE: 6.27\n",
"\n",
"4/5 (3370.63s - 1685.32s remaining)\n",
"Epoch: 004\tTrain Loss: 0.199\tTest: 0.613\tBest Test Loss: 0.4399\n",
"Gender Accuracy: 83.80%\tAge MAE: 6.41\n",
"\n",
"5/5 (4208.70s - 841.74s remaining)\n",
"Epoch: 005\tTrain Loss: 0.159\tTest: 0.710\tBest Test Loss: 0.4399\n",
"Gender Accuracy: 83.30%\tAge MAE: 6.29\n",
"\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "akzO0Z3Br2GP",
"outputId": "557b3b50-82c3-42df-dacf-2ee4f54c34c9",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 350
}
},
"source": [
"epochs = np.arange(1,len(val_gender_accuracies)+1)\n",
"fig,ax = plt.subplots(1,2,figsize=(10,5))\n",
"ax = ax.flat\n",
"ax[0].plot(epochs, val_gender_accuracies, 'bo')\n",
"ax[1].plot(epochs, val_age_maes, 'r')\n",
"ax[0].set_xlabel('Epochs')\n",
"ax[1].set_xlabel('Epochs')\n",
"ax[0].set_ylabel('Accuracy')\n",
"ax[1].set_ylabel('MAE')\n",
"ax[0].set_title('Validation Gender Accuracy')\n",
"ax[0].set_title('Validation Age Mean-Absolute-Error')\n",
"plt.show()\n"
],
"execution_count": null,
"outputs": [
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAm0AAAFNCAYAAABST1gVAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOzde5yWdZ3/8dcbEBFFwMAKUNA8IGqijedj4gHwBHlgDFOnVrM2t1rbTbe2tYNt1m+z2krXbD3FqghqeoMiGR4yJVERRVBJPAAexgMpoALy+f3xvcZubwbmZrjvue6ZeT8fj3nc9/W9Tp9rbrzn4/eoiMDMzMzMaluXvAMwMzMzs5Y5aTMzMzNrB5y0mZmZmbUDTtrMzMzM2gEnbWZmZmbtgJM2MzMzs3bASVsbkxSSdsjeXybp38s5thX3GS/pztbGaa23MZ/beq55t6R/qOQ1zcysfXHStoEk3SHpe82UnyDpZUndyr1WRJwTEd+vQExDskThg3tHxISIOGpjr72ee24naY2kS6t4j6bnerSkvJ+klZKeq9a9W9IWz99aG5vgSTose7ZlJT/7VzJOMzPbME7aNtzVwGmSVFL+OWBCRKzOIaY8nA68CYyTtGmV79VT0m5F258FFlb5ni1py+fPw5KI2KLk54HSg5R0KSkr+39cWnO8mVln5aRtw90CfAQ4uKlAUl/gWOAaSftIekDSUkkvSfqlpO7NXUjSVZJ+ULT9L9k5SyR9vuTYYyQ9KuktSS9KurBo973Z69KmGhFJZ0r6U9H5B0h6SNLfstcDivbdLen7ku6X9LakOyX1W9cvIEtYTwe+DawCjivZf5Skp7J7/VrSPcU1P5I+L2mepDclTZM0eF33ylwLnFG0fTpwTck9B0iaLKlR0kJJ/1S0b72fSVabd46kZ7JjftVMUl7282dGS3pW0muSftKU2EjaIft9/C3bd0PRddf5GZXc/0JJvyva/qCmVdJFpH+bv8z+LfwyO2aopOmS3sg+m1PW9Xwtyf69XCTpfmAFsH12/3+U9AzwTHbcWZIWZPe8VdKAomusdbyZma2fk7YNFBHvABNJf7SbnALMj4jHgPeBrwP9gP2BEcCXW7qupJHAN4AjgR2BI0oOWZ7dsw9wDPAlSWOyfYdkr32aqxGRtBUwBfgFKeH8KTBF0keKDvss0ABsDXTPYlmXg4BBwPWk38UHCVWW7E0CLsju9RRQnCCeAPwb8BmgP3AfcN167gXwO6BeUldJw4AtgJlF1+wC3AY8Bgwk/c6/Juno7JByPpNjgb2BT5I+z6NZt3U+f5GxQB2wF3AC0JSEfx+4E+ibXeO/s2co5zNqUUR8i/Q7/Ur2b+ErkjYHpgP/R/p864FfZ7/L1voccDbQC3g+KxsD7AsMk3Q48J+k3+XHs2OuL7nGB8dvRBxmZp2Gk7bWuRo4SVKPbPv0rIyIeDgiHoyI1RHxHPA/wKFlXPMU4MqIeCIilgMXFu+MiLsj4vGIWBMRc0iJTjnXhZTkPRMR12ZxXQfM58M1RFdGxNNFSenw9VzvDOD2iHiTlAiMlLR1tm80MDcibsqain8BvFx07jnAf0bEvGz/D4HhLdS2LSIlf0eQftfXluzfG+gfEd+LiJUR8SzwG1JyUu5n8qOIWBoRLwAzNuL5m1wcEW9k1/sZcGpWvgoYDAyIiHcjoqk2tJzPqLWOBZ6LiCuzaz8KTAZOXs85A7Jax+KfzYv2XxURc7PrrcrK/jN75neA8cD/RsQjEfEeKYnfX9KQomsUH29mZi1w0tYK2R/a14Axkj4B7EP6442knSQVlAYlvEVKStbZ1FhkAPBi0fbzxTsl7StpRtb89zdS8lPOdZuu/XxJ2fOkWqkmxYnVClJt1lokbUb6Yz8BIKvVe4FUU7fWc0REkJKuJoOBnzclAsAbgEpiac41wJmk5Kc0aRtMSZJBqs37aBZzOZ9Js88vaa7+3hH/4DKev0npZ9nUNPiv2fP+Jbt2Uw1cOZ9Raw0G9i35/YwHPiZp26LnW1Z0zpKI6FPys3wdz9dc2YeeJyKWAa+XPE9z1zAzs3Vw0tZ615BqfU4DpkXEK1n5paQakh0jYktS8rDO/lFFXgK2KdretmT//wG3AttERG/gsqLrRgvXXkL6w11sW2BxGXGVGgtsSWpee1nSy6Q/xE1NhC+Rmv2AD/p/DSo6/0XgiyXJwGYR8ecW7juZVBv1bFZ7VexFYGHJNXtFxOhsf2s/EyJi16KO+PeV8fxNSj/LJdn1Xo6IsyJiAPDF7Do7sGGf0XKgZ9H2x0rDLtl+Ebin5PezRUR8KSJeKB5ssL7fRQv3KC370PNktXQf4cPP09K/WzMzK+KkrfWuITXXnUXWNJrpBbwFLJM0FPhSmdebCJwpaZiknsB/lOzvBbwREe9K2ocP1+w0AmuA7ddx7anATpI+m3VWH0fqR1QoM7ZiZwD/C+xOakIcDhwI7CFpd1K/rN0ljVEaFfiPfDipuAy4QNKuAJJ6S1pfMx0AWS3P4UBzU1n8BXhb0jclbZb1fdtN0t7Z/tZ+Js1p6fmb/IukvpK2Ab4K3AAg6WRJTUnsm6TEZQ0b9hnNBg7Jasl6k5oei73Ch/8tFLJrf07SJtnP3pJ2afVvoWXXAQ2ShiuNrv0hMDNrnjYzs1Zw0tZK2R+fPwObk2rAmnyDlFC9TepXdcNaJzd/vdtJfZ/+CCzIXot9GfiepLeB75CSvKZzVwAXAfdnzV/7lVz7dVK/pvNITVT/ChwbEa+VE1sTSU2d/H+W1Rg1/TwM3AGckV3zZODH2b2GAbOA97JYbgYuBq7PmiqfAEaVc/+ImBURf22m/P3s+YaTpgJ5DbgC6J0d0qrPpFQ5z190+O+Bh0kJ1hTgt1n53sDMrCnyVuCrEfHshnxGETE9e4Y52T1KE7ufk/pcvinpFxHxNnAUqY/fElJT8MXA+qYqGaC152k7saxfVIrxD8C/k2pIXwI+kd3fzMxaSanLkVl1ZCM7FwHjI2JG3vGYmZm1V65ps4qTdLSkPlmzWFP/sQdzDsvMzKxdc9Jm1bA/8FdSM+VxwBhP62BmZrZx3DxqZmZm1g64ps3MzMysHXDSZmZmZtYOdMs7gLbQr1+/GDJkSN5hmFkbefjhh1+LiP55x1EJ/v4y63zW9R3WKZK2IUOGMGvWrLzDMLM2Iql0SbB2y99fZp3Pur7D3DxqZmZm1g44aTMzMzNrB5y0mZmZmbUDTtrMzMzM2gEnbWZmZmbtgJM2MzMzs3bASZuZmZlZO+CkrZOZMAGGDIEuXdLrhAl5R2RmZmbl6BST61oyYQKcfTasWJG2n38+bQOMH59fXGZmZtYy17R1It/61t8TtiYrVqRyMzOzNrVyJUybBhF5R9JuOGnrRF54YcPKzczMqubyy2HkSPjTn/KOpN1w0taJbLvthpWbmZlVzW23pdfrr883jnbESVsnctFF0LPnh8t69kzlZmZmbebtt+Huu0GCG2+E1avzjqhdcNLWiYwfn2qjBw9O/50MHpy2PQjBzMza1B/+kPq0nXsuNDbCjBl5R9QuOGnrZMaPh+eegzVr0qsTNjMza3OFAvTuDT/4AfTq5SbSMjlpMzMzs7azZg1MmZIGIfTqBWPHwk03pZo3Wy8nbWZmZtZ2Hn4YXnkFjj02bdfXw9KlcOed+cbVDjhpMzMzs7ZTKKRleUaOTNtHHAFbbeUm0jI4aTMzM7O2M2UK7L8/9OuXtjfZBE48EX7/e3jnnXxjq3FVTdokjZT0lKQFks5vZv+2kmZIelTSHEmjm9m/TNI3isq+LmmupCckXSepRzWfwcw6J0l9JE2SNF/SPEn7r+O4vSWtlnRSUdkZkp7Jfs4oKv+UpMez78RfSFJbPItZzViyJDWPNjWNNqmvh2XLYOrUfOJqJ6qWtEnqCvwKGAUMA06VNKzksG8DEyNiT6Ae+HXJ/p8CtxddcyDwT0BdROwGdM3OMzOrtJ8Dd0TEUGAPYF7pAdn33MXAnUVlWwH/AewL7AP8h6S+2e5LgbOAHbOfkdV8ALOa05SUlSZthx4KH
"text/plain": [
"<Figure size 720x360 with 2 Axes>"
]
},
"metadata": {
"tags": [],
"needs_background": "light"
}
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "U3MVHGy2tQgn",
"outputId": "322370b2-2b44-471c-800f-2637206d469d",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 652
}
},
"source": [
"!wget https://www.dropbox.com/s/6kzr8l68e9kpjkf/5_9.JPG\n",
"im = cv2.imread('/content/5_9.JPG')\n",
"im = trn.preprocess_image(im).to(device)\n",
"gender, age = model(im)\n",
"pred_gender = gender.to('cpu').detach().numpy()\n",
"pred_age = age.to('cpu').detach().numpy()\n",
"im = cv2.imread('/content/5_9.JPG')\n",
"im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB)\n",
"plt.imshow(im)\n",
"print('predicted gender:',np.where(pred_gender[0][0]<0.5,'Male','Female'), '; Predicted age', int(pred_age[0][0]*80))"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"text": [
"--2020-11-09 14:08:08-- https://www.dropbox.com/s/6kzr8l68e9kpjkf/5_9.JPG\n",
"Resolving www.dropbox.com (www.dropbox.com)... 162.125.5.1, 2620:100:601f:1::a27d:901\n",
"Connecting to www.dropbox.com (www.dropbox.com)|162.125.5.1|:443... connected.\n",
"HTTP request sent, awaiting response... 301 Moved Permanently\n",
"Location: /s/raw/6kzr8l68e9kpjkf/5_9.JPG [following]\n",
"--2020-11-09 14:08:08-- https://www.dropbox.com/s/raw/6kzr8l68e9kpjkf/5_9.JPG\n",
"Reusing existing connection to www.dropbox.com:443.\n",
"HTTP request sent, awaiting response... 302 Found\n",
"Location: https://uca373aa04bc3c60dd027c22b5aa.dl.dropboxusercontent.com/cd/0/inline/BC3TH3a9c3lP0QNxxm8x3r7gmpJ4kF79o3OdKPnoPKKZckshL_T1F5dD_lg7QKQdBUIUXjUwZ_Ljau6bhBMpll1ZeIuk42O44KyGUGEJyV3VAzJzHvtn7gN00jGfqvtrTeU/file# [following]\n",
"--2020-11-09 14:08:09-- https://uca373aa04bc3c60dd027c22b5aa.dl.dropboxusercontent.com/cd/0/inline/BC3TH3a9c3lP0QNxxm8x3r7gmpJ4kF79o3OdKPnoPKKZckshL_T1F5dD_lg7QKQdBUIUXjUwZ_Ljau6bhBMpll1ZeIuk42O44KyGUGEJyV3VAzJzHvtn7gN00jGfqvtrTeU/file\n",
"Resolving uca373aa04bc3c60dd027c22b5aa.dl.dropboxusercontent.com (uca373aa04bc3c60dd027c22b5aa.dl.dropboxusercontent.com)... 162.125.9.15, 2620:100:601d:15::a27d:50f\n",
"Connecting to uca373aa04bc3c60dd027c22b5aa.dl.dropboxusercontent.com (uca373aa04bc3c60dd027c22b5aa.dl.dropboxusercontent.com)|162.125.9.15|:443... connected.\n",
"HTTP request sent, awaiting response... 200 OK\n",
"Length: 46983 (46K) [image/jpeg]\n",
"Saving to: 5_9.JPG.1\n",
"\n",
"5_9.JPG.1 100%[===================>] 45.88K --.-KB/s in 0.02s \n",
"\n",
"2020-11-09 14:08:09 (2.44 MB/s) - 5_9.JPG.1 saved [46983/46983]\n",
"\n",
"predicted gender: Female ; Predicted age 24\n"
],
"name": "stdout"
},
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAATcAAAD8CAYAAAASeuPxAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9yY9tWXbe91u7Oc1tonldVlVWsTMbEARtEBDsqT0w4Jlmhpu54IHn4sRzjzy3BgY8k/8BATZgwDAkUJCggUCTIlmsvpJZ7+VrIuI2p9nN8mCdcyOyyBJtQCkmCm9nXURFvHPP3eecvVfzfd9aV1SVj+Pj+Dg+jl+24f6uJ/BxfBwfx8fxVYyPxu3j+Dg+jl/K8dG4fRwfx8fxSzk+GreP4+P4OH4px0fj9nF8HB/HL+X4aNw+jo/j4/ilHF+ZcROR/0JE/lxE/lJE/vCr+pyP4+P4OD6Ov2nIV6FzExEP/AXwnwM/Bf4l8F+r6p/+O/+wj+Pj+Dg+jr9hfFWR238M/KWqfl9VZ+AfA3//K/qsj+Pj+Dg+jr82wld03k+Bnzz5/afAf/KLDt5uer293oMqCqgq4zgxp4RzHucEgKaJdF1HDAEEUAUEEfv3aZp4eHjAOc9msyGEgHNQtZJTZhhHcs445wCh1AooImbj7XhHSomcMwBOhKZp8N5TS6GUgoq9J4SIiKOUggCIXI7BpoZzjhiDvb9We7+CHWD/jtjlgOB9sHl4j3MO5xwiQimVWgshRrxzVFVqreRcGIeBkjMxBNq2wTtPVSXlRMmFqnadWhVQYow0XYtzjpIzOWfq0whe1+npehmIyGU+PgS8dzgRvBNkmU9KmZQyORdqLegyx/Ul2LFuOb1zQggeEUHk8TNEhForqhVEQLmcDwRVWyO63EMR7LzOjn36uaqKc4KIs+PQ5Wava4fL+gHQqjYPb/NSEerySVqWOV3ux+PaU9VlInJ5tnZdYnOuj++FZb7esR4tNrll/du8VSta67Iv1vk6BMGJ3afLpazPb32Ml8e5zElluWdPHrPq8uLJmpTL3L33uPWaVKlaKaV+6bin961qvXzueu1P78/6Wv/unbO9p0pVpdRK1XqZyzq35QTL8+PJTzvP51/cvVXVl/zc+KqM2986ROQfAP8A4PZ6zz/87/5bSinM08wwJ/7sL7/PX3zvh7Rdx27T2qb1wm//5m/we7/3u+x3W+Y0I3hibGmbhrfv3vJHf/RHnE5nPvnkFS9fvuLTb79is+l49+49f/zHf8K79+/p+y1dv6GUwvF04Hg8kkvlm9/8Fs+ev+Snn33Ov/nTf8M4jdxc7fjd3/kdfuNXf9WMVHDEJlJEefvuwGefv+Xh7p6+bdltd2it3N/fcf/hjlwSsYnc3Nyw2+2otTKOI845ai04L2asG49qxbvI7c1zXrz4hBcvP6Hdbdlut7Rty4cPH3h4eODVq1dst1vmXDkPE3cf3vHP/+k/40ff/z6vnj3n9373d3jx7BlVHIfhyMPdPdM44L0wDiemeeDZ7Q2f/uqv0LQNHz58sOvPhVorzgkOIc+ZYRjMcC9Gp2ka+r7n9tkzrm6u6BvH89s9tzc3jHPis8/f8tnPvuDNz97x+mevmecJ7x3zPHE+n/Des+l7gji8d9zcXLPd9Ygo4pT9bkfTNszzzMPDAYCrqyu6rmWaJk6nM6VALcqcZ8ZpoJKJXUO/6YgxIFVI08zpdOJ8PgPgvcd7j1CJMeOcbRAfhBgDTRtxDnLKzGOm32zY31yDD0ylMGoli5KmkVoSoIQQCMHhvQctTPNM8R4fAyGaI/Yh4F1AqyOnhJYZAdquZbPbEttIqZlxnqgCrmlxQUhlJueZPI/UNEPJlJrNwFWPFE9QTxsb2tAQnUNU8SpIEciQ5kopgjiPcy2qnlwKpVSKAgi5VOY5kXMlzWVxBI5aK957drsd2+2WEAKlFMZx5Hw+U0rBe38JBqiOYRgZl+BBngQEqCIKp9OJw+GAc44QAl3Xsdls6JoWUZhT5nQeOE0jWRVFyCWjpZhjqAUPBKc0wdF4R9M2NE3kf/if/vGP/iYb81UZt8+A7zz5/dvL3y5DVf8R8I8APv3GS80pmdOj4hzE4IjBoqWUEptND1Rev3nDt771Tfq+XTwnqGZwge2249vf+RZvXr8h5YmUBqDgnB3z8uUtV7sNtVa2/Y7dbksqiQ8fPvDF23d0wdN6z6vnzzh9+9vkkrnadey3PWme0Jy4fXbDN771Tbb7HT/57A2vX7/ldLjneK+UZ4ndYoz6vidlT6mFYRjsZodweejzPJPLzDzPONdQtaBLlFmKebAQAtvtlqZpOJ1OxBhtU8UITslFCSFalOGEKc0cTif6zYbd1RXPNy/w3nO4vyd4YbvtqTXRdx1BHGmYGA4n8jTbuduGEAIxRlSV83DmfDpTSr5ERinNDNNIN7c0LiBiBqIohGiLfZgn3t3fU0vmZr8jBEf0DhGFMjMrbMLG7oUL5qREqCUwjsrhlLi/H/EhEFvwMVIVUp7IOV0iNZxAFbzzeOdRVTNQ82wb0Nl9UdXlb4mYKk0TiE0gukCI9jycF0Jo2O0i2+2OtusYUuLhfGYoGYkeHzxNNEPpg7tEE1WFgJLKTK2FWgRRi/qrCBLAi4MaEAQ8VAq5WHRtRhoaoHENwTkkBEQbEEE0opqpJVMmi3KkFJSZqpXqPU0IePGIChWgCpqFoiCuULWQS6GqIIthQpRSlFoyMZqhtizBHF1KiZQSqkophZQytVr0uRqpECKifslkwuX4EMy05JQu7wEuGZFlMBatoUpd/lO1353zNDGCd1CVqgVqQbTYibx9XhvjLzRCX5Vx+5fAb4nIr2NG7b8C/ptfdLBqZZrOOLHNkwuA0raROeXL4uz7lpILh8OBabxhu9ssaYzinOK80nWR22d7vA90faRo4TSc6PqWX/2Vb1PmzHA+43BsNj0hOG72G57fXPP+7sBwOlCmxItbO/+zZ3tu9ldoqdx9+MAPf/A9hvHMt77zbZwoz2+vGI+3vH79mndvv2AaB5qmxceASkWKEKOl06unc84W0pyEWpdUVytOLAbPOXF/f0/sOrz3tG37aNhCIHjbaOo83dDz4tVLvIPgPQTPVBJXIfDy1Ut88ByPBxBlu9ngvKWSNWXOpxPnw5FSK04hOg+lkknENnD77Ia2azgej4BFQJYeZ3JONM2G3W7DZtsjPhGCo5TMw/HA4XQkeEeumegsBdGSSfMMzqEoc87o6DifBwRPLWfGxaAcDge8cxxOma57AAx2UE000eEczGkGr8S6bNJUScPEPM3kXBHxCHaPc66UXBEnNBLxscHHQFHFqRJDpG07+n7D1f4K7zzlcCClRNVC33dst7ZenBMzKlootZAzBI/NA4uyUlFcAU/F+wbxFv2qVqY0MZdxgRcqc04gDh8DVQ2OEASpoCpIdYiKvYCiiqjiarXXkupbBOfJokhValZSzlQtTDkzp4T3gabvcT7gnBk5EY935njXvXY+n0kpcT6fibGhVmWaZuY5LykrgKWVTiyqX6P7R+jHDPw8TgB0XXeJCu1zV6hDECf4YKl6yRlZomMnHmpFq0OLkLPBCqqWLnv/i03YV2LcVDWLyH8P/O+AB/4XVf2TX3S8d46SZ1SEnGYURxs9fddcMDiz+A3eO4sehoHdfrtgNuC9GYztdkOM4WJM+n5DiBbCb9qOsHiCaZwARVCmaWK/37PdbRnHTMqVcZpxTtjvOzZdiwM8ha6LBOcYjif21zf8vT/4D/md3/wNfvjDH/Pnf/FdhnGiFA8IMTbEqHjvL+F6rfUSHYlTUko4L9QKORfDv6qS5plhHDmdTrSt4WNrito0DeIjsRNqzfzWb/82w7e/RU6JkmaapqHd9LRdZ7harbjgCE2DUMk5MZzO3N/dMc8WNU3TdFl46oSmNuy8x3mPAiUXQAzHOyvOwdWuIQRP1zUoQts2jNPAu7dvmecJdZ7T8cRu01uENmdUoCIkPPfHkfP5jnfvPqDVIqwp5WWzQ99v8P4e7zwhRtqmIUZYIddaC
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"tags": [],
"needs_background": "light"
}
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "kst7Qp5lwlhJ"
},
"source": [
""
],
"execution_count": null,
"outputs": []
}
]
}