wko-projekt/yolo.ipynb

3224 lines
2.0 MiB
Plaintext
Raw Normal View History

{
"cells": [
{
"cell_type": "markdown",
"id": "4b1d82bd",
"metadata": {},
"source": [
"# LICENSE PLATE DETECTION"
]
},
{
"cell_type": "markdown",
"id": "9b54ab67",
"metadata": {
"deletable": false,
"nbgrader": {
"cell_type": "code",
"checksum": "775443e2f8e6d780f7310e57d00701e7",
"grade": true,
"grade_id": "cell-53e7c09d33eaba20",
"locked": false,
"points": 6,
"schema_version": 3,
"solution": true,
"task": false
}
},
"source": [
"### YOLO V3"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "61ac9526",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Cloning into 'keras-yolo3'...\n",
"remote: Enumerating objects: 169, done.\u001b[K\n",
"remote: Total 169 (delta 0), reused 0 (delta 0), pack-reused 169\u001b[K\n",
"Receiving objects: 100% (169/169), 172.74 KiB | 625.00 KiB/s, done.\n",
"Resolving deltas: 100% (80/80), done.\n"
]
}
],
"source": [
"!git clone https://github.com/roboflow-ai/keras-yolo3"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "5d7010e7",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
" % Total % Received % Xferd Average Speed Time Time Time Current\n",
" Dload Upload Total Spent Left Speed\n",
"100 897 100 897 0 0 269 0 0:00:03 0:00:03 --:--:-- 269 0\n",
"100 2120k 100 2120k 0 0 515k 0 0:00:04 0:00:04 --:--:-- 23.5M\n",
"Archive: roboflow.zip\n",
" extracting: README.dataset.txt \n",
" extracting: README.roboflow.txt \n",
" creating: test/\n",
" extracting: test/2images1_png.rf.5de47b3b58bc776388f9547915f46edf.jpg \n",
" extracting: test/2images41_png.rf.2f711be90f9f9e796139a02cb45fe9ba.jpg \n",
" extracting: test/2images45_png.rf.cbcc994c49d1a2ca5e7bc52cb9b2a1a3.jpg \n",
" extracting: test/3images22_png.rf.b139cdb6065c658e0c7acc2124854383.jpg \n",
" extracting: test/3images34_png.rf.9a25c14870c5acae15ee0f159a9707b4.jpg \n",
" extracting: test/3images4_png.rf.a3d6d0b11320142ada8e8347c918dc30.jpg \n",
" extracting: test/6images3_png.rf.8b1268f1823ea224077f537939c2ccba.jpg \n",
" extracting: test/7images0_png.rf.fb9d6e1e739e19321bdc7050f4a95798.jpg \n",
" extracting: test/_annotations.txt \n",
" extracting: test/_classes.txt \n",
" extracting: test/images6_png.rf.56641c848717baa02774239ac0039bd6.jpg \n",
" extracting: test/img105_png.rf.d69f400c7410b1e265136d01b1a2cc5e.jpg \n",
" extracting: test/img149_png.rf.c487d9bc6be853e23cc7a12359178b40.jpg \n",
" extracting: test/img14_png.rf.1a47d3748ad1566280dc8199d96430de.jpg \n",
" extracting: test/img35_png.rf.16e367a1ce2db4dc0b0b1491814e8c95.jpg \n",
" extracting: test/img89_png.rf.f0f546c24ed5d6a16a2cbf9389065678.jpg \n",
" creating: train/\n",
" extracting: train/20img2_png.rf.015a51172ce51d61531b54af5a144183.jpg \n",
" extracting: train/21img3_png.rf.c1601abdfd96ebfc6f13205c638364bc.jpg \n",
" extracting: train/22img34_png.rf.02ddffee2d6e8dc6ef169f89f622a933.jpg \n",
" extracting: train/23img46_png.rf.fd5a109b78b90ed3582888880b743303.jpg \n",
" extracting: train/24img50_png.rf.1f28fdcb1632f237fb0bf7be7d877351.jpg \n",
" extracting: train/25img73_png.rf.25d9c97db5c2c466bbe2692f9f69c869.jpg \n",
" extracting: train/26img74_png.rf.861f6c881709f3bb65637c7ea3871dca.jpg \n",
" extracting: train/2images0_png.rf.b8c8f0d2594f6bfaf8be2dca50416bb6.jpg \n",
" extracting: train/2images18_png.rf.951b35372d913193f0899fda6877cbee.jpg \n",
" extracting: train/2images22_png.rf.bb299b6d237016c2714b68aead8266d7.jpg \n",
" extracting: train/2images23_png.rf.cea092359f78eb1c22db6b50627790d6.jpg \n",
" extracting: train/2images29_png.rf.91d8be50c5d0f82577d74268153ac5fc.jpg \n",
" extracting: train/2images2_png.rf.62684ca2757500eaeac877d48e04c92f.jpg \n",
" extracting: train/2images34_png.rf.ffff2284b01426e5cd22ca8053450348.jpg \n",
" extracting: train/2images3_png.rf.c7b635e1dc54f5bb10aa338d78969c22.jpg \n",
" extracting: train/2images46_png.rf.d4143a5946da0d1bc8e540c239a648b8.jpg \n",
" extracting: train/2images4_png.rf.64541674b6b6df83b15534c2d8bf0030.jpg \n",
" extracting: train/30img11_png.rf.1a236b6935fd926336da07248a867a36.jpg \n",
" extracting: train/3images0_png.rf.ff30aaf2256dde95d2dc4893b7074098.jpg \n",
" extracting: train/3images11_png.rf.8a11e1eae3b52a369681843c7d7116d1.jpg \n",
" extracting: train/3images18_png.rf.0673ed9396fa1ae5a43ff44f10422ff5.jpg \n",
" extracting: train/3images29_png.rf.a96af5fe85f477adc0fce370e788f76c.jpg \n",
" extracting: train/3images2_png.rf.d7de4c45de845226a8391e8f332352d9.jpg \n",
" extracting: train/3images30_png.rf.dd0080eda6b7d8ff2e188c7e5590e7c6.jpg \n",
" extracting: train/3images33_png.rf.3abc75a93214fc0a497dd54cabd690a0.jpg \n",
" extracting: train/3images3_png.rf.e9771234c266dba02be2fd6f204aa66b.jpg \n",
" extracting: train/3images42_png.rf.b3b45a46d57ac11c2d546831ad52cceb.jpg \n",
" extracting: train/3images43_png.rf.0603c0f1b7a15be7449b6d46c621e7af.jpg \n",
" extracting: train/3images5_png.rf.6a53d28cdfade27885d25f8208f3028a.jpg \n",
" extracting: train/6images0_png.rf.1e11dd3d7f4e5a79ce207c7770185b0c.jpg \n",
" extracting: train/6images12_png.rf.d0d6b3319c39fdb6a9356047f5ddb8ee.jpg \n",
" extracting: train/6images1_png.rf.8c65b6bfe8d5b01a2a1545337de6c390.jpg \n",
" extracting: train/6images4_png.rf.2c77da3c85f4cb57ebe5d90ab8ed5e0c.jpg \n",
" extracting: train/6images5_png.rf.7033ded0e4684504365b5b0345529c5c.jpg \n",
" extracting: train/7images12_png.rf.c46a44810aea7edafc53b6b561c6cf6a.jpg \n",
" extracting: train/7images17_png.rf.ff8fc5bb0f84483dd914f5f2de524933.jpg \n",
" extracting: train/7images1_png.rf.cf5406f149f35ab24eda2c621f9298ed.jpg \n",
" extracting: train/7images2_png.rf.f84de676f7fb3de9d7789e1dafab8fa3.jpg \n",
" extracting: train/7images3_png.rf.14c5f2588d07e7234659792e20bd7fd8.jpg \n",
" extracting: train/7images4_png.rf.5e455f9a5c94b0a3b56043ef05d06854.jpg \n",
" extracting: train/_annotations.txt \n",
" extracting: train/_classes.txt \n",
" extracting: train/images0_png.rf.d1f446cd89662b7ccf994dc77f63ff56.jpg \n",
" extracting: train/images10_png.rf.bc421baf20b7cbf6af4ea822f259fcab.jpg \n",
" extracting: train/images13_png.rf.dff8711d203b47a3f8709c4cee5d6927.jpg \n",
" extracting: train/images15_png.rf.e1b904b94d5539da79117c3613ae5765.jpg \n",
" extracting: train/images1_png.rf.9c2cb373d7f4613a2735410f1fdb3043.jpg \n",
" extracting: train/images3_png.rf.e7cf0078d44c2571ebc5d607ffaacbc8.jpg \n",
" extracting: train/images4_png.rf.97f8f01f67adf77de50c99fd6ed7f879.jpg \n",
" extracting: train/images5_png.rf.d16b8c87a8a593e5971124648ba63736.jpg \n",
" extracting: train/img0_png.rf.fa065b68c3d51d65399f883f8713ccf2.jpg \n",
" extracting: train/img102_png.rf.3da7ec4deedfb6f15834e9a42aee4e7c.jpg \n",
" extracting: train/img103_png.rf.67216b08a719a9a9dba68f83c5460a74.jpg \n",
" extracting: train/img104_png.rf.db759d639a6b1ace6dc8e7442c86ba9a.jpg \n",
" extracting: train/img106_png.rf.d882268d61ac720e54c35110fb8bc4b0.jpg \n",
" extracting: train/img107_png.rf.a62231fc47913091ec76468e536d6f28.jpg \n",
" extracting: train/img10_png.rf.e7bba8322d47d623f71903aa50f48730.jpg \n",
" extracting: train/img113_png.rf.cb3afcbea4e7177a2ed703b4b1d94887.jpg \n",
" extracting: train/img116_png.rf.1f7034a069e5a888b00da9496e0df5ae.jpg \n",
" extracting: train/img118_png.rf.9e21a52ffda3719b2cc6deb0309efd7d.jpg \n",
" extracting: train/img11_png.rf.a9584dc2d254fd84ca6a30cc9b821bd5.jpg \n",
" extracting: train/img125_png.rf.4a0a9a2f74bd5127343124c4fb4d0670.jpg \n",
" extracting: train/img126_png.rf.0bad29364a3846287498838f6791cae8.jpg \n",
" extracting: train/img133_png.rf.e66c88015d6fb51921b20ad8008fc981.jpg \n",
" extracting: train/img146_png.rf.9811cc9a676e18c4cf2bce86398feb9d.jpg \n",
" extracting: train/img170_png.rf.1d04d991430ba0d672fabff684817dc6.jpg \n",
" extracting: train/img174_png.rf.4d01b9ebbdc8c1b434c61c945794a79e.jpg \n",
" extracting: train/img178_png.rf.b0e5b6547069d86483e91fc99356e5d9.jpg \n",
" extracting: train/img181_png.rf.363074a89b0325055d28f3794083e479.jpg \n",
" extracting: train/img189_png.rf.07aedf508ccbfc3e0244bd54bd76cbf8.jpg \n",
" extracting: train/img197_png.rf.36119ab11e392cfeded10c61aa97eac6.jpg \n",
" extracting: train/img1_png.rf.bf5b1060d3cb9959dc94b75d4fc78334.jpg \n",
" extracting: train/img202_png.rf.f6520c22d6c95c8e5a105b6ee48b8da1.jpg \n",
" extracting: train/img205_png.rf.98d121af5e0548a1402eb3e93560465d.jpg \n",
" extracting: train/img215_png.rf.6d29cfcf38f6a4b2165ba5ba110454d2.jpg \n",
" extracting: train/img270_png.rf.52541958250f2b45297faa1440d55d56.jpg \n",
" extracting: train/img278_png.rf.82173849dfde92f2a2ab2761e5679891.jpg \n",
" extracting: train/img283_png.rf.809b4e6edbe803fbcab887a40e59f526.jpg \n",
" extracting: train/img2_png.rf.23b2d7fe287627739888976776de8437.jpg \n",
" extracting: train/img306_png.rf.642a9812ecebfd9784d9eb593b78dcf2.jpg \n",
" extracting: train/img34_png.rf.f98b7fa7325ddb9ca373121c5c120f55.jpg \n",
" extracting: train/img38_png.rf.cd97a110e34ad869a4b79d8237d92a36.jpg \n",
" extracting: train/img39_png.rf.e9b1634ca400418b29839bad544e8634.jpg \n",
" extracting: train/img3_png.rf.3f382680461124ba2e19c1df51d895e7.jpg \n",
" extracting: train/img45_png.rf.870b550082c3da2c42e40017442c115b.jpg \n",
" extracting: train/img46_png.rf.2c1d961d3e61d1389c825f2aba32ab39.jpg \n",
" extracting: train/img4_png.rf.4f0ce3c02167bf3f8ae2454471c9c4fd.jpg \n",
" extracting: train/img57_png.rf.dc254e143fec0667ac462e303290e301.jpg \n",
" extracting: train/img58_png.rf.1a6e09bda52588bb7f3890768f0db5f2.jpg \n",
" extracting: train/img5_png.rf.542fe1bdd2a910b20f27ce55cf8689ff.jpg \n",
" extracting: train/img66_png.rf.534ec186146ae4409f8c875cf28dcb84.jpg \n",
" extracting: train/img6_png.rf.7aceac81d4a22f02ab0460ee5bd2227f.jpg \n",
" extracting: train/img77_png.rf.8f8e23567322fd7de129380c6a54bd01.jpg \n",
" extracting: train/img78_png.rf.eb48e94d48c04b3077d049cb8cd920bb.jpg \n",
" extracting: train/img7_png.rf.2dd95d826f13ab03805de7f7b842eb40.jpg \n",
" extracting: train/img85_png.rf.f7a4ae3bb16a8c3fe7f164e35f11ea65.jpg \n",
" extracting: train/img86_png.rf.3addc2b6c62b8d5098feba035bd6014d.jpg \n",
" extracting: train/img92_png.rf.5b79211320122e08554541c15fc041dd.jpg \n",
" extracting: train/img93_png.rf.7fbe9b0dcab1f063b154796d00ae669b.jpg \n",
" extracting: train/img95_png.rf.c97bb901c22e4f1519bac037ffbdbbf7.jpg \n",
" extracting: train/img97_png.rf.2e3f7205a9d122aa07906ebe643f1c04.jpg \n",
" extracting: train/img98_png.rf.c6da81320ec0c22868d84c2291b416f5.jpg \n",
" creating: valid/\n",
" extracting: valid/27img121_png.rf.6b1bbeee06ff52963c7b12c7bfb2aacc.jpg \n",
" extracting: valid/2images12_png.rf.ba715b76693ae62d01e142ba9859ffc9.jpg \n",
" extracting: valid/2images35_png.rf.81e0cc483a896440e148a5df5550d243.jpg \n",
" extracting: valid/2images40_png.rf.45e16e4d96b21eeb7b0e06556ca12291.jpg \n",
" extracting: valid/3images19_png.rf.aec1de41eff03d6e343427691b2a3029.jpg \n",
" extracting: valid/3images1_png.rf.f293d93f952977825a07613f23a55f70.jpg \n",
" extracting: valid/6images11_png.rf.a467d473bfa546de8e2c5ef4ef894802.jpg \n",
" extracting: valid/6images2_png.rf.386c9a11cef823c522619aefd9c7ca9d.jpg \n",
" extracting: valid/_annotations.txt \n",
" extracting: valid/_classes.txt \n",
" extracting: valid/images14_png.rf.f0a78b8df38e6394e9cc3d56d7677c87.jpg \n",
" extracting: valid/images2_png.rf.1f566a50352095712ec385ffc17b14c5.jpg \n",
" extracting: valid/img101_png.rf.aca3e688b7798ee456467954274733de.jpg \n",
" extracting: valid/img111_png.rf.4bc2a8d175d8bbe2a289ba9e0ed4c717.jpg \n",
" extracting: valid/img112_png.rf.aaadc30802c92e3c1196a96b859c8ebb.jpg \n",
" extracting: valid/img117_png.rf.76d5b2f35f4974cca3750f258af86101.jpg \n",
" extracting: valid/img121_png.rf.a11051677709f708036ca072d0725099.jpg \n",
" extracting: valid/img122_png.rf.f6c62a3f0290eae81ffc5c457f546adf.jpg \n",
" extracting: valid/img141_png.rf.9d9ff6b78c2940546bf364e662b1c813.jpg \n",
" extracting: valid/img165_png.rf.6bb45f3455f0340e377ec61e662d7846.jpg \n",
" extracting: valid/img177_png.rf.fd279311108df43a7d9225cc26c2542f.jpg \n",
" extracting: valid/img262_png.rf.cd066cf49feb976bf8cd8eca32dcf729.jpg \n",
" extracting: valid/img27_png.rf.09745a24cc36301e1eca5c3a9bab3853.jpg \n",
" extracting: valid/img304_png.rf.f91aa4dfe963c390a521fd748f1ab9f5.jpg \n",
" extracting: valid/img313_png.rf.8ea5815425e82f42c06715e0b98342f2.jpg \n",
" extracting: valid/img31_png.rf.3b72bf618de466d70ab487fe5e20ff70.jpg \n",
" extracting: valid/img40_png.rf.8389bb867a237cad805b4819dc788a98.jpg \n",
" extracting: valid/img41_png.rf.4f6f5b9dcbe9eb80f9913e223f321f66.jpg \n",
" extracting: valid/img69_png.rf.52cb5ea0d37bc73a2fcc1ee19de2b124.jpg \n",
" extracting: valid/img84_png.rf.c9700ee5dee2697886b497a2e17f1573.jpg \n"
]
}
],
"source": [
"!curl -L \"https://app.roboflow.com/ds/hTj8Pr7g7U?key=q9kdROYojM\" > roboflow.zip; unzip roboflow.zip; rm roboflow.zip"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "6989cf92",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"--2023-01-18 12:01:19-- https://pjreddie.com/media/files/yolov3.weights\n",
"Translacja pjreddie.com (pjreddie.com)... 128.208.4.108\n",
"Łączenie się z pjreddie.com (pjreddie.com)|128.208.4.108|:443... połączono.\n",
"Żądanie HTTP wysłano, oczekiwanie na odpowiedź... 200 OK\n",
"Długość: 248007048 (237M) [application/octet-stream]\n",
"Zapis do: `yolov3.weights'\n",
"\n",
"yolov3.weights 100%[===================>] 236,52M 17,0MB/s w 15s \n",
"\n",
"2023-01-18 12:01:35 (15,4 MB/s) - zapisano `yolov3.weights' [248007048/248007048]\n",
"\n"
]
}
],
"source": [
"!wget https://pjreddie.com/media/files/yolov3.weights"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "e981c95d",
"metadata": {},
"outputs": [],
"source": [
"from keras.layers import ELU, PReLU, LeakyReLU"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "d8924aad",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Loading weights.\n",
"Weights Header: 0 2 0 [32013312]\n",
"Parsing Darknet config.\n",
"Creating Keras model.\n",
"Parsing section net_0\n",
"Parsing section convolutional_0\n",
"conv2d bn leaky (3, 3, 3, 32)\n",
"Metal device set to: Apple M1\n",
"\n",
"systemMemory: 8.00 GB\n",
"maxCacheSize: 2.67 GB\n",
"\n",
"2023-01-18 12:03:25.001841: I tensorflow/core/common_runtime/pluggable_device/pluggable_device_factory.cc:306] Could not identify NUMA node of platform GPU ID 0, defaulting to 0. Your kernel may not have been built with NUMA support.\n",
"2023-01-18 12:03:25.002402: I tensorflow/core/common_runtime/pluggable_device/pluggable_device_factory.cc:272] Created TensorFlow device (/job:localhost/replica:0/task:0/device:GPU:0 with 0 MB memory) -> physical PluggableDevice (device: 0, name: METAL, pci bus id: <undefined>)\n",
"Parsing section convolutional_1\n",
"conv2d bn leaky (3, 3, 32, 64)\n",
"Parsing section convolutional_2\n",
"conv2d bn leaky (1, 1, 64, 32)\n",
"Parsing section convolutional_3\n",
"conv2d bn leaky (3, 3, 32, 64)\n",
"Parsing section shortcut_0\n",
"Parsing section convolutional_4\n",
"conv2d bn leaky (3, 3, 64, 128)\n",
"Parsing section convolutional_5\n",
"conv2d bn leaky (1, 1, 128, 64)\n",
"Parsing section convolutional_6\n",
"conv2d bn leaky (3, 3, 64, 128)\n",
"Parsing section shortcut_1\n",
"Parsing section convolutional_7\n",
"conv2d bn leaky (1, 1, 128, 64)\n",
"Parsing section convolutional_8\n",
"conv2d bn leaky (3, 3, 64, 128)\n",
"Parsing section shortcut_2\n",
"Parsing section convolutional_9\n",
"conv2d bn leaky (3, 3, 128, 256)\n",
"Parsing section convolutional_10\n",
"conv2d bn leaky (1, 1, 256, 128)\n",
"Parsing section convolutional_11\n",
"conv2d bn leaky (3, 3, 128, 256)\n",
"Parsing section shortcut_3\n",
"Parsing section convolutional_12\n",
"conv2d bn leaky (1, 1, 256, 128)\n",
"Parsing section convolutional_13\n",
"conv2d bn leaky (3, 3, 128, 256)\n",
"Parsing section shortcut_4\n",
"Parsing section convolutional_14\n",
"conv2d bn leaky (1, 1, 256, 128)\n",
"Parsing section convolutional_15\n",
"conv2d bn leaky (3, 3, 128, 256)\n",
"Parsing section shortcut_5\n",
"Parsing section convolutional_16\n",
"conv2d bn leaky (1, 1, 256, 128)\n",
"Parsing section convolutional_17\n",
"conv2d bn leaky (3, 3, 128, 256)\n",
"Parsing section shortcut_6\n",
"Parsing section convolutional_18\n",
"conv2d bn leaky (1, 1, 256, 128)\n",
"Parsing section convolutional_19\n",
"conv2d bn leaky (3, 3, 128, 256)\n",
"Parsing section shortcut_7\n",
"Parsing section convolutional_20\n",
"conv2d bn leaky (1, 1, 256, 128)\n",
"Parsing section convolutional_21\n",
"conv2d bn leaky (3, 3, 128, 256)\n",
"Parsing section shortcut_8\n",
"Parsing section convolutional_22\n",
"conv2d bn leaky (1, 1, 256, 128)\n",
"Parsing section convolutional_23\n",
"conv2d bn leaky (3, 3, 128, 256)\n",
"Parsing section shortcut_9\n",
"Parsing section convolutional_24\n",
"conv2d bn leaky (1, 1, 256, 128)\n",
"Parsing section convolutional_25\n",
"conv2d bn leaky (3, 3, 128, 256)\n",
"Parsing section shortcut_10\n",
"Parsing section convolutional_26\n",
"conv2d bn leaky (3, 3, 256, 512)\n",
"Parsing section convolutional_27\n",
"conv2d bn leaky (1, 1, 512, 256)\n",
"Parsing section convolutional_28\n",
"conv2d bn leaky (3, 3, 256, 512)\n",
"Parsing section shortcut_11\n",
"Parsing section convolutional_29\n",
"conv2d bn leaky (1, 1, 512, 256)\n",
"Parsing section convolutional_30\n",
"conv2d bn leaky (3, 3, 256, 512)\n",
"Parsing section shortcut_12\n",
"Parsing section convolutional_31\n",
"conv2d bn leaky (1, 1, 512, 256)\n",
"Parsing section convolutional_32\n",
"conv2d bn leaky (3, 3, 256, 512)\n",
"Parsing section shortcut_13\n",
"Parsing section convolutional_33\n",
"conv2d bn leaky (1, 1, 512, 256)\n",
"Parsing section convolutional_34\n",
"conv2d bn leaky (3, 3, 256, 512)\n",
"Parsing section shortcut_14\n",
"Parsing section convolutional_35\n",
"conv2d bn leaky (1, 1, 512, 256)\n",
"Parsing section convolutional_36\n",
"conv2d bn leaky (3, 3, 256, 512)\n",
"Parsing section shortcut_15\n",
"Parsing section convolutional_37\n",
"conv2d bn leaky (1, 1, 512, 256)\n",
"Parsing section convolutional_38\n",
"conv2d bn leaky (3, 3, 256, 512)\n",
"Parsing section shortcut_16\n",
"Parsing section convolutional_39\n",
"conv2d bn leaky (1, 1, 512, 256)\n",
"Parsing section convolutional_40\n",
"conv2d bn leaky (3, 3, 256, 512)\n",
"Parsing section shortcut_17\n",
"Parsing section convolutional_41\n",
"conv2d bn leaky (1, 1, 512, 256)\n",
"Parsing section convolutional_42\n",
"conv2d bn leaky (3, 3, 256, 512)\n",
"Parsing section shortcut_18\n",
"Parsing section convolutional_43\n",
"conv2d bn leaky (3, 3, 512, 1024)\n",
"Parsing section convolutional_44\n",
"conv2d bn leaky (1, 1, 1024, 512)\n",
"Parsing section convolutional_45\n",
"conv2d bn leaky (3, 3, 512, 1024)\n",
"Parsing section shortcut_19\n",
"Parsing section convolutional_46\n",
"conv2d bn leaky (1, 1, 1024, 512)\n",
"Parsing section convolutional_47\n",
"conv2d bn leaky (3, 3, 512, 1024)\n",
"Parsing section shortcut_20\n",
"Parsing section convolutional_48\n",
"conv2d bn leaky (1, 1, 1024, 512)\n",
"Parsing section convolutional_49\n",
"conv2d bn leaky (3, 3, 512, 1024)\n",
"Parsing section shortcut_21\n",
"Parsing section convolutional_50\n",
"conv2d bn leaky (1, 1, 1024, 512)\n",
"Parsing section convolutional_51\n",
"conv2d bn leaky (3, 3, 512, 1024)\n",
"Parsing section shortcut_22\n",
"Parsing section convolutional_52\n",
"conv2d bn leaky (1, 1, 1024, 512)\n",
"Parsing section convolutional_53\n",
"conv2d bn leaky (3, 3, 512, 1024)\n",
"Parsing section convolutional_54\n",
"conv2d bn leaky (1, 1, 1024, 512)\n",
"Parsing section convolutional_55\n",
"conv2d bn leaky (3, 3, 512, 1024)\n",
"Parsing section convolutional_56\n",
"conv2d bn leaky (1, 1, 1024, 512)\n",
"Parsing section convolutional_57\n",
"conv2d bn leaky (3, 3, 512, 1024)\n",
"Parsing section convolutional_58\n",
"conv2d linear (1, 1, 1024, 255)\n",
"Parsing section yolo_0\n",
"Parsing section route_0\n",
"Parsing section convolutional_59\n",
"conv2d bn leaky (1, 1, 512, 256)\n",
"Parsing section upsample_0\n",
"Parsing section route_1\n",
"Concatenating route layers: [<KerasTensor: shape=(None, None, None, 256) dtype=float32 (created by layer 'up_sampling2d')>, <KerasTensor: shape=(None, None, None, 512) dtype=float32 (created by layer 'add_18')>]\n",
"Parsing section convolutional_60\n",
"conv2d bn leaky (1, 1, 768, 256)\n",
"Parsing section convolutional_61\n",
"conv2d bn leaky (3, 3, 256, 512)\n",
"Parsing section convolutional_62\n",
"conv2d bn leaky (1, 1, 512, 256)\n",
"Parsing section convolutional_63\n",
"conv2d bn leaky (3, 3, 256, 512)\n",
"Parsing section convolutional_64\n",
"conv2d bn leaky (1, 1, 512, 256)\n",
"Parsing section convolutional_65\n",
"conv2d bn leaky (3, 3, 256, 512)\n",
"Parsing section convolutional_66\n",
"conv2d linear (1, 1, 512, 255)\n",
"Parsing section yolo_1\n",
"Parsing section route_2\n",
"Parsing section convolutional_67\n",
"conv2d bn leaky (1, 1, 256, 128)\n",
"Parsing section upsample_1\n",
"Parsing section route_3\n",
"Concatenating route layers: [<KerasTensor: shape=(None, None, None, 128) dtype=float32 (created by layer 'up_sampling2d_1')>, <KerasTensor: shape=(None, None, None, 256) dtype=float32 (created by layer 'add_10')>]\n",
"Parsing section convolutional_68\n",
"conv2d bn leaky (1, 1, 384, 128)\n",
"Parsing section convolutional_69\n",
"conv2d bn leaky (3, 3, 128, 256)\n",
"Parsing section convolutional_70\n",
"conv2d bn leaky (1, 1, 256, 128)\n",
"Parsing section convolutional_71\n",
"conv2d bn leaky (3, 3, 128, 256)\n",
"Parsing section convolutional_72\n",
"conv2d bn leaky (1, 1, 256, 128)\n",
"Parsing section convolutional_73\n",
"conv2d bn leaky (3, 3, 128, 256)\n",
"Parsing section convolutional_74\n",
"conv2d linear (1, 1, 256, 255)\n",
"Parsing section yolo_2\n",
"Model: \"model\"\n",
"__________________________________________________________________________________________________\n",
" Layer (type) Output Shape Param # Connected to \n",
"==================================================================================================\n",
" input_1 (InputLayer) [(None, None, None, 0 [] \n",
" 3)] \n",
" \n",
" conv2d (Conv2D) (None, None, None, 864 ['input_1[0][0]'] \n",
" 32) \n",
" \n",
" batch_normalization (BatchNorm (None, None, None, 128 ['conv2d[0][0]'] \n",
" alization) 32) \n",
" \n",
" leaky_re_lu (LeakyReLU) (None, None, None, 0 ['batch_normalization[0][0]'] \n",
" 32) \n",
" \n",
" zero_padding2d (ZeroPadding2D) (None, None, None, 0 ['leaky_re_lu[0][0]'] \n",
" 32) \n",
" \n",
" conv2d_1 (Conv2D) (None, None, None, 18432 ['zero_padding2d[0][0]'] \n",
" 64) \n",
" \n",
" batch_normalization_1 (BatchNo (None, None, None, 256 ['conv2d_1[0][0]'] \n",
" rmalization) 64) \n",
" \n",
" leaky_re_lu_1 (LeakyReLU) (None, None, None, 0 ['batch_normalization_1[0][0]'] \n",
" 64) \n",
" \n",
" conv2d_2 (Conv2D) (None, None, None, 2048 ['leaky_re_lu_1[0][0]'] \n",
" 32) \n",
" \n",
" batch_normalization_2 (BatchNo (None, None, None, 128 ['conv2d_2[0][0]'] \n",
" rmalization) 32) \n",
" \n",
" leaky_re_lu_2 (LeakyReLU) (None, None, None, 0 ['batch_normalization_2[0][0]'] \n",
" 32) \n",
" \n",
" conv2d_3 (Conv2D) (None, None, None, 18432 ['leaky_re_lu_2[0][0]'] \n",
" 64) \n",
" \n",
" batch_normalization_3 (BatchNo (None, None, None, 256 ['conv2d_3[0][0]'] \n",
" rmalization) 64) \n",
" \n",
" leaky_re_lu_3 (LeakyReLU) (None, None, None, 0 ['batch_normalization_3[0][0]'] \n",
" 64) \n",
" \n",
" add (Add) (None, None, None, 0 ['leaky_re_lu_1[0][0]', \n",
" 64) 'leaky_re_lu_3[0][0]'] \n",
" \n",
" zero_padding2d_1 (ZeroPadding2 (None, None, None, 0 ['add[0][0]'] \n",
" D) 64) \n",
" \n",
" conv2d_4 (Conv2D) (None, None, None, 73728 ['zero_padding2d_1[0][0]'] \n",
" 128) \n",
" \n",
" batch_normalization_4 (BatchNo (None, None, None, 512 ['conv2d_4[0][0]'] \n",
" rmalization) 128) \n",
" \n",
" leaky_re_lu_4 (LeakyReLU) (None, None, None, 0 ['batch_normalization_4[0][0]'] \n",
" 128) \n",
" \n",
" conv2d_5 (Conv2D) (None, None, None, 8192 ['leaky_re_lu_4[0][0]'] \n",
" 64) \n",
" \n",
" batch_normalization_5 (BatchNo (None, None, None, 256 ['conv2d_5[0][0]'] \n",
" rmalization) 64) \n",
" \n",
" leaky_re_lu_5 (LeakyReLU) (None, None, None, 0 ['batch_normalization_5[0][0]'] \n",
" 64) \n",
" \n",
" conv2d_6 (Conv2D) (None, None, None, 73728 ['leaky_re_lu_5[0][0]'] \n",
" 128) \n",
" \n",
" batch_normalization_6 (BatchNo (None, None, None, 512 ['conv2d_6[0][0]'] \n",
" rmalization) 128) \n",
" \n",
" leaky_re_lu_6 (LeakyReLU) (None, None, None, 0 ['batch_normalization_6[0][0]'] \n",
" 128) \n",
" \n",
" add_1 (Add) (None, None, None, 0 ['leaky_re_lu_4[0][0]', \n",
" 128) 'leaky_re_lu_6[0][0]'] \n",
" \n",
" conv2d_7 (Conv2D) (None, None, None, 8192 ['add_1[0][0]'] \n",
" 64) \n",
" \n",
" batch_normalization_7 (BatchNo (None, None, None, 256 ['conv2d_7[0][0]'] \n",
" rmalization) 64) \n",
" \n",
" leaky_re_lu_7 (LeakyReLU) (None, None, None, 0 ['batch_normalization_7[0][0]'] \n",
" 64) \n",
" \n",
" conv2d_8 (Conv2D) (None, None, None, 73728 ['leaky_re_lu_7[0][0]'] \n",
" 128) \n",
" \n",
" batch_normalization_8 (BatchNo (None, None, None, 512 ['conv2d_8[0][0]'] \n",
" rmalization) 128) \n",
" \n",
" leaky_re_lu_8 (LeakyReLU) (None, None, None, 0 ['batch_normalization_8[0][0]'] \n",
" 128) \n",
" \n",
" add_2 (Add) (None, None, None, 0 ['add_1[0][0]', \n",
" 128) 'leaky_re_lu_8[0][0]'] \n",
" \n",
" zero_padding2d_2 (ZeroPadding2 (None, None, None, 0 ['add_2[0][0]'] \n",
" D) 128) \n",
" \n",
" conv2d_9 (Conv2D) (None, None, None, 294912 ['zero_padding2d_2[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_9 (BatchNo (None, None, None, 1024 ['conv2d_9[0][0]'] \n",
" rmalization) 256) \n",
" \n",
" leaky_re_lu_9 (LeakyReLU) (None, None, None, 0 ['batch_normalization_9[0][0]'] \n",
" 256) \n",
" \n",
" conv2d_10 (Conv2D) (None, None, None, 32768 ['leaky_re_lu_9[0][0]'] \n",
" 128) \n",
" \n",
" batch_normalization_10 (BatchN (None, None, None, 512 ['conv2d_10[0][0]'] \n",
" ormalization) 128) \n",
" \n",
" leaky_re_lu_10 (LeakyReLU) (None, None, None, 0 ['batch_normalization_10[0][0]'] \n",
" 128) \n",
" \n",
" conv2d_11 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_10[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_11 (BatchN (None, None, None, 1024 ['conv2d_11[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_11 (LeakyReLU) (None, None, None, 0 ['batch_normalization_11[0][0]'] \n",
" 256) \n",
" \n",
" add_3 (Add) (None, None, None, 0 ['leaky_re_lu_9[0][0]', \n",
" 256) 'leaky_re_lu_11[0][0]'] \n",
" \n",
" conv2d_12 (Conv2D) (None, None, None, 32768 ['add_3[0][0]'] \n",
" 128) \n",
" \n",
" batch_normalization_12 (BatchN (None, None, None, 512 ['conv2d_12[0][0]'] \n",
" ormalization) 128) \n",
" \n",
" leaky_re_lu_12 (LeakyReLU) (None, None, None, 0 ['batch_normalization_12[0][0]'] \n",
" 128) \n",
" \n",
" conv2d_13 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_12[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_13 (BatchN (None, None, None, 1024 ['conv2d_13[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_13 (LeakyReLU) (None, None, None, 0 ['batch_normalization_13[0][0]'] \n",
" 256) \n",
" \n",
" add_4 (Add) (None, None, None, 0 ['add_3[0][0]', \n",
" 256) 'leaky_re_lu_13[0][0]'] \n",
" \n",
" conv2d_14 (Conv2D) (None, None, None, 32768 ['add_4[0][0]'] \n",
" 128) \n",
" \n",
" batch_normalization_14 (BatchN (None, None, None, 512 ['conv2d_14[0][0]'] \n",
" ormalization) 128) \n",
" \n",
" leaky_re_lu_14 (LeakyReLU) (None, None, None, 0 ['batch_normalization_14[0][0]'] \n",
" 128) \n",
" \n",
" conv2d_15 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_14[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_15 (BatchN (None, None, None, 1024 ['conv2d_15[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_15 (LeakyReLU) (None, None, None, 0 ['batch_normalization_15[0][0]'] \n",
" 256) \n",
" \n",
" add_5 (Add) (None, None, None, 0 ['add_4[0][0]', \n",
" 256) 'leaky_re_lu_15[0][0]'] \n",
" \n",
" conv2d_16 (Conv2D) (None, None, None, 32768 ['add_5[0][0]'] \n",
" 128) \n",
" \n",
" batch_normalization_16 (BatchN (None, None, None, 512 ['conv2d_16[0][0]'] \n",
" ormalization) 128) \n",
" \n",
" leaky_re_lu_16 (LeakyReLU) (None, None, None, 0 ['batch_normalization_16[0][0]'] \n",
" 128) \n",
" \n",
" conv2d_17 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_16[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_17 (BatchN (None, None, None, 1024 ['conv2d_17[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_17 (LeakyReLU) (None, None, None, 0 ['batch_normalization_17[0][0]'] \n",
" 256) \n",
" \n",
" add_6 (Add) (None, None, None, 0 ['add_5[0][0]', \n",
" 256) 'leaky_re_lu_17[0][0]'] \n",
" \n",
" conv2d_18 (Conv2D) (None, None, None, 32768 ['add_6[0][0]'] \n",
" 128) \n",
" \n",
" batch_normalization_18 (BatchN (None, None, None, 512 ['conv2d_18[0][0]'] \n",
" ormalization) 128) \n",
" \n",
" leaky_re_lu_18 (LeakyReLU) (None, None, None, 0 ['batch_normalization_18[0][0]'] \n",
" 128) \n",
" \n",
" conv2d_19 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_18[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_19 (BatchN (None, None, None, 1024 ['conv2d_19[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_19 (LeakyReLU) (None, None, None, 0 ['batch_normalization_19[0][0]'] \n",
" 256) \n",
" \n",
" add_7 (Add) (None, None, None, 0 ['add_6[0][0]', \n",
" 256) 'leaky_re_lu_19[0][0]'] \n",
" \n",
" conv2d_20 (Conv2D) (None, None, None, 32768 ['add_7[0][0]'] \n",
" 128) \n",
" \n",
" batch_normalization_20 (BatchN (None, None, None, 512 ['conv2d_20[0][0]'] \n",
" ormalization) 128) \n",
" \n",
" leaky_re_lu_20 (LeakyReLU) (None, None, None, 0 ['batch_normalization_20[0][0]'] \n",
" 128) \n",
" \n",
" conv2d_21 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_20[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_21 (BatchN (None, None, None, 1024 ['conv2d_21[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_21 (LeakyReLU) (None, None, None, 0 ['batch_normalization_21[0][0]'] \n",
" 256) \n",
" \n",
" add_8 (Add) (None, None, None, 0 ['add_7[0][0]', \n",
" 256) 'leaky_re_lu_21[0][0]'] \n",
" \n",
" conv2d_22 (Conv2D) (None, None, None, 32768 ['add_8[0][0]'] \n",
" 128) \n",
" \n",
" batch_normalization_22 (BatchN (None, None, None, 512 ['conv2d_22[0][0]'] \n",
" ormalization) 128) \n",
" \n",
" leaky_re_lu_22 (LeakyReLU) (None, None, None, 0 ['batch_normalization_22[0][0]'] \n",
" 128) \n",
" \n",
" conv2d_23 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_22[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_23 (BatchN (None, None, None, 1024 ['conv2d_23[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_23 (LeakyReLU) (None, None, None, 0 ['batch_normalization_23[0][0]'] \n",
" 256) \n",
" \n",
" add_9 (Add) (None, None, None, 0 ['add_8[0][0]', \n",
" 256) 'leaky_re_lu_23[0][0]'] \n",
" \n",
" conv2d_24 (Conv2D) (None, None, None, 32768 ['add_9[0][0]'] \n",
" 128) \n",
" \n",
" batch_normalization_24 (BatchN (None, None, None, 512 ['conv2d_24[0][0]'] \n",
" ormalization) 128) \n",
" \n",
" leaky_re_lu_24 (LeakyReLU) (None, None, None, 0 ['batch_normalization_24[0][0]'] \n",
" 128) \n",
" \n",
" conv2d_25 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_24[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_25 (BatchN (None, None, None, 1024 ['conv2d_25[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_25 (LeakyReLU) (None, None, None, 0 ['batch_normalization_25[0][0]'] \n",
" 256) \n",
" \n",
" add_10 (Add) (None, None, None, 0 ['add_9[0][0]', \n",
" 256) 'leaky_re_lu_25[0][0]'] \n",
" \n",
" zero_padding2d_3 (ZeroPadding2 (None, None, None, 0 ['add_10[0][0]'] \n",
" D) 256) \n",
" \n",
" conv2d_26 (Conv2D) (None, None, None, 1179648 ['zero_padding2d_3[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_26 (BatchN (None, None, None, 2048 ['conv2d_26[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_26 (LeakyReLU) (None, None, None, 0 ['batch_normalization_26[0][0]'] \n",
" 512) \n",
" \n",
" conv2d_27 (Conv2D) (None, None, None, 131072 ['leaky_re_lu_26[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_27 (BatchN (None, None, None, 1024 ['conv2d_27[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_27 (LeakyReLU) (None, None, None, 0 ['batch_normalization_27[0][0]'] \n",
" 256) \n",
" \n",
" conv2d_28 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_27[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_28 (BatchN (None, None, None, 2048 ['conv2d_28[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_28 (LeakyReLU) (None, None, None, 0 ['batch_normalization_28[0][0]'] \n",
" 512) \n",
" \n",
" add_11 (Add) (None, None, None, 0 ['leaky_re_lu_26[0][0]', \n",
" 512) 'leaky_re_lu_28[0][0]'] \n",
" \n",
" conv2d_29 (Conv2D) (None, None, None, 131072 ['add_11[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_29 (BatchN (None, None, None, 1024 ['conv2d_29[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_29 (LeakyReLU) (None, None, None, 0 ['batch_normalization_29[0][0]'] \n",
" 256) \n",
" \n",
" conv2d_30 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_29[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_30 (BatchN (None, None, None, 2048 ['conv2d_30[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_30 (LeakyReLU) (None, None, None, 0 ['batch_normalization_30[0][0]'] \n",
" 512) \n",
" \n",
" add_12 (Add) (None, None, None, 0 ['add_11[0][0]', \n",
" 512) 'leaky_re_lu_30[0][0]'] \n",
" \n",
" conv2d_31 (Conv2D) (None, None, None, 131072 ['add_12[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_31 (BatchN (None, None, None, 1024 ['conv2d_31[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_31 (LeakyReLU) (None, None, None, 0 ['batch_normalization_31[0][0]'] \n",
" 256) \n",
" \n",
" conv2d_32 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_31[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_32 (BatchN (None, None, None, 2048 ['conv2d_32[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_32 (LeakyReLU) (None, None, None, 0 ['batch_normalization_32[0][0]'] \n",
" 512) \n",
" \n",
" add_13 (Add) (None, None, None, 0 ['add_12[0][0]', \n",
" 512) 'leaky_re_lu_32[0][0]'] \n",
" \n",
" conv2d_33 (Conv2D) (None, None, None, 131072 ['add_13[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_33 (BatchN (None, None, None, 1024 ['conv2d_33[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_33 (LeakyReLU) (None, None, None, 0 ['batch_normalization_33[0][0]'] \n",
" 256) \n",
" \n",
" conv2d_34 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_33[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_34 (BatchN (None, None, None, 2048 ['conv2d_34[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_34 (LeakyReLU) (None, None, None, 0 ['batch_normalization_34[0][0]'] \n",
" 512) \n",
" \n",
" add_14 (Add) (None, None, None, 0 ['add_13[0][0]', \n",
" 512) 'leaky_re_lu_34[0][0]'] \n",
" \n",
" conv2d_35 (Conv2D) (None, None, None, 131072 ['add_14[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_35 (BatchN (None, None, None, 1024 ['conv2d_35[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_35 (LeakyReLU) (None, None, None, 0 ['batch_normalization_35[0][0]'] \n",
" 256) \n",
" \n",
" conv2d_36 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_35[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_36 (BatchN (None, None, None, 2048 ['conv2d_36[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_36 (LeakyReLU) (None, None, None, 0 ['batch_normalization_36[0][0]'] \n",
" 512) \n",
" \n",
" add_15 (Add) (None, None, None, 0 ['add_14[0][0]', \n",
" 512) 'leaky_re_lu_36[0][0]'] \n",
" \n",
" conv2d_37 (Conv2D) (None, None, None, 131072 ['add_15[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_37 (BatchN (None, None, None, 1024 ['conv2d_37[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_37 (LeakyReLU) (None, None, None, 0 ['batch_normalization_37[0][0]'] \n",
" 256) \n",
" \n",
" conv2d_38 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_37[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_38 (BatchN (None, None, None, 2048 ['conv2d_38[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_38 (LeakyReLU) (None, None, None, 0 ['batch_normalization_38[0][0]'] \n",
" 512) \n",
" \n",
" add_16 (Add) (None, None, None, 0 ['add_15[0][0]', \n",
" 512) 'leaky_re_lu_38[0][0]'] \n",
" \n",
" conv2d_39 (Conv2D) (None, None, None, 131072 ['add_16[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_39 (BatchN (None, None, None, 1024 ['conv2d_39[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_39 (LeakyReLU) (None, None, None, 0 ['batch_normalization_39[0][0]'] \n",
" 256) \n",
" \n",
" conv2d_40 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_39[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_40 (BatchN (None, None, None, 2048 ['conv2d_40[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_40 (LeakyReLU) (None, None, None, 0 ['batch_normalization_40[0][0]'] \n",
" 512) \n",
" \n",
" add_17 (Add) (None, None, None, 0 ['add_16[0][0]', \n",
" 512) 'leaky_re_lu_40[0][0]'] \n",
" \n",
" conv2d_41 (Conv2D) (None, None, None, 131072 ['add_17[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_41 (BatchN (None, None, None, 1024 ['conv2d_41[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_41 (LeakyReLU) (None, None, None, 0 ['batch_normalization_41[0][0]'] \n",
" 256) \n",
" \n",
" conv2d_42 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_41[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_42 (BatchN (None, None, None, 2048 ['conv2d_42[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_42 (LeakyReLU) (None, None, None, 0 ['batch_normalization_42[0][0]'] \n",
" 512) \n",
" \n",
" add_18 (Add) (None, None, None, 0 ['add_17[0][0]', \n",
" 512) 'leaky_re_lu_42[0][0]'] \n",
" \n",
" zero_padding2d_4 (ZeroPadding2 (None, None, None, 0 ['add_18[0][0]'] \n",
" D) 512) \n",
" \n",
" conv2d_43 (Conv2D) (None, None, None, 4718592 ['zero_padding2d_4[0][0]'] \n",
" 1024) \n",
" \n",
" batch_normalization_43 (BatchN (None, None, None, 4096 ['conv2d_43[0][0]'] \n",
" ormalization) 1024) \n",
" \n",
" leaky_re_lu_43 (LeakyReLU) (None, None, None, 0 ['batch_normalization_43[0][0]'] \n",
" 1024) \n",
" \n",
" conv2d_44 (Conv2D) (None, None, None, 524288 ['leaky_re_lu_43[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_44 (BatchN (None, None, None, 2048 ['conv2d_44[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_44 (LeakyReLU) (None, None, None, 0 ['batch_normalization_44[0][0]'] \n",
" 512) \n",
" \n",
" conv2d_45 (Conv2D) (None, None, None, 4718592 ['leaky_re_lu_44[0][0]'] \n",
" 1024) \n",
" \n",
" batch_normalization_45 (BatchN (None, None, None, 4096 ['conv2d_45[0][0]'] \n",
" ormalization) 1024) \n",
" \n",
" leaky_re_lu_45 (LeakyReLU) (None, None, None, 0 ['batch_normalization_45[0][0]'] \n",
" 1024) \n",
" \n",
" add_19 (Add) (None, None, None, 0 ['leaky_re_lu_43[0][0]', \n",
" 1024) 'leaky_re_lu_45[0][0]'] \n",
" \n",
" conv2d_46 (Conv2D) (None, None, None, 524288 ['add_19[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_46 (BatchN (None, None, None, 2048 ['conv2d_46[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_46 (LeakyReLU) (None, None, None, 0 ['batch_normalization_46[0][0]'] \n",
" 512) \n",
" \n",
" conv2d_47 (Conv2D) (None, None, None, 4718592 ['leaky_re_lu_46[0][0]'] \n",
" 1024) \n",
" \n",
" batch_normalization_47 (BatchN (None, None, None, 4096 ['conv2d_47[0][0]'] \n",
" ormalization) 1024) \n",
" \n",
" leaky_re_lu_47 (LeakyReLU) (None, None, None, 0 ['batch_normalization_47[0][0]'] \n",
" 1024) \n",
" \n",
" add_20 (Add) (None, None, None, 0 ['add_19[0][0]', \n",
" 1024) 'leaky_re_lu_47[0][0]'] \n",
" \n",
" conv2d_48 (Conv2D) (None, None, None, 524288 ['add_20[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_48 (BatchN (None, None, None, 2048 ['conv2d_48[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_48 (LeakyReLU) (None, None, None, 0 ['batch_normalization_48[0][0]'] \n",
" 512) \n",
" \n",
" conv2d_49 (Conv2D) (None, None, None, 4718592 ['leaky_re_lu_48[0][0]'] \n",
" 1024) \n",
" \n",
" batch_normalization_49 (BatchN (None, None, None, 4096 ['conv2d_49[0][0]'] \n",
" ormalization) 1024) \n",
" \n",
" leaky_re_lu_49 (LeakyReLU) (None, None, None, 0 ['batch_normalization_49[0][0]'] \n",
" 1024) \n",
" \n",
" add_21 (Add) (None, None, None, 0 ['add_20[0][0]', \n",
" 1024) 'leaky_re_lu_49[0][0]'] \n",
" \n",
" conv2d_50 (Conv2D) (None, None, None, 524288 ['add_21[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_50 (BatchN (None, None, None, 2048 ['conv2d_50[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_50 (LeakyReLU) (None, None, None, 0 ['batch_normalization_50[0][0]'] \n",
" 512) \n",
" \n",
" conv2d_51 (Conv2D) (None, None, None, 4718592 ['leaky_re_lu_50[0][0]'] \n",
" 1024) \n",
" \n",
" batch_normalization_51 (BatchN (None, None, None, 4096 ['conv2d_51[0][0]'] \n",
" ormalization) 1024) \n",
" \n",
" leaky_re_lu_51 (LeakyReLU) (None, None, None, 0 ['batch_normalization_51[0][0]'] \n",
" 1024) \n",
" \n",
" add_22 (Add) (None, None, None, 0 ['add_21[0][0]', \n",
" 1024) 'leaky_re_lu_51[0][0]'] \n",
" \n",
" conv2d_52 (Conv2D) (None, None, None, 524288 ['add_22[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_52 (BatchN (None, None, None, 2048 ['conv2d_52[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_52 (LeakyReLU) (None, None, None, 0 ['batch_normalization_52[0][0]'] \n",
" 512) \n",
" \n",
" conv2d_53 (Conv2D) (None, None, None, 4718592 ['leaky_re_lu_52[0][0]'] \n",
" 1024) \n",
" \n",
" batch_normalization_53 (BatchN (None, None, None, 4096 ['conv2d_53[0][0]'] \n",
" ormalization) 1024) \n",
" \n",
" leaky_re_lu_53 (LeakyReLU) (None, None, None, 0 ['batch_normalization_53[0][0]'] \n",
" 1024) \n",
" \n",
" conv2d_54 (Conv2D) (None, None, None, 524288 ['leaky_re_lu_53[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_54 (BatchN (None, None, None, 2048 ['conv2d_54[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_54 (LeakyReLU) (None, None, None, 0 ['batch_normalization_54[0][0]'] \n",
" 512) \n",
" \n",
" conv2d_55 (Conv2D) (None, None, None, 4718592 ['leaky_re_lu_54[0][0]'] \n",
" 1024) \n",
" \n",
" batch_normalization_55 (BatchN (None, None, None, 4096 ['conv2d_55[0][0]'] \n",
" ormalization) 1024) \n",
" \n",
" leaky_re_lu_55 (LeakyReLU) (None, None, None, 0 ['batch_normalization_55[0][0]'] \n",
" 1024) \n",
" \n",
" conv2d_56 (Conv2D) (None, None, None, 524288 ['leaky_re_lu_55[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_56 (BatchN (None, None, None, 2048 ['conv2d_56[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_56 (LeakyReLU) (None, None, None, 0 ['batch_normalization_56[0][0]'] \n",
" 512) \n",
" \n",
" conv2d_59 (Conv2D) (None, None, None, 131072 ['leaky_re_lu_56[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_58 (BatchN (None, None, None, 1024 ['conv2d_59[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_58 (LeakyReLU) (None, None, None, 0 ['batch_normalization_58[0][0]'] \n",
" 256) \n",
" \n",
" up_sampling2d (UpSampling2D) (None, None, None, 0 ['leaky_re_lu_58[0][0]'] \n",
" 256) \n",
" \n",
" concatenate (Concatenate) (None, None, None, 0 ['up_sampling2d[0][0]', \n",
" 768) 'add_18[0][0]'] \n",
" \n",
" conv2d_60 (Conv2D) (None, None, None, 196608 ['concatenate[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_59 (BatchN (None, None, None, 1024 ['conv2d_60[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_59 (LeakyReLU) (None, None, None, 0 ['batch_normalization_59[0][0]'] \n",
" 256) \n",
" \n",
" conv2d_61 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_59[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_60 (BatchN (None, None, None, 2048 ['conv2d_61[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_60 (LeakyReLU) (None, None, None, 0 ['batch_normalization_60[0][0]'] \n",
" 512) \n",
" \n",
" conv2d_62 (Conv2D) (None, None, None, 131072 ['leaky_re_lu_60[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_61 (BatchN (None, None, None, 1024 ['conv2d_62[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_61 (LeakyReLU) (None, None, None, 0 ['batch_normalization_61[0][0]'] \n",
" 256) \n",
" \n",
" conv2d_63 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_61[0][0]'] \n",
" 512) \n",
" \n",
" batch_normalization_62 (BatchN (None, None, None, 2048 ['conv2d_63[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" leaky_re_lu_62 (LeakyReLU) (None, None, None, 0 ['batch_normalization_62[0][0]'] \n",
" 512) \n",
" \n",
" conv2d_64 (Conv2D) (None, None, None, 131072 ['leaky_re_lu_62[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_63 (BatchN (None, None, None, 1024 ['conv2d_64[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_63 (LeakyReLU) (None, None, None, 0 ['batch_normalization_63[0][0]'] \n",
" 256) \n",
" \n",
" conv2d_67 (Conv2D) (None, None, None, 32768 ['leaky_re_lu_63[0][0]'] \n",
" 128) \n",
" \n",
" batch_normalization_65 (BatchN (None, None, None, 512 ['conv2d_67[0][0]'] \n",
" ormalization) 128) \n",
" \n",
" leaky_re_lu_65 (LeakyReLU) (None, None, None, 0 ['batch_normalization_65[0][0]'] \n",
" 128) \n",
" \n",
" up_sampling2d_1 (UpSampling2D) (None, None, None, 0 ['leaky_re_lu_65[0][0]'] \n",
" 128) \n",
" \n",
" concatenate_1 (Concatenate) (None, None, None, 0 ['up_sampling2d_1[0][0]', \n",
" 384) 'add_10[0][0]'] \n",
" \n",
" conv2d_68 (Conv2D) (None, None, None, 49152 ['concatenate_1[0][0]'] \n",
" 128) \n",
" \n",
" batch_normalization_66 (BatchN (None, None, None, 512 ['conv2d_68[0][0]'] \n",
" ormalization) 128) \n",
" \n",
" leaky_re_lu_66 (LeakyReLU) (None, None, None, 0 ['batch_normalization_66[0][0]'] \n",
" 128) \n",
" \n",
" conv2d_69 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_66[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_67 (BatchN (None, None, None, 1024 ['conv2d_69[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_67 (LeakyReLU) (None, None, None, 0 ['batch_normalization_67[0][0]'] \n",
" 256) \n",
" \n",
" conv2d_70 (Conv2D) (None, None, None, 32768 ['leaky_re_lu_67[0][0]'] \n",
" 128) \n",
" \n",
" batch_normalization_68 (BatchN (None, None, None, 512 ['conv2d_70[0][0]'] \n",
" ormalization) 128) \n",
" \n",
" leaky_re_lu_68 (LeakyReLU) (None, None, None, 0 ['batch_normalization_68[0][0]'] \n",
" 128) \n",
" \n",
" conv2d_71 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_68[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_69 (BatchN (None, None, None, 1024 ['conv2d_71[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_69 (LeakyReLU) (None, None, None, 0 ['batch_normalization_69[0][0]'] \n",
" 256) \n",
" \n",
" conv2d_72 (Conv2D) (None, None, None, 32768 ['leaky_re_lu_69[0][0]'] \n",
" 128) \n",
" \n",
" batch_normalization_70 (BatchN (None, None, None, 512 ['conv2d_72[0][0]'] \n",
" ormalization) 128) \n",
" \n",
" leaky_re_lu_70 (LeakyReLU) (None, None, None, 0 ['batch_normalization_70[0][0]'] \n",
" 128) \n",
" \n",
" conv2d_57 (Conv2D) (None, None, None, 4718592 ['leaky_re_lu_56[0][0]'] \n",
" 1024) \n",
" \n",
" conv2d_65 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_63[0][0]'] \n",
" 512) \n",
" \n",
" conv2d_73 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_70[0][0]'] \n",
" 256) \n",
" \n",
" batch_normalization_57 (BatchN (None, None, None, 4096 ['conv2d_57[0][0]'] \n",
" ormalization) 1024) \n",
" \n",
" batch_normalization_64 (BatchN (None, None, None, 2048 ['conv2d_65[0][0]'] \n",
" ormalization) 512) \n",
" \n",
" batch_normalization_71 (BatchN (None, None, None, 1024 ['conv2d_73[0][0]'] \n",
" ormalization) 256) \n",
" \n",
" leaky_re_lu_57 (LeakyReLU) (None, None, None, 0 ['batch_normalization_57[0][0]'] \n",
" 1024) \n",
" \n",
" leaky_re_lu_64 (LeakyReLU) (None, None, None, 0 ['batch_normalization_64[0][0]'] \n",
" 512) \n",
" \n",
" leaky_re_lu_71 (LeakyReLU) (None, None, None, 0 ['batch_normalization_71[0][0]'] \n",
" 256) \n",
" \n",
" conv2d_58 (Conv2D) (None, None, None, 261375 ['leaky_re_lu_57[0][0]'] \n",
" 255) \n",
" \n",
" conv2d_66 (Conv2D) (None, None, None, 130815 ['leaky_re_lu_64[0][0]'] \n",
" 255) \n",
" \n",
" conv2d_74 (Conv2D) (None, None, None, 65535 ['leaky_re_lu_71[0][0]'] \n",
" 255) \n",
" \n",
"==================================================================================================\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Total params: 62,001,757\n",
"Trainable params: 61,949,149\n",
"Non-trainable params: 52,608\n",
"__________________________________________________________________________________________________\n",
"None\n",
"WARNING:tensorflow:Compiled the loaded model, but the compiled metrics have yet to be built. `model.compile_metrics` will be empty until you train or evaluate the model.\n",
"Saved Keras model to model_data/yolo.h5\n",
"Read 62001757 of 62001757.0 from Darknet weights.\n"
]
}
],
"source": [
"!python keras-yolo3/convert.py keras-yolo3/yolov3.cfg yolov3.weights model_data/yolo.h5"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "4038756b",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"-------------------CLASS NAMES-------------------\n",
"['licence']\n",
"-------------------CLASS NAMES-------------------\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-22 02:42:51.965089: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: SSE4.1 SSE4.2 AVX AVX2 FMA\n",
"To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Create YOLOv3 model with 9 anchors and 1 classes.\n",
"WARNING:tensorflow:Skipping loading weights for layer #249 (named conv2d_58) due to mismatch in shape for weight conv2d_58/kernel:0. Weight expects shape (1, 1, 1024, 18). Received saved weight with shape (255, 1024, 1, 1)\n",
"WARNING:tensorflow:Skipping loading weights for layer #249 (named conv2d_58) due to mismatch in shape for weight conv2d_58/bias:0. Weight expects shape (18,). Received saved weight with shape (255,)\n",
"WARNING:tensorflow:Skipping loading weights for layer #250 (named conv2d_66) due to mismatch in shape for weight conv2d_66/kernel:0. Weight expects shape (1, 1, 512, 18). Received saved weight with shape (255, 512, 1, 1)\n",
"WARNING:tensorflow:Skipping loading weights for layer #250 (named conv2d_66) due to mismatch in shape for weight conv2d_66/bias:0. Weight expects shape (18,). Received saved weight with shape (255,)\n",
"WARNING:tensorflow:Skipping loading weights for layer #251 (named conv2d_74) due to mismatch in shape for weight conv2d_74/kernel:0. Weight expects shape (1, 1, 256, 18). Received saved weight with shape (255, 256, 1, 1)\n",
"WARNING:tensorflow:Skipping loading weights for layer #251 (named conv2d_74) due to mismatch in shape for weight conv2d_74/bias:0. Weight expects shape (18,). Received saved weight with shape (255,)\n",
"Load weights ./model_data/yolo.h5.\n",
"Freeze the first 249 layers of total 252 layers.\n",
"WARNING:tensorflow:`period` argument is deprecated. Please use `save_freq` to specify the frequency in number of batches seen.\n",
"Train on 488 samples, val on 121 samples, with batch size 16.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"/Users/aczajka/miniconda3/envs/yolov3/lib/python3.9/site-packages/keras/optimizers/optimizer_v2/adam.py:117: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead.\n",
" super().__init__(name, **kwargs)\n",
"/var/folders/j_/grk4ythd0392dcw5z3gkgw5w0000gn/T/ipykernel_39692/4035785499.py:62: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
" model.fit_generator(data_generator_wrapper(lines[:num_train], batch_size, input_shape, anchors, num_classes),\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/500\n",
"WARNING:tensorflow:From /Users/aczajka/miniconda3/envs/yolov3/lib/python3.9/site-packages/tensorflow/python/autograph/pyct/static_analysis/liveness.py:83: Analyzer.lamba_check (from tensorflow.python.autograph.pyct.static_analysis.liveness) is deprecated and will be removed after 2023-09-23.\n",
"Instructions for updating:\n",
"Lambda fuctions will be no more assumed to be used in the statement where they are used, or at least in the same block. https://github.com/tensorflow/tensorflow/issues/56089\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-22 02:43:01.274999: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Subshape must have computed start >= end since stride is negative, but is 0 and 2 (computed from start 0 and end 9223372036854775807 over shape with rank 2 and stride-1)\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"30/30 [==============================] - ETA: 0s - loss: 1092.7228"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-22 02:43:54.290859: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Subshape must have computed start >= end since stride is negative, but is 0 and 2 (computed from start 0 and end 9223372036854775807 over shape with rank 2 and stride-1)\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"30/30 [==============================] - 70s 2s/step - loss: 1092.7228 - val_loss: 216.0935\n",
"Epoch 2/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 135.6953 - val_loss: 94.2604\n",
"Epoch 3/500\n",
"30/30 [==============================] - 52s 2s/step - loss: 79.2672 - val_loss: 68.8617\n",
"Epoch 4/500\n",
"30/30 [==============================] - 52s 2s/step - loss: 60.4469 - val_loss: 54.7572\n",
"Epoch 5/500\n",
"30/30 [==============================] - 50s 2s/step - loss: 50.0802 - val_loss: 47.2904\n",
"Epoch 6/500\n",
"30/30 [==============================] - 51s 2s/step - loss: 43.6335 - val_loss: 41.2742\n",
"Epoch 7/500\n",
"30/30 [==============================] - 51s 2s/step - loss: 39.3473 - val_loss: 38.5374\n",
"Epoch 8/500\n",
"30/30 [==============================] - 52s 2s/step - loss: 36.2422 - val_loss: 35.2012\n",
"Epoch 9/500\n",
"30/30 [==============================] - 51s 2s/step - loss: 33.6743 - val_loss: 33.0579\n",
"Epoch 10/500\n",
"30/30 [==============================] - 49s 2s/step - loss: 32.0283 - val_loss: 30.6336\n",
"Epoch 11/500\n",
"30/30 [==============================] - 49s 2s/step - loss: 30.3864 - val_loss: 29.2345\n",
"Epoch 12/500\n",
"30/30 [==============================] - 51s 2s/step - loss: 29.6261 - val_loss: 28.6320\n",
"Epoch 13/500\n",
"30/30 [==============================] - 51s 2s/step - loss: 28.1432 - val_loss: 27.8887\n",
"Epoch 14/500\n",
"30/30 [==============================] - 52s 2s/step - loss: 27.6032 - val_loss: 27.0226\n",
"Epoch 15/500\n",
"30/30 [==============================] - 52s 2s/step - loss: 26.9148 - val_loss: 26.3452\n",
"Epoch 16/500\n",
"30/30 [==============================] - 52s 2s/step - loss: 26.4210 - val_loss: 26.4830\n",
"Epoch 17/500\n",
"30/30 [==============================] - 53s 2s/step - loss: 25.6399 - val_loss: 25.2511\n",
"Epoch 18/500\n",
"30/30 [==============================] - 60s 2s/step - loss: 25.5443 - val_loss: 24.6174\n",
"Epoch 19/500\n",
"30/30 [==============================] - 52s 2s/step - loss: 25.2961 - val_loss: 24.7754\n",
"Epoch 20/500\n",
"30/30 [==============================] - 54s 2s/step - loss: 24.7307 - val_loss: 24.6782\n",
"Epoch 21/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 24.2857 - val_loss: 24.3096\n",
"Epoch 22/500\n",
"30/30 [==============================] - 52s 2s/step - loss: 24.2008 - val_loss: 24.3196\n",
"Epoch 23/500\n",
"30/30 [==============================] - 51s 2s/step - loss: 23.5739 - val_loss: 23.3351\n",
"Epoch 24/500\n",
"30/30 [==============================] - 52s 2s/step - loss: 23.6946 - val_loss: 24.0281\n",
"Epoch 25/500\n",
"30/30 [==============================] - 52s 2s/step - loss: 23.7198 - val_loss: 23.4021\n",
"Epoch 26/500\n",
"30/30 [==============================] - 53s 2s/step - loss: 23.2751 - val_loss: 23.3185\n",
"Epoch 27/500\n",
"30/30 [==============================] - 53s 2s/step - loss: 23.2101 - val_loss: 22.7601\n",
"Epoch 28/500\n",
"30/30 [==============================] - 53s 2s/step - loss: 22.9937 - val_loss: 22.6282\n",
"Epoch 29/500\n",
"30/30 [==============================] - 53s 2s/step - loss: 22.8363 - val_loss: 22.1787\n",
"Epoch 30/500\n",
"30/30 [==============================] - 55s 2s/step - loss: 22.6890 - val_loss: 22.1749\n",
"Epoch 31/500\n",
"30/30 [==============================] - 56s 2s/step - loss: 22.4564 - val_loss: 22.6868\n",
"Epoch 32/500\n",
"30/30 [==============================] - 52s 2s/step - loss: 22.3397 - val_loss: 22.1918\n",
"Epoch 33/500\n",
"30/30 [==============================] - 53s 2s/step - loss: 22.8438 - val_loss: 22.4380\n",
"Epoch 34/500\n",
"30/30 [==============================] - 53s 2s/step - loss: 22.0734 - val_loss: 22.9481\n",
"Epoch 35/500\n",
"30/30 [==============================] - 53s 2s/step - loss: 21.9711 - val_loss: 22.8436\n",
"Epoch 36/500\n",
"30/30 [==============================] - 53s 2s/step - loss: 22.0127 - val_loss: 22.7770\n",
"Epoch 37/500\n",
"30/30 [==============================] - 53s 2s/step - loss: 22.6367 - val_loss: 21.8047\n",
"Epoch 38/500\n",
"30/30 [==============================] - 53s 2s/step - loss: 21.8459 - val_loss: 22.3148\n",
"Epoch 39/500\n",
"30/30 [==============================] - 54s 2s/step - loss: 21.9811 - val_loss: 21.6083\n",
"Epoch 40/500\n",
"30/30 [==============================] - 52s 2s/step - loss: 21.8194 - val_loss: 21.5877\n",
"Epoch 41/500\n",
"30/30 [==============================] - 53s 2s/step - loss: 21.6587 - val_loss: 21.3777\n",
"Epoch 42/500\n",
"30/30 [==============================] - 56s 2s/step - loss: 21.4056 - val_loss: 21.0999\n",
"Epoch 43/500\n",
"30/30 [==============================] - 52s 2s/step - loss: 21.4517 - val_loss: 20.9185\n",
"Epoch 44/500\n",
"30/30 [==============================] - 55s 2s/step - loss: 21.4323 - val_loss: 21.4888\n",
"Epoch 45/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 21.4581 - val_loss: 20.9886\n",
"Epoch 46/500\n",
"30/30 [==============================] - 56s 2s/step - loss: 21.3487 - val_loss: 20.3990\n",
"Epoch 47/500\n",
"30/30 [==============================] - 56s 2s/step - loss: 20.9203 - val_loss: 20.3689\n",
"Epoch 48/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 21.0719 - val_loss: 21.1066\n",
"Epoch 49/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 21.3894 - val_loss: 21.2877\n",
"Epoch 50/500\n",
"30/30 [==============================] - 56s 2s/step - loss: 21.2891 - val_loss: 21.2323\n",
"Epoch 51/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 21.0220 - val_loss: 20.7920\n",
"Epoch 52/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.9018 - val_loss: 20.3990\n",
"Epoch 53/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 21.2242 - val_loss: 20.2087\n",
"Epoch 54/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.9219 - val_loss: 20.2367\n",
"Epoch 55/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.8007 - val_loss: 20.1518\n",
"Epoch 56/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.8917 - val_loss: 20.4730\n",
"Epoch 57/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.8413 - val_loss: 20.3548\n",
"Epoch 58/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 20.5870 - val_loss: 20.3552\n",
"Epoch 59/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.9533 - val_loss: 20.3583\n",
"Epoch 60/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 20.5604 - val_loss: 19.6875\n",
"Epoch 61/500\n",
"30/30 [==============================] - 62s 2s/step - loss: 20.8170 - val_loss: 20.4102\n",
"Epoch 62/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 20.7297 - val_loss: 20.4196\n",
"Epoch 63/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 20.4839 - val_loss: 20.1161\n",
"Epoch 64/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.4190 - val_loss: 20.5080\n",
"Epoch 65/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.6353 - val_loss: 20.1768\n",
"Epoch 66/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 20.6978 - val_loss: 20.5307\n",
"Epoch 67/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.5475 - val_loss: 20.9204\n",
"Epoch 68/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.4128 - val_loss: 20.2049\n",
"Epoch 69/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 20.3816 - val_loss: 19.6142\n",
"Epoch 70/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.4737 - val_loss: 20.3626\n",
"Epoch 71/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.0341 - val_loss: 19.9938\n",
"Epoch 72/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 20.4320 - val_loss: 21.0509\n",
"Epoch 73/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.4354 - val_loss: 20.2190\n",
"Epoch 74/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 19.9763 - val_loss: 19.8038\n",
"Epoch 75/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 20.4443 - val_loss: 19.8551\n",
"Epoch 76/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 19.5941 - val_loss: 19.9696\n",
"Epoch 77/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.1076 - val_loss: 20.5628\n",
"Epoch 78/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.8378 - val_loss: 20.4607\n",
"Epoch 79/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 19.9174 - val_loss: 19.2342\n",
"Epoch 80/500\n",
"30/30 [==============================] - 60s 2s/step - loss: 19.9954 - val_loss: 19.9048\n",
"Epoch 81/500\n",
"30/30 [==============================] - 61s 2s/step - loss: 19.8898 - val_loss: 19.9757\n",
"Epoch 82/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.8671 - val_loss: 20.3432\n",
"Epoch 83/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.0536 - val_loss: 20.0036\n",
"Epoch 84/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.8378 - val_loss: 19.8090\n",
"Epoch 85/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.0678 - val_loss: 19.4705\n",
"Epoch 86/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 20.0358 - val_loss: 19.7351\n",
"Epoch 87/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 19.7083 - val_loss: 19.1633\n",
"Epoch 88/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.5802 - val_loss: 19.2210\n",
"Epoch 89/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 19.6578 - val_loss: 19.5279\n",
"Epoch 90/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.3884 - val_loss: 19.6862\n",
"Epoch 91/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.8888 - val_loss: 20.6697\n",
"Epoch 92/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 19.7249 - val_loss: 19.4848\n",
"Epoch 93/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.9230 - val_loss: 19.8133\n",
"Epoch 94/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.5174 - val_loss: 18.7876\n",
"Epoch 95/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 19.6750 - val_loss: 19.1217\n",
"Epoch 96/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 19.2918 - val_loss: 19.0066\n",
"Epoch 97/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.5391 - val_loss: 19.2043\n",
"Epoch 98/500\n",
"30/30 [==============================] - 62s 2s/step - loss: 19.6986 - val_loss: 20.1391\n",
"Epoch 99/500\n",
"30/30 [==============================] - 61s 2s/step - loss: 19.5048 - val_loss: 19.4383\n",
"Epoch 100/500\n",
"30/30 [==============================] - 56s 2s/step - loss: 19.2941 - val_loss: 19.6998\n",
"Epoch 101/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.4645 - val_loss: 18.7480\n",
"Epoch 102/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.3468 - val_loss: 20.0104\n",
"Epoch 103/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.5910 - val_loss: 19.0392\n",
"Epoch 104/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.4385 - val_loss: 19.1266\n",
"Epoch 105/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.3504 - val_loss: 19.7491\n",
"Epoch 106/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.9692 - val_loss: 19.1707\n",
"Epoch 107/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.2553 - val_loss: 19.5704\n",
"Epoch 108/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 19.5590 - val_loss: 18.9097\n",
"Epoch 109/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.6710 - val_loss: 19.4302\n",
"Epoch 110/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.0906 - val_loss: 19.0445\n",
"Epoch 111/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 19.0178 - val_loss: 18.9003\n",
"Epoch 112/500\n",
"30/30 [==============================] - 64s 2s/step - loss: 19.1675 - val_loss: 18.6330\n",
"Epoch 113/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 18.9757 - val_loss: 18.7002\n",
"Epoch 114/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.9385 - val_loss: 18.6894\n",
"Epoch 115/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 19.1563 - val_loss: 18.4366\n",
"Epoch 116/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 18.7275 - val_loss: 19.4573\n",
"Epoch 117/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.9145 - val_loss: 18.3290\n",
"Epoch 118/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.9156 - val_loss: 19.1213\n",
"Epoch 119/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 19.1048 - val_loss: 18.6663\n",
"Epoch 120/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.5928 - val_loss: 19.4735\n",
"Epoch 121/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.9346 - val_loss: 18.1666\n",
"Epoch 122/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.7425 - val_loss: 18.6575\n",
"Epoch 123/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.5844 - val_loss: 18.5724\n",
"Epoch 124/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.5992 - val_loss: 18.6960\n",
"Epoch 125/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.7258 - val_loss: 18.3909\n",
"Epoch 126/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.7052 - val_loss: 18.4346\n",
"Epoch 127/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.5290 - val_loss: 19.0881\n",
"Epoch 128/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.6084 - val_loss: 18.3423\n",
"Epoch 129/500\n",
"30/30 [==============================] - 67s 2s/step - loss: 18.2576 - val_loss: 17.7641\n",
"Epoch 130/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 18.5741 - val_loss: 19.3456\n",
"Epoch 131/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.7191 - val_loss: 18.2478\n",
"Epoch 132/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.2529 - val_loss: 17.8907\n",
"Epoch 133/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.6660 - val_loss: 19.2050\n",
"Epoch 134/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 18.4503 - val_loss: 17.5212\n",
"Epoch 135/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 18.4364 - val_loss: 17.6540\n",
"Epoch 136/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.3926 - val_loss: 17.6553\n",
"Epoch 137/500\n",
"30/30 [==============================] - 61s 2s/step - loss: 18.2460 - val_loss: 18.6843\n",
"Epoch 138/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.2683 - val_loss: 18.1989\n",
"Epoch 139/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 18.4373 - val_loss: 18.2519\n",
"Epoch 140/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 18.0950 - val_loss: 18.2093\n",
"Epoch 141/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 18.6397 - val_loss: 17.5036\n",
"Epoch 142/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.4368 - val_loss: 18.0884\n",
"Epoch 143/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.2509 - val_loss: 18.0419\n",
"Epoch 144/500\n",
"30/30 [==============================] - 60s 2s/step - loss: 17.9318 - val_loss: 17.1161\n",
"Epoch 145/500\n",
"30/30 [==============================] - 67s 2s/step - loss: 18.1403 - val_loss: 17.9708\n",
"Epoch 146/500\n",
"30/30 [==============================] - 56s 2s/step - loss: 18.2065 - val_loss: 18.9385\n",
"Epoch 147/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.9481 - val_loss: 17.5626\n",
"Epoch 148/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.9567 - val_loss: 17.6918\n",
"Epoch 149/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.0001 - val_loss: 17.8759\n",
"Epoch 150/500\n",
"30/30 [==============================] - 60s 2s/step - loss: 18.2126 - val_loss: 18.0285\n",
"Epoch 151/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.8216 - val_loss: 18.1529\n",
"Epoch 152/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.8409 - val_loss: 18.0349\n",
"Epoch 153/500\n",
"30/30 [==============================] - 60s 2s/step - loss: 17.8870 - val_loss: 16.9735\n",
"Epoch 154/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.5961 - val_loss: 17.3506\n",
"Epoch 155/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 18.0078 - val_loss: 18.0054\n",
"Epoch 156/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.9904 - val_loss: 17.5965\n",
"Epoch 157/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.9485 - val_loss: 17.4312\n",
"Epoch 158/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.8291 - val_loss: 17.3607\n",
"Epoch 159/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.8277 - val_loss: 17.2476\n",
"Epoch 160/500\n",
"30/30 [==============================] - 60s 2s/step - loss: 17.2321 - val_loss: 17.4888\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 161/500\n",
"30/30 [==============================] - 66s 2s/step - loss: 17.8075 - val_loss: 17.9411\n",
"Epoch 162/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.6729 - val_loss: 16.4171\n",
"Epoch 163/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.7537 - val_loss: 17.1066\n",
"Epoch 164/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.7760 - val_loss: 17.9759\n",
"Epoch 165/500\n",
"30/30 [==============================] - 60s 2s/step - loss: 17.9173 - val_loss: 17.1527\n",
"Epoch 166/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.7308 - val_loss: 17.3219\n",
"Epoch 167/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.4189 - val_loss: 17.8249\n",
"Epoch 168/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.4101 - val_loss: 17.1193\n",
"Epoch 169/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.7719 - val_loss: 17.0561\n",
"Epoch 170/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.7480 - val_loss: 16.8337\n",
"Epoch 171/500\n",
"30/30 [==============================] - 60s 2s/step - loss: 17.3692 - val_loss: 16.0112\n",
"Epoch 172/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.7808 - val_loss: 16.9604\n",
"Epoch 173/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.5618 - val_loss: 16.9944\n",
"Epoch 174/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.6525 - val_loss: 17.3570\n",
"Epoch 175/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.5603 - val_loss: 16.9481\n",
"Epoch 176/500\n",
"30/30 [==============================] - 64s 2s/step - loss: 17.3968 - val_loss: 16.7614\n",
"Epoch 177/500\n",
"30/30 [==============================] - 61s 2s/step - loss: 17.7041 - val_loss: 17.0379\n",
"Epoch 178/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.4487 - val_loss: 17.8662\n",
"Epoch 179/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.5594 - val_loss: 16.9650\n",
"Epoch 180/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.4884 - val_loss: 16.6101\n",
"Epoch 181/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.3017 - val_loss: 17.3026\n",
"Epoch 182/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.4179 - val_loss: 17.0920\n",
"Epoch 183/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.1374 - val_loss: 17.0096\n",
"Epoch 184/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.2827 - val_loss: 17.5058\n",
"Epoch 185/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.3034 - val_loss: 17.1128\n",
"Epoch 186/500\n",
"30/30 [==============================] - 72s 2s/step - loss: 17.1985 - val_loss: 16.1411\n",
"Epoch 187/500\n",
"30/30 [==============================] - 102s 3s/step - loss: 17.2851 - val_loss: 17.6696\n",
"Epoch 188/500\n",
"30/30 [==============================] - 117s 4s/step - loss: 17.1215 - val_loss: 17.2290\n",
"Epoch 189/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 17.4202 - val_loss: 16.9745\n",
"Epoch 190/500\n",
"30/30 [==============================] - 126s 4s/step - loss: 17.0531 - val_loss: 16.7439\n",
"Epoch 191/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 17.2199 - val_loss: 16.9525\n",
"Epoch 192/500\n",
"30/30 [==============================] - 130s 4s/step - loss: 17.2730 - val_loss: 16.7329\n",
"Epoch 193/500\n",
"30/30 [==============================] - 131s 4s/step - loss: 17.0992 - val_loss: 16.7782\n",
"Epoch 194/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 17.2608 - val_loss: 16.7102\n",
"Epoch 195/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 17.2919 - val_loss: 16.7896\n",
"Epoch 196/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 17.2738 - val_loss: 16.4602\n",
"Epoch 197/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.8739 - val_loss: 16.5762\n",
"Epoch 198/500\n",
"30/30 [==============================] - 128s 4s/step - loss: 17.1413 - val_loss: 16.7527\n",
"Epoch 199/500\n",
"30/30 [==============================] - 111s 4s/step - loss: 16.9642 - val_loss: 16.8084\n",
"Epoch 200/500\n",
"30/30 [==============================] - 52s 2s/step - loss: 17.0036 - val_loss: 16.4942\n",
"Epoch 201/500\n",
"30/30 [==============================] - 67s 2s/step - loss: 16.9632 - val_loss: 16.9797\n",
"Epoch 202/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.0620 - val_loss: 17.0211\n",
"Epoch 203/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.4539 - val_loss: 16.8192\n",
"Epoch 204/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.9974 - val_loss: 16.3421\n",
"Epoch 205/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.8898 - val_loss: 17.2752\n",
"Epoch 206/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.9180 - val_loss: 15.9694\n",
"Epoch 207/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.2808 - val_loss: 16.7605\n",
"Epoch 208/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.0356 - val_loss: 16.7981\n",
"Epoch 209/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 17.0600 - val_loss: 16.9557\n",
"Epoch 210/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.1601 - val_loss: 17.0695\n",
"Epoch 211/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 17.0459 - val_loss: 16.6873\n",
"Epoch 212/500\n",
"30/30 [==============================] - 89s 3s/step - loss: 17.1121 - val_loss: 16.7286\n",
"Epoch 213/500\n",
"30/30 [==============================] - 112s 4s/step - loss: 16.7431 - val_loss: 16.7320\n",
"Epoch 214/500\n",
"30/30 [==============================] - 118s 4s/step - loss: 16.8781 - val_loss: 16.9751\n",
"Epoch 215/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 17.1820 - val_loss: 16.3007\n",
"Epoch 216/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.7895 - val_loss: 16.8100\n",
"Epoch 217/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 17.0252 - val_loss: 16.4287\n",
"Epoch 218/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.7351 - val_loss: 16.7850\n",
"Epoch 219/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 16.8772 - val_loss: 16.6999\n",
"Epoch 220/500\n",
"30/30 [==============================] - 127s 4s/step - loss: 16.8597 - val_loss: 16.5488\n",
"Epoch 221/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 17.1371 - val_loss: 16.2532\n",
"Epoch 222/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.8663 - val_loss: 16.5585\n",
"Epoch 223/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.9581 - val_loss: 16.2982\n",
"Epoch 224/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.9060 - val_loss: 16.3674\n",
"Epoch 225/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 16.9360 - val_loss: 16.6738\n",
"Epoch 226/500\n",
"30/30 [==============================] - 110s 4s/step - loss: 16.6695 - val_loss: 16.8557\n",
"Epoch 227/500\n",
"30/30 [==============================] - 54s 2s/step - loss: 16.9524 - val_loss: 16.5686\n",
"Epoch 228/500\n",
"30/30 [==============================] - 68s 2s/step - loss: 16.8033 - val_loss: 16.5986\n",
"Epoch 229/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.7346 - val_loss: 16.0810\n",
"Epoch 230/500\n",
"30/30 [==============================] - 61s 2s/step - loss: 16.6734 - val_loss: 16.3377\n",
"Epoch 231/500\n",
"30/30 [==============================] - 61s 2s/step - loss: 16.8451 - val_loss: 16.1956\n",
"Epoch 232/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.8533 - val_loss: 16.4178\n",
"Epoch 233/500\n",
"30/30 [==============================] - 67s 2s/step - loss: 16.6677 - val_loss: 16.1700\n",
"Epoch 234/500\n",
"30/30 [==============================] - 62s 2s/step - loss: 16.7513 - val_loss: 16.6826\n",
"Epoch 235/500\n",
"30/30 [==============================] - 75s 3s/step - loss: 16.5991 - val_loss: 16.3288\n",
"Epoch 236/500\n",
"30/30 [==============================] - 60s 2s/step - loss: 17.0266 - val_loss: 16.5422\n",
"Epoch 237/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.7941 - val_loss: 16.2773\n",
"Epoch 238/500\n",
"30/30 [==============================] - 75s 3s/step - loss: 16.5905 - val_loss: 15.8983\n",
"Epoch 239/500\n",
"30/30 [==============================] - 79s 3s/step - loss: 16.7549 - val_loss: 16.9012\n",
"Epoch 240/500\n",
"30/30 [==============================] - 83s 3s/step - loss: 16.7844 - val_loss: 16.3224\n",
"Epoch 241/500\n",
"30/30 [==============================] - 86s 3s/step - loss: 16.6991 - val_loss: 16.3654\n",
"Epoch 242/500\n",
"30/30 [==============================] - 87s 3s/step - loss: 16.5758 - val_loss: 15.8727\n",
"Epoch 243/500\n",
"30/30 [==============================] - 87s 3s/step - loss: 16.3914 - val_loss: 16.4542\n",
"Epoch 244/500\n",
"30/30 [==============================] - 89s 3s/step - loss: 16.7021 - val_loss: 16.4562\n",
"Epoch 245/500\n",
"30/30 [==============================] - 89s 3s/step - loss: 16.4270 - val_loss: 16.3817\n",
"Epoch 246/500\n",
"30/30 [==============================] - 89s 3s/step - loss: 16.7722 - val_loss: 16.2395\n",
"Epoch 247/500\n",
"30/30 [==============================] - 89s 3s/step - loss: 16.6468 - val_loss: 16.6332\n",
"Epoch 248/500\n",
"30/30 [==============================] - 89s 3s/step - loss: 17.0123 - val_loss: 16.2401\n",
"Epoch 249/500\n",
"30/30 [==============================] - 88s 3s/step - loss: 16.4098 - val_loss: 16.2627\n",
"Epoch 250/500\n",
"30/30 [==============================] - 87s 3s/step - loss: 16.6750 - val_loss: 16.3639\n",
"Epoch 251/500\n",
"30/30 [==============================] - 88s 3s/step - loss: 16.4957 - val_loss: 17.0374\n",
"Epoch 252/500\n",
"30/30 [==============================] - 88s 3s/step - loss: 16.5535 - val_loss: 16.6554\n",
"Epoch 253/500\n",
"30/30 [==============================] - 119s 4s/step - loss: 16.5255 - val_loss: 16.8328\n",
"Epoch 254/500\n",
"30/30 [==============================] - 119s 4s/step - loss: 16.6808 - val_loss: 16.1435\n",
"Epoch 255/500\n",
"30/30 [==============================] - 54s 2s/step - loss: 16.5841 - val_loss: 16.3919\n",
"Epoch 256/500\n",
"30/30 [==============================] - 78s 3s/step - loss: 16.5055 - val_loss: 16.5761\n",
"Epoch 257/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.6117 - val_loss: 16.4381\n",
"Epoch 258/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.6162 - val_loss: 16.2132\n",
"Epoch 259/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.4880 - val_loss: 16.6501\n",
"Epoch 260/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.5138 - val_loss: 15.8520\n",
"Epoch 261/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.4561 - val_loss: 15.8716\n",
"Epoch 262/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.6328 - val_loss: 16.2283\n",
"Epoch 263/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.8187 - val_loss: 16.8967\n",
"Epoch 264/500\n",
"30/30 [==============================] - 90s 3s/step - loss: 16.5232 - val_loss: 15.7357\n",
"Epoch 265/500\n",
"30/30 [==============================] - 108s 4s/step - loss: 16.3057 - val_loss: 16.0941\n",
"Epoch 266/500\n",
"30/30 [==============================] - 117s 4s/step - loss: 16.6120 - val_loss: 16.4122\n",
"Epoch 267/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.6497 - val_loss: 15.5423\n",
"Epoch 268/500\n",
"30/30 [==============================] - 122s 4s/step - loss: 16.4017 - val_loss: 16.8959\n",
"Epoch 269/500\n",
"30/30 [==============================] - 127s 4s/step - loss: 16.5587 - val_loss: 16.1176\n",
"Epoch 270/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 16.3952 - val_loss: 16.4328\n",
"Epoch 271/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.5917 - val_loss: 16.1204\n",
"Epoch 272/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 16.3392 - val_loss: 16.1431\n",
"Epoch 273/500\n",
"30/30 [==============================] - 128s 4s/step - loss: 16.5220 - val_loss: 16.2746\n",
"Epoch 274/500\n",
"30/30 [==============================] - 128s 4s/step - loss: 16.6498 - val_loss: 16.3835\n",
"Epoch 275/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 16.2066 - val_loss: 16.0384\n",
"Epoch 276/500\n",
"30/30 [==============================] - 81s 3s/step - loss: 16.2591 - val_loss: 16.4378\n",
"Epoch 277/500\n",
"30/30 [==============================] - 65s 2s/step - loss: 16.6943 - val_loss: 16.1523\n",
"Epoch 278/500\n",
"30/30 [==============================] - 61s 2s/step - loss: 16.3948 - val_loss: 16.1507\n",
"Epoch 279/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.6854 - val_loss: 16.2779\n",
"Epoch 280/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.4208 - val_loss: 16.0576\n",
"Epoch 281/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.3797 - val_loss: 16.6038\n",
"Epoch 282/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.8321 - val_loss: 16.0848\n",
"Epoch 283/500\n",
"30/30 [==============================] - 67s 2s/step - loss: 16.2373 - val_loss: 16.3140\n",
"Epoch 284/500\n",
"30/30 [==============================] - 56s 2s/step - loss: 16.3162 - val_loss: 15.8853\n",
"Epoch 285/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.3769 - val_loss: 16.3856\n",
"Epoch 286/500\n",
"30/30 [==============================] - 61s 2s/step - loss: 16.4671 - val_loss: 16.0674\n",
"Epoch 287/500\n",
"30/30 [==============================] - 85s 3s/step - loss: 16.5860 - val_loss: 16.3418\n",
"Epoch 288/500\n",
"30/30 [==============================] - 106s 4s/step - loss: 16.4896 - val_loss: 16.5205\n",
"Epoch 289/500\n",
"30/30 [==============================] - 118s 4s/step - loss: 16.4469 - val_loss: 15.8535\n",
"Epoch 290/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 16.6378 - val_loss: 15.7410\n",
"Epoch 291/500\n",
"30/30 [==============================] - 122s 4s/step - loss: 16.6027 - val_loss: 16.5198\n",
"Epoch 292/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 15.9454 - val_loss: 16.8931\n",
"Epoch 293/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 16.3042 - val_loss: 15.6124\n",
"Epoch 294/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 16.4451 - val_loss: 15.6456\n",
"Epoch 295/500\n",
"30/30 [==============================] - 126s 4s/step - loss: 16.3229 - val_loss: 16.1610\n",
"Epoch 296/500\n",
"30/30 [==============================] - 122s 4s/step - loss: 16.3041 - val_loss: 16.1309\n",
"Epoch 297/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 16.5280 - val_loss: 15.8774\n",
"Epoch 298/500\n",
"30/30 [==============================] - 122s 4s/step - loss: 16.3325 - val_loss: 16.5331\n",
"Epoch 299/500\n",
"30/30 [==============================] - 110s 4s/step - loss: 16.5183 - val_loss: 15.7422\n",
"Epoch 300/500\n",
"30/30 [==============================] - 69s 2s/step - loss: 16.5641 - val_loss: 16.7612\n",
"Epoch 301/500\n",
"30/30 [==============================] - 63s 2s/step - loss: 16.2330 - val_loss: 15.8244\n",
"Epoch 302/500\n",
"30/30 [==============================] - 64s 2s/step - loss: 16.4699 - val_loss: 15.6958\n",
"Epoch 303/500\n",
"30/30 [==============================] - 63s 2s/step - loss: 16.4143 - val_loss: 16.6897\n",
"Epoch 304/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.2447 - val_loss: 16.1471\n",
"Epoch 305/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.5204 - val_loss: 15.7905\n",
"Epoch 306/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.1380 - val_loss: 16.5672\n",
"Epoch 307/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.5557 - val_loss: 15.9381\n",
"Epoch 308/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.4380 - val_loss: 16.5429\n",
"Epoch 309/500\n",
"30/30 [==============================] - 60s 2s/step - loss: 16.3664 - val_loss: 15.8925\n",
"Epoch 310/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.3254 - val_loss: 15.8290\n",
"Epoch 311/500\n",
"30/30 [==============================] - 73s 2s/step - loss: 16.4264 - val_loss: 16.0228\n",
"Epoch 312/500\n",
"30/30 [==============================] - 97s 3s/step - loss: 16.2977 - val_loss: 16.1006\n",
"Epoch 313/500\n",
"30/30 [==============================] - 114s 4s/step - loss: 16.4107 - val_loss: 16.0559\n",
"Epoch 314/500\n",
"30/30 [==============================] - 118s 4s/step - loss: 16.1044 - val_loss: 15.9039\n",
"Epoch 315/500\n",
"30/30 [==============================] - 129s 4s/step - loss: 16.3085 - val_loss: 16.3312\n",
"Epoch 316/500\n",
"30/30 [==============================] - 127s 4s/step - loss: 16.1068 - val_loss: 16.0503\n",
"Epoch 317/500\n",
"30/30 [==============================] - 126s 4s/step - loss: 16.6584 - val_loss: 16.2829\n",
"Epoch 318/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.2703 - val_loss: 15.6388\n",
"Epoch 319/500\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"30/30 [==============================] - 129s 4s/step - loss: 16.2571 - val_loss: 15.7867\n",
"Epoch 320/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.5441 - val_loss: 15.8499\n",
"Epoch 321/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 16.3501 - val_loss: 16.1323\n",
"Epoch 322/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 16.2824 - val_loss: 15.9564\n",
"Epoch 323/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.3759 - val_loss: 16.3467\n",
"Epoch 324/500\n",
"30/30 [==============================] - 60s 2s/step - loss: 16.3403 - val_loss: 15.6820\n",
"Epoch 325/500\n",
"30/30 [==============================] - 69s 2s/step - loss: 16.2955 - val_loss: 16.1720\n",
"Epoch 326/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.4078 - val_loss: 16.3941\n",
"Epoch 327/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.0622 - val_loss: 16.0237\n",
"Epoch 328/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.3376 - val_loss: 15.5706\n",
"Epoch 329/500\n",
"30/30 [==============================] - 62s 2s/step - loss: 16.1294 - val_loss: 16.5142\n",
"Epoch 330/500\n",
"30/30 [==============================] - 63s 2s/step - loss: 16.0853 - val_loss: 16.1133\n",
"Epoch 331/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 16.1868 - val_loss: 15.9329\n",
"Epoch 332/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.1243 - val_loss: 15.7737\n",
"Epoch 333/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.0936 - val_loss: 15.8534\n",
"Epoch 334/500\n",
"30/30 [==============================] - 70s 2s/step - loss: 16.3387 - val_loss: 16.0363\n",
"Epoch 335/500\n",
"30/30 [==============================] - 96s 3s/step - loss: 16.1497 - val_loss: 16.3894\n",
"Epoch 336/500\n",
"30/30 [==============================] - 114s 4s/step - loss: 15.7429 - val_loss: 16.1402\n",
"Epoch 337/500\n",
"30/30 [==============================] - 119s 4s/step - loss: 16.3378 - val_loss: 16.3067\n",
"Epoch 338/500\n",
"30/30 [==============================] - 122s 4s/step - loss: 16.1981 - val_loss: 16.1319\n",
"Epoch 339/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 16.1361 - val_loss: 15.7421\n",
"Epoch 340/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.2517 - val_loss: 15.5112\n",
"Epoch 341/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 16.1154 - val_loss: 15.5062\n",
"Epoch 342/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.1898 - val_loss: 15.5263\n",
"Epoch 343/500\n",
"30/30 [==============================] - 122s 4s/step - loss: 16.0264 - val_loss: 16.6698\n",
"Epoch 344/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.1943 - val_loss: 15.7087\n",
"Epoch 345/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.2535 - val_loss: 16.1479\n",
"Epoch 346/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 16.5307 - val_loss: 15.6747\n",
"Epoch 347/500\n",
"30/30 [==============================] - 64s 2s/step - loss: 16.2075 - val_loss: 15.6584\n",
"Epoch 348/500\n",
"30/30 [==============================] - 69s 2s/step - loss: 16.2071 - val_loss: 15.3423\n",
"Epoch 349/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.0504 - val_loss: 16.2236\n",
"Epoch 350/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.0833 - val_loss: 16.2664\n",
"Epoch 351/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.2250 - val_loss: 15.8436\n",
"Epoch 352/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.1694 - val_loss: 15.7174\n",
"Epoch 353/500\n",
"30/30 [==============================] - 61s 2s/step - loss: 16.3608 - val_loss: 16.8256\n",
"Epoch 354/500\n",
"30/30 [==============================] - 63s 2s/step - loss: 16.0936 - val_loss: 15.2995\n",
"Epoch 355/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.0449 - val_loss: 16.5662\n",
"Epoch 356/500\n",
"30/30 [==============================] - 70s 2s/step - loss: 16.1806 - val_loss: 16.0976\n",
"Epoch 357/500\n",
"30/30 [==============================] - 96s 3s/step - loss: 16.2721 - val_loss: 15.5171\n",
"Epoch 358/500\n",
"30/30 [==============================] - 114s 4s/step - loss: 16.2750 - val_loss: 16.0328\n",
"Epoch 359/500\n",
"30/30 [==============================] - 121s 4s/step - loss: 16.4254 - val_loss: 16.0317\n",
"Epoch 360/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 16.2188 - val_loss: 15.7162\n",
"Epoch 361/500\n",
"30/30 [==============================] - 130s 4s/step - loss: 16.0624 - val_loss: 16.2708\n",
"Epoch 362/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 16.1229 - val_loss: 16.3186\n",
"Epoch 363/500\n",
"30/30 [==============================] - 122s 4s/step - loss: 16.1250 - val_loss: 15.5198\n",
"Epoch 364/500\n",
"30/30 [==============================] - 126s 4s/step - loss: 16.1816 - val_loss: 16.0486\n",
"Epoch 365/500\n",
"30/30 [==============================] - 89s 3s/step - loss: 16.2343 - val_loss: 16.1744\n",
"Epoch 366/500\n",
"30/30 [==============================] - 88s 3s/step - loss: 16.1624 - val_loss: 15.6001\n",
"Epoch 367/500\n",
"30/30 [==============================] - 88s 3s/step - loss: 16.1360 - val_loss: 16.4407\n",
"Epoch 368/500\n",
"30/30 [==============================] - 88s 3s/step - loss: 16.0462 - val_loss: 16.1154\n",
"Epoch 369/500\n",
"30/30 [==============================] - 67s 2s/step - loss: 16.1973 - val_loss: 15.5669\n",
"Epoch 370/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 16.1021 - val_loss: 15.6763\n",
"Epoch 371/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 16.1306 - val_loss: 15.5349\n",
"Epoch 372/500\n",
"30/30 [==============================] - 56s 2s/step - loss: 16.2231 - val_loss: 16.4343\n",
"Epoch 373/500\n",
"30/30 [==============================] - 55s 2s/step - loss: 15.9661 - val_loss: 15.7303\n",
"Epoch 374/500\n",
"30/30 [==============================] - 54s 2s/step - loss: 16.1949 - val_loss: 15.5661\n",
"Epoch 375/500\n",
"30/30 [==============================] - 54s 2s/step - loss: 15.9551 - val_loss: 16.5234\n",
"Epoch 376/500\n",
"30/30 [==============================] - 54s 2s/step - loss: 16.0258 - val_loss: 15.4668\n",
"Epoch 377/500\n",
"30/30 [==============================] - 54s 2s/step - loss: 16.1134 - val_loss: 16.1877\n",
"Epoch 378/500\n",
"30/30 [==============================] - 54s 2s/step - loss: 15.9459 - val_loss: 16.0216\n",
"Epoch 379/500\n",
"30/30 [==============================] - 60s 2s/step - loss: 16.4525 - val_loss: 15.6702\n",
"Epoch 380/500\n",
"30/30 [==============================] - 72s 2s/step - loss: 16.0660 - val_loss: 15.3305\n",
"Epoch 381/500\n",
"30/30 [==============================] - 81s 3s/step - loss: 16.0083 - val_loss: 16.1274\n",
"Epoch 382/500\n",
"30/30 [==============================] - 87s 3s/step - loss: 16.0092 - val_loss: 16.1366\n",
"Epoch 383/500\n",
"30/30 [==============================] - 121s 4s/step - loss: 16.1354 - val_loss: 15.7858\n",
"Epoch 384/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 16.1129 - val_loss: 15.7107\n",
"Epoch 385/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 16.0284 - val_loss: 16.0496\n",
"Epoch 386/500\n",
"30/30 [==============================] - 126s 4s/step - loss: 15.8368 - val_loss: 16.5170\n",
"Epoch 387/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 16.3342 - val_loss: 15.4547\n",
"Epoch 388/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 16.2401 - val_loss: 15.5744\n",
"Epoch 389/500\n",
"30/30 [==============================] - 126s 4s/step - loss: 16.2276 - val_loss: 15.5983\n",
"Epoch 390/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 15.9413 - val_loss: 15.6545\n",
"Epoch 391/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 16.3595 - val_loss: 15.3371\n",
"Epoch 392/500\n",
"30/30 [==============================] - 63s 2s/step - loss: 15.8981 - val_loss: 16.1008\n",
"Epoch 393/500\n",
"30/30 [==============================] - 68s 2s/step - loss: 16.1883 - val_loss: 15.8930\n",
"Epoch 394/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 15.9516 - val_loss: 15.9927\n",
"Epoch 395/500\n",
"30/30 [==============================] - 61s 2s/step - loss: 15.9433 - val_loss: 14.9362\n",
"Epoch 396/500\n",
"30/30 [==============================] - 60s 2s/step - loss: 16.1004 - val_loss: 16.4069\n",
"Epoch 397/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 15.9822 - val_loss: 15.7637\n",
"Epoch 398/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.1382 - val_loss: 15.9379\n",
"Epoch 399/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.1960 - val_loss: 16.0348\n",
"Epoch 400/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 16.0175 - val_loss: 16.3064\n",
"Epoch 401/500\n",
"30/30 [==============================] - 62s 2s/step - loss: 15.8997 - val_loss: 15.6500\n",
"Epoch 402/500\n",
"30/30 [==============================] - 83s 3s/step - loss: 16.2010 - val_loss: 15.8539\n",
"Epoch 403/500\n",
"30/30 [==============================] - 104s 3s/step - loss: 15.7408 - val_loss: 16.1076\n",
"Epoch 404/500\n",
"30/30 [==============================] - 115s 4s/step - loss: 16.4274 - val_loss: 15.3456\n",
"Epoch 405/500\n",
"30/30 [==============================] - 120s 4s/step - loss: 15.7428 - val_loss: 15.5534\n",
"Epoch 406/500\n",
"30/30 [==============================] - 122s 4s/step - loss: 16.2359 - val_loss: 16.0074\n",
"Epoch 407/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 16.0718 - val_loss: 16.4514\n",
"Epoch 408/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.3895 - val_loss: 15.2282\n",
"Epoch 409/500\n",
"30/30 [==============================] - 122s 4s/step - loss: 15.9361 - val_loss: 15.5478\n",
"Epoch 410/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.1194 - val_loss: 15.6116\n",
"Epoch 411/500\n",
"30/30 [==============================] - 126s 4s/step - loss: 16.0214 - val_loss: 16.1073\n",
"Epoch 412/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 16.0557 - val_loss: 15.5114\n",
"Epoch 413/500\n",
"30/30 [==============================] - 127s 4s/step - loss: 16.1405 - val_loss: 16.4630\n",
"Epoch 414/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.1881 - val_loss: 15.7337\n",
"Epoch 415/500\n",
"30/30 [==============================] - 70s 2s/step - loss: 15.9524 - val_loss: 15.4768\n",
"Epoch 416/500\n",
"30/30 [==============================] - 65s 2s/step - loss: 15.8019 - val_loss: 15.7502\n",
"Epoch 417/500\n",
"30/30 [==============================] - 60s 2s/step - loss: 16.3821 - val_loss: 15.6619\n",
"Epoch 418/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 15.8915 - val_loss: 15.7251\n",
"Epoch 419/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 15.8573 - val_loss: 16.5752\n",
"Epoch 420/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.0249 - val_loss: 16.2398\n",
"Epoch 421/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 15.9861 - val_loss: 16.3022\n",
"Epoch 422/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 15.8775 - val_loss: 15.7504\n",
"Epoch 423/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.0352 - val_loss: 15.9333\n",
"Epoch 424/500\n",
"30/30 [==============================] - 61s 2s/step - loss: 15.9813 - val_loss: 16.0950\n",
"Epoch 425/500\n",
"30/30 [==============================] - 60s 2s/step - loss: 16.0516 - val_loss: 15.4165\n",
"Epoch 426/500\n",
"30/30 [==============================] - 71s 2s/step - loss: 16.1241 - val_loss: 15.4657\n",
"Epoch 427/500\n",
"30/30 [==============================] - 98s 3s/step - loss: 16.0654 - val_loss: 16.1920\n",
"Epoch 428/500\n",
"30/30 [==============================] - 114s 4s/step - loss: 15.9455 - val_loss: 15.2535\n",
"Epoch 429/500\n",
"30/30 [==============================] - 121s 4s/step - loss: 16.0065 - val_loss: 15.8941\n",
"Epoch 430/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 15.7573 - val_loss: 15.4150\n",
"Epoch 431/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 16.0947 - val_loss: 15.7753\n",
"Epoch 432/500\n",
"30/30 [==============================] - 127s 4s/step - loss: 15.8444 - val_loss: 15.5911\n",
"Epoch 433/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.1289 - val_loss: 15.9490\n",
"Epoch 434/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 15.9296 - val_loss: 15.6148\n",
"Epoch 435/500\n",
"30/30 [==============================] - 126s 4s/step - loss: 15.9802 - val_loss: 15.4892\n",
"Epoch 436/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 16.0529 - val_loss: 15.2430\n",
"Epoch 437/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 15.7882 - val_loss: 15.6371\n",
"Epoch 438/500\n",
"30/30 [==============================] - 119s 4s/step - loss: 16.0208 - val_loss: 15.5694\n",
"Epoch 439/500\n",
"30/30 [==============================] - 53s 2s/step - loss: 16.2243 - val_loss: 16.4516\n",
"Epoch 440/500\n",
"30/30 [==============================] - 69s 2s/step - loss: 15.8460 - val_loss: 15.2869\n",
"Epoch 441/500\n",
"30/30 [==============================] - 65s 2s/step - loss: 15.9455 - val_loss: 15.9559\n",
"Epoch 442/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 15.9085 - val_loss: 15.4212\n",
"Epoch 443/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.0805 - val_loss: 15.5691\n",
"Epoch 444/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 15.8312 - val_loss: 15.5900\n",
"Epoch 445/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 16.1131 - val_loss: 14.9550\n",
"Epoch 446/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.1825 - val_loss: 16.5839\n",
"Epoch 447/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 15.8725 - val_loss: 15.4740\n",
"Epoch 448/500\n",
"30/30 [==============================] - 83s 3s/step - loss: 15.9381 - val_loss: 15.3606\n",
"Epoch 449/500\n",
"30/30 [==============================] - 107s 4s/step - loss: 15.7734 - val_loss: 15.8835\n",
"Epoch 450/500\n",
"30/30 [==============================] - 118s 4s/step - loss: 16.2426 - val_loss: 16.0760\n",
"Epoch 451/500\n",
"30/30 [==============================] - 122s 4s/step - loss: 15.7717 - val_loss: 16.1588\n",
"Epoch 452/500\n",
"30/30 [==============================] - 126s 4s/step - loss: 15.8032 - val_loss: 15.5423\n",
"Epoch 453/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.0863 - val_loss: 16.2087\n",
"Epoch 454/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 15.7231 - val_loss: 15.4152\n",
"Epoch 455/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 15.9819 - val_loss: 15.6086\n",
"Epoch 456/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 16.2392 - val_loss: 15.6546\n",
"Epoch 457/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 15.9337 - val_loss: 15.5734\n",
"Epoch 458/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 15.7483 - val_loss: 16.0871\n",
"Epoch 459/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 15.9154 - val_loss: 15.7753\n",
"Epoch 460/500\n",
"30/30 [==============================] - 116s 4s/step - loss: 16.1634 - val_loss: 16.0291\n",
"Epoch 461/500\n",
"30/30 [==============================] - 52s 2s/step - loss: 16.0713 - val_loss: 15.6570\n",
"Epoch 462/500\n",
"30/30 [==============================] - 68s 2s/step - loss: 15.7077 - val_loss: 15.3641\n",
"Epoch 463/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.0866 - val_loss: 15.8481\n",
"Epoch 464/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 15.9679 - val_loss: 15.6844\n",
"Epoch 465/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 15.9050 - val_loss: 15.2170\n",
"Epoch 466/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 15.7928 - val_loss: 16.0792\n",
"Epoch 467/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 15.9432 - val_loss: 15.4652\n",
"Epoch 468/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 16.0087 - val_loss: 15.8910\n",
"Epoch 469/500\n",
"30/30 [==============================] - 64s 2s/step - loss: 15.9682 - val_loss: 15.9137\n",
"Epoch 470/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 15.6714 - val_loss: 15.9395\n",
"Epoch 471/500\n",
"30/30 [==============================] - 88s 3s/step - loss: 16.0309 - val_loss: 15.9491\n",
"Epoch 472/500\n",
"30/30 [==============================] - 109s 4s/step - loss: 15.8227 - val_loss: 15.7770\n",
"Epoch 473/500\n",
"30/30 [==============================] - 117s 4s/step - loss: 16.0340 - val_loss: 15.3767\n",
"Epoch 474/500\n",
"30/30 [==============================] - 122s 4s/step - loss: 15.8100 - val_loss: 16.0189\n",
"Epoch 475/500\n",
"30/30 [==============================] - 129s 4s/step - loss: 15.8677 - val_loss: 15.8241\n",
"Epoch 476/500\n",
"30/30 [==============================] - 129s 4s/step - loss: 15.8201 - val_loss: 15.2546\n",
"Epoch 477/500\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"30/30 [==============================] - 123s 4s/step - loss: 16.1264 - val_loss: 16.2662\n",
"Epoch 478/500\n",
"30/30 [==============================] - 124s 4s/step - loss: 16.1311 - val_loss: 15.2587\n",
"Epoch 479/500\n",
"30/30 [==============================] - 125s 4s/step - loss: 16.2160 - val_loss: 15.7506\n",
"Epoch 480/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 15.8996 - val_loss: 16.0202\n",
"Epoch 481/500\n",
"30/30 [==============================] - 127s 4s/step - loss: 15.9867 - val_loss: 15.5650\n",
"Epoch 482/500\n",
"30/30 [==============================] - 170s 6s/step - loss: 15.7489 - val_loss: 15.4263\n",
"Epoch 483/500\n",
"30/30 [==============================] - 63s 2s/step - loss: 16.0861 - val_loss: 15.6782\n",
"Epoch 484/500\n",
"30/30 [==============================] - 77s 3s/step - loss: 15.8524 - val_loss: 15.6728\n",
"Epoch 485/500\n",
"30/30 [==============================] - 68s 2s/step - loss: 15.9259 - val_loss: 15.5141\n",
"Epoch 486/500\n",
"30/30 [==============================] - 67s 2s/step - loss: 15.7106 - val_loss: 15.6335\n",
"Epoch 487/500\n",
"30/30 [==============================] - 63s 2s/step - loss: 15.9842 - val_loss: 15.1482\n",
"Epoch 488/500\n",
"30/30 [==============================] - 61s 2s/step - loss: 15.8998 - val_loss: 16.0844\n",
"Epoch 489/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 15.8302 - val_loss: 16.6305\n",
"Epoch 490/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 15.8365 - val_loss: 15.8551\n",
"Epoch 491/500\n",
"30/30 [==============================] - 57s 2s/step - loss: 16.0139 - val_loss: 15.3942\n",
"Epoch 492/500\n",
"30/30 [==============================] - 58s 2s/step - loss: 15.9906 - val_loss: 16.0351\n",
"Epoch 493/500\n",
"30/30 [==============================] - 59s 2s/step - loss: 15.7704 - val_loss: 15.5585\n",
"Epoch 494/500\n",
"30/30 [==============================] - 64s 2s/step - loss: 15.8734 - val_loss: 15.5017\n",
"Epoch 495/500\n",
"30/30 [==============================] - 86s 3s/step - loss: 15.8414 - val_loss: 16.0038\n",
"Epoch 496/500\n",
"30/30 [==============================] - 109s 4s/step - loss: 16.0293 - val_loss: 15.9147\n",
"Epoch 497/500\n",
"30/30 [==============================] - 119s 4s/step - loss: 15.7651 - val_loss: 15.6716\n",
"Epoch 498/500\n",
"30/30 [==============================] - 123s 4s/step - loss: 15.8485 - val_loss: 16.0082\n",
"Epoch 499/500\n",
"30/30 [==============================] - 126s 4s/step - loss: 15.8425 - val_loss: 14.8089\n",
"Epoch 500/500\n",
"30/30 [==============================] - 101s 3s/step - loss: 16.0761 - val_loss: 15.9947\n",
"Unfreeze all of the layers.\n",
"Train on 488 samples, val on 121 samples, with batch size 16.\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"/var/folders/j_/grk4ythd0392dcw5z3gkgw5w0000gn/T/ipykernel_39692/4035785499.py:81: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
" model.fit_generator(data_generator_wrapper(lines[:num_train], batch_size, input_shape, anchors, num_classes),\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 51/100\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-22 13:37:37.110606: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Subshape must have computed start >= end since stride is negative, but is 0 and 2 (computed from start 0 and end 9223372036854775807 over shape with rank 2 and stride-1)\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"30/30 [==============================] - ETA: 0s - loss: 16.2221 "
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2023-01-22 13:43:00.326593: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Subshape must have computed start >= end since stride is negative, but is 0 and 2 (computed from start 0 and end 9223372036854775807 over shape with rank 2 and stride-1)\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"30/30 [==============================] - 350s 11s/step - loss: 16.2221 - val_loss: 15.0134 - lr: 1.0000e-04\n",
"Epoch 52/100\n",
"30/30 [==============================] - 346s 12s/step - loss: 14.9261 - val_loss: 14.5771 - lr: 1.0000e-04\n",
"Epoch 53/100\n",
"30/30 [==============================] - 243s 8s/step - loss: 14.5103 - val_loss: 14.5714 - lr: 1.0000e-04\n",
"Epoch 54/100\n",
"30/30 [==============================] - 231s 8s/step - loss: 14.2489 - val_loss: 13.8991 - lr: 1.0000e-04\n",
"Epoch 55/100\n",
"30/30 [==============================] - 276s 9s/step - loss: 14.1362 - val_loss: 14.2145 - lr: 1.0000e-04\n",
"Epoch 56/100\n",
"30/30 [==============================] - 334s 11s/step - loss: 13.6959 - val_loss: 13.6794 - lr: 1.0000e-04\n",
"Epoch 57/100\n",
"30/30 [==============================] - 340s 11s/step - loss: 13.5898 - val_loss: 13.1452 - lr: 1.0000e-04\n",
"Epoch 58/100\n",
"30/30 [==============================] - 336s 11s/step - loss: 13.4866 - val_loss: 13.5824 - lr: 1.0000e-04\n",
"Epoch 59/100\n",
"30/30 [==============================] - 259s 9s/step - loss: 13.4531 - val_loss: 13.1278 - lr: 1.0000e-04\n",
"Epoch 60/100\n",
"30/30 [==============================] - 219s 7s/step - loss: 13.3503 - val_loss: 13.0499 - lr: 1.0000e-04\n",
"Epoch 61/100\n",
"30/30 [==============================] - 254s 8s/step - loss: 13.2267 - val_loss: 13.0210 - lr: 1.0000e-04\n",
"Epoch 62/100\n",
"30/30 [==============================] - 414s 14s/step - loss: 13.2120 - val_loss: 14.0383 - lr: 1.0000e-04\n",
"Epoch 63/100\n",
"30/30 [==============================] - 472s 16s/step - loss: 12.9336 - val_loss: 13.2708 - lr: 1.0000e-04\n",
"Epoch 64/100\n",
"30/30 [==============================] - ETA: 0s - loss: 13.1477 \n",
"Epoch 64: ReduceLROnPlateau reducing learning rate to 9.999999747378752e-06.\n",
"30/30 [==============================] - 470s 16s/step - loss: 13.1477 - val_loss: 13.6016 - lr: 1.0000e-04\n",
"Epoch 65/100\n",
"30/30 [==============================] - 272s 9s/step - loss: 13.2001 - val_loss: 12.9789 - lr: 1.0000e-05\n",
"Epoch 66/100\n",
"30/30 [==============================] - 256s 9s/step - loss: 12.8699 - val_loss: 12.7537 - lr: 1.0000e-05\n",
"Epoch 67/100\n",
"30/30 [==============================] - 460s 15s/step - loss: 12.8529 - val_loss: 12.5797 - lr: 1.0000e-05\n",
"Epoch 68/100\n",
"30/30 [==============================] - 520s 17s/step - loss: 12.8881 - val_loss: 12.8464 - lr: 1.0000e-05\n",
"Epoch 69/100\n",
"30/30 [==============================] - 316s 10s/step - loss: 12.8289 - val_loss: 13.0487 - lr: 1.0000e-05\n",
"Epoch 70/100\n",
"30/30 [==============================] - ETA: 0s - loss: 12.7765\n",
"Epoch 70: ReduceLROnPlateau reducing learning rate to 9.999999747378752e-07.\n",
"30/30 [==============================] - 236s 8s/step - loss: 12.7765 - val_loss: 12.7764 - lr: 1.0000e-05\n",
"Epoch 71/100\n",
"30/30 [==============================] - 357s 12s/step - loss: 12.7222 - val_loss: 12.6030 - lr: 1.0000e-06\n",
"Epoch 72/100\n",
"30/30 [==============================] - 471s 16s/step - loss: 12.9312 - val_loss: 12.7407 - lr: 1.0000e-06\n",
"Epoch 73/100\n",
"30/30 [==============================] - ETA: 0s - loss: 12.7563 \n",
"Epoch 73: ReduceLROnPlateau reducing learning rate to 9.999999974752428e-08.\n",
"30/30 [==============================] - 474s 16s/step - loss: 12.7563 - val_loss: 12.8981 - lr: 1.0000e-06\n",
"Epoch 74/100\n",
"30/30 [==============================] - 337s 11s/step - loss: 12.6372 - val_loss: 13.0085 - lr: 1.0000e-07\n",
"Epoch 75/100\n",
"30/30 [==============================] - 238s 8s/step - loss: 12.6892 - val_loss: 12.6015 - lr: 1.0000e-07\n",
"Epoch 76/100\n",
"30/30 [==============================] - ETA: 0s - loss: 12.7828\n",
"Epoch 76: ReduceLROnPlateau reducing learning rate to 1.0000000116860975e-08.\n",
"30/30 [==============================] - 308s 10s/step - loss: 12.7828 - val_loss: 13.2228 - lr: 1.0000e-07\n",
"Epoch 77/100\n",
"30/30 [==============================] - 336s 11s/step - loss: 12.7876 - val_loss: 12.4209 - lr: 1.0000e-08\n",
"Epoch 78/100\n",
"30/30 [==============================] - 337s 11s/step - loss: 12.5455 - val_loss: 12.7752 - lr: 1.0000e-08\n",
"Epoch 79/100\n",
"30/30 [==============================] - 258s 8s/step - loss: 12.7785 - val_loss: 12.7235 - lr: 1.0000e-08\n",
"Epoch 80/100\n",
"30/30 [==============================] - ETA: 0s - loss: 12.7194\n",
"Epoch 80: ReduceLROnPlateau reducing learning rate to 9.999999939225292e-10.\n",
"30/30 [==============================] - 222s 7s/step - loss: 12.7194 - val_loss: 12.8656 - lr: 1.0000e-08\n",
"Epoch 81/100\n",
"30/30 [==============================] - 255s 9s/step - loss: 13.0056 - val_loss: 12.7722 - lr: 1.0000e-09\n",
"Epoch 82/100\n",
"30/30 [==============================] - 275s 9s/step - loss: 12.6045 - val_loss: 12.7747 - lr: 1.0000e-09\n",
"Epoch 83/100\n",
"30/30 [==============================] - ETA: 0s - loss: 12.8213 \n",
"Epoch 83: ReduceLROnPlateau reducing learning rate to 9.999999717180686e-11.\n",
"30/30 [==============================] - 371s 12s/step - loss: 12.8213 - val_loss: 12.7278 - lr: 1.0000e-09\n",
"Epoch 84/100\n",
"30/30 [==============================] - 476s 16s/step - loss: 12.8018 - val_loss: 12.7182 - lr: 1.0000e-10\n",
"Epoch 85/100\n",
"30/30 [==============================] - 395s 13s/step - loss: 12.6051 - val_loss: 12.9475 - lr: 1.0000e-10\n",
"Epoch 86/100\n",
"30/30 [==============================] - ETA: 0s - loss: 12.7930\n",
"Epoch 86: ReduceLROnPlateau reducing learning rate to 9.99999943962493e-12.\n",
"30/30 [==============================] - 221s 7s/step - loss: 12.7930 - val_loss: 12.5336 - lr: 1.0000e-10\n",
"Epoch 87/100\n",
"30/30 [==============================] - 239s 8s/step - loss: 12.6282 - val_loss: 12.7567 - lr: 1.0000e-11\n",
"Epoch 87: early stopping\n"
]
}
],
"source": [
"\"\"\"\n",
"Self-contained Python script to train YOLOv3 on your own dataset\n",
"\"\"\"\n",
"\n",
"import numpy as np\n",
"import keras.backend as K\n",
"from keras.layers import Input, Lambda\n",
"from keras.models import Model\n",
"from keras.optimizers import Adam\n",
"from keras.callbacks import TensorBoard, ModelCheckpoint, ReduceLROnPlateau, EarlyStopping\n",
"\n",
"from yolo3.model import preprocess_true_boxes, yolo_body, tiny_yolo_body, yolo_loss\n",
"from yolo3.utils import get_random_data\n",
"\n",
"\n",
"def _main():\n",
" annotation_path = './train/_annotations.txt' # path to Roboflow data annotations\n",
" log_dir = './logs/000/' # where we're storing our logs\n",
" classes_path = './train/_classes.txt' # path to Roboflow class names\n",
" anchors_path = './model_data/yolo_anchors.txt'\n",
" class_names = get_classes(classes_path)\n",
" print(\"-------------------CLASS NAMES-------------------\")\n",
" print(class_names)\n",
" print(\"-------------------CLASS NAMES-------------------\")\n",
" num_classes = len(class_names)\n",
" anchors = get_anchors(anchors_path)\n",
"\n",
" input_shape = (256,256) # multiple of 32, hw default = (416,416)\n",
"\n",
" is_tiny_version = len(anchors)==6 # default setting\n",
" if is_tiny_version:\n",
" model = create_tiny_model(input_shape, anchors, num_classes,\n",
" freeze_body=2, weights_path='./model_data/tiny_yolo_weights.h5')\n",
" else:\n",
" model = create_model(input_shape, anchors, num_classes,\n",
" freeze_body=2, weights_path='./model_data/yolo.h5') # make sure you know what you freeze\n",
"\n",
" logging = TensorBoard(log_dir=log_dir)\n",
" checkpoint = ModelCheckpoint(log_dir + 'ep{epoch:03d}-loss{loss:.3f}-val_loss{val_loss:.3f}.h5',\n",
" monitor='val_loss', save_weights_only=True, save_best_only=True, period=3)\n",
" reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=3, verbose=1)\n",
" early_stopping = EarlyStopping(monitor='val_loss', min_delta=0, patience=10, verbose=1)\n",
"\n",
" val_split = 0.2 # set the size of the validation set\n",
" with open(annotation_path) as f:\n",
" lines = f.readlines()\n",
" np.random.seed(10101)\n",
" np.random.shuffle(lines)\n",
" np.random.seed(None)\n",
" num_val = int(len(lines)*val_split)\n",
" num_train = len(lines) - num_val\n",
"\n",
" # Train with frozen layers first, to get a stable loss.\n",
" # Adjust num epochs to your dataset. This step is enough to obtain a not bad model.\n",
" if True:\n",
" model.compile(optimizer=Adam(lr=1e-3), loss={\n",
" # use custom yolo_loss Lambda layer.\n",
" 'yolo_loss': lambda y_true, y_pred: y_pred})\n",
"\n",
" batch_size = 16\n",
" print('Train on {} samples, val on {} samples, with batch size {}.'.format(num_train, num_val, batch_size))\n",
" model.fit_generator(data_generator_wrapper(lines[:num_train], batch_size, input_shape, anchors, num_classes),\n",
" steps_per_epoch=max(1, num_train//batch_size),\n",
" validation_data=data_generator_wrapper(lines[num_train:], batch_size, input_shape, anchors, num_classes),\n",
" validation_steps=max(1, num_val//batch_size),\n",
" epochs=500,\n",
" initial_epoch=0,\n",
" callbacks=[logging, checkpoint])\n",
" model.save_weights(log_dir + 'trained_weights_stage_1.h5')\n",
"\n",
" # Unfreeze and continue training, to fine-tune.\n",
" # Train longer if the result is not good.\n",
" if True:\n",
" for i in range(len(model.layers)):\n",
" model.layers[i].trainable = True\n",
" model.compile(optimizer=Adam(lr=1e-4), loss={'yolo_loss': lambda y_true, y_pred: y_pred}) # recompile to apply the change\n",
" print('Unfreeze all of the layers.')\n",
"\n",
" batch_size = 16 # note that more GPU memory is required after unfreezing the body\n",
" print('Train on {} samples, val on {} samples, with batch size {}.'.format(num_train, num_val, batch_size))\n",
" model.fit_generator(data_generator_wrapper(lines[:num_train], batch_size, input_shape, anchors, num_classes),\n",
" steps_per_epoch=max(1, num_train//batch_size),\n",
" validation_data=data_generator_wrapper(lines[num_train:], batch_size, input_shape, anchors, num_classes),\n",
" validation_steps=max(1, num_val//batch_size),\n",
" epochs=100,\n",
" initial_epoch=50,\n",
" callbacks=[logging, checkpoint, reduce_lr, early_stopping])\n",
" model.save_weights(log_dir + 'trained_weights_final.h5')\n",
"\n",
" # Further training if needed.\n",
"\n",
"\n",
"def get_classes(classes_path):\n",
" '''loads the classes'''\n",
" with open(classes_path) as f:\n",
" class_names = f.readlines()\n",
" class_names = [c.strip() for c in class_names]\n",
" return class_names\n",
"\n",
"def get_anchors(anchors_path):\n",
" '''loads the anchors from a file'''\n",
" with open(anchors_path) as f:\n",
" anchors = f.readline()\n",
" anchors = [float(x) for x in anchors.split(',')]\n",
" return np.array(anchors).reshape(-1, 2)\n",
"\n",
"\n",
"def create_model(input_shape, anchors, num_classes, load_pretrained=True, freeze_body=2,\n",
" weights_path='./model_data/yolo.h5'):\n",
" '''create the training model'''\n",
" K.clear_session() # get a new session\n",
" image_input = Input(shape=(None, None, 3))\n",
" h, w = input_shape\n",
" num_anchors = len(anchors)\n",
"\n",
" y_true = [Input(shape=(h//{0:32, 1:16, 2:8}[l], w//{0:32, 1:16, 2:8}[l], \\\n",
" num_anchors//3, num_classes+5)) for l in range(3)]\n",
"\n",
" model_body = yolo_body(image_input, num_anchors//3, num_classes)\n",
" print('Create YOLOv3 model with {} anchors and {} classes.'.format(num_anchors, num_classes))\n",
"\n",
" if load_pretrained:\n",
" model_body.load_weights(weights_path, by_name=True, skip_mismatch=True)\n",
" print('Load weights {}.'.format(weights_path))\n",
" if freeze_body in [1, 2]:\n",
" # Freeze darknet53 body or freeze all but 3 output layers.\n",
" num = (185, len(model_body.layers)-3)[freeze_body-1]\n",
" for i in range(num): model_body.layers[i].trainable = False\n",
" print('Freeze the first {} layers of total {} layers.'.format(num, len(model_body.layers)))\n",
"\n",
" model_loss = Lambda(yolo_loss, output_shape=(1,), name='yolo_loss',\n",
" arguments={'anchors': anchors, 'num_classes': num_classes, 'ignore_thresh': 0.5})(\n",
" [*model_body.output, *y_true])\n",
" model = Model([model_body.input, *y_true], model_loss)\n",
"\n",
" return model\n",
"\n",
"def create_tiny_model(input_shape, anchors, num_classes, load_pretrained=True, freeze_body=2,\n",
" weights_path='./model_data/tiny_yolo_weights.h5'):\n",
" '''create the training model, for Tiny YOLOv3'''\n",
" K.clear_session() # get a new session\n",
" image_input = Input(shape=(None, None, 3))\n",
" h, w = input_shape\n",
" num_anchors = len(anchors)\n",
"\n",
" y_true = [Input(shape=(h//{0:32, 1:16}[l], w//{0:32, 1:16}[l], \\\n",
" num_anchors//2, num_classes+5)) for l in range(2)]\n",
"\n",
" model_body = tiny_yolo_body(image_input, num_anchors//2, num_classes)\n",
" print('Create Tiny YOLOv3 model with {} anchors and {} classes.'.format(num_anchors, num_classes))\n",
"\n",
" if load_pretrained:\n",
" model_body.load_weights(weights_path, by_name=True, skip_mismatch=True)\n",
" print('Load weights {}.'.format(weights_path))\n",
" if freeze_body in [1, 2]:\n",
" # Freeze the darknet body or freeze all but 2 output layers.\n",
" num = (20, len(model_body.layers)-2)[freeze_body-1]\n",
" for i in range(num): model_body.layers[i].trainable = False\n",
" print('Freeze the first {} layers of total {} layers.'.format(num, len(model_body.layers)))\n",
"\n",
" model_loss = Lambda(yolo_loss, output_shape=(1,), name='yolo_loss',\n",
" arguments={'anchors': anchors, 'num_classes': num_classes, 'ignore_thresh': 0.7})(\n",
" [*model_body.output, *y_true])\n",
" model = Model([model_body.input, *y_true], model_loss)\n",
"\n",
" return model\n",
"\n",
"def data_generator(annotation_lines, batch_size, input_shape, anchors, num_classes):\n",
" '''data generator for fit_generator'''\n",
" n = len(annotation_lines)\n",
" i = 0\n",
" while True:\n",
" image_data = []\n",
" box_data = []\n",
" for b in range(batch_size):\n",
" if i==0:\n",
" np.random.shuffle(annotation_lines)\n",
" image, box = get_random_data(annotation_lines[i], input_shape, random=True)\n",
" image_data.append(image)\n",
" box_data.append(box)\n",
" i = (i+1) % n\n",
" image_data = np.array(image_data)\n",
" box_data = np.array(box_data)\n",
" y_true = preprocess_true_boxes(box_data, input_shape, anchors, num_classes)\n",
" yield [image_data, *y_true], np.zeros(batch_size)\n",
"\n",
"def data_generator_wrapper(annotation_lines, batch_size, input_shape, anchors, num_classes):\n",
" n = len(annotation_lines)\n",
" if n==0 or batch_size<=0: return None\n",
" return data_generator(annotation_lines, batch_size, input_shape, anchors, num_classes)\n",
"\n",
"if __name__ == '__main__':\n",
" _main()"
]
},
{
"cell_type": "markdown",
"id": "214e7684",
"metadata": {},
"source": [
"# Prepare image to ocr"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "abe450c6",
"metadata": {},
"outputs": [],
"source": [
"import cv2 as cv\n",
"from matplotlib import pyplot as plt"
]
},
{
"cell_type": "code",
"execution_count": 218,
"id": "f650aacf",
"metadata": {},
"outputs": [],
"source": [
"def grayscale(image):\n",
" return cv.cvtColor(image, cv.COLOR_BGR2GRAY)\n",
"\n",
"def noise_removal(image):\n",
" import numpy as np\n",
" kernel = np.ones((1, 1), np.uint8)\n",
" image = cv.dilate(image, kernel, iterations=1)\n",
" kernel = np.ones((1, 1), np.uint8)\n",
" image = cv.erode(image, kernel, iterations=1)\n",
" image = cv.morphologyEx(image, cv.MORPH_CLOSE, kernel)\n",
" image = cv.medianBlur(image, 3)\n",
" return (image)\n",
"\n",
"def thin_font(image):\n",
" import numpy as np\n",
" image = cv.bitwise_not(image)\n",
" kernel = np.ones((2,2),np.uint8)\n",
" image = cv.erode(image, kernel, iterations=1)\n",
" image = cv.bitwise_not(image)\n",
" return (image)\n",
"\n",
"def thick_font(image):\n",
" import numpy as np\n",
" image = cv.bitwise_not(image)\n",
" kernel = np.ones((2,2),np.uint8)\n",
" image = cv.dilate(image, kernel, iterations=1)\n",
" image = cv.bitwise_not(image)\n",
" return (image)\n",
"\n",
"def remove_borders(image):\n",
" contours, heiarchy = cv.findContours(image, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE)\n",
" cntsSorted = sorted(contours, key=lambda x:cv.contourArea(x))\n",
" cnt = cntsSorted[-1]\n",
" x, y, w, h = cv.boundingRect(cnt)\n",
" crop = image[y:y+h, x:x+w]\n",
" return (crop)"
]
},
{
"cell_type": "code",
"execution_count": 231,
"id": "9df02404",
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAApcAAAEzCAYAAABkCSj0AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOx9d5hU1f3+e6fP7tI7SG8qoIig6FesRLArKGhsqEFEDQFLokZNbCFqjIoNFYMaDWJssaOigooFQUWkCahILyJsmZ16f3/s7z37uWfu7M4sOzszy32fh4edmVvOPfecz3k/9RimaZpw4MCBAwcOHDhw4KAe4Mp1Axw4cODAgQMHDhw0Hjjk0oEDBw4cOHDgwEG9wSGXDhw4cODAgQMHDuoNDrl04MCBAwcOHDhwUG9wyKUDBw4cOHDgwIGDeoNDLh04cODAgQMHDhzUGxxy6cCBAwcOHDhw4KDe4JBLBw4cOHDgwIEDB/UGh1w6cODAgQMHDhw4qDd40j3QMIxstsPBXg7DMGAYBhKJhPoMANxAyjAM6JtJ2X0HAB5P1bCOx+PweDyIRqOW391uN+LxeL0/gwMHDhw42Hvgcrng8XgQiUQAWNctfQ0DAK/Xi1gspr5zuVyWNY9roMvlgmmatR5nGIY6Vv8s7yuvy7/5+yWXXIILLrgApmnC5XLB6/UiGo0iFovB4/FgwIABcLvdFg5YUlJSa9+kTS4dOGhI6MTRjkSmIpuchPoEs7uuAwcOHDhwkClItmioIOnj+kKyxu8kAeTvqdY43aiir1mmaSojifydf8vPHo8H1157LeLxOFwuF9xuNwAgFovB5XLhyCOPxMCBAy3tBaDIaJ37J929xR3LpYNsojbLpd3xpmmiV69eaNq0KcLhMJYvX67Ol8fxOvo9HDhw4MCBg7qARIzk0u12J5E7Ejlp9HC73RbLYSpIzqWTTcMw4PF4EI/HkUgk0LFjR7Rv3972Oj6fD3PmzFHE1u12w+VyIRKJKC+fXHdJKtlOaTUl0rFcOuTSQc5RkzWRv1GDkhqZ1+vFSy+9hKOPPhpr167F4MGD1YTgpLO7rmO9dODAgQMHewJpqXS5XJZQK5JKadV0u92IxWJJ1+DxXK/043gPkkJpMAkGgwiHw7jhhhtwzTXXwDAMRKNR1TYJWjoleYzFYpY2sL0kyC6XC9FoFB6PJ2O3uEMuHeQUcnK6XC41qagFUtvSh6nP58OqVavQpEkTdS4ARKNRxONx/Otf/8Kf//xni8blWC4dOHDgwMGeQl+TarJGejweRdJkLCZQs2eOv0tiOn/+fPTr10+tjbFYDF6vFz6fDx6PR62f0p3Otnk8HmV4cbvdijTK43hP3pcxojoccumgIJDKBV6Ta9zlcmH9+vVo0qSJ5XtOos2bN2P+/Pm4+OKLVYByTclBDhw4cODAQbrQ3eLSZe31ehGJRCwet9q8c4ZhoF+/fpg2bZpa+/gbLYz9+/dHMBi0ED+SSrbD6/UmEV0mttJiaZom4vG4ZS2km53X5HXlukk4CT0OCgJyEnAy1uTSlpNRIpFIKEtnhw4dcPTRR+Pqq6/Gww8/rH5LJ9bFgQMHDhw4qAl2STa6d0ySSq/Xi8mTJ1s8dQAUyQOAzp07Y/DgwSmrpUgyGIvFlPWSx8iYSa53dNnL+/F4eW0ew2ukIpbpwrFcOsg5evbsiZKSEmX+B6Cy4MLhMFasWKGOlYN/8+bNSosDqsmlz+eDy+VCKBTC119/jZNPPhmRSERNIAcOHDhw4KC+IMkf16P+/ftbCFwgEMDbb7+tygVxLZMWTv1akpgyu5sGEr38kF2WN69Dj148HreQSEli4/E4/H6/isuUCUA6HLe4g7yHx+PByy+/jKOOOsrWLb5mzRoMGTJETSygWpvbvHkzioqKksz6QJVWt2rVKvzf//2fbRC1Y7104MCBAwd1gdfrVX/rcfyGYSAYDOKHH36A1+u1cCcaP1hHMhAI2Bo86Jpmkk1lZSX8fj8Mw1AWy2AwCACqjjNd3ry21+tVpJTt4xqq13qW+Q5M3onH47bJPIBDLh3kOQzDwPfff4927dqpwa3X1UokEgiFQujduzdKS0st7oQffvgBTZs2TcqMSyQSeOKJJ3DddddZits6MZcOHDhwUP/Qk1RIXvQi3+mca/e7XhBc3kN+ZyfbdauiLLtTU1KOTDaVSS/xeBzz5s3D/vvvX2N/+Hy+JN4kw7nsQrvsjrM7T/ZbbZ9TfWd3T7vz7M5xYi4d5DVM08TZZ5+Ne+65BwcddJAlXgSodnMXFxcDsMaGsM6XtGbG43GceeaZKCsrw5YtW5RGpwsRh1g6cODAQf1BEhOfz6dcvTIUyW7nGHqb+J2+E428tn4+s55JCPWKIzLL2o7g6oRVWvL69++P+++/P6kUHuMQ+/bti0AgkHE/SaJWE9HTf6vpvNo+13avPT0vFRxy6SCnWLx4MR5//HH06NEDrVu3xoUXXqi0JQoLCh8ZU6LHgezcuRMzZszAggULEA6Ha8zOsysK68CBAwcO9hy6m1eWxZEJmfI7wH7HGv14u/PlPeQOOHaWR4lx48ahVatWAGCJR3S73ejatSsOOeQQSzuAPd+1Zm+C4xZ3kFOQ6LlcLvTp0wdffPEFgGqt0+Vyoby8HN26dUNFRQVMs6p4ev/+/fH6668r8/zq1atx8MEHJ1k+eS1dW7ar3eXAgQMHDjJHKne0Hq5Ulx1rpDFAbqeYysLJ+xYXF6N79+5JmdM8ZubMmejRo4fygtEqqRcq16+dqvbj3gTHLe4g7yGJIGt5UdAwEFnGypimic6dO+P999+3uDqY4eZ2u5MCrHkfXdt14MCBAwd7DhJEXaGn1U/unMbf9fqM/F6/npTXclMNJrjIzGlpZRw0aBBef/11lRwjrxEMBlFeXq7aQeIps7HdbjfC4bBKnInFYqq9XGccpIZDLh3kFNQAXS4XfD6f+puB06tWrcKRRx6JUChkETI+nw8VFRXwer2YOXMmrrvuOotF0uVyWbRRea5TjsiBAwcO6g/SkkdiyL+Z4UzIHWsA+8QTebxdfcYmTZpgxYoVymjA9YP7ZUsZz/I6EmVlZSrjW26JKOshk3hWVlbC7XZbtl90iGXtcMilg5xCxq+wFqUMrjYMA+Xl5erY0047DVOmTFFChMcyKxyA0lLj8bgSNFLDdeDAgQMH9Qt9xxo9OUf3NOkuZ0LGSh5wwAH4+9//jng8jkAgoLb3LS4uVrWRCdM0EQgE1H2A6tAo6QGTa44ey8l28xyuMfSsBQIBhEIhi+vcgT0ccukgp5CC5ddff8W9996LK6+8Eq+++irWrVuHHTt2WOJ5Nm3ahK+++goDBgzAY489hnA4jM8//9xyLTsSaZeN6MCBAwcO6gd6co1dwo38zufz4YorrrC4zqV72+12o2fPnjjssMNUfUg9dtKuVI5MAtV3p9HXAUk25bX0OE22SZJiBzXDSehxkDeg9jh37lxce+21+PLLLwFYk34Yczlr1iyceOKJ2L17t6W+mV7PErAKPSkoHDhw4MBB/ULGt7dv3x6tWrWyrVVZXFyMd999V8XKyx1rTNNU7nNpDKCbPRqNKouitDomEglL4XCfz4dwOAyPx2ObRCTjLGVdTl6LJFcnxnu75dIpou6gIKCXitBJIb+Tn2WRXn0Iy+vZ/e7AgQMHDuoHMrlFl7VTp07FxIkTU5bwSSQS8Pv9iEQilu0HWa+SrmnWq5QubP7G+MxAIICKigpFGBnX6fP5VLKorIMpLZZ6JjsAC2HNdfkhOwut3TEMSagt4Sid69UEh1w6aFSw035r+uzAgQMHuYCuMJME6XGJuqfFTp7J2o4AFNHifXg9fUs/2RbAPlxIFhq3u69+vL7zTnFxMVauXJmSfPl8PmU1tOMQep1Lu+Qe/Tv9eLv+S3XNdK5bUxuzDd3IwvvG43FFktkePXFp0aJFOOGEEwAA77zzDgYOHAjAWpJPXs9uz/B04ZBLB40GUnDss88+ePLJJxGNRnH99dfj66+/zm3jHDhw4EADrUe0jEkZxlhAoPadaAidDHm9XktcoLyetLrp0Mk
"text/plain": [
"<Figure size 643.75x287.5 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"image_file = './img/img00.png'\n",
"img = cv.imread(image_file)\n",
"gray_image = grayscale(img)\n",
"thresh, im_bw = cv.threshold(gray_image, 100, 150, cv.THRESH_BINARY)\n",
"no_noise = noise_removal(im_bw)\n",
"# eroded_image = thin_font(no_noise)\n",
"# dilated_image = thick_font(eroded_image)\n",
"no_borders = remove_borders(no_noise)\n",
"cv.imwrite(\"temp/no_borders.jpg\", no_borders)\n",
"display('temp/no_borders.jpg')"
]
},
{
"cell_type": "code",
"execution_count": 169,
"id": "68bb5c6b",
"metadata": {},
"outputs": [],
"source": [
"def display(im_path):\n",
" dpi = 80\n",
" im_data = plt.imread(im_path)\n",
"\n",
" height, width = im_data.shape[:2]\n",
" \n",
" # What size does the figure need to be in inches to fit the image?\n",
" figsize = width / float(dpi), height / float(dpi)\n",
"\n",
" # Create a figure of the right size with one axes that takes up the full figure\n",
" fig = plt.figure(figsize=figsize)\n",
" ax = fig.add_axes([0, 0, 1, 1])\n",
"\n",
" # Hide spines, ticks, etc.\n",
" ax.axis('off')\n",
"\n",
" # Display the image.\n",
" ax.imshow(im_data, cmap='gray')\n",
"\n",
" plt.show()"
]
},
{
"cell_type": "code",
"execution_count": 170,
"id": "c57a2af5",
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAArUAAAGLCAYAAADZOGoBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9aYxtS3YeiH0rIvbe5+Rw5/fum4cayRpZA6uKgyiKmqDJ/3ryj25Ihq02YPQP2zBgtH/YMNAG/MOAYcCNdsvdULfQ3Wq00fAgWTMpUU1SRYpFscgaXs1Vb7xvuPfmzcxzzt4RsfxjrRURe+fJV7ckGwaB3FX5Mu8Z9hDDWt/61kTMzLg6ro6r4+q4Oq6Oq+PquDqujj/Ch/v/9w1cHVfH1XF1XB1Xx9VxdVwdV8e/6nEFaq+Oq+PquDqujqvj6rg6ro4/8scVqL06ro6r4+q4Oq6Oq+PquDr+yB9XoPbquDqujqvj6rg6ro6r4+r4I39cgdqr4+q4Oq6Oq+PquDqujqvjj/xxBWqvjqvj6rg6ro6r4+q4Oq6OP/LHFai9Oq6Oq+PquDqujqvj6rg6/sgfV6D26rg6ro6r4+q4Oq6Oq+Pq+CN/XIHaq+PquDqujqvj6rg6ro6r44/8ER73g7df+gxyzuXfzIyzszNYQzJmlp+cgZQABgACSH5cCPChg3cBzgmWJiIQkf6bwIzy7xACuq6Dcw5EhGXjM+/luyACE8rniKicu71XAICT13LOYGbknJFyRtd15bsEoOs6hBD0swkpRcQY0YeufMbtuSfnHLx30IcvBxEhpVSvQYQQAnLOszF1zoGZEWNsntMDjuCDjJv92Dg5IhDLM6aUyj05yDW897NxgT4fkUyPzZuNC+l34RwIADjN7sWuDQZY7z9znI3/cuyZGUSEHCc4p3MOhg9ATx45R8Q4IaUI5gwGYFdt538cR4zTJD9xwjRFuX5ipMwAAV03wHsPZiBnubYnB+QMwnyt6h2W1+2+mTOYs44jynpt58se0ca9nevlT3vu9vr2kxgIIZTnzDmX+W3XyzAMe+/Fzmm/t7tNnafF3DEDOWWklC7sLfvdPo+tU9uL9to4jgADKadyLlu3dm8pJUzThL7vMQwDDg4OMAwDNpsNpmlCjHG21u0ebH+2P+0Y1nlimzlgse4uO9oxXV6jXmv/9dv9vryWzdFy/bfPZT/tvLWfb/eh7Rm7zyonMTtPXSthdi2TNzZv9h1HASHIj3MO0zRhtxnLvbSyub1W+z6Rm80TIPLQkROZQhnkAefowvgt5Y3Js3Zsqy7R79l4ydQgIQEJSKzyTt82OT6fOxtXhnE4Y4x67xkpx/p8RbbJd+OUyzMmbufMwTsP7ztQ8Hh0doopTnKfdq8xAkwAOcB7IAT0fV/Gvu97rFYrDMOAYVih7wf0fQ/XyTz64ND1PYZ+hWHVlfmq60P2qPe+nJOKuqXyHAAwbrdgla0pyX6NMSKnVGQBM2O73SKnBGQun0splXWWUwIDGFOcyR/nHPq+h/ce3nlQM78AQJjLlrqeZKi24xYAl3/bum3loR3tmrNrX9w/cx3f6qBWrjrnyvMZlkgLmTpNE7quE/lJDsEFnVInz6vj73Xc231pa1LWOMGkVSuDljq9XbvTNJXxWD6jvW7jKD8O3odLP59TnddpmjBNU6PP5Lu2zQwb2Pi092HYzOa8lS/tQUSii4lVRM/H/jLMdlmTWyLCf/M3/vre99rjsUHtZrMFc1agADgHpMRlEznnBbBgrtDlxj2c93Dky+cA0kXcKgCnwNAjBI8QukaQ27VFGHVd0PPoAnYXB8eUc1n4RGVwyyJ3roIFrouuKgZUcKhjTagTZtdrf3KOF15bTqZ91xZNAX45F0BdFGXwFxaBXZ+AAmplnBg6uvsXjgN88BAwB13BAgxN+Ij4z/AgMAnkk3PqZ3JCzoycMmKa7CSzsbDD7st7Bd9QoChTgcwJmRMYGQImMgAHTwRyBCIHOEIaJ+SckHMCpwSOCTAByQwHBlAFqnNUDA/PQCIUJdneG4HLEpwrXa5jrOdp12L7bPuMrvZ8+8alfS3GKlhbQNIq/RbALIWhXaus98lduI4djgika8/WYPt9Zp4ZQsvrydg6NRwYmXP57jiOF8ahFYBmLLbn9N6X67Zj5b2fAd5WMc2eqwBbXHh/eewzNJbgTcZwPletkmr3WTsf9rklQFuO/777ac/XAlabn8uA/XJu3m8Nlmdr1pK95pxX0UhwIGSSPZqZC5CUcWawA+BQZS8Ddf+zKsMETorvTJ4o4G0Vv81/u+7be8s5i8xIDDLDpTgXCQ5ezhlEl6goKO/rzTSgLGJSA42ZMEVGnCJSzvDBw3kHcvLdnBhnmy1yMpmkY8GtASV7KXEWyUgB1AX0Q4e+rPkew2pAPwy4cf1GAUihq4aFM90EAhEjpwjndJ85gieGrwMKygzOSfaTIzhieJWfyLqXWQgmzhknDx4iKZDPakjGmJBTQsoZnBJy0a88GzNO8zVZ5hlqyEAJK/0bOSPz3NBfntOOsv9jAijPZEyM8eJe33O0cqHe44+XAb7IQKdEi1MdJE9HjtD5DgfrAX3XCW5Rw40cweu9F9zjCJx5to8LWLb16uZG4z453gLby/Z0Ox5yHgPs+8emzh8XssEMmypXMlhJRaaL52iva88WdU0tiQI7TG+1oNb0Szs+lxGX/7LHY4Na7zsI6BBwQA4gEmDpnUwuOZl051z5mxzB+U7+LpsX4IU1JYPgy0J33oOcg0oZuS4SMgPBCyizSfDls7aBZFH2oZONhGZiiOBDe05Gp8KlHWjOGeAMoip0WvBIVIETEVWhSwSicAEI2NEqc/m7glrZnBnOVavSew9Sa6i1dhw5OG9MrQI7cgVwEenGa4wIQOeNsygoBogZjAzOjJQmBX4k8pMcsglztaQdeTAyklr4MY7IBSzKd+VeddPZvXhZJx4eLjiQ83CqAAEFoETIWcaAQCB28E7mkoJHz14WTsqIcRJFrGsAaqB4R3qfVBQv1BjKgtoFMNp8wBQil2e0d5wTQG08D6tib4WVGR77GKl2zpdrYLmJW1C7DxQtmddWKNo17bet5SUwLd8BIQQFBOQArqyKfb+9x/Zc7fXba5r137I3Bmjtx9b8vjG57LkuG8+Z4XTJeZbX2WdYtJ+pz7owBPYwie1vO0fLIs2vK54o+Q5goHlusM+ZJaCC5Pa6l4/bxXtb3pN4YhJyJsQkaz/nDHIeMK+akQ12Hl39CtuQOQuoAgkIIFeunSFgC0gFUxoRIJ/Va6gdmVOWv+WCAm4gsoyzyR25R+SWea3XpQI0hIkqkkiNZhEXjBgTYkwYpwnbzQ4ps3jhcgYcoacOTARiQmbGNCVkeLDT66ie86ErDJ1zoqf6vgd5B+eFve0HAbRmyPVqzK0PVgUEma4z2WOkTU4RfQiwVU1gIAvolNmQscmcEZERJ5mrzAzmqMOnjJwC2N1mKwZDysrOzo0HAb/VuIfqRZhRY2tavWjCyBp1ol/JLMaMyQw7vy76vbJR59T55f5o5fFS7ogntJ4Ps/O2otP05xxoyXrxSoKRU6MqM6Y4InQdQi/yauh6Bbeh6FEik422bmXtZk6IkxhAjuSa7Mxrq3tKn2MfW/njjn3kyBzYtr+hY5SbPWOe6VSIiHbcRJ5H1XlV3i2Z8fZ7McYLTPPyHk1ntkerI5b3sDxmcv4xQe9jg9r16gBENkAAiMFZ3CQF1DYKqf1Nys7Yws05Y5oSOEaxhAvgEhAsM6PLgZq9BdlsznkYY1uZ3VCVAhjeeQzDIMo2RURlg9xyMem1zKqogwkRcqhb15GJmno3jhZMKAEg3wi+uVJqx0Y+X8G0MHRzNpYEPc9BiRoOXoGhuc+9UwPBHosInfOzhUHElVHTH0ZGisK6sjLx5IDsfN0ARch4AOpe1u+0LiVAQQBJ+AI5EiveOQE37OGoQ9eEMZAjMByYVWDlrGPPgAJ3Hzyc60WwMOReOUE+xaI89Vo2V143eSbAZ1IlruAc+8CQMhAqsM2QKGBWf+xZl3PZCoClELowp83hUgWU+8Boy+w
"text/plain": [
"<Figure size 673.75x375 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"display(image_file)"
]
},
{
"cell_type": "code",
"execution_count": 171,
"id": "d52ff256",
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAArUAAAGLCAYAAADZOGoBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9WawuSZIeiH3m7hHx/2e5ee/NPbO2rL2K7H2pZldvQ5DDRcJohKE0EjkiMRAxxAygF0mApHnUAx8EQuCTpNkIjKSHIQYDaCQOG+R0c+lmL1XVVV3dXUtXd1XlnpV5M+96zvn/PyLc3fRgZu4ecU5W3RYfhAZOJE6ec/8lFl/MPjP7zIyYmXF9XB/Xx/VxfVwf18f1cX1cH3+KD/f/7xu4Pq6P6+P6uD6uj+vj+rg+ro9/3eMa1F4f18f1cX1cH9fH9XF9XB9/6o9rUHt9XB/Xx/VxfVwf18f1cX38qT+uQe31cX1cH9fH9XF9XB/Xx/Xxp/64BrXXx/VxfVwf18f1cX1cH9fHn/rjGtReH9fH9XF9XB/Xx/VxfVwff+qPa1B7fVwf18f1cX1cH9fH9XF9/Kk/rkHt9XF9XB/Xx/VxfVwf18f18af+uAa118f1cX1cH9fH9XF9XB/Xx5/6IzzuB9979ctwzoGIymsnR8fl30QE5xxAJK/p32AGckaMETklpJTAnEGOkVJEzhnMXM7DzMg5IaWIeZ4RY5TzQk4FyOdiTmDm8t0ca2M0ZgZnBpVPA+Dmbzg45+C9h3MO4zjKewzknJFSwjhP+r4HyIEcIQOIMZbnzeDm3hg5Z+SYQERgZoAbq6HzyDmXz6YYAQa89+XZ7VntNSICRxmzlJKcP2dkzkiYkRPKGJTv+VDupXwvydg458DMGMexPEO5V0cgnV/7PkAyh/pailyewb7vvQfgkMHlXmSiZJwcyRjOKaLrBuSUkRnIIOScMc0J3nfoug6+65BShu+CjG/OSDEBcBh8gPceXRfQdwF938M5h957nScCQ9aZrDGGc/LMCby4Z9axsLmoR10xMlZ1TcrSI1kP7fccwetrhDof7Vy3v23Mm22EPgTM84yUkiyVrkPOsZyLmTHPM8ZxRGrmUsYe5d82ZkdHR2VfASj3Ms8zACCEgBDCYi0755BzLue016ZpgnMO8zyX86SUEHxXPhNCQM4Zfd8jc4ZzhBBEtAzDgGmacDgcsNvtsNvtcHp6KvPtfflce01bg/K3A4HKs8nBujRtEDNySvIyEcgRLrVJVNmRU67rtBm39sfuxd5Hea3e2+LUej7Za6l8v/2czXm5hnfIulecnj/nDALgyF1an8wZOac6396BnINzJA/X7FmCrA1mXuwHH2zPzYhxRuaEruuw2Qz6vCKT9Y5he8HGIhMW6wqw/e10XcgYEHkwE7KOi/ceLpOcsegJgJiXc6q/yzpwjKzfz5A96D2p3OnKuJu8jDHqGPFiHbc6C5D175wrcy+/O3m2nEGc4ZxD13VwjkBu5f3hDM6myxL6vscwDLKWSfSdfEnnMGfwPBd9Ns8zpmnCPM/Y70eM41j+PcUZYMI8y+fkM3tg5uW4OweAkGJCihExRpU7UJld930/bOGbZw59BxdU3ntZK957+BDKnvTeA/pecB7e1TXZ6fumTzgmjDEhqo4C8lK3YLm3bS855xFjxPHx8UJmAsA0zmXf23Wvmk+RmVz2Vjs+pp+cc+B27Wa5pxhzeV57jvZatk7mbLpXcElKCYlR9KvtsfbeTK6ZXDc8ABgkyhUzZCDnWK5r57F7l+9UPd/+u/1bZJD+jXhp3ZvOED3aoet8kd3rfW2HJwfnASJf8FGMsazX8mxXzI/dT0oJxFU/2nfsmu+nI9vD3vt3/sbfvHSP6+OxQe12u5WF0n5ZB8RutNwsMzjnItTYwFgzeClPsFluN4ANWkq2UTOYaTHRADBHUbIi6Dw4LSeXiBCoKqScMziZ0s4APIgYKRGmaVKFUoVBCxIYCZQrcIECoxjjAsBCx8F7r+fjIqrTGJEzy/NkRsoJnLlsnFYALBZrrIC2XQgJSc9XgQszgTiW7+ackROXcYos4x85A1mUqV2XyINy1sVEopTABey3KMEEjXMCjsi7xdjLDcQCJAAAzlYOgTkjqQArQgdAzqKMRFA5/b5bnJfIwflQxg0KYOQZc7NRVFhA1qMoycbQWhwL8+eS0GTmAu4N2Bg4BoCUEzy5BZBqQVE7nyK4+dIttEJfBJfTz8vgE3k4FwBUQSBKPJfr2LUMHLeHCV1bKyaI7bDvr4Wbfaad3xgjvFuKDgO3oApKcs5FgU/TVBSv3QMRIaW0AOj2fr3fDCJR4LZWGQIU7LqCEX1jVMk9cTsj8qVmLTGYaaGMTL60e7G87x3KRiAqALqeX/YTr4Axc9a/sbgOyyDoXkZRNg71M7nsR1EqMl4ZRO16MiM+l1ujBoG1oCB4j0QRwdaggt+cGUR1z8iYL0EhEYGYQbB13z5LC14M7BBIZaJ3HolTMy+5OU8VLqRGo+BBAjkPxwzfdcicyrxlMKo5B6QUdVtnHWenc89wzsNB1oeB1HmeQQQE79D3vQDiKYGQBZQSoe8DhqGHD74BOgpWOQOcwCkDzAX05ihKPs4z4iRG6DRPmOeIw26HR2dnmKdJwF9iHA6HspYFtDdGc66OiRyTSOLWKCJCyozMqmchsg7eF+eEB9D3PbbHx+j6AUHf88Gj6zuQF2dNAfj6u+gjJ9cKzq90BS3lDMt9tHPZnseOIiOatdsalbIlRIZ3obsS7KwP55avrcGz3acPuq8zl2sAVUbpYyCYjMkZ8zgixhmj4gN7tpRkzFOuutnkmD1LK1tNjnO+DEJFFqdybnsGM0pbh187Fuvf67/Bq/WCKmPqTwCRh/cOROnyeLMCYecgYpXL+Vrn2xp3rUGu977IgtZ4MX21dmz+6x6PDWpTjMhUQS0R4ZAPZYG06HsNYIuibEGPy80i0JMWIJaQY0aOWQRV4wG2ARErTgRMnKMumDqBYk3FhZJwRIgpiTVO4hVNKZeF2QJZEy5EUBAaAQKYnDwHVBixCFNbk3Z/4jmqG4FRPRfteDHX6zpHYK4aSYC4AloFsFmBaeIWwMmze8+qfHUjpSyCL2eI07GCCDj5DpwoH6fgUkCbOBuyLe4G9IlSJXgf4INHCB1cc037TYjgTNXIATDPEUBj7KB6TjOLhyN0HdTZovcMxDnCAQIYUoRPodyHygmov2qx7hgMTpDdCRSPmIyZjfLSG6XLUN+vG9DAfvnWajObh6l9vz1s3uvrjYUKLASxKFE1jBRstEKwFRqtd6O91voz7eda67r1APR9Xz6/Pkd7fQDInOEb4BO6gK7vipKxz86Nh6qu+aUB1Cqgq+5fPoRidBqgrYJ0+dGcs3xOPmTDfem8AoTsNXflfdg8eycAW2SMAKey6PT+nKMCBu3SzLj8LAB4FWlyKs+cI91vl8cFqB7sYjDaZ/Ja8TuYQWCYjMiBnAe5DA+BhxJl4GIgczNeZIEaEBwcWN0trG8Wo1+NZoelUZBznQNfjAK7F663qWNavatAsaZJQGNo1hocyVSUNSCxGG/gq0y7RrDICahVeTX0PZwzxS6K1eUkgF6FTu8JnjIoA5xjkdlxnpGzRh1jVBkbG4+prPVpnDEeRszzVLyz+/1hsfemaVoays4hhK46WHQRd76TaBNR8V4SCOQYCKF4wGVNLgFq3/cYNhuELojBQA7Oe/ig650a2UOrdWfrujFsbMwXckcdJHB6D6gAqv3ewqgTqw6Av7Q/nHNwarZcJYvqwYvzL95ZATSL5Mr5BEgSGV7h4plvvejTNMmcW0Q5i07J5rBDu+cIXVc15awGvO3VtTxuZXk1fC+D1/Zodc5Vz3lJ1hRnoKsRIP2xe7bf3tueNUOlGuK2P9vrmYHSYr7WYbGW7bZGDWetI75rMPyvczw2qN3v9ovFaTe2BrXrhzQvLdisOHmAbgiywYoQ4mL5KlIEQbx35GwjV+AiigDITMgpllCCWdXmjSIWT0EIQSz+aZbQvz6HhWDlYLUecgGXxTMEkdAG88wjlDkXL4kBFWY
"text/plain": [
"<Figure size 673.75x375 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"inverted_image = cv.bitwise_not(img)\n",
"cv.imwrite(\"temp/inverted.jpg\", inverted_image)\n",
"display(\"temp/inverted.jpg\")"
]
},
{
"cell_type": "code",
"execution_count": 172,
"id": "e653440c",
"metadata": {},
"outputs": [],
"source": [
"def grayscale(image):\n",
" return cv.cvtColor(image, cv.COLOR_BGR2GRAY)"
]
},
{
"cell_type": "code",
"execution_count": 203,
"id": "0985148f",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 203,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"gray_image = grayscale(img)\n",
"cv.imwrite(\"temp/gray.jpg\", gray_image)"
]
},
{
"cell_type": "code",
"execution_count": 204,
"id": "b54b6db2",
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAArUAAAGLCAYAAADZOGoBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOz9V4ys2XXeD6+qzt1VnfvEOWlmOJxwZsiZISdwKJImRVGQLcO2LFmGYcNBlgGHGwO+sA1f21e6tA3YMBwE/0VLgA3bIpQsakgxU+TkmXMm9Il9QudUHavqu+jv2f17n95vdR/6/+GDgN5Ao7vfsMPaKzxr7bX3W2m32+04LsfluByX43JcjstxOS7H5U9xqf7/uwPH5bgcl+NyXI7LcTkux+W4/N+WY1B7XI7LcTkux+W4HJfjclz+1JdjUHtcjstxOS7H5bgcl+NyXP7Ul2NQe1yOy3E5LsfluByX43Jc/tSXY1B7XI7LcTkux+W4HJfjclz+1JdjUHtcjstxOS7H5bgcl+NyXP7Ul2NQe1yOy3E5LsfluByX43Jc/tSXY1B7XI7LcTkux+W4HJfjclz+1JdjUHtcjstxOS7H5bgcl+NyXP7Ul+6jPnjhwoVoNpvBD5Ctr69HRES73Y52ux2tViv9rZ+IiGq1Gj09PdHV1RXVajWq1Wq0Wq30f6VSSfVUKpXo6uqKrq6u6O7ujp6entjd3U316Fk9p/e6u4tDqVbL8Xq73Y5msxmtVitarVb09fWlOiMiuru7o6+vL3Z3d6PZbB7om/6vVqupb11dXVGpVKK7uztarVZUKpVUX0TE7u5uof/d3d1RqVRid3c31d3d3R27u7upX6q3u7s70Y0/uq/+6N1KpRLVajW9S/pGRPT19UWlUknzpfnQWNU/vqO2SFfOudrkmPlMV1dX7OzsJDqJ3qLzzs5O7O7upudIk0qlEjs7O9FqtWJ7ezt2d3fT72azmWgYEdHT05P6Ljqqz6Ip//Z5Up9VH+9pLF78mupUuxwv51Zld3c3uru7U79FJ9bV3d0d/f396Trr4e9qtRqNRiM9F7EvCz09Pal+tam517viZ423p6cnWq1W9PT0pHq6u7tjZ2cn9V11bW5uRldXV5KtiIjNzc3o7e2N/v7+GBgYiFqtFisrK7GzsxPNZjPpFNLG/1dfOCanIeWAc87nxZ/kVbWv+Wa91Gv6n3Pspbu7u9BHPef1iG65Z9UHPctx8G/SKtcf6Zuenp403mazGZVKJXp7e9Pc7+zsxObmZqI5+dbpEREF+ZUepfxL/6m/onGOppwP3cvxgcbCcWssvEb6U2+zUE+qDepBtiOdk/vopvRrtVqNra2t2N7eTnrLZaBarSaay6b19vYmuejv7096S7ZIz/X09MTg4OABWmn8tJW0rZqrdrsdW1tbBVnb3t5Of4uWlEfpVN2jrRRvUU9oXOIN9UHzR57kPDSbzejt7U04Qs+KXpxbvseSk0PyGO2a+latVhNm0Fg1DralsWtcpLX+F927u7tT3zS2iEh6kzLFcYqPfBzqN2WF+pn8QPvC+kQH8kyr1Urzt7W1lfSQYxv2I4dHxL8uM15EH46dfOJjKJtnvftf/+t/PXD9QJuHPvH/LY1G40CnJViccDcaZCT9kIARRWKIaPqhwssxC/vhgk8hIlOwfQJutSMDIAVFBtEzqsPbZL16L2IPSKq/ZLyurq4D4ycYksCwHdJZP26Uc7R2R0LKx4GbCusuEzoCyk6OBIEBBUGKQ+1JibDQKMsoCdBK4Cn4nHONge27EOUMH+dD/acRZaEhYb05IZec5IRWz7sSdkNGPtB4pfxctnwc4mnyB4GtK1iCa/bf511zIpnTfNCA9/b2FnQC5460KtMP5G0HL/pdxoM5QKvxEHB6P/Qj2lMHUC7KwKsrfD3jDqADLDqbPo4cuHZQr6JnJasCXKqLPOV9yo212WweGFPOaaPMyekhnXKFIJlgmYac7ZEmBCtep2gqkNrT05Pa2N7eTs4babe9vZ2MvtOT8kf6SDYERvv6+qK3tzd6enqSQ9fX15ee6+/vT/2njHNMucAPAWNOR6g/cv4bjUYCstKZ29vbBeDkv10m2KZ404vzszsEBJDSMwRutGs7OztZwCoalJWcE0qeV53iR9lX/c2AlDCIwBv5nEGsSqUSPT09BUdGTr+e51hcF7m+Zh9Ia/6dA7SqL0cL0d+dFl2X7JTZedcLAse0Ibrn+sCvuf7/f7scGdS6F9NutxNQI0hzMJUDmyoeSYvYN5oEfWJITbYmQO/KS3Rj0tfXd4DJGZ3Se2JMguOy6Jv+1m+PPOpvB1i87wCfhoz9d+DiNNY1vkNDQ6Ejbd3ARux7cTSonfiAwkGF4TRSuzs7OylSSH4gMFIUwYGV+EtzJseE71G43DirkH8YPfLCd9lODqTqXq54FI1G0OnkBoBOCp0hlyP/2xWb3qMyJH0JULa3twvtOE1cGdPoCdCS/whq6YSyX6zD+5mjp483Ny+UiRwAYn2kbQ5Asm/in1y0UfVxNYvAzovzFWWP4J1F7blR9z74OxwvjRSdCeetXD3UVzk6ORilTDLamhsP++L1u9PjRe14ZItjlK4S0CPAIx85OJQOFW/KZjBKp78FaBnJ6uvrS1HXgYGBAo/39vYWHIdWay86TDoJ1OecI8md/if/qP9bW1uxublZ0O8El3wnp3/UJm1FDqRF7K8O0D7k6lQfPAChQt1cZkdZF0vOKfW/NV8MsIj2iqj39fWlYJcHldzOO4iO2F8Vc53lwYQcLqJ98rGWORO5elRoS8rm2QODTk8fh5wmtSuc4g47Zdzr7QRsy/T/UcqRQa28ShUxpQbkUdocEOOAtFTDwjp8cLnohIRRjKhJI9hVe1wmzS3Nqn33SniPbdL45oSTipD9p3dGz5fGxn88Mksnwq87qHUvsFqtJi+SBtTTHjgWXhONqBx9qS0HSClY6h89ZPVH9aoOLss7eODf3qYrHUVSGOFgcV6kMswpfbalHwclrIcRfwepuWUfL7lVgVzJAR5XGowWk29EU0ZyObd0RKhsBYi1hBkRaflQBp3LkzlQzPrLxpIrLn+H0Yd1E8R4cT524ONOhwCUOzI5+uf6zT7x/5zBywFVgl32jXOmezJIbKOTs+R95N9ymh1k5QAsx6O/aTTd0eQ4qCObzWYCJTTEBF8aD0GtIrPkU4EW9Zl0IICVc9bf319YSWRKgf6WMydQ5LqEesKdJJ8n51PqPepgpRSpPl9qJs+UOQ9eXK78Xhn4UT87OSOcU9JH9/Seghk5x02OzGF917OUSfGP5kj97e3tjcHBwRgYGEiYh6DN5USpG1wVVmFAjuP26DvHRNqUFZfTMntGeeBPztkgpsnZoxwo9fY8Zc4LZZ6AlnrqKLr7sHJkUDsyMlJQMru7u8lwdTJGDnJ1jaCWSsijQRGRIq5STKyPoIIAQaBJE0sj7f2iwuCyAr1T9bNMIVCJqj0qOY5TxZc0vXQSzMMMPpU7rzWbzZTDxOcETt0bpzDouvpOpetATtep1PW/5jOi6NGKyQW69R55TcajUqnE5uZmdpnKlSBpeZjCULvukDgvlL3HtniNdTq/Vir7udUuO+JFPROxD0rZPxluBw9SGDQS6iPp5MadQJ18zXZoTJnfzDZ6e3sLIFnXtRSsujwfLyIOGAQHgu6g5JwwLzS+Pp9uWHJg2Q2pG1tGoBzAuWOrdgjqc6kjuTG4TmE9HEcuLYPAI+Jg/rzLjebHaSYecMed4xJNpAddn5JWLgfUteQ9RVtJS+l26jDyvgNGLi0riqqoKnMme3p6Uj54X19fIY1MdTkftlqtAtjnSqN+BDS1v4FAlI44nU3KGQMLXE7mXDAS/aBgQXxdJiPsp7+n3/6ueNtBtvOf85Wvwno7HogpG6/4hIEtX62t1WpprmWbGIlX/8mjapNzwuCaFw8AUZ/knBrSO+d85sZKHmGfWJ9jmbL+6l5Oz+ZwEOfG72lslM2cTv+/KUcGteyUjJUmnR1VyS0hMFKj3CIRnobUGU1Mz5w9bgzLKeN2u53yhtQH3/wlgejv70+TISZ
"text/plain": [
"<Figure size 673.75x375 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"display(\"temp/gray.jpg\")"
]
},
{
"cell_type": "code",
"execution_count": 254,
"id": "90df13c4",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 254,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"thresh, im_bw = cv.threshold(gray_image, 170, 210, cv.THRESH_BINARY)\n",
"cv.imwrite(\"temp/bw_image.jpg\", im_bw)"
]
},
{
"cell_type": "code",
"execution_count": 255,
"id": "d6f7feb2",
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAArUAAAGLCAYAAADZOGoBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOxdd3gU1fp+Z2tCEQglgCCIAiqQUCyIYgUuXkSwgYKgt9iu1y4WLFdFRbH36/V3xV6woCI21AtiV0IHKQrSpIQuJFvn9wfPd/LNt2c2uyFhd5PzPk+e7M7OnDlz5pT3fNWybduGgYGBgYGBgYGBQQ7Dk+kKGBgYGBgYGBgYGOwtDKk1MDAwMDAwMDDIeRhSa2BgYGBgYGBgkPMwpNbAwMDAwMDAwCDnYUitgYGBgYGBgYFBzsOQWgMDAwMDAwMDg5yHIbUGBgYGBgYGBgY5D0NqDQwMDAwMDAwMch6G1BoYGBgYGBgYGOQ8fKmeaFlWTdbDwMDAwMDAwMDAQItUEuAaSa2BgYGBgYGBgUHOw5BaAwMDAwMDAwODnIchtQYGBgYGBgYGBjkPQ2oNDAwMDAwMDAxyHobUGhgYGBgYGBgY5DwMqTUwMDAwMDAwMMh5GFJrYGBgYGBgYGCQ8zCk1sDAwMDAwMDAIOdhSK2BgYGBgYGBgUHOw5BaAwMDAwMDAwODnIchtQYGBgYGBlkOy7Lg8SQu2ZZlwev1ZqBGBgbZB0NqDQwMDAwMshiWZQEAbNt2HOPHdYTXwKCuwYwCAwMDAwODHIAbqY3H4+qzgUFdhmXzUZLsRDNgDAwMDAwMMgKdtNbAoC4hlb5vJLUGBgYGBgZZDtu21aLu8/nUcWNTa2BQASOpNTAwMDAwyGJYlqUILf9sYFCXYCS1BgYGBgYGtQwkZOJ2tUbwZGBgSK2BgYGBgUFWgyRUjRo1QnFxsZbAGumtgQHgq/wUAwMDAwMDg30Bv9+fQFrp+7HHHov//ve/aN++PeLxOADA4/HA6/UiEokAAKLRqPrNwKCuwdjUGhgYGBjsU5BdaGUe/Tr7Ua/Xi1gs5jjHsiwHkePlyzBYyb4nq5vbuXQOJUfgddPdJ9l5ADBnzhy0b99ePSuwJ2QXxaL1+/2KuNIxTmqvvfZa/Pe//9XWgT+PgUGuIZW+a0itgYGBgcE+gUwWwD363c5zOya/e71eRfTc7m3btuM8j8ejyLAb6bMsCz6fT5FGXZkejweWZSEWiyWUw+9BCRJ4XNnrrrsOQ4cOVWS9W7duCAaDqoxYLKbILSfq9CwAEA6HlYR35cqV2LhxI8rKyvCnP/3JtS4GBrkGQ2oNDAwMDLIKblJUt/PkMUC/uBFJJshziNBJIusmlaXvUrI6ePBg9OjRQ5XFy4nFYorgfvDBBygpKXFIkmW5//znPzFixAj06NHDcZw+c8JMRJzKc9sM0PFwOIwHH3wQTzzxBLZs2eLapgYGuYJU+q6xqTUwMDAw2GeghSmZhJZ+lySMkz6JVLNqcUmlLLtx48Y4+OCDAbhLNUePHo1TTz1VW99oNAqv1wuPx4O1a9eipKTEkcJW3uOqq67C/vvv7yCtvE7RaBSLFi1CJBLBAQccgGbNmgGokEoT0SaTBI5AIICbbroJc+fOxTfffINNmzYZQmtQ62FIrYGBgYFBjUInIXQzP6DEAqTu10lW6TMhGVEm6Mrg97QsCyeccALeeOMNhMNhJSGla7jaX2fGQLat3OaV6k8SXI/Hg759++LNN99UJJyXF4lE1P9YLIadO3eid+/eAICHH34Yf//731V72LbtaCs357LXX38d11xzDZ599llEo1EjrTWo1TAhvQwMDAwMahRupgDyuG3biEQiiEQiCaSXzAD4uVxyy3/jhJTDjdCVlJRg8+bNeOWVVxCPx5Gfnw+fz6eIKABlL+vz+eD1ehEKhdRxnukrHA6runLzhng8jgceeAAvv/yyIrT0jPF4HPF4HIFAALFYDGPGjEGrVq3QoUMHVcfrr78eI0aMUGRaEmJeV07yLcvCgw8+iLfeekv7LgwMahOMpNbAwMDAYJ8iHWclTlxJ4smvJ9vWN954A61bt1Zkj45zG9V4PI5hw4bh999/d9zD7/c7zg2FQoo8AhVhs4jAWpYFv98P27YRDAYRi8UQjUbh8Xjg8/m0kuPJkyfjyCOPRDAYVGSWiDLVmcqIRqMoKytz1DESiWDGjBno06cPLMvC66+/jjZt2iQ8n9frVU5rkUhEOZBx8m+cxQxqKwypNTAwMDCoUaQSSksiPz8f1157bcJx3XVerxfHHnssGjdurL0XvzYQCCRcz8moDBfGJcJSGhyPx/HYY49h69atCREOZs2a5bj/YYcdhoKCAlUetwEm0hmNRvH000/jp59+0rbVjh07UFJSAq/Xi/Lycm2bcEJNZhXxeBwdOnTADTfcgPvvvz/h+Q0MagtM9AMDAwMDgxqFLsxV165dtQST0LhxY0ydOhUAEuxTueSRh+aS0QWk/a1lWejYsSNWrVrluNfbb7+NPn36oGnTpgiFQvD5fJg/fz5CoRAaN26MDh06KAkoj0pg2zY6d+6M1atXO+rJEQgEUFRUhDfffBOFhYXKXIBCdXHCHIlE0K1bN/z666+ubQfsIfEvv/wyjjnmGDRv3tzxvDxsmN/vd5hH7Ny5U9XBwCDXYEJ6GRgYGBhkJZYtW4a2bdsCqDAhIHAHLC4pJRJIBJZHDeCxXGV5BNu2ccghhySQWgB4/PHHMXr0aLXWde3aFatWrcKgQYPw5ptvOogx3cPv96NTp05YuXKlKofX2ePxoFmzZvjll18QCAQcYbmoPpygx+NxdO3a1UFqJThZf/zxx3HhhRcmEGQqm8okQr5161a0a9dO2QMbGOQSUqGrxlHMwMDAIAVw8pHsWFXK4t+JvBG8Xm/Cd34u/1/ZvSTRc3OmSuc55Ge6n/xOIIcmTvB8Pp/D+YlsVomQcWctTnbJVIBMB3h58vnJzlT3fB6PB1dffTWaNWuGpk2bomnTpgnEV3edLisYl9YOGDAAv/zyC7xeL6LRqJZ0chBZ1t2X2k3nXKczuaB7crLfuHFjbNy40eGAxtvACK8Mch3GptbAwMAgBUgyURU7UV6WLqg+J2+APkoA/aYLacWP65yVJGFyI1ap4t1330VBQYFK3xqLxVQ4LI/Hgx07duCUU05xXKPL4EWENhwOIxAIKALK24o+EzGVTmBU5ldffYXrr78+4X0QIZROYlQnXUKGG264ASNHjnQ4jdH/Xbt2YeDAgVi3bl3Cu+CSZZ/Ph2g0ivz8fBXqi8ojcwr67Pf7te9DmlLQsfvvvx+LFi3CQw89pH7j/SkYDDrItMfjQV5eniMyg3wvBga5DENqDQwMDFKATsKWTOqWDDL0EkFHkiWZ4edK21J+3K3+uvtaloXDDjsMZ5xxhuv1OvTp0wf169cHUCFBpsgBHo8HmzZt0taDmwsQmSKJNH9m+Z0Tf7c2b9GiBU466SQ88MADCeYLbnB7j+3atcPBBx+sjaYQj8cxe/bsBLtdAifr/Dn4cf4OiZDq6qP7DQBWr16tTBW4RJffh39P1k/T2ZQZGGQrDKk1MDAwSAE6JyROsNIhBCShS0Zqdd8luCq+sussy0JRUZFDNc/rc+KJJ+KWW25xVdFXVo9oNAqfz6c87ivDggULEAgEUFBQgHg8jjlz5igiZtt7ohT06NHDQcikhJE7ntG1HTt2xPXXX4/p06dj4cKFCIVCCcQ4mZRdJ+FOZSOjI8QdO3Z0RDbgfUeSWsApxeZSV36+3LzIzQrXAMRiMWXGwduxa9eu2LFjBzZu3FjpezIwyCUYUmtgYGCQAmTIJimJ09lXuoFSmpLkUXetlABKAscdgQjJyovH43jrrbfQqlUrx7mk9qdYrJR1KhVIswkyH+AmE7Jt6PiQIUPwzDPPYMSIEdi0aRP69OnjeN6CggKsWrVKkXDe3tQO1JaS7DZp0gRfffUVunTpghUrVqhydeRY2rHScb/f75AmU7sTYSSyzK/jdsTXX389/vKXvzjKp4QORDjpXrxv0e9kjhAOhxPIKrUrmU3QpoL
"text/plain": [
"<Figure size 673.75x375 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"display(\"temp/bw_image.jpg\")"
]
},
{
"cell_type": "code",
"execution_count": 207,
"id": "fe31b009",
"metadata": {},
"outputs": [],
"source": [
"def noise_removal(image):\n",
" import numpy as np\n",
" kernel = np.ones((1, 1), np.uint8)\n",
" image = cv.dilate(image, kernel, iterations=1)\n",
" kernel = np.ones((1, 1), np.uint8)\n",
" image = cv.erode(image, kernel, iterations=1)\n",
" image = cv.morphologyEx(image, cv.MORPH_CLOSE, kernel)\n",
" image = cv.medianBlur(image, 3)\n",
" return (image)"
]
},
{
"cell_type": "code",
"execution_count": 208,
"id": "4e873cc8",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 208,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"no_noise = noise_removal(im_bw)\n",
"cv.imwrite(\"temp/no_noise.jpg\", no_noise)"
]
},
{
"cell_type": "code",
"execution_count": 209,
"id": "a993d061",
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAArUAAAGLCAYAAADZOGoBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOx9d5hUVbb9upU7kKNIFBVEiY35GVBRMfIUMIxhnlkxIYiiP8fwRiUOYxgdx1FHB3NCHJ+RMaAzI0qmkSwgOUnq7sr390d/6/S+p241VQ1NVXef9X18dFXdfE9YZ++197Zs27ZhYGBgYGBgYGBgUIfhyfUFGBgYGBgYGBgYGOwrDKk1MDAwMDAwMDCo8zCk1sDAwMDAwMDAoM7DkFoDAwMDAwMDA4M6D0NqDQwMDAwMDAwM6jwMqTUwMDAwMDAwMKjzMKTWwMDAwMDAwMCgzsOQWgMDAwMDAwMDgzoPQ2oNDAwMDAwMDAzqPHyZbmhZVm1eh0EDhmVZ0AvbeTwe2LYN27bh9XqRSCRydHUGBgYGuYdlWbAsC8lk0vH9wIED8d5778Hn88G2bSQSCdi2jbKyMrRr106Nr6Z4qEFdRyZt2FhqDXIONlTLsuDxVDZJDtwej8cQWgMDgwYP27bVuOj1etX3Ho8HPp8P0WhUjaU//PADzjrrLMc+NEwZA5VBfUbGlloDg9oArbC0QkiC62bBNTAwMGjI0MfF5cuX45FHHkEikVBkd+XKlZg/f77axrZtQ2YNGgQsO0PWYDqEQW3A5/MhmUyqQZduMmmxNeTWwMDAoBIejydFgkBwrHSbr6XBwIynBnURRn5gkPeIx+MOrRgtDclk0hBaAwMDA1QSWRJVOU5algWv1wuv1+sgtNyeMQn8B2RGDAwM6iqM/MAgr5BIJFTAAy24JlDMwMCgIUNaZi3LQiKRUMG0+tgoyS89YTJGIZ2V18CgPsDIDwxyCq/Xq8grITMfGBgYGBhUzcGUZ+lEV3q10o2dxkBgUJdh5AcGeQ9JXuk200muWVAZGBgYVIEyA0l0ZSwC4JQsEMZKa1DfYeQHBjkFB9k2bdqgQ4cO8Hq9+OGHH1JIrbHaGhgYNFToYyAJrP6d3F4SXrdtDAzqI4yl1iDn8Hg8uOiii/DPf/4TX3zxBQoLCwFUWR+SySQCgQACgYAKiOBnY8U1MDCo73DzXOkeLQCq8AL/TiaTDouttOQaGNRHmBZukFMwKAyo1Htx0JWDtdfrxbJly7Bhwwb87//+L7p27Ypt27Zhw4YNOOWUU3Jy3QYGBga1AblQ53hY3eK9uvRdgJP8GvmBQX2HkR8Y5BTxeBwA8N577+GHH35AMplEeXm5Gqg7dOiAKVOmoFmzZvB6vbj88ssxcOBAJJNJhEIhR5Uc41ozMDCoS5AyAT3wi9/pYxtjDxjw5RY4ZmDQUGFIrUFOwQF78+bN2LJlC2zbxujRo+H3+wEALVq0QN++fVXUbosWLdCqVSs1oMt8jAYGBgZ1CW4VFGX2F1lxUd8HqBr/KDMwxNagocOQWoOcQlYRY0Tvaaedhp49e6JRo0aucgTuN3v2bOzevTsXl21gYGCwX6BnLpBFFCSplZlidDLMvw0MGjpMnlqDnENaWvn3+++/j5NPPlkN7rFYDKFQCF6vV3130EEHoayszFhpDQwM6hVogU2XU1YfM30+H2Kx2IG8RAODA45M5npDag1yDikhYAlIj8ej/sXjcYRCIUSjUdxwww147LHHkEwm0b59e2WpdUtfY2BgYJDP0KUGMqiLllpWDdOJLquFmUI1Bg0FhtQa5D3c9LAMhACqIne5XatWrXDwwQfD5/Nhzpw5yjUnAycMDAwM6gLciKxejIbj2t4W7n6/31hrDeo1DKk1qBOQulmZ3ouWWwmpH6PVgn+bIAkDA4O6BLdyt256WWDvRRRMCVyD+o5M6KoJFDPIC+hWiHQEVaa68fl8KQEUBgYGBnUF1S3EmzRpgkMPPRRerxcA0hLWHTt2YPny5YbQGhjAkFqDPIAc2KU7ju43+Tu/Bypz3KbLjmBgYGCQ79Ats0xlCAAnnngi3njjDQBV+Wo59kmr7D//+U9ccsklapyMx+PGa2XQYJGV/ECK2qvLDVobeUOlSN7kJW2YcJMpsCKZtFLI9hEIBBCNRl1/MzAwODDQ3egyVRXnlX0lYn6/X5WG5Tn1ogUydZZbLlhdxpRO2iTTbrmdQ+6re5Lc9rEsC40bN8by5cvh9/tVURqWBo/FYipwVi7q6a2KRqNqnLv33nvxt7/9zXWuNOOfQV1GrWhqGZlOQbpOdKVlzW2AqElAD8/Bv9MRGIP6C/09Z6ofk0FnbKcm/Y2BQW7g8Xjg8/kQjUYdcwd/y5bYylyuAFKOpWdWkWSU27t9t7fxRd4HAIwePRrnn3++Os6wYcOwYcMGdY1u8x69UnfddRfOP/98WJaF/v37q+Pbto14PK4suLFYzPXaAacc6+eff8bmzZsBAMFgEEOHDsX69etT7tfAoK5hv5NaSRCYYoQnchuYdJ1kTQN6qiO1Bg0TctJy09TKtudWP920IQODAw9J8OS4XlOy5UYYddLnNg/tTYevV/IKhUIYOXKkY/6i1jUWi+Gcc85Bnz591P7PPfccpkyZgrlz56ptZZaDgoICjBw5EgBw1llnoU+fPo6ML16vF5ZlIR6Pq4W4tGa7kW5de0ui++yzz+KNN97ArFmzDJk1qNPY74FiupuIHVyeSCe0bqvgbKAPdEYrZACkaswI2dakhQaobL905RkYGBw4cBEqpUJ6X63J2C6PVx15lVIAt7nIsix06tQJLVq0cM3006RJE4wZM0ZJ4LivlArI+7zllltQWlqKefPmOazEPFZJSQnuvvtuh6RAekBJ+kle5ed0z1fm+SYSiQRuvPFGVFRUYNeuXVi2bJkhtgb1GlmR2ury5+kWXHZUdlImis5WE+tmgTPuk4YJDtYy4bhuoZFtQ5/cksmkGvSNpdbA4MBB96johJOWzGyPKQ0r+rxg2zaCwaDjXNFoFIlEAl6vFz6fT7n3CwsLcd999+Gyyy5zWDx5TdTrAk6LaDgcRjAYdIwvPp/PcQ3y/D6fDyeffDLeeusth4QvmUyirKwMgUBA7RuLxRCLxeDz+RSR9nq9KCgoQDgcVtZbwGns4XEZT2BZFu68804ceeSRuOiii8z8aVCvkTGprU4sL8kFCYNcEevWsWw7FF0xHDTk93J1a1B/IQd2PQJYVh4DnNZ9t0AyQ2gNDA4sZN+UEjXpZs8Gcj7SLasejweBQADhcBgzZ85E586dYVkWtm7dikMPPTRlPLBtG//+97/RoUMHdcxAIIBYLKYWyolEQpFguW8oFHJoXRm0pS+uSdrHjRuHq666CuFwGAUFBYhEIiqLSyAQcBDV6dOnq6wGHONOP/10TJkyBcFg0GGd5XXzGkngQ6EQysrK4Pf74ff7DaE1qPfISlNLa6seXUpyads2PvroIzRp0gSxWAwLFy7ELbfckqJ5qomlTI+elRoj00kbBqrLvtGxY0e8+uqrAIARI0Zg1qxZafXXZmA3MDjw0DWfUlPLz9nMC2PGjME555yj5iCZ4o9E8KijjlKWU9u28eOPP+Lyyy/Hli1bHLKAhQsX4pBDDlEENh6PIxAIKPIcj8ddda5S2sR/4XAYl156KWbNmoVt27Yp7+Trr7+OE044Aa1bt1bn4D68Zl7nH/7wB0yZMgXLly93jFdNmjRB3759MW3aNMecKOUdXDjIgDwA2LlzJ2bNmoXzzz8/+5dnYJAH2O+aWp1ASnJBt25JSQmKi4uVDkhqGoGaEwo9StVoaxsWRo8e7XDN6W2gefPm6NOnD7xeL2655Ra89dZb+Oyzzxzb67IEAwODAwc36YH+3e23345GjRpldLzzzjsPvXr1Ui5/WlYJmf6KZLl///4IhUKOczOY1G1OoWSO85g8prQ2f/TRR5g7d64ivf/617+wZ88eWJaFYDCIu+66C6eccgqaNGniIPWSyDN7QSwWw+rVq7F8+XLH9dm2jZ07d+L777/HY489hhtvvBEtW7Z0BM3yupLJJKLRqJq
"text/plain": [
"<Figure size 673.75x375 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"display(\"temp/no_noise.jpg\")"
]
},
{
"cell_type": "code",
"execution_count": 210,
"id": "e175dc1d",
"metadata": {},
"outputs": [],
"source": [
"def thin_font(image):\n",
" import numpy as np\n",
" image = cv.bitwise_not(image)\n",
" kernel = np.ones((2,2),np.uint8)\n",
" image = cv.erode(image, kernel, iterations=1)\n",
" image = cv.bitwise_not(image)\n",
" return (image)"
]
},
{
"cell_type": "code",
"execution_count": 211,
"id": "c77d5076",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 211,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"eroded_image = thin_font(no_noise)\n",
"cv.imwrite(\"temp/eroded_image.jpg\", eroded_image)"
]
},
{
"cell_type": "code",
"execution_count": 212,
"id": "0517c085",
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAArUAAAGLCAYAAADZOGoBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOx9d5xU5fX+c6fvLlVAmgh2isKC2CMKdowFRbG3mG+ssdE0mmJioqCixm6KCWpUxG6wBLHFREVcBMQCUgQhKNVld/r8/tjf8+65Z+7szsIuM7O8z+fDh52Z2+9bnvec55zjZDKZDCwsLCwsLCwsLCxKGL5CX4CFhYWFhYWFhYXF1sKSWgsLCwsLCwsLi5KHJbUWFhYWFhYWFhYlD0tqLSwsLCwsLCwsSh6W1FpYWFhYWFhYWJQ8LKm1sLCwsLCwsLAoeVhSa2FhYWFhYWFhUfKwpNbCwsLCwsLCwqLkYUmthYWFhYWFhYVFySOQ74aO47TkdVhsx3AcB7Kwnd/vRyqVyvm7hYWFxfYGn88Hx3HM2BgIBJBKpXDUUUfhueeeQzweh9/vh+M4SKfTSKVSCAaDqKysxOLFiwHYsdSitJFP27WWWouCI5PJwHEc+Hx1zTGdTsNxHPPPDsIWFhYWbiSTSTNm1tTUoKyszBiffD4fwuEwUqmUa/y0Y6lFa4cltRYFhfYAaBJrB2ELCwuLurGQBgC54HccB6FQCIlEAul02mwfjUbx+9//Hps2bTLkF4DrbwuL1oa85QcWFi0BDsxysJaf5TYWFhYW2ys4LkpSmslksHHjRsyfPx81NTXw+Xzw+/0AgB9++AGTJ09GIBBwGQ+slNCiNcPJ5MkWbEewaEmQ0EpLg9d3FhYWFtsrGlvg+3w+Q36B+vgEbTCwsChFWE2tRdEjFAoZywLJaygUMgOwJbQWFhYWdZCTOq2ytNx6kVaOn/J7a6CyaM2wllqLgkK3K7rXOBhba62FhcX2Dq9xMtd2krxyPLUWWovWgHzasNXUWhQUmsDSOstB3A7EFhYW2zvyGQdl4Jgkt3YMtdieYEmtRUHh9/uRTqex4447olevXkin0/D5fFi0aBE2btwIwA7KFhYWFhpewV+5SC2/I+yYatFaYUmtRUERj8fhOA5OOeUU3HbbbUilUohEIhg5ciRmzZpltguFQjmPkUqlXMUaLCwsLLYHSKkBDQI6FoFFGnTwmIVFa4QltRYFh8/nM4NxIBBAMpnMCmpYvHgxysvLAdQPyqlUCn6/H3/6058wceJEa32wsLAoeUgrq5Rn8Te/3+8aI4F6cusVe5BMJl2fLaG1aM2w2Q8sCo5UKmWsDLQocJDeeeed8f7776O8vBx+v9/kXAyFQohEIgiFQoYQ22BGCwuLUgIrKeqCCDJAltsw0wFTdLFsLmDlBBYWhLXUWhQFPvroI9xyyy0AgHHjxuHcc8/FoYceik6dOmHvvfc2pBeos0bIQdwO7BYWFqUI6ZHyylagpQJaQmAlBRYWbtiUXhYFh3S3BQIBrFixAhUVFeb3ZDKJQCAAv9+PTCbjGrwdx8FDDz2EsWPH2spjFhYWJQuSWQbPAnVkNZFIuLZzHMfoZGnJpSTBwqI1wxZfsCh6BAJuZ4HjOKitrUU8HkcikTCEFqgLKovH40ZqkMlkUFNTg9raWgAwRRwsLCwsSg30QJGcOo5jCK2WGiQSCddC3xJaC4s6WEutRcGhLayhUAjPP/88Dj30UGQyGQSDQcTjcQSDQRMglk6nUV5ejlGjRuGNN96wg7qFhUXJQUsNpLSKFlh6p0hsveRXgUAgy6JrYdHaYIsvWBQ9GN0rAyMSiQR+/vOfo02bNgBgMiL4/X6ccsopuOKKKxAOh5FIJBCNRpFMJq30wMLCouQgxy0GgElIuZUcJxkwRqJrF/UWFnWwpNaioJBZC2Sy8MWLF7t+lwnEN23aZDRlS5Ysce1rYWFhUSporHytDhyTn+X+dvyzsKiDlR9YFBxe1gkZDaxrmfPvhn63sLCwKDXIMaxDhw7YfffdPTMbaPlBMplEVVXVtrxUC4ttDis/sCh60OJKksrBuzGyyt8DgYBx0VlCa2FhUUrQC/VgMGh+O+SQQ/DEE08YiZZMaajL3m7YsAF77rmnS6pgJQkW2yOaZKmVbpBcVjG5qtTbSN3kFl+wxzEtmdk+wEFdEl5Jagk9Udj2YWFRWEgCJvOvynlla+YFACaQlMfRsqaG5g79t9zPa18Naltz5Yrl/ert5HNp164dli5d6kphmE6nTdEFBojJkrgS8XjcpACbOXMmzjrrLFfp3MbGxIYqmVlYFAPymcubLD9gB5OpRjTRJdGQnVB2qC2N1JTaSktsty/od5wr2bi0avh8PlcQWXMsqiwsLLYcrIyVSCSyyNaWEClmCJAensaOFQgEzNzEgCuv4+ZT4IXZCa655hr8+Mc/NscGgMmTJ+Pll192zYsycwH/Hzt2LE488UQEAgHss88+nudNp9MmtWEsFkMwGMwqk8t51e/3o7q6Gl999ZUpJT5lyhQ899xznvcnn5Xf77eBZxZFi2aXH8gOBiBrpcgT8nfZeeUAtjWkQkbL68HBovXCS34AIGtiZFuT5FW3E7sIsrAoDLzGf62N35JjbqkFVv7NhbDMD8uFcDgcxrXXXmsWygBc6bZGjhyJwYMHG4OP3+/Heeedh/Lycjz99NNZwV6RSATXXXcdAODYY49FZWWlOba2XFOSQEJMcivvh9Zfzo8VFRUYPHiw2e+ss85COBzG008/nXXfvHdJuK2l1qJU0SRLLTuxXNV55czLJUtoDhcT0zvZ0qgWWhID1Ls2AeR0BdoB28Ji28Krz2ld6Nb2SS+5gVzcSk2q1qdmMhnstttu6NKlCwAYUkvLZXl5OV544QVjDZUSilQqhWAwaOZDfk6n0/jnP/+JMWPGuObK9u3bo7KyEi+//HLWM4rH4wiFQkin0ya9YXl5OTKZ+gILwWDQpDiUz4xWWcYXBAIBBAIBxGIxAMDs2bMxYcIEzJ071xWDwHFUelZthTKLYkSzW2p1J5C6IA4S8js5oEj9VFMHLxJirxW2tbptPyBZ5eDrpaXVmm4J7UmwsLDYNsil65Qkqqn9Uhcj0HOBJHYSJHRyIRwOhzFu3DicccYZiMfjKCsrc+3D42tLqt/vNyQzHo/D5/MhHA67rklqaIPBIIYNG4bHHnvMdT36fmg15rxK66+U/snKY3yGgLuyYjweN98dcMABmD59OnbZZZcsLbPU+wJ2jLQoXeRNar20iHJg0sFhWme7pZZVKbCXrp+tdVlZlBaYJUG2Ja+2IS0vtKbowAzbZiwsti3kuN0cGk45H+nFq8/nQygUQjQaxezZs9GrVy8A9cR00KBB+Oabb8y+mUwG//3vf7HbbrshEAgYK6u26IZCIUNceX5abmURBGlF5fgE1JHXyZMn4/zzzze/kajy9/LyckSjUcRiMfh8PkQiEVRXV7u0umVlZdi8ebMh7ZIIS0JMK3JZWZkZG/1+v6cVVhqhrMHIopSRN6mVQnd2SB08lslk8OqrryIUCrkI55FHHrnFKZd4Hi1dyGU1tmidoIUEcGvIgLoJZ6eddsLUqVPNb88//zzuuece2y4sLIoEWhYkyW1Tx/FMJoPrr78exx13nCvgS5Izv9+PHj16GE0qF7nPPPMMbr75ZrzyyitmHolEIgBgCKoMPKOmltZSaaVNJBJIpVKGXHJOjMfjJiia9/z444/j4IMPRiAQMGMZxzFeRzQaRSaTMde8ceNGjBw50pzzgAMOwG233WaO4WVRlZ4syvV4rvLycsyaNQtnnHEGVq9e7VpQyIW/lR9YlCqaJD/Q+llpiWUnr6ysRDgcNr+TjGxNB5FuHC+LsV1Ntn6MGzcOoVDIfNbpczp16oQhQ4YAgFl4RaNRPPjgg1meAmuBsLDY9vCSHujvrrzySrRr1y6v4x1//PHYZ599AMAQTM4R7OdehpB+/fphhx12MN+TqBLpdNpICuR3JK+8XoJ/M7MD/54xYwamT5+OUCiEq6++GsOGDUP79u0bNNDQepvJZPDVV1/hsccewye
"text/plain": [
"<Figure size 673.75x375 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"display(\"temp/eroded_image.jpg\")"
]
},
{
"cell_type": "code",
"execution_count": 213,
"id": "9b077539",
"metadata": {},
"outputs": [],
"source": [
"def thick_font(image):\n",
" import numpy as np\n",
" image = cv.bitwise_not(image)\n",
" kernel = np.ones((2,2),np.uint8)\n",
" image = cv.dilate(image, kernel, iterations=1)\n",
" image = cv.bitwise_not(image)\n",
" return (image)"
]
},
{
"cell_type": "code",
"execution_count": 214,
"id": "25bbdacd",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 214,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"dilated_image = thick_font(no_noise)\n",
"cv.imwrite(\"temp/dilated_image.jpg\", dilated_image)"
]
},
{
"cell_type": "code",
"execution_count": 215,
"id": "cff53158",
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAArUAAAGLCAYAAADZOGoBAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOy9d5xU5dk+fp2pu8tSpBcRIihNYClGjaIoSFQ0IiL2mPiqiQUFRcGS5DU/I0UMxvbGFlvQqChgiV1IDNFEelFUpHcQKctOn/P7Y7/Xs/d55syys+wwM7vP9fnwYWfm9POU67nv675vy7ZtGwYGBgYGBgYGBgYFDE+uL8DAwMDAwMDAwMDgUGFIrYGBgYGBgYGBQcHDkFoDAwMDAwMDA4OChyG1BgYGBgYGBgYGBQ9Dag0MDAwMDAwMDAoehtQaGBgYGBgYGBgUPAypNTAwMDAwMDAwKHgYUmtgYGBgYGBgYFDwMKTWwMDAwMDAwMCg4OGr6YaWZWXzOgwMDAwMDAxcIOdf27bh8/kQj8fVd82bN8fGjRthWRYSiQRs28aBAwfQvn17WJYF27ZhiocaFDpq0oaNpdbAwMDAwCCPQVJqWRYCgQASiQQ8Hg+8Xi88Hg8SiQSi0aia9L/44gv89Kc/hW3bSCaTAKqIsTFQGdRn1NhSa2BgYGBgYJA72LaNRCKh/iai0SgmT54Mj6fSTrVmzRosW7bMsZ8hswYNAZZdQ5+E6RAGBgYGBga5A6UEHo/HYYH1eDzKmpuOwHKq5zEMDAoNNWm3htQaGBgYGBjkMSQR5d/pyKkkuclkEj5fpUNWWnkNDAoRRlNrYGBgYGBQ4JCTucfjcRBar9eLYDCofqcBioQ2mUwiHo8rHa6BQX2GsdQaGBgYGBjkMXSrrJQfyN85T6eb1r1er7HWGhQsjKXWwMDAwMCgwKFP5rqRSUoTCFp0JSQRNjCojzDZDwwMDAwMDAoIkuRK4iottm5WWxMgZlDfYSy1BnkD5mA0MDAwMKiCm8XVsiyVp5YZEVh4AQASiQSSyaTDYms0tQb1HaaFG+QUcrDu0qULNm/ejNLS0hTrAwA1gB/sOAYGBgb1CW4WVmYzoEZWamXl9slkUn028gOD+g5Dag1yCjn4btq0Ceeeey4qKiqUC41WCMuyMG3aNIwZM0a51j744AMMHDhQHccQWwMDAwMDg4YLo6k1yDlIXMPhMBYsWKCsCZLwjh8/HkOHDsX333+vLLn9+/dH06ZNHYnHDQwMDAwMDBomDKk1yDlkQIN0obVu3RodO3aEZVkYP348SktL0blzZ/Tv3x9erxdffPEF9u7de9A0NgYGBgYGBgb1HyZPrUFOoedN9Hq9SgP2q1/9CpMmTVLfWZalNGSNGzdGmzZtsG/fPrWvKf9oYGBgYGBQP2HK5BrkPUhqqZOV9cy9Xq8itPzOsix06dIFn332GTp06IDy8vKUYxkYGBgYGBjULxhSa1AQkJpYy7Lg8/kQj8cBQBFdWUGnqKgIvXr1wpIlS9S+MpWNgYGBgYGBQf2CIbUGBQFdEyuts/xeklpp1fV6vbBt26SqMTAwMDAwqMcwpNYg7yF1sOn+lp9lOzSWWQMDAwMDg4aBmsz5JvuBQU5BK6xMyUXrq05wZelH3TJrMiAYGBjURxysymIymVRyLQODho46t9Sms7ZJ4iI1kwYGNYVbdoODyQ90fa4MRjMwMMhP6H2dpV4ZCFpTr47b99WdJ92+1cmfZHBqOk+S23ZuU28gEEA0GlWFZ5o0aYJvv/1WVVLk94lEAh6PB9FoFB999BGuuuoqxGKxlOszMKhPyJn8QO+wwWDQ0eFMhLrBoYKLJKBqEHcjtzK7ApCqzzUwMMhfyH4uPzPFHwBXwwkhUwQSPp9PfZeOGKeD23Y1Lf4iryXdGGRZFl555RW0a9dOHW/AgAEA4ChK4/f7EY1G4fP5sH//fnzzzTeKZD/44IOYPXu245jyOZnUhwaFirwhtX6/32GZNR3KoDbQsyTo1hP5mxzA5UQotzUwMMg/0CpJ4qr3e/k3UFUim9ZJpgOMx+Mp5FGOG9WNAbqG300mJbe1LAtdu3bFyJEjEQgE8PDDD2PPnj2OY+kEXY5RXbp0wcUXXwzLsnD99dejWbNm6hr9fj+SyaS6j2QyiUAggFgspu5Vjm8ffvghFi9eDNu24fV68cgjj2D37t2H8koMDPICOSG1bu4XQyIM6gJy8OZgLgd7QrdMBAIBxONxY501MCgAeL1eAFAelnT9XEISTo/HA7/fj0gkkrIN4NTj64QZcHpz5Hderxe9evVyaFzlAvvEE0/E73//ewSDQYwePRqfffYZdu3aVa0l97jjjkNRURFOOukkTJkyRY1vlFvwWnSjkM/nc3g8PR6P2k7u6/P5cNFFF+Gzzz7Dzp07a/wODAzyETnNfsBOxvyhXM3yOEaCYFBbWJaFQCCQMmm5uRoJTmg1cRMaGBjkB4LBYK36OYmrJH+6zlQSQX7WC75wP+67cOFCdOnSBZZlIR6PK7LLcwQCAYRCIfh8PowZMwbPPfec4zjymouKirB48WJ06NABlmWhpKQE5eXl6pgAEI1GkUgk4PP5HMfhZ5/PB9u2EQ6HUVRUpK6LCwI+jzvuuANPP/20kgEaGBQicpL9IBgMIhqNpmiWDJkwOFRwAE8kEmqi44SjT0783i0g0VQeMzDIX3i9XtV3JaHVrZEECaff70/R1cttdfkRrb88XyKRwJAhQ/Dyyy87SnNzzJGufhLLaDQKoHLeAypJqNfrVWOQBAl2MplE8+bNsXr1avj9fjUe7dmzB6WlpWps+uCDD3DRRRelkPimTZti06ZNjnHQ7/c77ldau5PJJCZPnowzzzwTI0eOrKO3ZGCQn/AcfJPMwE4OuLNqk+/WoLaQlhEAavLgb0QymUQikVARwlJTywnKwMAgP0EiJvs5/2Y/l8SVf8fjcYfMiGT19ttvx9y5c/HWW28BgMNjSDIbj8cxceJEPPDAA/D5fIrAkiD6fD74/X4EAgEkk0lluKE3MhKJKILp8/kwevRovP322475juPXiSeeiDlz5iAYDKqFOlBpueXfU6dOxW233YZ4PI5wOKwstslkEnv27MFPfvITbNiwAYFAAEVFReq5UTbBc3Gsk0SXBNjAoD6izi21tm3jmmuuQdu2bbF582Y8++yzKb8bGNQWeqCFHhR26623Yvbs2Vi7dq0JDDMwKFC4BVS5bdOtWzeMGjXKoT8FKgkkSe3ZZ5+N3r17IxwO45577sGjjz6KvXv3phy7U6dO6NKli4McErJ8t5TWkbS+++67WLJkiRqT/v3vf6tAMampHT58OEaNGoW+ffs69ido6d20aRPWrFnjuCdebzKZxKJFizB9+nRceeWVGDBggLJUk8TSuksi6/F4cPTRR2PcuHF4+OGHa/VODAwKAVkpvnDVVVehbdu2+Oijj/Dss8+axPgGdYJjjz0WTZo0UZ9lcAdQOXDffvvtqKiowLvvvov169c7CK/cz8DAID/RuXNntGzZUn3W+7n8ftCgQZg4cSIAOAKkJLkjioqKMGHCBDz//PPYu3dvymJ3/fr1WL16NX70ox+p8+mZGEgyKY+wLAsrVqzASy+9hHfeecchgSJ4rN69e+PKK6/E8OHD1TFJoAEoi/HKlSuxa9eulAwM9DTx85NPPomePXuirKzMEVBGMq/ff+fOnTFu3Dg89NBDmb0QA4MCQlayH3z88cd4//33MWXKFFdRf20sZ9IFJSUOBg0DlmXh9ddfxxlnnKGij8PhsHIBckD3+XyIxWJ48skncffdd6u2drAiDQYGBrmBx+NRgU8ejwePPPIILrnkEoculGD/lsFcLEIQDAYVqdTTSJI4RqNR9OnTBxs3bkxJCWbbNs4++2y8/PLLymIaCAQcOl5pweX1de/eHRs2bHC9L5mX/euvv0b79u2VVZXEWG7n8/nQo0cPrFu3rtpiEdThTps2Dddcc43K8MIAWqb9krltLctCJBJBx44dEQ6HzeLeoOBQ59kP3PIFuoHJreuSQNxxxx246667sH//fhx77LEIhUJ1dmy
"text/plain": [
"<Figure size 673.75x375 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"display(\"temp/dilated_image.jpg\")"
]
},
{
"cell_type": "code",
"execution_count": 216,
"id": "18cd2910",
"metadata": {},
"outputs": [],
"source": [
"def remove_borders(image):\n",
" contours, heiarchy = cv.findContours(image, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE)\n",
" cntsSorted = sorted(contours, key=lambda x:cv.contourArea(x))\n",
" cnt = cntsSorted[-1]\n",
" x, y, w, h = cv.boundingRect(cnt)\n",
" crop = image[y:y+h, x:x+w]\n",
" return (crop)"
]
},
{
"cell_type": "code",
"execution_count": 217,
"id": "40842784",
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAApcAAAEzCAYAAABkCSj0AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOx9d5hU1f3+e6fP7tI7SG8qoIig6FesRLArKGhsqEFEDQFLokZNbCFqjIoNFYMaDWJssaOigooFQUWkCahILyJsmZ16f3/s7z37uWfu7M4sOzszy32fh4edmVvOPfecz3k/9RimaZpw4MCBAwcOHDhw4KAe4Mp1Axw4cODAgQMHDhw0Hjjk0oEDBw4cOHDgwEG9wSGXDhw4cODAgQMHDuoNDrl04MCBAwcOHDhwUG9wyKUDBw4cOHDgwIGDeoNDLh04cODAgQMHDhzUGxxy6cCBAwcOHDhw4KDe4JBLBw4cOHDgwIEDB/UGh1w6cODAgQMHDhw4qDd40j3QMIxstsPBXg7DMGAYBhKJhPoMANxAyjAM6JtJ2X0HAB5P1bCOx+PweDyIRqOW391uN+LxeL0/gwMHDhw42Hvgcrng8XgQiUQAWNctfQ0DAK/Xi1gspr5zuVyWNY9roMvlgmmatR5nGIY6Vv8s7yuvy7/5+yWXXIILLrgApmnC5XLB6/UiGo0iFovB4/FgwIABcLvdFg5YUlJSa9+kTS4dOGhI6MTRjkSmIpuchPoEs7uuAwcOHDhwkClItmioIOnj+kKyxu8kAeTvqdY43aiir1mmaSojifydf8vPHo8H1157LeLxOFwuF9xuNwAgFovB5XLhyCOPxMCBAy3tBaDIaJ37J929xR3LpYNsojbLpd3xpmmiV69eaNq0KcLhMJYvX67Ol8fxOvo9HDhw4MCBg7qARIzk0u12J5E7Ejlp9HC73RbLYSpIzqWTTcMw4PF4EI/HkUgk0LFjR7Rv3972Oj6fD3PmzFHE1u12w+VyIRKJKC+fXHdJKtlOaTUl0rFcOuTSQc5RkzWRv1GDkhqZ1+vFSy+9hKOPPhpr167F4MGD1YTgpLO7rmO9dODAgQMHewJpqXS5XJZQK5JKadV0u92IxWJJ1+DxXK/043gPkkJpMAkGgwiHw7jhhhtwzTXXwDAMRKNR1TYJWjoleYzFYpY2sL0kyC6XC9FoFB6PJ2O3uEMuHeQUcnK6XC41qagFUtvSh6nP58OqVavQpEkTdS4ARKNRxONx/Otf/8Kf//xni8blWC4dOHDgwMGeQl+TarJGejweRdJkLCZQs2eOv0tiOn/+fPTr10+tjbFYDF6vFz6fDx6PR62f0p3Otnk8HmV4cbvdijTK43hP3pcxojoccumgIJDKBV6Ta9zlcmH9+vVo0qSJ5XtOos2bN2P+/Pm4+OKLVYByTclBDhw4cODAQbrQ3eLSZe31ehGJRCwet9q8c4ZhoF+/fpg2bZpa+/gbLYz9+/dHMBi0ED+SSrbD6/UmEV0mttJiaZom4vG4ZS2km53X5HXlukk4CT0OCgJyEnAy1uTSlpNRIpFIKEtnhw4dcPTRR+Pqq6/Gww8/rH5LJ9bFgQMHDhw4qAl2STa6d0ySSq/Xi8mTJ1s8dQAUyQOAzp07Y/DgwSmrpUgyGIvFlPWSx8iYSa53dNnL+/F4eW0ew2ukIpbpwrFcOsg5evbsiZKSEmX+B6Cy4MLhMFasWKGOlYN/8+bNSosDqsmlz+eDy+VCKBTC119/jZNPPhmRSERNIAcOHDhw4KC+IMkf16P+/ftbCFwgEMDbb7+tygVxLZMWTv1akpgyu5sGEr38kF2WN69Dj148HreQSEli4/E4/H6/isuUCUA6HLe4g7yHx+PByy+/jKOOOsrWLb5mzRoMGTJETSygWpvbvHkzioqKksz6QJVWt2rVKvzf//2fbRC1Y7104MCBAwd1gdfrVX/rcfyGYSAYDOKHH36A1+u1cCcaP1hHMhAI2Bo86Jpmkk1lZSX8fj8Mw1AWy2AwCACqjjNd3ry21+tVpJTt4xqq13qW+Q5M3onH47bJPIBDLh3kOQzDwPfff4927dqpwa3X1UokEgiFQujduzdKS0st7oQffvgBTZs2TcqMSyQSeOKJJ3DddddZits6MZcOHDhwUP/Qk1RIXvQi3+mca/e7XhBc3kN+ZyfbdauiLLtTU1KOTDaVSS/xeBzz5s3D/vvvX2N/+Hy+JN4kw7nsQrvsjrM7T/ZbbZ9TfWd3T7vz7M5xYi4d5DVM08TZZ5+Ne+65BwcddJAlXgSodnMXFxcDsMaGsM6XtGbG43GceeaZKCsrw5YtW5RGpwsRh1g6cODAQf1BEhOfz6dcvTIUyW7nGHqb+J2+E428tn4+s55JCPWKIzLL2o7g6oRVWvL69++P+++/P6kUHuMQ+/bti0AgkHE/SaJWE9HTf6vpvNo+13avPT0vFRxy6SCnWLx4MR5//HH06NEDrVu3xoUXXqi0JQoLCh8ZU6LHgezcuRMzZszAggULEA6Ha8zOsysK68CBAwcO9hy6m1eWxZEJmfI7wH7HGv14u/PlPeQOOHaWR4lx48ahVatWAGCJR3S73ejatSsOOeQQSzuAPd+1Zm+C4xZ3kFOQ6LlcLvTp0wdffPEFgGqt0+Vyoby8HN26dUNFRQVMs6p4ev/+/fH6668r8/zq1atx8MEHJ1k+eS1dW7ar3eXAgQMHDjJHKne0Hq5Ulx1rpDFAbqeYysLJ+xYXF6N79+5JmdM8ZubMmejRo4fygtEqqRcq16+dqvbj3gTHLe4g7yGJIGt5UdAwEFnGypimic6dO+P999+3uDqY4eZ2u5MCrHkfXdt14MCBAwd7DhJEXaGn1U/unMbf9fqM/F6/npTXclMNJrjIzGlpZRw0aBBef/11lRwjrxEMBlFeXq7aQeIps7HdbjfC4bBKnInFYqq9XGccpIZDLh3kFNQAXS4XfD6f+puB06tWrcKRRx6JUChkETI+nw8VFRXwer2YOXMmrrvuOotF0uVyWbRRea5TjsiBAwcO6g/SkkdiyL+Z4UzIHWsA+8QTebxdfcYmTZpgxYoVymjA9YP7ZUsZz/I6EmVlZSrjW26JKOshk3hWVlbC7XZbtl90iGXtcMilg5xCxq+wFqUMrjYMA+Xl5erY0047DVOmTFFChMcyKxyA0lLj8bgSNFLDdeDAgQMH9Qt9xxo9OUf3NOkuZ0LGSh5wwAH4+9//jng8jkAgoLb3LS4uVrWRCdM0EQgE1H2A6tAo6QGTa44ey8l28xyuMfSsBQIBhEIhi+vcgT0ccukgp5CC5ddff8W9996LK6+8Eq+++irWrVuHHTt2WOJ5Nm3ahK+++goDBgzAY489hnA4jM8//9xyLTsSaZeN6MCBAwcO6gd6co1dwo38zufz4YorrrC4zqV72+12o2fPnjjssMNUfUg9dtKuVI5MAtV3p9HXAUk25bX0OE22SZJiBzXDSehxkDeg9jh37lxce+21+PLLLwFYk34Yczlr1iyceOKJ2L17t6W+mV7PErAKPSkoHDhw4MBB/ULGt7dv3x6tWrWyrVVZXFyMd999V8XKyx1rTNNU7nNpDKCbPRqNKouitDomEglL4XCfz4dwOAyPx2ObRCTjLGVdTl6LJFcnxnu75dIpou6gIKCXitBJIb+Tn2WRXn0Iy+vZ/e7AgQMHDuoHMrlFl7VTp07FxIkTU5bwSSQS8Pv9iEQilu0HWa+SrmnWq5QubP7G+MxAIICKigpFGBnX6fP5VLKorIMpLZZ6JjsAC2HNdfkhOwut3TEMSagt4Sid69UEh1w6aFSw035r+uzAgQMHuYCuMJME6XGJuqfFTp7J2o4AFNHifXg9fUs/2RbAPlxIFhq3u69+vL7zTnFxMVauXJmSfPl8PmU1tOMQep1Lu+Qe/Tv9eLv+S3XNdK5bUxuzDd3IwvvG43FFktkePXFp0aJFOOGEEwAA77zzDgYOHAjAWpJPXs9uz/B04ZBLB40GUnDss88+ePLJJxGNRnH99dfj66+/zm3jHDhw4EADrUe0jEkZxlhAoPadaAidDHm9XktcoLyetLrp0Mk
"text/plain": [
"<Figure size 643.75x287.5 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"no_borders = remove_borders(no_noise)\n",
"cv.imwrite(\"temp/no_borders.jpg\", no_borders)\n",
"display('temp/no_borders.jpg')"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.15"
},
"latex_envs": {
"LaTeX_envs_menu_present": true,
"autoclose": false,
"autocomplete": true,
"bibliofile": "biblio.bib",
"cite_by": "apalike",
"current_citInitial": 1,
"eqLabelWithNumbers": true,
"eqNumInitial": 1,
"hotkeys": {
"equation": "Ctrl-E",
"itemize": "Ctrl-I"
},
"labels_anchors": false,
"latex_user_defs": false,
"report_style_numbering": false,
"user_envs_cfg": false
},
"toc": {
"base_numbering": 1,
"nav_menu": {},
"number_sections": false,
"sideBar": true,
"skip_h1_title": false,
"title_cell": "Table of Contents",
"title_sidebar": "Contents",
"toc_cell": false,
"toc_position": {},
"toc_section_display": true,
"toc_window_display": false
}
},
"nbformat": 4,
"nbformat_minor": 5
}