2.0 MiB
2.0 MiB
LICENSE PLATE DETECTION
YOLO V3
!git clone https://github.com/roboflow-ai/keras-yolo3
Cloning into 'keras-yolo3'... remote: Enumerating objects: 169, done.[K remote: Total 169 (delta 0), reused 0 (delta 0), pack-reused 169[K Receiving objects: 100% (169/169), 172.74 KiB | 625.00 KiB/s, done. Resolving deltas: 100% (80/80), done.
!curl -L "https://app.roboflow.com/ds/hTj8Pr7g7U?key=q9kdROYojM" > roboflow.zip; unzip roboflow.zip; rm roboflow.zip
% Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 100 897 100 897 0 0 269 0 0:00:03 0:00:03 --:--:-- 269 0 100 2120k 100 2120k 0 0 515k 0 0:00:04 0:00:04 --:--:-- 23.5M Archive: roboflow.zip extracting: README.dataset.txt extracting: README.roboflow.txt creating: test/ extracting: test/2images1_png.rf.5de47b3b58bc776388f9547915f46edf.jpg extracting: test/2images41_png.rf.2f711be90f9f9e796139a02cb45fe9ba.jpg extracting: test/2images45_png.rf.cbcc994c49d1a2ca5e7bc52cb9b2a1a3.jpg extracting: test/3images22_png.rf.b139cdb6065c658e0c7acc2124854383.jpg extracting: test/3images34_png.rf.9a25c14870c5acae15ee0f159a9707b4.jpg extracting: test/3images4_png.rf.a3d6d0b11320142ada8e8347c918dc30.jpg extracting: test/6images3_png.rf.8b1268f1823ea224077f537939c2ccba.jpg extracting: test/7images0_png.rf.fb9d6e1e739e19321bdc7050f4a95798.jpg extracting: test/_annotations.txt extracting: test/_classes.txt extracting: test/images6_png.rf.56641c848717baa02774239ac0039bd6.jpg extracting: test/img105_png.rf.d69f400c7410b1e265136d01b1a2cc5e.jpg extracting: test/img149_png.rf.c487d9bc6be853e23cc7a12359178b40.jpg extracting: test/img14_png.rf.1a47d3748ad1566280dc8199d96430de.jpg extracting: test/img35_png.rf.16e367a1ce2db4dc0b0b1491814e8c95.jpg extracting: test/img89_png.rf.f0f546c24ed5d6a16a2cbf9389065678.jpg creating: train/ extracting: train/20img2_png.rf.015a51172ce51d61531b54af5a144183.jpg extracting: train/21img3_png.rf.c1601abdfd96ebfc6f13205c638364bc.jpg extracting: train/22img34_png.rf.02ddffee2d6e8dc6ef169f89f622a933.jpg extracting: train/23img46_png.rf.fd5a109b78b90ed3582888880b743303.jpg extracting: train/24img50_png.rf.1f28fdcb1632f237fb0bf7be7d877351.jpg extracting: train/25img73_png.rf.25d9c97db5c2c466bbe2692f9f69c869.jpg extracting: train/26img74_png.rf.861f6c881709f3bb65637c7ea3871dca.jpg extracting: train/2images0_png.rf.b8c8f0d2594f6bfaf8be2dca50416bb6.jpg extracting: train/2images18_png.rf.951b35372d913193f0899fda6877cbee.jpg extracting: train/2images22_png.rf.bb299b6d237016c2714b68aead8266d7.jpg extracting: train/2images23_png.rf.cea092359f78eb1c22db6b50627790d6.jpg extracting: train/2images29_png.rf.91d8be50c5d0f82577d74268153ac5fc.jpg extracting: train/2images2_png.rf.62684ca2757500eaeac877d48e04c92f.jpg extracting: train/2images34_png.rf.ffff2284b01426e5cd22ca8053450348.jpg extracting: train/2images3_png.rf.c7b635e1dc54f5bb10aa338d78969c22.jpg extracting: train/2images46_png.rf.d4143a5946da0d1bc8e540c239a648b8.jpg extracting: train/2images4_png.rf.64541674b6b6df83b15534c2d8bf0030.jpg extracting: train/30img11_png.rf.1a236b6935fd926336da07248a867a36.jpg extracting: train/3images0_png.rf.ff30aaf2256dde95d2dc4893b7074098.jpg extracting: train/3images11_png.rf.8a11e1eae3b52a369681843c7d7116d1.jpg extracting: train/3images18_png.rf.0673ed9396fa1ae5a43ff44f10422ff5.jpg extracting: train/3images29_png.rf.a96af5fe85f477adc0fce370e788f76c.jpg extracting: train/3images2_png.rf.d7de4c45de845226a8391e8f332352d9.jpg extracting: train/3images30_png.rf.dd0080eda6b7d8ff2e188c7e5590e7c6.jpg extracting: train/3images33_png.rf.3abc75a93214fc0a497dd54cabd690a0.jpg extracting: train/3images3_png.rf.e9771234c266dba02be2fd6f204aa66b.jpg extracting: train/3images42_png.rf.b3b45a46d57ac11c2d546831ad52cceb.jpg extracting: train/3images43_png.rf.0603c0f1b7a15be7449b6d46c621e7af.jpg extracting: train/3images5_png.rf.6a53d28cdfade27885d25f8208f3028a.jpg extracting: train/6images0_png.rf.1e11dd3d7f4e5a79ce207c7770185b0c.jpg extracting: train/6images12_png.rf.d0d6b3319c39fdb6a9356047f5ddb8ee.jpg extracting: train/6images1_png.rf.8c65b6bfe8d5b01a2a1545337de6c390.jpg extracting: train/6images4_png.rf.2c77da3c85f4cb57ebe5d90ab8ed5e0c.jpg extracting: train/6images5_png.rf.7033ded0e4684504365b5b0345529c5c.jpg extracting: train/7images12_png.rf.c46a44810aea7edafc53b6b561c6cf6a.jpg extracting: train/7images17_png.rf.ff8fc5bb0f84483dd914f5f2de524933.jpg extracting: train/7images1_png.rf.cf5406f149f35ab24eda2c621f9298ed.jpg extracting: train/7images2_png.rf.f84de676f7fb3de9d7789e1dafab8fa3.jpg extracting: train/7images3_png.rf.14c5f2588d07e7234659792e20bd7fd8.jpg extracting: train/7images4_png.rf.5e455f9a5c94b0a3b56043ef05d06854.jpg extracting: train/_annotations.txt extracting: train/_classes.txt extracting: train/images0_png.rf.d1f446cd89662b7ccf994dc77f63ff56.jpg extracting: train/images10_png.rf.bc421baf20b7cbf6af4ea822f259fcab.jpg extracting: train/images13_png.rf.dff8711d203b47a3f8709c4cee5d6927.jpg extracting: train/images15_png.rf.e1b904b94d5539da79117c3613ae5765.jpg extracting: train/images1_png.rf.9c2cb373d7f4613a2735410f1fdb3043.jpg extracting: train/images3_png.rf.e7cf0078d44c2571ebc5d607ffaacbc8.jpg extracting: train/images4_png.rf.97f8f01f67adf77de50c99fd6ed7f879.jpg extracting: train/images5_png.rf.d16b8c87a8a593e5971124648ba63736.jpg extracting: train/img0_png.rf.fa065b68c3d51d65399f883f8713ccf2.jpg extracting: train/img102_png.rf.3da7ec4deedfb6f15834e9a42aee4e7c.jpg extracting: train/img103_png.rf.67216b08a719a9a9dba68f83c5460a74.jpg extracting: train/img104_png.rf.db759d639a6b1ace6dc8e7442c86ba9a.jpg extracting: train/img106_png.rf.d882268d61ac720e54c35110fb8bc4b0.jpg extracting: train/img107_png.rf.a62231fc47913091ec76468e536d6f28.jpg extracting: train/img10_png.rf.e7bba8322d47d623f71903aa50f48730.jpg extracting: train/img113_png.rf.cb3afcbea4e7177a2ed703b4b1d94887.jpg extracting: train/img116_png.rf.1f7034a069e5a888b00da9496e0df5ae.jpg extracting: train/img118_png.rf.9e21a52ffda3719b2cc6deb0309efd7d.jpg extracting: train/img11_png.rf.a9584dc2d254fd84ca6a30cc9b821bd5.jpg extracting: train/img125_png.rf.4a0a9a2f74bd5127343124c4fb4d0670.jpg extracting: train/img126_png.rf.0bad29364a3846287498838f6791cae8.jpg extracting: train/img133_png.rf.e66c88015d6fb51921b20ad8008fc981.jpg extracting: train/img146_png.rf.9811cc9a676e18c4cf2bce86398feb9d.jpg extracting: train/img170_png.rf.1d04d991430ba0d672fabff684817dc6.jpg extracting: train/img174_png.rf.4d01b9ebbdc8c1b434c61c945794a79e.jpg extracting: train/img178_png.rf.b0e5b6547069d86483e91fc99356e5d9.jpg extracting: train/img181_png.rf.363074a89b0325055d28f3794083e479.jpg extracting: train/img189_png.rf.07aedf508ccbfc3e0244bd54bd76cbf8.jpg extracting: train/img197_png.rf.36119ab11e392cfeded10c61aa97eac6.jpg extracting: train/img1_png.rf.bf5b1060d3cb9959dc94b75d4fc78334.jpg extracting: train/img202_png.rf.f6520c22d6c95c8e5a105b6ee48b8da1.jpg extracting: train/img205_png.rf.98d121af5e0548a1402eb3e93560465d.jpg extracting: train/img215_png.rf.6d29cfcf38f6a4b2165ba5ba110454d2.jpg extracting: train/img270_png.rf.52541958250f2b45297faa1440d55d56.jpg extracting: train/img278_png.rf.82173849dfde92f2a2ab2761e5679891.jpg extracting: train/img283_png.rf.809b4e6edbe803fbcab887a40e59f526.jpg extracting: train/img2_png.rf.23b2d7fe287627739888976776de8437.jpg extracting: train/img306_png.rf.642a9812ecebfd9784d9eb593b78dcf2.jpg extracting: train/img34_png.rf.f98b7fa7325ddb9ca373121c5c120f55.jpg extracting: train/img38_png.rf.cd97a110e34ad869a4b79d8237d92a36.jpg extracting: train/img39_png.rf.e9b1634ca400418b29839bad544e8634.jpg extracting: train/img3_png.rf.3f382680461124ba2e19c1df51d895e7.jpg extracting: train/img45_png.rf.870b550082c3da2c42e40017442c115b.jpg extracting: train/img46_png.rf.2c1d961d3e61d1389c825f2aba32ab39.jpg extracting: train/img4_png.rf.4f0ce3c02167bf3f8ae2454471c9c4fd.jpg extracting: train/img57_png.rf.dc254e143fec0667ac462e303290e301.jpg extracting: train/img58_png.rf.1a6e09bda52588bb7f3890768f0db5f2.jpg extracting: train/img5_png.rf.542fe1bdd2a910b20f27ce55cf8689ff.jpg extracting: train/img66_png.rf.534ec186146ae4409f8c875cf28dcb84.jpg extracting: train/img6_png.rf.7aceac81d4a22f02ab0460ee5bd2227f.jpg extracting: train/img77_png.rf.8f8e23567322fd7de129380c6a54bd01.jpg extracting: train/img78_png.rf.eb48e94d48c04b3077d049cb8cd920bb.jpg extracting: train/img7_png.rf.2dd95d826f13ab03805de7f7b842eb40.jpg extracting: train/img85_png.rf.f7a4ae3bb16a8c3fe7f164e35f11ea65.jpg extracting: train/img86_png.rf.3addc2b6c62b8d5098feba035bd6014d.jpg extracting: train/img92_png.rf.5b79211320122e08554541c15fc041dd.jpg extracting: train/img93_png.rf.7fbe9b0dcab1f063b154796d00ae669b.jpg extracting: train/img95_png.rf.c97bb901c22e4f1519bac037ffbdbbf7.jpg extracting: train/img97_png.rf.2e3f7205a9d122aa07906ebe643f1c04.jpg extracting: train/img98_png.rf.c6da81320ec0c22868d84c2291b416f5.jpg creating: valid/ extracting: valid/27img121_png.rf.6b1bbeee06ff52963c7b12c7bfb2aacc.jpg extracting: valid/2images12_png.rf.ba715b76693ae62d01e142ba9859ffc9.jpg extracting: valid/2images35_png.rf.81e0cc483a896440e148a5df5550d243.jpg extracting: valid/2images40_png.rf.45e16e4d96b21eeb7b0e06556ca12291.jpg extracting: valid/3images19_png.rf.aec1de41eff03d6e343427691b2a3029.jpg extracting: valid/3images1_png.rf.f293d93f952977825a07613f23a55f70.jpg extracting: valid/6images11_png.rf.a467d473bfa546de8e2c5ef4ef894802.jpg extracting: valid/6images2_png.rf.386c9a11cef823c522619aefd9c7ca9d.jpg extracting: valid/_annotations.txt extracting: valid/_classes.txt extracting: valid/images14_png.rf.f0a78b8df38e6394e9cc3d56d7677c87.jpg extracting: valid/images2_png.rf.1f566a50352095712ec385ffc17b14c5.jpg extracting: valid/img101_png.rf.aca3e688b7798ee456467954274733de.jpg extracting: valid/img111_png.rf.4bc2a8d175d8bbe2a289ba9e0ed4c717.jpg extracting: valid/img112_png.rf.aaadc30802c92e3c1196a96b859c8ebb.jpg extracting: valid/img117_png.rf.76d5b2f35f4974cca3750f258af86101.jpg extracting: valid/img121_png.rf.a11051677709f708036ca072d0725099.jpg extracting: valid/img122_png.rf.f6c62a3f0290eae81ffc5c457f546adf.jpg extracting: valid/img141_png.rf.9d9ff6b78c2940546bf364e662b1c813.jpg extracting: valid/img165_png.rf.6bb45f3455f0340e377ec61e662d7846.jpg extracting: valid/img177_png.rf.fd279311108df43a7d9225cc26c2542f.jpg extracting: valid/img262_png.rf.cd066cf49feb976bf8cd8eca32dcf729.jpg extracting: valid/img27_png.rf.09745a24cc36301e1eca5c3a9bab3853.jpg extracting: valid/img304_png.rf.f91aa4dfe963c390a521fd748f1ab9f5.jpg extracting: valid/img313_png.rf.8ea5815425e82f42c06715e0b98342f2.jpg extracting: valid/img31_png.rf.3b72bf618de466d70ab487fe5e20ff70.jpg extracting: valid/img40_png.rf.8389bb867a237cad805b4819dc788a98.jpg extracting: valid/img41_png.rf.4f6f5b9dcbe9eb80f9913e223f321f66.jpg extracting: valid/img69_png.rf.52cb5ea0d37bc73a2fcc1ee19de2b124.jpg extracting: valid/img84_png.rf.c9700ee5dee2697886b497a2e17f1573.jpg
!wget https://pjreddie.com/media/files/yolov3.weights
--2023-01-18 12:01:19-- https://pjreddie.com/media/files/yolov3.weights Translacja pjreddie.com (pjreddie.com)... 128.208.4.108 Łączenie się z pjreddie.com (pjreddie.com)|128.208.4.108|:443... połączono. Żądanie HTTP wysłano, oczekiwanie na odpowiedź... 200 OK Długość: 248007048 (237M) [application/octet-stream] Zapis do: `yolov3.weights' yolov3.weights 100%[===================>] 236,52M 17,0MB/s w 15s 2023-01-18 12:01:35 (15,4 MB/s) - zapisano `yolov3.weights' [248007048/248007048]
from keras.layers import ELU, PReLU, LeakyReLU
!python keras-yolo3/convert.py keras-yolo3/yolov3.cfg yolov3.weights model_data/yolo.h5
Loading weights. Weights Header: 0 2 0 [32013312] Parsing Darknet config. Creating Keras model. Parsing section net_0 Parsing section convolutional_0 conv2d bn leaky (3, 3, 3, 32) Metal device set to: Apple M1 systemMemory: 8.00 GB maxCacheSize: 2.67 GB 2023-01-18 12:03:25.001841: I tensorflow/core/common_runtime/pluggable_device/pluggable_device_factory.cc:306] Could not identify NUMA node of platform GPU ID 0, defaulting to 0. Your kernel may not have been built with NUMA support. 2023-01-18 12:03:25.002402: I tensorflow/core/common_runtime/pluggable_device/pluggable_device_factory.cc:272] Created TensorFlow device (/job:localhost/replica:0/task:0/device:GPU:0 with 0 MB memory) -> physical PluggableDevice (device: 0, name: METAL, pci bus id: <undefined>) Parsing section convolutional_1 conv2d bn leaky (3, 3, 32, 64) Parsing section convolutional_2 conv2d bn leaky (1, 1, 64, 32) Parsing section convolutional_3 conv2d bn leaky (3, 3, 32, 64) Parsing section shortcut_0 Parsing section convolutional_4 conv2d bn leaky (3, 3, 64, 128) Parsing section convolutional_5 conv2d bn leaky (1, 1, 128, 64) Parsing section convolutional_6 conv2d bn leaky (3, 3, 64, 128) Parsing section shortcut_1 Parsing section convolutional_7 conv2d bn leaky (1, 1, 128, 64) Parsing section convolutional_8 conv2d bn leaky (3, 3, 64, 128) Parsing section shortcut_2 Parsing section convolutional_9 conv2d bn leaky (3, 3, 128, 256) Parsing section convolutional_10 conv2d bn leaky (1, 1, 256, 128) Parsing section convolutional_11 conv2d bn leaky (3, 3, 128, 256) Parsing section shortcut_3 Parsing section convolutional_12 conv2d bn leaky (1, 1, 256, 128) Parsing section convolutional_13 conv2d bn leaky (3, 3, 128, 256) Parsing section shortcut_4 Parsing section convolutional_14 conv2d bn leaky (1, 1, 256, 128) Parsing section convolutional_15 conv2d bn leaky (3, 3, 128, 256) Parsing section shortcut_5 Parsing section convolutional_16 conv2d bn leaky (1, 1, 256, 128) Parsing section convolutional_17 conv2d bn leaky (3, 3, 128, 256) Parsing section shortcut_6 Parsing section convolutional_18 conv2d bn leaky (1, 1, 256, 128) Parsing section convolutional_19 conv2d bn leaky (3, 3, 128, 256) Parsing section shortcut_7 Parsing section convolutional_20 conv2d bn leaky (1, 1, 256, 128) Parsing section convolutional_21 conv2d bn leaky (3, 3, 128, 256) Parsing section shortcut_8 Parsing section convolutional_22 conv2d bn leaky (1, 1, 256, 128) Parsing section convolutional_23 conv2d bn leaky (3, 3, 128, 256) Parsing section shortcut_9 Parsing section convolutional_24 conv2d bn leaky (1, 1, 256, 128) Parsing section convolutional_25 conv2d bn leaky (3, 3, 128, 256) Parsing section shortcut_10 Parsing section convolutional_26 conv2d bn leaky (3, 3, 256, 512) Parsing section convolutional_27 conv2d bn leaky (1, 1, 512, 256) Parsing section convolutional_28 conv2d bn leaky (3, 3, 256, 512) Parsing section shortcut_11 Parsing section convolutional_29 conv2d bn leaky (1, 1, 512, 256) Parsing section convolutional_30 conv2d bn leaky (3, 3, 256, 512) Parsing section shortcut_12 Parsing section convolutional_31 conv2d bn leaky (1, 1, 512, 256) Parsing section convolutional_32 conv2d bn leaky (3, 3, 256, 512) Parsing section shortcut_13 Parsing section convolutional_33 conv2d bn leaky (1, 1, 512, 256) Parsing section convolutional_34 conv2d bn leaky (3, 3, 256, 512) Parsing section shortcut_14 Parsing section convolutional_35 conv2d bn leaky (1, 1, 512, 256) Parsing section convolutional_36 conv2d bn leaky (3, 3, 256, 512) Parsing section shortcut_15 Parsing section convolutional_37 conv2d bn leaky (1, 1, 512, 256) Parsing section convolutional_38 conv2d bn leaky (3, 3, 256, 512) Parsing section shortcut_16 Parsing section convolutional_39 conv2d bn leaky (1, 1, 512, 256) Parsing section convolutional_40 conv2d bn leaky (3, 3, 256, 512) Parsing section shortcut_17 Parsing section convolutional_41 conv2d bn leaky (1, 1, 512, 256) Parsing section convolutional_42 conv2d bn leaky (3, 3, 256, 512) Parsing section shortcut_18 Parsing section convolutional_43 conv2d bn leaky (3, 3, 512, 1024) Parsing section convolutional_44 conv2d bn leaky (1, 1, 1024, 512) Parsing section convolutional_45 conv2d bn leaky (3, 3, 512, 1024) Parsing section shortcut_19 Parsing section convolutional_46 conv2d bn leaky (1, 1, 1024, 512) Parsing section convolutional_47 conv2d bn leaky (3, 3, 512, 1024) Parsing section shortcut_20 Parsing section convolutional_48 conv2d bn leaky (1, 1, 1024, 512) Parsing section convolutional_49 conv2d bn leaky (3, 3, 512, 1024) Parsing section shortcut_21 Parsing section convolutional_50 conv2d bn leaky (1, 1, 1024, 512) Parsing section convolutional_51 conv2d bn leaky (3, 3, 512, 1024) Parsing section shortcut_22 Parsing section convolutional_52 conv2d bn leaky (1, 1, 1024, 512) Parsing section convolutional_53 conv2d bn leaky (3, 3, 512, 1024) Parsing section convolutional_54 conv2d bn leaky (1, 1, 1024, 512) Parsing section convolutional_55 conv2d bn leaky (3, 3, 512, 1024) Parsing section convolutional_56 conv2d bn leaky (1, 1, 1024, 512) Parsing section convolutional_57 conv2d bn leaky (3, 3, 512, 1024) Parsing section convolutional_58 conv2d linear (1, 1, 1024, 255) Parsing section yolo_0 Parsing section route_0 Parsing section convolutional_59 conv2d bn leaky (1, 1, 512, 256) Parsing section upsample_0 Parsing section route_1 Concatenating route layers: [<KerasTensor: shape=(None, None, None, 256) dtype=float32 (created by layer 'up_sampling2d')>, <KerasTensor: shape=(None, None, None, 512) dtype=float32 (created by layer 'add_18')>] Parsing section convolutional_60 conv2d bn leaky (1, 1, 768, 256) Parsing section convolutional_61 conv2d bn leaky (3, 3, 256, 512) Parsing section convolutional_62 conv2d bn leaky (1, 1, 512, 256) Parsing section convolutional_63 conv2d bn leaky (3, 3, 256, 512) Parsing section convolutional_64 conv2d bn leaky (1, 1, 512, 256) Parsing section convolutional_65 conv2d bn leaky (3, 3, 256, 512) Parsing section convolutional_66 conv2d linear (1, 1, 512, 255) Parsing section yolo_1 Parsing section route_2 Parsing section convolutional_67 conv2d bn leaky (1, 1, 256, 128) Parsing section upsample_1 Parsing section route_3 Concatenating route layers: [<KerasTensor: shape=(None, None, None, 128) dtype=float32 (created by layer 'up_sampling2d_1')>, <KerasTensor: shape=(None, None, None, 256) dtype=float32 (created by layer 'add_10')>] Parsing section convolutional_68 conv2d bn leaky (1, 1, 384, 128) Parsing section convolutional_69 conv2d bn leaky (3, 3, 128, 256) Parsing section convolutional_70 conv2d bn leaky (1, 1, 256, 128) Parsing section convolutional_71 conv2d bn leaky (3, 3, 128, 256) Parsing section convolutional_72 conv2d bn leaky (1, 1, 256, 128) Parsing section convolutional_73 conv2d bn leaky (3, 3, 128, 256) Parsing section convolutional_74 conv2d linear (1, 1, 256, 255) Parsing section yolo_2 Model: "model" __________________________________________________________________________________________________ Layer (type) Output Shape Param # Connected to ================================================================================================== input_1 (InputLayer) [(None, None, None, 0 [] 3)] conv2d (Conv2D) (None, None, None, 864 ['input_1[0][0]'] 32) batch_normalization (BatchNorm (None, None, None, 128 ['conv2d[0][0]'] alization) 32) leaky_re_lu (LeakyReLU) (None, None, None, 0 ['batch_normalization[0][0]'] 32) zero_padding2d (ZeroPadding2D) (None, None, None, 0 ['leaky_re_lu[0][0]'] 32) conv2d_1 (Conv2D) (None, None, None, 18432 ['zero_padding2d[0][0]'] 64) batch_normalization_1 (BatchNo (None, None, None, 256 ['conv2d_1[0][0]'] rmalization) 64) leaky_re_lu_1 (LeakyReLU) (None, None, None, 0 ['batch_normalization_1[0][0]'] 64) conv2d_2 (Conv2D) (None, None, None, 2048 ['leaky_re_lu_1[0][0]'] 32) batch_normalization_2 (BatchNo (None, None, None, 128 ['conv2d_2[0][0]'] rmalization) 32) leaky_re_lu_2 (LeakyReLU) (None, None, None, 0 ['batch_normalization_2[0][0]'] 32) conv2d_3 (Conv2D) (None, None, None, 18432 ['leaky_re_lu_2[0][0]'] 64) batch_normalization_3 (BatchNo (None, None, None, 256 ['conv2d_3[0][0]'] rmalization) 64) leaky_re_lu_3 (LeakyReLU) (None, None, None, 0 ['batch_normalization_3[0][0]'] 64) add (Add) (None, None, None, 0 ['leaky_re_lu_1[0][0]', 64) 'leaky_re_lu_3[0][0]'] zero_padding2d_1 (ZeroPadding2 (None, None, None, 0 ['add[0][0]'] D) 64) conv2d_4 (Conv2D) (None, None, None, 73728 ['zero_padding2d_1[0][0]'] 128) batch_normalization_4 (BatchNo (None, None, None, 512 ['conv2d_4[0][0]'] rmalization) 128) leaky_re_lu_4 (LeakyReLU) (None, None, None, 0 ['batch_normalization_4[0][0]'] 128) conv2d_5 (Conv2D) (None, None, None, 8192 ['leaky_re_lu_4[0][0]'] 64) batch_normalization_5 (BatchNo (None, None, None, 256 ['conv2d_5[0][0]'] rmalization) 64) leaky_re_lu_5 (LeakyReLU) (None, None, None, 0 ['batch_normalization_5[0][0]'] 64) conv2d_6 (Conv2D) (None, None, None, 73728 ['leaky_re_lu_5[0][0]'] 128) batch_normalization_6 (BatchNo (None, None, None, 512 ['conv2d_6[0][0]'] rmalization) 128) leaky_re_lu_6 (LeakyReLU) (None, None, None, 0 ['batch_normalization_6[0][0]'] 128) add_1 (Add) (None, None, None, 0 ['leaky_re_lu_4[0][0]', 128) 'leaky_re_lu_6[0][0]'] conv2d_7 (Conv2D) (None, None, None, 8192 ['add_1[0][0]'] 64) batch_normalization_7 (BatchNo (None, None, None, 256 ['conv2d_7[0][0]'] rmalization) 64) leaky_re_lu_7 (LeakyReLU) (None, None, None, 0 ['batch_normalization_7[0][0]'] 64) conv2d_8 (Conv2D) (None, None, None, 73728 ['leaky_re_lu_7[0][0]'] 128) batch_normalization_8 (BatchNo (None, None, None, 512 ['conv2d_8[0][0]'] rmalization) 128) leaky_re_lu_8 (LeakyReLU) (None, None, None, 0 ['batch_normalization_8[0][0]'] 128) add_2 (Add) (None, None, None, 0 ['add_1[0][0]', 128) 'leaky_re_lu_8[0][0]'] zero_padding2d_2 (ZeroPadding2 (None, None, None, 0 ['add_2[0][0]'] D) 128) conv2d_9 (Conv2D) (None, None, None, 294912 ['zero_padding2d_2[0][0]'] 256) batch_normalization_9 (BatchNo (None, None, None, 1024 ['conv2d_9[0][0]'] rmalization) 256) leaky_re_lu_9 (LeakyReLU) (None, None, None, 0 ['batch_normalization_9[0][0]'] 256) conv2d_10 (Conv2D) (None, None, None, 32768 ['leaky_re_lu_9[0][0]'] 128) batch_normalization_10 (BatchN (None, None, None, 512 ['conv2d_10[0][0]'] ormalization) 128) leaky_re_lu_10 (LeakyReLU) (None, None, None, 0 ['batch_normalization_10[0][0]'] 128) conv2d_11 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_10[0][0]'] 256) batch_normalization_11 (BatchN (None, None, None, 1024 ['conv2d_11[0][0]'] ormalization) 256) leaky_re_lu_11 (LeakyReLU) (None, None, None, 0 ['batch_normalization_11[0][0]'] 256) add_3 (Add) (None, None, None, 0 ['leaky_re_lu_9[0][0]', 256) 'leaky_re_lu_11[0][0]'] conv2d_12 (Conv2D) (None, None, None, 32768 ['add_3[0][0]'] 128) batch_normalization_12 (BatchN (None, None, None, 512 ['conv2d_12[0][0]'] ormalization) 128) leaky_re_lu_12 (LeakyReLU) (None, None, None, 0 ['batch_normalization_12[0][0]'] 128) conv2d_13 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_12[0][0]'] 256) batch_normalization_13 (BatchN (None, None, None, 1024 ['conv2d_13[0][0]'] ormalization) 256) leaky_re_lu_13 (LeakyReLU) (None, None, None, 0 ['batch_normalization_13[0][0]'] 256) add_4 (Add) (None, None, None, 0 ['add_3[0][0]', 256) 'leaky_re_lu_13[0][0]'] conv2d_14 (Conv2D) (None, None, None, 32768 ['add_4[0][0]'] 128) batch_normalization_14 (BatchN (None, None, None, 512 ['conv2d_14[0][0]'] ormalization) 128) leaky_re_lu_14 (LeakyReLU) (None, None, None, 0 ['batch_normalization_14[0][0]'] 128) conv2d_15 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_14[0][0]'] 256) batch_normalization_15 (BatchN (None, None, None, 1024 ['conv2d_15[0][0]'] ormalization) 256) leaky_re_lu_15 (LeakyReLU) (None, None, None, 0 ['batch_normalization_15[0][0]'] 256) add_5 (Add) (None, None, None, 0 ['add_4[0][0]', 256) 'leaky_re_lu_15[0][0]'] conv2d_16 (Conv2D) (None, None, None, 32768 ['add_5[0][0]'] 128) batch_normalization_16 (BatchN (None, None, None, 512 ['conv2d_16[0][0]'] ormalization) 128) leaky_re_lu_16 (LeakyReLU) (None, None, None, 0 ['batch_normalization_16[0][0]'] 128) conv2d_17 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_16[0][0]'] 256) batch_normalization_17 (BatchN (None, None, None, 1024 ['conv2d_17[0][0]'] ormalization) 256) leaky_re_lu_17 (LeakyReLU) (None, None, None, 0 ['batch_normalization_17[0][0]'] 256) add_6 (Add) (None, None, None, 0 ['add_5[0][0]', 256) 'leaky_re_lu_17[0][0]'] conv2d_18 (Conv2D) (None, None, None, 32768 ['add_6[0][0]'] 128) batch_normalization_18 (BatchN (None, None, None, 512 ['conv2d_18[0][0]'] ormalization) 128) leaky_re_lu_18 (LeakyReLU) (None, None, None, 0 ['batch_normalization_18[0][0]'] 128) conv2d_19 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_18[0][0]'] 256) batch_normalization_19 (BatchN (None, None, None, 1024 ['conv2d_19[0][0]'] ormalization) 256) leaky_re_lu_19 (LeakyReLU) (None, None, None, 0 ['batch_normalization_19[0][0]'] 256) add_7 (Add) (None, None, None, 0 ['add_6[0][0]', 256) 'leaky_re_lu_19[0][0]'] conv2d_20 (Conv2D) (None, None, None, 32768 ['add_7[0][0]'] 128) batch_normalization_20 (BatchN (None, None, None, 512 ['conv2d_20[0][0]'] ormalization) 128) leaky_re_lu_20 (LeakyReLU) (None, None, None, 0 ['batch_normalization_20[0][0]'] 128) conv2d_21 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_20[0][0]'] 256) batch_normalization_21 (BatchN (None, None, None, 1024 ['conv2d_21[0][0]'] ormalization) 256) leaky_re_lu_21 (LeakyReLU) (None, None, None, 0 ['batch_normalization_21[0][0]'] 256) add_8 (Add) (None, None, None, 0 ['add_7[0][0]', 256) 'leaky_re_lu_21[0][0]'] conv2d_22 (Conv2D) (None, None, None, 32768 ['add_8[0][0]'] 128) batch_normalization_22 (BatchN (None, None, None, 512 ['conv2d_22[0][0]'] ormalization) 128) leaky_re_lu_22 (LeakyReLU) (None, None, None, 0 ['batch_normalization_22[0][0]'] 128) conv2d_23 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_22[0][0]'] 256) batch_normalization_23 (BatchN (None, None, None, 1024 ['conv2d_23[0][0]'] ormalization) 256) leaky_re_lu_23 (LeakyReLU) (None, None, None, 0 ['batch_normalization_23[0][0]'] 256) add_9 (Add) (None, None, None, 0 ['add_8[0][0]', 256) 'leaky_re_lu_23[0][0]'] conv2d_24 (Conv2D) (None, None, None, 32768 ['add_9[0][0]'] 128) batch_normalization_24 (BatchN (None, None, None, 512 ['conv2d_24[0][0]'] ormalization) 128) leaky_re_lu_24 (LeakyReLU) (None, None, None, 0 ['batch_normalization_24[0][0]'] 128) conv2d_25 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_24[0][0]'] 256) batch_normalization_25 (BatchN (None, None, None, 1024 ['conv2d_25[0][0]'] ormalization) 256) leaky_re_lu_25 (LeakyReLU) (None, None, None, 0 ['batch_normalization_25[0][0]'] 256) add_10 (Add) (None, None, None, 0 ['add_9[0][0]', 256) 'leaky_re_lu_25[0][0]'] zero_padding2d_3 (ZeroPadding2 (None, None, None, 0 ['add_10[0][0]'] D) 256) conv2d_26 (Conv2D) (None, None, None, 1179648 ['zero_padding2d_3[0][0]'] 512) batch_normalization_26 (BatchN (None, None, None, 2048 ['conv2d_26[0][0]'] ormalization) 512) leaky_re_lu_26 (LeakyReLU) (None, None, None, 0 ['batch_normalization_26[0][0]'] 512) conv2d_27 (Conv2D) (None, None, None, 131072 ['leaky_re_lu_26[0][0]'] 256) batch_normalization_27 (BatchN (None, None, None, 1024 ['conv2d_27[0][0]'] ormalization) 256) leaky_re_lu_27 (LeakyReLU) (None, None, None, 0 ['batch_normalization_27[0][0]'] 256) conv2d_28 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_27[0][0]'] 512) batch_normalization_28 (BatchN (None, None, None, 2048 ['conv2d_28[0][0]'] ormalization) 512) leaky_re_lu_28 (LeakyReLU) (None, None, None, 0 ['batch_normalization_28[0][0]'] 512) add_11 (Add) (None, None, None, 0 ['leaky_re_lu_26[0][0]', 512) 'leaky_re_lu_28[0][0]'] conv2d_29 (Conv2D) (None, None, None, 131072 ['add_11[0][0]'] 256) batch_normalization_29 (BatchN (None, None, None, 1024 ['conv2d_29[0][0]'] ormalization) 256) leaky_re_lu_29 (LeakyReLU) (None, None, None, 0 ['batch_normalization_29[0][0]'] 256) conv2d_30 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_29[0][0]'] 512) batch_normalization_30 (BatchN (None, None, None, 2048 ['conv2d_30[0][0]'] ormalization) 512) leaky_re_lu_30 (LeakyReLU) (None, None, None, 0 ['batch_normalization_30[0][0]'] 512) add_12 (Add) (None, None, None, 0 ['add_11[0][0]', 512) 'leaky_re_lu_30[0][0]'] conv2d_31 (Conv2D) (None, None, None, 131072 ['add_12[0][0]'] 256) batch_normalization_31 (BatchN (None, None, None, 1024 ['conv2d_31[0][0]'] ormalization) 256) leaky_re_lu_31 (LeakyReLU) (None, None, None, 0 ['batch_normalization_31[0][0]'] 256) conv2d_32 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_31[0][0]'] 512) batch_normalization_32 (BatchN (None, None, None, 2048 ['conv2d_32[0][0]'] ormalization) 512) leaky_re_lu_32 (LeakyReLU) (None, None, None, 0 ['batch_normalization_32[0][0]'] 512) add_13 (Add) (None, None, None, 0 ['add_12[0][0]', 512) 'leaky_re_lu_32[0][0]'] conv2d_33 (Conv2D) (None, None, None, 131072 ['add_13[0][0]'] 256) batch_normalization_33 (BatchN (None, None, None, 1024 ['conv2d_33[0][0]'] ormalization) 256) leaky_re_lu_33 (LeakyReLU) (None, None, None, 0 ['batch_normalization_33[0][0]'] 256) conv2d_34 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_33[0][0]'] 512) batch_normalization_34 (BatchN (None, None, None, 2048 ['conv2d_34[0][0]'] ormalization) 512) leaky_re_lu_34 (LeakyReLU) (None, None, None, 0 ['batch_normalization_34[0][0]'] 512) add_14 (Add) (None, None, None, 0 ['add_13[0][0]', 512) 'leaky_re_lu_34[0][0]'] conv2d_35 (Conv2D) (None, None, None, 131072 ['add_14[0][0]'] 256) batch_normalization_35 (BatchN (None, None, None, 1024 ['conv2d_35[0][0]'] ormalization) 256) leaky_re_lu_35 (LeakyReLU) (None, None, None, 0 ['batch_normalization_35[0][0]'] 256) conv2d_36 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_35[0][0]'] 512) batch_normalization_36 (BatchN (None, None, None, 2048 ['conv2d_36[0][0]'] ormalization) 512) leaky_re_lu_36 (LeakyReLU) (None, None, None, 0 ['batch_normalization_36[0][0]'] 512) add_15 (Add) (None, None, None, 0 ['add_14[0][0]', 512) 'leaky_re_lu_36[0][0]'] conv2d_37 (Conv2D) (None, None, None, 131072 ['add_15[0][0]'] 256) batch_normalization_37 (BatchN (None, None, None, 1024 ['conv2d_37[0][0]'] ormalization) 256) leaky_re_lu_37 (LeakyReLU) (None, None, None, 0 ['batch_normalization_37[0][0]'] 256) conv2d_38 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_37[0][0]'] 512) batch_normalization_38 (BatchN (None, None, None, 2048 ['conv2d_38[0][0]'] ormalization) 512) leaky_re_lu_38 (LeakyReLU) (None, None, None, 0 ['batch_normalization_38[0][0]'] 512) add_16 (Add) (None, None, None, 0 ['add_15[0][0]', 512) 'leaky_re_lu_38[0][0]'] conv2d_39 (Conv2D) (None, None, None, 131072 ['add_16[0][0]'] 256) batch_normalization_39 (BatchN (None, None, None, 1024 ['conv2d_39[0][0]'] ormalization) 256) leaky_re_lu_39 (LeakyReLU) (None, None, None, 0 ['batch_normalization_39[0][0]'] 256) conv2d_40 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_39[0][0]'] 512) batch_normalization_40 (BatchN (None, None, None, 2048 ['conv2d_40[0][0]'] ormalization) 512) leaky_re_lu_40 (LeakyReLU) (None, None, None, 0 ['batch_normalization_40[0][0]'] 512) add_17 (Add) (None, None, None, 0 ['add_16[0][0]', 512) 'leaky_re_lu_40[0][0]'] conv2d_41 (Conv2D) (None, None, None, 131072 ['add_17[0][0]'] 256) batch_normalization_41 (BatchN (None, None, None, 1024 ['conv2d_41[0][0]'] ormalization) 256) leaky_re_lu_41 (LeakyReLU) (None, None, None, 0 ['batch_normalization_41[0][0]'] 256) conv2d_42 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_41[0][0]'] 512) batch_normalization_42 (BatchN (None, None, None, 2048 ['conv2d_42[0][0]'] ormalization) 512) leaky_re_lu_42 (LeakyReLU) (None, None, None, 0 ['batch_normalization_42[0][0]'] 512) add_18 (Add) (None, None, None, 0 ['add_17[0][0]', 512) 'leaky_re_lu_42[0][0]'] zero_padding2d_4 (ZeroPadding2 (None, None, None, 0 ['add_18[0][0]'] D) 512) conv2d_43 (Conv2D) (None, None, None, 4718592 ['zero_padding2d_4[0][0]'] 1024) batch_normalization_43 (BatchN (None, None, None, 4096 ['conv2d_43[0][0]'] ormalization) 1024) leaky_re_lu_43 (LeakyReLU) (None, None, None, 0 ['batch_normalization_43[0][0]'] 1024) conv2d_44 (Conv2D) (None, None, None, 524288 ['leaky_re_lu_43[0][0]'] 512) batch_normalization_44 (BatchN (None, None, None, 2048 ['conv2d_44[0][0]'] ormalization) 512) leaky_re_lu_44 (LeakyReLU) (None, None, None, 0 ['batch_normalization_44[0][0]'] 512) conv2d_45 (Conv2D) (None, None, None, 4718592 ['leaky_re_lu_44[0][0]'] 1024) batch_normalization_45 (BatchN (None, None, None, 4096 ['conv2d_45[0][0]'] ormalization) 1024) leaky_re_lu_45 (LeakyReLU) (None, None, None, 0 ['batch_normalization_45[0][0]'] 1024) add_19 (Add) (None, None, None, 0 ['leaky_re_lu_43[0][0]', 1024) 'leaky_re_lu_45[0][0]'] conv2d_46 (Conv2D) (None, None, None, 524288 ['add_19[0][0]'] 512) batch_normalization_46 (BatchN (None, None, None, 2048 ['conv2d_46[0][0]'] ormalization) 512) leaky_re_lu_46 (LeakyReLU) (None, None, None, 0 ['batch_normalization_46[0][0]'] 512) conv2d_47 (Conv2D) (None, None, None, 4718592 ['leaky_re_lu_46[0][0]'] 1024) batch_normalization_47 (BatchN (None, None, None, 4096 ['conv2d_47[0][0]'] ormalization) 1024) leaky_re_lu_47 (LeakyReLU) (None, None, None, 0 ['batch_normalization_47[0][0]'] 1024) add_20 (Add) (None, None, None, 0 ['add_19[0][0]', 1024) 'leaky_re_lu_47[0][0]'] conv2d_48 (Conv2D) (None, None, None, 524288 ['add_20[0][0]'] 512) batch_normalization_48 (BatchN (None, None, None, 2048 ['conv2d_48[0][0]'] ormalization) 512) leaky_re_lu_48 (LeakyReLU) (None, None, None, 0 ['batch_normalization_48[0][0]'] 512) conv2d_49 (Conv2D) (None, None, None, 4718592 ['leaky_re_lu_48[0][0]'] 1024) batch_normalization_49 (BatchN (None, None, None, 4096 ['conv2d_49[0][0]'] ormalization) 1024) leaky_re_lu_49 (LeakyReLU) (None, None, None, 0 ['batch_normalization_49[0][0]'] 1024) add_21 (Add) (None, None, None, 0 ['add_20[0][0]', 1024) 'leaky_re_lu_49[0][0]'] conv2d_50 (Conv2D) (None, None, None, 524288 ['add_21[0][0]'] 512) batch_normalization_50 (BatchN (None, None, None, 2048 ['conv2d_50[0][0]'] ormalization) 512) leaky_re_lu_50 (LeakyReLU) (None, None, None, 0 ['batch_normalization_50[0][0]'] 512) conv2d_51 (Conv2D) (None, None, None, 4718592 ['leaky_re_lu_50[0][0]'] 1024) batch_normalization_51 (BatchN (None, None, None, 4096 ['conv2d_51[0][0]'] ormalization) 1024) leaky_re_lu_51 (LeakyReLU) (None, None, None, 0 ['batch_normalization_51[0][0]'] 1024) add_22 (Add) (None, None, None, 0 ['add_21[0][0]', 1024) 'leaky_re_lu_51[0][0]'] conv2d_52 (Conv2D) (None, None, None, 524288 ['add_22[0][0]'] 512) batch_normalization_52 (BatchN (None, None, None, 2048 ['conv2d_52[0][0]'] ormalization) 512) leaky_re_lu_52 (LeakyReLU) (None, None, None, 0 ['batch_normalization_52[0][0]'] 512) conv2d_53 (Conv2D) (None, None, None, 4718592 ['leaky_re_lu_52[0][0]'] 1024) batch_normalization_53 (BatchN (None, None, None, 4096 ['conv2d_53[0][0]'] ormalization) 1024) leaky_re_lu_53 (LeakyReLU) (None, None, None, 0 ['batch_normalization_53[0][0]'] 1024) conv2d_54 (Conv2D) (None, None, None, 524288 ['leaky_re_lu_53[0][0]'] 512) batch_normalization_54 (BatchN (None, None, None, 2048 ['conv2d_54[0][0]'] ormalization) 512) leaky_re_lu_54 (LeakyReLU) (None, None, None, 0 ['batch_normalization_54[0][0]'] 512) conv2d_55 (Conv2D) (None, None, None, 4718592 ['leaky_re_lu_54[0][0]'] 1024) batch_normalization_55 (BatchN (None, None, None, 4096 ['conv2d_55[0][0]'] ormalization) 1024) leaky_re_lu_55 (LeakyReLU) (None, None, None, 0 ['batch_normalization_55[0][0]'] 1024) conv2d_56 (Conv2D) (None, None, None, 524288 ['leaky_re_lu_55[0][0]'] 512) batch_normalization_56 (BatchN (None, None, None, 2048 ['conv2d_56[0][0]'] ormalization) 512) leaky_re_lu_56 (LeakyReLU) (None, None, None, 0 ['batch_normalization_56[0][0]'] 512) conv2d_59 (Conv2D) (None, None, None, 131072 ['leaky_re_lu_56[0][0]'] 256) batch_normalization_58 (BatchN (None, None, None, 1024 ['conv2d_59[0][0]'] ormalization) 256) leaky_re_lu_58 (LeakyReLU) (None, None, None, 0 ['batch_normalization_58[0][0]'] 256) up_sampling2d (UpSampling2D) (None, None, None, 0 ['leaky_re_lu_58[0][0]'] 256) concatenate (Concatenate) (None, None, None, 0 ['up_sampling2d[0][0]', 768) 'add_18[0][0]'] conv2d_60 (Conv2D) (None, None, None, 196608 ['concatenate[0][0]'] 256) batch_normalization_59 (BatchN (None, None, None, 1024 ['conv2d_60[0][0]'] ormalization) 256) leaky_re_lu_59 (LeakyReLU) (None, None, None, 0 ['batch_normalization_59[0][0]'] 256) conv2d_61 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_59[0][0]'] 512) batch_normalization_60 (BatchN (None, None, None, 2048 ['conv2d_61[0][0]'] ormalization) 512) leaky_re_lu_60 (LeakyReLU) (None, None, None, 0 ['batch_normalization_60[0][0]'] 512) conv2d_62 (Conv2D) (None, None, None, 131072 ['leaky_re_lu_60[0][0]'] 256) batch_normalization_61 (BatchN (None, None, None, 1024 ['conv2d_62[0][0]'] ormalization) 256) leaky_re_lu_61 (LeakyReLU) (None, None, None, 0 ['batch_normalization_61[0][0]'] 256) conv2d_63 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_61[0][0]'] 512) batch_normalization_62 (BatchN (None, None, None, 2048 ['conv2d_63[0][0]'] ormalization) 512) leaky_re_lu_62 (LeakyReLU) (None, None, None, 0 ['batch_normalization_62[0][0]'] 512) conv2d_64 (Conv2D) (None, None, None, 131072 ['leaky_re_lu_62[0][0]'] 256) batch_normalization_63 (BatchN (None, None, None, 1024 ['conv2d_64[0][0]'] ormalization) 256) leaky_re_lu_63 (LeakyReLU) (None, None, None, 0 ['batch_normalization_63[0][0]'] 256) conv2d_67 (Conv2D) (None, None, None, 32768 ['leaky_re_lu_63[0][0]'] 128) batch_normalization_65 (BatchN (None, None, None, 512 ['conv2d_67[0][0]'] ormalization) 128) leaky_re_lu_65 (LeakyReLU) (None, None, None, 0 ['batch_normalization_65[0][0]'] 128) up_sampling2d_1 (UpSampling2D) (None, None, None, 0 ['leaky_re_lu_65[0][0]'] 128) concatenate_1 (Concatenate) (None, None, None, 0 ['up_sampling2d_1[0][0]', 384) 'add_10[0][0]'] conv2d_68 (Conv2D) (None, None, None, 49152 ['concatenate_1[0][0]'] 128) batch_normalization_66 (BatchN (None, None, None, 512 ['conv2d_68[0][0]'] ormalization) 128) leaky_re_lu_66 (LeakyReLU) (None, None, None, 0 ['batch_normalization_66[0][0]'] 128) conv2d_69 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_66[0][0]'] 256) batch_normalization_67 (BatchN (None, None, None, 1024 ['conv2d_69[0][0]'] ormalization) 256) leaky_re_lu_67 (LeakyReLU) (None, None, None, 0 ['batch_normalization_67[0][0]'] 256) conv2d_70 (Conv2D) (None, None, None, 32768 ['leaky_re_lu_67[0][0]'] 128) batch_normalization_68 (BatchN (None, None, None, 512 ['conv2d_70[0][0]'] ormalization) 128) leaky_re_lu_68 (LeakyReLU) (None, None, None, 0 ['batch_normalization_68[0][0]'] 128) conv2d_71 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_68[0][0]'] 256) batch_normalization_69 (BatchN (None, None, None, 1024 ['conv2d_71[0][0]'] ormalization) 256) leaky_re_lu_69 (LeakyReLU) (None, None, None, 0 ['batch_normalization_69[0][0]'] 256) conv2d_72 (Conv2D) (None, None, None, 32768 ['leaky_re_lu_69[0][0]'] 128) batch_normalization_70 (BatchN (None, None, None, 512 ['conv2d_72[0][0]'] ormalization) 128) leaky_re_lu_70 (LeakyReLU) (None, None, None, 0 ['batch_normalization_70[0][0]'] 128) conv2d_57 (Conv2D) (None, None, None, 4718592 ['leaky_re_lu_56[0][0]'] 1024) conv2d_65 (Conv2D) (None, None, None, 1179648 ['leaky_re_lu_63[0][0]'] 512) conv2d_73 (Conv2D) (None, None, None, 294912 ['leaky_re_lu_70[0][0]'] 256) batch_normalization_57 (BatchN (None, None, None, 4096 ['conv2d_57[0][0]'] ormalization) 1024) batch_normalization_64 (BatchN (None, None, None, 2048 ['conv2d_65[0][0]'] ormalization) 512) batch_normalization_71 (BatchN (None, None, None, 1024 ['conv2d_73[0][0]'] ormalization) 256) leaky_re_lu_57 (LeakyReLU) (None, None, None, 0 ['batch_normalization_57[0][0]'] 1024) leaky_re_lu_64 (LeakyReLU) (None, None, None, 0 ['batch_normalization_64[0][0]'] 512) leaky_re_lu_71 (LeakyReLU) (None, None, None, 0 ['batch_normalization_71[0][0]'] 256) conv2d_58 (Conv2D) (None, None, None, 261375 ['leaky_re_lu_57[0][0]'] 255) conv2d_66 (Conv2D) (None, None, None, 130815 ['leaky_re_lu_64[0][0]'] 255) conv2d_74 (Conv2D) (None, None, None, 65535 ['leaky_re_lu_71[0][0]'] 255) ================================================================================================== Total params: 62,001,757 Trainable params: 61,949,149 Non-trainable params: 52,608 __________________________________________________________________________________________________ None WARNING:tensorflow:Compiled the loaded model, but the compiled metrics have yet to be built. `model.compile_metrics` will be empty until you train or evaluate the model. Saved Keras model to model_data/yolo.h5 Read 62001757 of 62001757.0 from Darknet weights.
"""
Self-contained Python script to train YOLOv3 on your own dataset
"""
import numpy as np
import keras.backend as K
from keras.layers import Input, Lambda
from keras.models import Model
from keras.optimizers import Adam
from keras.callbacks import TensorBoard, ModelCheckpoint, ReduceLROnPlateau, EarlyStopping
from yolo3.model import preprocess_true_boxes, yolo_body, tiny_yolo_body, yolo_loss
from yolo3.utils import get_random_data
def _main():
annotation_path = './train/_annotations.txt' # path to Roboflow data annotations
log_dir = './logs/000/' # where we're storing our logs
classes_path = './train/_classes.txt' # path to Roboflow class names
anchors_path = './model_data/yolo_anchors.txt'
class_names = get_classes(classes_path)
print("-------------------CLASS NAMES-------------------")
print(class_names)
print("-------------------CLASS NAMES-------------------")
num_classes = len(class_names)
anchors = get_anchors(anchors_path)
input_shape = (256,256) # multiple of 32, hw default = (416,416)
is_tiny_version = len(anchors)==6 # default setting
if is_tiny_version:
model = create_tiny_model(input_shape, anchors, num_classes,
freeze_body=2, weights_path='./model_data/tiny_yolo_weights.h5')
else:
model = create_model(input_shape, anchors, num_classes,
freeze_body=2, weights_path='./model_data/yolo.h5') # make sure you know what you freeze
logging = TensorBoard(log_dir=log_dir)
checkpoint = ModelCheckpoint(log_dir + 'ep{epoch:03d}-loss{loss:.3f}-val_loss{val_loss:.3f}.h5',
monitor='val_loss', save_weights_only=True, save_best_only=True, period=3)
reduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=3, verbose=1)
early_stopping = EarlyStopping(monitor='val_loss', min_delta=0, patience=10, verbose=1)
val_split = 0.2 # set the size of the validation set
with open(annotation_path) as f:
lines = f.readlines()
np.random.seed(10101)
np.random.shuffle(lines)
np.random.seed(None)
num_val = int(len(lines)*val_split)
num_train = len(lines) - num_val
# Train with frozen layers first, to get a stable loss.
# Adjust num epochs to your dataset. This step is enough to obtain a not bad model.
if True:
model.compile(optimizer=Adam(lr=1e-3), loss={
# use custom yolo_loss Lambda layer.
'yolo_loss': lambda y_true, y_pred: y_pred})
batch_size = 16
print('Train on {} samples, val on {} samples, with batch size {}.'.format(num_train, num_val, batch_size))
model.fit_generator(data_generator_wrapper(lines[:num_train], batch_size, input_shape, anchors, num_classes),
steps_per_epoch=max(1, num_train//batch_size),
validation_data=data_generator_wrapper(lines[num_train:], batch_size, input_shape, anchors, num_classes),
validation_steps=max(1, num_val//batch_size),
epochs=500,
initial_epoch=0,
callbacks=[logging, checkpoint])
model.save_weights(log_dir + 'trained_weights_stage_1.h5')
# Unfreeze and continue training, to fine-tune.
# Train longer if the result is not good.
if True:
for i in range(len(model.layers)):
model.layers[i].trainable = True
model.compile(optimizer=Adam(lr=1e-4), loss={'yolo_loss': lambda y_true, y_pred: y_pred}) # recompile to apply the change
print('Unfreeze all of the layers.')
batch_size = 16 # note that more GPU memory is required after unfreezing the body
print('Train on {} samples, val on {} samples, with batch size {}.'.format(num_train, num_val, batch_size))
model.fit_generator(data_generator_wrapper(lines[:num_train], batch_size, input_shape, anchors, num_classes),
steps_per_epoch=max(1, num_train//batch_size),
validation_data=data_generator_wrapper(lines[num_train:], batch_size, input_shape, anchors, num_classes),
validation_steps=max(1, num_val//batch_size),
epochs=100,
initial_epoch=50,
callbacks=[logging, checkpoint, reduce_lr, early_stopping])
model.save_weights(log_dir + 'trained_weights_final.h5')
# Further training if needed.
def get_classes(classes_path):
'''loads the classes'''
with open(classes_path) as f:
class_names = f.readlines()
class_names = [c.strip() for c in class_names]
return class_names
def get_anchors(anchors_path):
'''loads the anchors from a file'''
with open(anchors_path) as f:
anchors = f.readline()
anchors = [float(x) for x in anchors.split(',')]
return np.array(anchors).reshape(-1, 2)
def create_model(input_shape, anchors, num_classes, load_pretrained=True, freeze_body=2,
weights_path='./model_data/yolo.h5'):
'''create the training model'''
K.clear_session() # get a new session
image_input = Input(shape=(None, None, 3))
h, w = input_shape
num_anchors = len(anchors)
y_true = [Input(shape=(h//{0:32, 1:16, 2:8}[l], w//{0:32, 1:16, 2:8}[l], \
num_anchors//3, num_classes+5)) for l in range(3)]
model_body = yolo_body(image_input, num_anchors//3, num_classes)
print('Create YOLOv3 model with {} anchors and {} classes.'.format(num_anchors, num_classes))
if load_pretrained:
model_body.load_weights(weights_path, by_name=True, skip_mismatch=True)
print('Load weights {}.'.format(weights_path))
if freeze_body in [1, 2]:
# Freeze darknet53 body or freeze all but 3 output layers.
num = (185, len(model_body.layers)-3)[freeze_body-1]
for i in range(num): model_body.layers[i].trainable = False
print('Freeze the first {} layers of total {} layers.'.format(num, len(model_body.layers)))
model_loss = Lambda(yolo_loss, output_shape=(1,), name='yolo_loss',
arguments={'anchors': anchors, 'num_classes': num_classes, 'ignore_thresh': 0.5})(
[*model_body.output, *y_true])
model = Model([model_body.input, *y_true], model_loss)
return model
def create_tiny_model(input_shape, anchors, num_classes, load_pretrained=True, freeze_body=2,
weights_path='./model_data/tiny_yolo_weights.h5'):
'''create the training model, for Tiny YOLOv3'''
K.clear_session() # get a new session
image_input = Input(shape=(None, None, 3))
h, w = input_shape
num_anchors = len(anchors)
y_true = [Input(shape=(h//{0:32, 1:16}[l], w//{0:32, 1:16}[l], \
num_anchors//2, num_classes+5)) for l in range(2)]
model_body = tiny_yolo_body(image_input, num_anchors//2, num_classes)
print('Create Tiny YOLOv3 model with {} anchors and {} classes.'.format(num_anchors, num_classes))
if load_pretrained:
model_body.load_weights(weights_path, by_name=True, skip_mismatch=True)
print('Load weights {}.'.format(weights_path))
if freeze_body in [1, 2]:
# Freeze the darknet body or freeze all but 2 output layers.
num = (20, len(model_body.layers)-2)[freeze_body-1]
for i in range(num): model_body.layers[i].trainable = False
print('Freeze the first {} layers of total {} layers.'.format(num, len(model_body.layers)))
model_loss = Lambda(yolo_loss, output_shape=(1,), name='yolo_loss',
arguments={'anchors': anchors, 'num_classes': num_classes, 'ignore_thresh': 0.7})(
[*model_body.output, *y_true])
model = Model([model_body.input, *y_true], model_loss)
return model
def data_generator(annotation_lines, batch_size, input_shape, anchors, num_classes):
'''data generator for fit_generator'''
n = len(annotation_lines)
i = 0
while True:
image_data = []
box_data = []
for b in range(batch_size):
if i==0:
np.random.shuffle(annotation_lines)
image, box = get_random_data(annotation_lines[i], input_shape, random=True)
image_data.append(image)
box_data.append(box)
i = (i+1) % n
image_data = np.array(image_data)
box_data = np.array(box_data)
y_true = preprocess_true_boxes(box_data, input_shape, anchors, num_classes)
yield [image_data, *y_true], np.zeros(batch_size)
def data_generator_wrapper(annotation_lines, batch_size, input_shape, anchors, num_classes):
n = len(annotation_lines)
if n==0 or batch_size<=0: return None
return data_generator(annotation_lines, batch_size, input_shape, anchors, num_classes)
if __name__ == '__main__':
_main()
-------------------CLASS NAMES------------------- ['licence'] -------------------CLASS NAMES-------------------
2023-01-22 02:42:51.965089: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: SSE4.1 SSE4.2 AVX AVX2 FMA To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
Create YOLOv3 model with 9 anchors and 1 classes. WARNING:tensorflow:Skipping loading weights for layer #249 (named conv2d_58) due to mismatch in shape for weight conv2d_58/kernel:0. Weight expects shape (1, 1, 1024, 18). Received saved weight with shape (255, 1024, 1, 1) WARNING:tensorflow:Skipping loading weights for layer #249 (named conv2d_58) due to mismatch in shape for weight conv2d_58/bias:0. Weight expects shape (18,). Received saved weight with shape (255,) WARNING:tensorflow:Skipping loading weights for layer #250 (named conv2d_66) due to mismatch in shape for weight conv2d_66/kernel:0. Weight expects shape (1, 1, 512, 18). Received saved weight with shape (255, 512, 1, 1) WARNING:tensorflow:Skipping loading weights for layer #250 (named conv2d_66) due to mismatch in shape for weight conv2d_66/bias:0. Weight expects shape (18,). Received saved weight with shape (255,) WARNING:tensorflow:Skipping loading weights for layer #251 (named conv2d_74) due to mismatch in shape for weight conv2d_74/kernel:0. Weight expects shape (1, 1, 256, 18). Received saved weight with shape (255, 256, 1, 1) WARNING:tensorflow:Skipping loading weights for layer #251 (named conv2d_74) due to mismatch in shape for weight conv2d_74/bias:0. Weight expects shape (18,). Received saved weight with shape (255,) Load weights ./model_data/yolo.h5. Freeze the first 249 layers of total 252 layers. WARNING:tensorflow:`period` argument is deprecated. Please use `save_freq` to specify the frequency in number of batches seen. Train on 488 samples, val on 121 samples, with batch size 16.
/Users/aczajka/miniconda3/envs/yolov3/lib/python3.9/site-packages/keras/optimizers/optimizer_v2/adam.py:117: UserWarning: The `lr` argument is deprecated, use `learning_rate` instead. super().__init__(name, **kwargs) /var/folders/j_/grk4ythd0392dcw5z3gkgw5w0000gn/T/ipykernel_39692/4035785499.py:62: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators. model.fit_generator(data_generator_wrapper(lines[:num_train], batch_size, input_shape, anchors, num_classes),
Epoch 1/500 WARNING:tensorflow:From /Users/aczajka/miniconda3/envs/yolov3/lib/python3.9/site-packages/tensorflow/python/autograph/pyct/static_analysis/liveness.py:83: Analyzer.lamba_check (from tensorflow.python.autograph.pyct.static_analysis.liveness) is deprecated and will be removed after 2023-09-23. Instructions for updating: Lambda fuctions will be no more assumed to be used in the statement where they are used, or at least in the same block. https://github.com/tensorflow/tensorflow/issues/56089
2023-01-22 02:43:01.274999: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Subshape must have computed start >= end since stride is negative, but is 0 and 2 (computed from start 0 and end 9223372036854775807 over shape with rank 2 and stride-1)
30/30 [==============================] - ETA: 0s - loss: 1092.7228
2023-01-22 02:43:54.290859: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Subshape must have computed start >= end since stride is negative, but is 0 and 2 (computed from start 0 and end 9223372036854775807 over shape with rank 2 and stride-1)
30/30 [==============================] - 70s 2s/step - loss: 1092.7228 - val_loss: 216.0935 Epoch 2/500 30/30 [==============================] - 57s 2s/step - loss: 135.6953 - val_loss: 94.2604 Epoch 3/500 30/30 [==============================] - 52s 2s/step - loss: 79.2672 - val_loss: 68.8617 Epoch 4/500 30/30 [==============================] - 52s 2s/step - loss: 60.4469 - val_loss: 54.7572 Epoch 5/500 30/30 [==============================] - 50s 2s/step - loss: 50.0802 - val_loss: 47.2904 Epoch 6/500 30/30 [==============================] - 51s 2s/step - loss: 43.6335 - val_loss: 41.2742 Epoch 7/500 30/30 [==============================] - 51s 2s/step - loss: 39.3473 - val_loss: 38.5374 Epoch 8/500 30/30 [==============================] - 52s 2s/step - loss: 36.2422 - val_loss: 35.2012 Epoch 9/500 30/30 [==============================] - 51s 2s/step - loss: 33.6743 - val_loss: 33.0579 Epoch 10/500 30/30 [==============================] - 49s 2s/step - loss: 32.0283 - val_loss: 30.6336 Epoch 11/500 30/30 [==============================] - 49s 2s/step - loss: 30.3864 - val_loss: 29.2345 Epoch 12/500 30/30 [==============================] - 51s 2s/step - loss: 29.6261 - val_loss: 28.6320 Epoch 13/500 30/30 [==============================] - 51s 2s/step - loss: 28.1432 - val_loss: 27.8887 Epoch 14/500 30/30 [==============================] - 52s 2s/step - loss: 27.6032 - val_loss: 27.0226 Epoch 15/500 30/30 [==============================] - 52s 2s/step - loss: 26.9148 - val_loss: 26.3452 Epoch 16/500 30/30 [==============================] - 52s 2s/step - loss: 26.4210 - val_loss: 26.4830 Epoch 17/500 30/30 [==============================] - 53s 2s/step - loss: 25.6399 - val_loss: 25.2511 Epoch 18/500 30/30 [==============================] - 60s 2s/step - loss: 25.5443 - val_loss: 24.6174 Epoch 19/500 30/30 [==============================] - 52s 2s/step - loss: 25.2961 - val_loss: 24.7754 Epoch 20/500 30/30 [==============================] - 54s 2s/step - loss: 24.7307 - val_loss: 24.6782 Epoch 21/500 30/30 [==============================] - 58s 2s/step - loss: 24.2857 - val_loss: 24.3096 Epoch 22/500 30/30 [==============================] - 52s 2s/step - loss: 24.2008 - val_loss: 24.3196 Epoch 23/500 30/30 [==============================] - 51s 2s/step - loss: 23.5739 - val_loss: 23.3351 Epoch 24/500 30/30 [==============================] - 52s 2s/step - loss: 23.6946 - val_loss: 24.0281 Epoch 25/500 30/30 [==============================] - 52s 2s/step - loss: 23.7198 - val_loss: 23.4021 Epoch 26/500 30/30 [==============================] - 53s 2s/step - loss: 23.2751 - val_loss: 23.3185 Epoch 27/500 30/30 [==============================] - 53s 2s/step - loss: 23.2101 - val_loss: 22.7601 Epoch 28/500 30/30 [==============================] - 53s 2s/step - loss: 22.9937 - val_loss: 22.6282 Epoch 29/500 30/30 [==============================] - 53s 2s/step - loss: 22.8363 - val_loss: 22.1787 Epoch 30/500 30/30 [==============================] - 55s 2s/step - loss: 22.6890 - val_loss: 22.1749 Epoch 31/500 30/30 [==============================] - 56s 2s/step - loss: 22.4564 - val_loss: 22.6868 Epoch 32/500 30/30 [==============================] - 52s 2s/step - loss: 22.3397 - val_loss: 22.1918 Epoch 33/500 30/30 [==============================] - 53s 2s/step - loss: 22.8438 - val_loss: 22.4380 Epoch 34/500 30/30 [==============================] - 53s 2s/step - loss: 22.0734 - val_loss: 22.9481 Epoch 35/500 30/30 [==============================] - 53s 2s/step - loss: 21.9711 - val_loss: 22.8436 Epoch 36/500 30/30 [==============================] - 53s 2s/step - loss: 22.0127 - val_loss: 22.7770 Epoch 37/500 30/30 [==============================] - 53s 2s/step - loss: 22.6367 - val_loss: 21.8047 Epoch 38/500 30/30 [==============================] - 53s 2s/step - loss: 21.8459 - val_loss: 22.3148 Epoch 39/500 30/30 [==============================] - 54s 2s/step - loss: 21.9811 - val_loss: 21.6083 Epoch 40/500 30/30 [==============================] - 52s 2s/step - loss: 21.8194 - val_loss: 21.5877 Epoch 41/500 30/30 [==============================] - 53s 2s/step - loss: 21.6587 - val_loss: 21.3777 Epoch 42/500 30/30 [==============================] - 56s 2s/step - loss: 21.4056 - val_loss: 21.0999 Epoch 43/500 30/30 [==============================] - 52s 2s/step - loss: 21.4517 - val_loss: 20.9185 Epoch 44/500 30/30 [==============================] - 55s 2s/step - loss: 21.4323 - val_loss: 21.4888 Epoch 45/500 30/30 [==============================] - 57s 2s/step - loss: 21.4581 - val_loss: 20.9886 Epoch 46/500 30/30 [==============================] - 56s 2s/step - loss: 21.3487 - val_loss: 20.3990 Epoch 47/500 30/30 [==============================] - 56s 2s/step - loss: 20.9203 - val_loss: 20.3689 Epoch 48/500 30/30 [==============================] - 57s 2s/step - loss: 21.0719 - val_loss: 21.1066 Epoch 49/500 30/30 [==============================] - 57s 2s/step - loss: 21.3894 - val_loss: 21.2877 Epoch 50/500 30/30 [==============================] - 56s 2s/step - loss: 21.2891 - val_loss: 21.2323 Epoch 51/500 30/30 [==============================] - 58s 2s/step - loss: 21.0220 - val_loss: 20.7920 Epoch 52/500 30/30 [==============================] - 57s 2s/step - loss: 20.9018 - val_loss: 20.3990 Epoch 53/500 30/30 [==============================] - 57s 2s/step - loss: 21.2242 - val_loss: 20.2087 Epoch 54/500 30/30 [==============================] - 57s 2s/step - loss: 20.9219 - val_loss: 20.2367 Epoch 55/500 30/30 [==============================] - 57s 2s/step - loss: 20.8007 - val_loss: 20.1518 Epoch 56/500 30/30 [==============================] - 57s 2s/step - loss: 20.8917 - val_loss: 20.4730 Epoch 57/500 30/30 [==============================] - 57s 2s/step - loss: 20.8413 - val_loss: 20.3548 Epoch 58/500 30/30 [==============================] - 58s 2s/step - loss: 20.5870 - val_loss: 20.3552 Epoch 59/500 30/30 [==============================] - 57s 2s/step - loss: 20.9533 - val_loss: 20.3583 Epoch 60/500 30/30 [==============================] - 58s 2s/step - loss: 20.5604 - val_loss: 19.6875 Epoch 61/500 30/30 [==============================] - 62s 2s/step - loss: 20.8170 - val_loss: 20.4102 Epoch 62/500 30/30 [==============================] - 59s 2s/step - loss: 20.7297 - val_loss: 20.4196 Epoch 63/500 30/30 [==============================] - 58s 2s/step - loss: 20.4839 - val_loss: 20.1161 Epoch 64/500 30/30 [==============================] - 57s 2s/step - loss: 20.4190 - val_loss: 20.5080 Epoch 65/500 30/30 [==============================] - 57s 2s/step - loss: 20.6353 - val_loss: 20.1768 Epoch 66/500 30/30 [==============================] - 58s 2s/step - loss: 20.6978 - val_loss: 20.5307 Epoch 67/500 30/30 [==============================] - 57s 2s/step - loss: 20.5475 - val_loss: 20.9204 Epoch 68/500 30/30 [==============================] - 57s 2s/step - loss: 20.4128 - val_loss: 20.2049 Epoch 69/500 30/30 [==============================] - 58s 2s/step - loss: 20.3816 - val_loss: 19.6142 Epoch 70/500 30/30 [==============================] - 57s 2s/step - loss: 20.4737 - val_loss: 20.3626 Epoch 71/500 30/30 [==============================] - 57s 2s/step - loss: 20.0341 - val_loss: 19.9938 Epoch 72/500 30/30 [==============================] - 58s 2s/step - loss: 20.4320 - val_loss: 21.0509 Epoch 73/500 30/30 [==============================] - 57s 2s/step - loss: 20.4354 - val_loss: 20.2190 Epoch 74/500 30/30 [==============================] - 58s 2s/step - loss: 19.9763 - val_loss: 19.8038 Epoch 75/500 30/30 [==============================] - 59s 2s/step - loss: 20.4443 - val_loss: 19.8551 Epoch 76/500 30/30 [==============================] - 58s 2s/step - loss: 19.5941 - val_loss: 19.9696 Epoch 77/500 30/30 [==============================] - 57s 2s/step - loss: 20.1076 - val_loss: 20.5628 Epoch 78/500 30/30 [==============================] - 57s 2s/step - loss: 19.8378 - val_loss: 20.4607 Epoch 79/500 30/30 [==============================] - 58s 2s/step - loss: 19.9174 - val_loss: 19.2342 Epoch 80/500 30/30 [==============================] - 60s 2s/step - loss: 19.9954 - val_loss: 19.9048 Epoch 81/500 30/30 [==============================] - 61s 2s/step - loss: 19.8898 - val_loss: 19.9757 Epoch 82/500 30/30 [==============================] - 57s 2s/step - loss: 19.8671 - val_loss: 20.3432 Epoch 83/500 30/30 [==============================] - 57s 2s/step - loss: 20.0536 - val_loss: 20.0036 Epoch 84/500 30/30 [==============================] - 57s 2s/step - loss: 19.8378 - val_loss: 19.8090 Epoch 85/500 30/30 [==============================] - 57s 2s/step - loss: 20.0678 - val_loss: 19.4705 Epoch 86/500 30/30 [==============================] - 57s 2s/step - loss: 20.0358 - val_loss: 19.7351 Epoch 87/500 30/30 [==============================] - 58s 2s/step - loss: 19.7083 - val_loss: 19.1633 Epoch 88/500 30/30 [==============================] - 57s 2s/step - loss: 19.5802 - val_loss: 19.2210 Epoch 89/500 30/30 [==============================] - 58s 2s/step - loss: 19.6578 - val_loss: 19.5279 Epoch 90/500 30/30 [==============================] - 57s 2s/step - loss: 19.3884 - val_loss: 19.6862 Epoch 91/500 30/30 [==============================] - 57s 2s/step - loss: 19.8888 - val_loss: 20.6697 Epoch 92/500 30/30 [==============================] - 58s 2s/step - loss: 19.7249 - val_loss: 19.4848 Epoch 93/500 30/30 [==============================] - 57s 2s/step - loss: 19.9230 - val_loss: 19.8133 Epoch 94/500 30/30 [==============================] - 57s 2s/step - loss: 19.5174 - val_loss: 18.7876 Epoch 95/500 30/30 [==============================] - 58s 2s/step - loss: 19.6750 - val_loss: 19.1217 Epoch 96/500 30/30 [==============================] - 59s 2s/step - loss: 19.2918 - val_loss: 19.0066 Epoch 97/500 30/30 [==============================] - 57s 2s/step - loss: 19.5391 - val_loss: 19.2043 Epoch 98/500 30/30 [==============================] - 62s 2s/step - loss: 19.6986 - val_loss: 20.1391 Epoch 99/500 30/30 [==============================] - 61s 2s/step - loss: 19.5048 - val_loss: 19.4383 Epoch 100/500 30/30 [==============================] - 56s 2s/step - loss: 19.2941 - val_loss: 19.6998 Epoch 101/500 30/30 [==============================] - 57s 2s/step - loss: 19.4645 - val_loss: 18.7480 Epoch 102/500 30/30 [==============================] - 57s 2s/step - loss: 19.3468 - val_loss: 20.0104 Epoch 103/500 30/30 [==============================] - 57s 2s/step - loss: 19.5910 - val_loss: 19.0392 Epoch 104/500 30/30 [==============================] - 57s 2s/step - loss: 19.4385 - val_loss: 19.1266 Epoch 105/500 30/30 [==============================] - 57s 2s/step - loss: 19.3504 - val_loss: 19.7491 Epoch 106/500 30/30 [==============================] - 58s 2s/step - loss: 18.9692 - val_loss: 19.1707 Epoch 107/500 30/30 [==============================] - 57s 2s/step - loss: 19.2553 - val_loss: 19.5704 Epoch 108/500 30/30 [==============================] - 58s 2s/step - loss: 19.5590 - val_loss: 18.9097 Epoch 109/500 30/30 [==============================] - 58s 2s/step - loss: 18.6710 - val_loss: 19.4302 Epoch 110/500 30/30 [==============================] - 57s 2s/step - loss: 19.0906 - val_loss: 19.0445 Epoch 111/500 30/30 [==============================] - 58s 2s/step - loss: 19.0178 - val_loss: 18.9003 Epoch 112/500 30/30 [==============================] - 64s 2s/step - loss: 19.1675 - val_loss: 18.6330 Epoch 113/500 30/30 [==============================] - 59s 2s/step - loss: 18.9757 - val_loss: 18.7002 Epoch 114/500 30/30 [==============================] - 58s 2s/step - loss: 18.9385 - val_loss: 18.6894 Epoch 115/500 30/30 [==============================] - 58s 2s/step - loss: 19.1563 - val_loss: 18.4366 Epoch 116/500 30/30 [==============================] - 57s 2s/step - loss: 18.7275 - val_loss: 19.4573 Epoch 117/500 30/30 [==============================] - 58s 2s/step - loss: 18.9145 - val_loss: 18.3290 Epoch 118/500 30/30 [==============================] - 58s 2s/step - loss: 18.9156 - val_loss: 19.1213 Epoch 119/500 30/30 [==============================] - 57s 2s/step - loss: 19.1048 - val_loss: 18.6663 Epoch 120/500 30/30 [==============================] - 58s 2s/step - loss: 18.5928 - val_loss: 19.4735 Epoch 121/500 30/30 [==============================] - 58s 2s/step - loss: 18.9346 - val_loss: 18.1666 Epoch 122/500 30/30 [==============================] - 58s 2s/step - loss: 18.7425 - val_loss: 18.6575 Epoch 123/500 30/30 [==============================] - 58s 2s/step - loss: 18.5844 - val_loss: 18.5724 Epoch 124/500 30/30 [==============================] - 58s 2s/step - loss: 18.5992 - val_loss: 18.6960 Epoch 125/500 30/30 [==============================] - 58s 2s/step - loss: 18.7258 - val_loss: 18.3909 Epoch 126/500 30/30 [==============================] - 58s 2s/step - loss: 18.7052 - val_loss: 18.4346 Epoch 127/500 30/30 [==============================] - 58s 2s/step - loss: 18.5290 - val_loss: 19.0881 Epoch 128/500 30/30 [==============================] - 58s 2s/step - loss: 18.6084 - val_loss: 18.3423 Epoch 129/500 30/30 [==============================] - 67s 2s/step - loss: 18.2576 - val_loss: 17.7641 Epoch 130/500 30/30 [==============================] - 57s 2s/step - loss: 18.5741 - val_loss: 19.3456 Epoch 131/500 30/30 [==============================] - 58s 2s/step - loss: 18.7191 - val_loss: 18.2478 Epoch 132/500 30/30 [==============================] - 58s 2s/step - loss: 18.2529 - val_loss: 17.8907 Epoch 133/500 30/30 [==============================] - 58s 2s/step - loss: 18.6660 - val_loss: 19.2050 Epoch 134/500 30/30 [==============================] - 59s 2s/step - loss: 18.4503 - val_loss: 17.5212 Epoch 135/500 30/30 [==============================] - 59s 2s/step - loss: 18.4364 - val_loss: 17.6540 Epoch 136/500 30/30 [==============================] - 58s 2s/step - loss: 18.3926 - val_loss: 17.6553 Epoch 137/500 30/30 [==============================] - 61s 2s/step - loss: 18.2460 - val_loss: 18.6843 Epoch 138/500 30/30 [==============================] - 58s 2s/step - loss: 18.2683 - val_loss: 18.1989 Epoch 139/500 30/30 [==============================] - 59s 2s/step - loss: 18.4373 - val_loss: 18.2519 Epoch 140/500 30/30 [==============================] - 59s 2s/step - loss: 18.0950 - val_loss: 18.2093 Epoch 141/500 30/30 [==============================] - 59s 2s/step - loss: 18.6397 - val_loss: 17.5036 Epoch 142/500 30/30 [==============================] - 58s 2s/step - loss: 18.4368 - val_loss: 18.0884 Epoch 143/500 30/30 [==============================] - 58s 2s/step - loss: 18.2509 - val_loss: 18.0419 Epoch 144/500 30/30 [==============================] - 60s 2s/step - loss: 17.9318 - val_loss: 17.1161 Epoch 145/500 30/30 [==============================] - 67s 2s/step - loss: 18.1403 - val_loss: 17.9708 Epoch 146/500 30/30 [==============================] - 56s 2s/step - loss: 18.2065 - val_loss: 18.9385 Epoch 147/500 30/30 [==============================] - 59s 2s/step - loss: 17.9481 - val_loss: 17.5626 Epoch 148/500 30/30 [==============================] - 58s 2s/step - loss: 17.9567 - val_loss: 17.6918 Epoch 149/500 30/30 [==============================] - 58s 2s/step - loss: 18.0001 - val_loss: 17.8759 Epoch 150/500 30/30 [==============================] - 60s 2s/step - loss: 18.2126 - val_loss: 18.0285 Epoch 151/500 30/30 [==============================] - 59s 2s/step - loss: 17.8216 - val_loss: 18.1529 Epoch 152/500 30/30 [==============================] - 58s 2s/step - loss: 17.8409 - val_loss: 18.0349 Epoch 153/500 30/30 [==============================] - 60s 2s/step - loss: 17.8870 - val_loss: 16.9735 Epoch 154/500 30/30 [==============================] - 58s 2s/step - loss: 17.5961 - val_loss: 17.3506 Epoch 155/500 30/30 [==============================] - 58s 2s/step - loss: 18.0078 - val_loss: 18.0054 Epoch 156/500 30/30 [==============================] - 59s 2s/step - loss: 17.9904 - val_loss: 17.5965 Epoch 157/500 30/30 [==============================] - 58s 2s/step - loss: 17.9485 - val_loss: 17.4312 Epoch 158/500 30/30 [==============================] - 58s 2s/step - loss: 17.8291 - val_loss: 17.3607 Epoch 159/500 30/30 [==============================] - 59s 2s/step - loss: 17.8277 - val_loss: 17.2476 Epoch 160/500 30/30 [==============================] - 60s 2s/step - loss: 17.2321 - val_loss: 17.4888 Epoch 161/500 30/30 [==============================] - 66s 2s/step - loss: 17.8075 - val_loss: 17.9411 Epoch 162/500 30/30 [==============================] - 59s 2s/step - loss: 17.6729 - val_loss: 16.4171 Epoch 163/500 30/30 [==============================] - 58s 2s/step - loss: 17.7537 - val_loss: 17.1066 Epoch 164/500 30/30 [==============================] - 58s 2s/step - loss: 17.7760 - val_loss: 17.9759 Epoch 165/500 30/30 [==============================] - 60s 2s/step - loss: 17.9173 - val_loss: 17.1527 Epoch 166/500 30/30 [==============================] - 58s 2s/step - loss: 17.7308 - val_loss: 17.3219 Epoch 167/500 30/30 [==============================] - 58s 2s/step - loss: 17.4189 - val_loss: 17.8249 Epoch 168/500 30/30 [==============================] - 59s 2s/step - loss: 17.4101 - val_loss: 17.1193 Epoch 169/500 30/30 [==============================] - 59s 2s/step - loss: 17.7719 - val_loss: 17.0561 Epoch 170/500 30/30 [==============================] - 58s 2s/step - loss: 17.7480 - val_loss: 16.8337 Epoch 171/500 30/30 [==============================] - 60s 2s/step - loss: 17.3692 - val_loss: 16.0112 Epoch 172/500 30/30 [==============================] - 58s 2s/step - loss: 17.7808 - val_loss: 16.9604 Epoch 173/500 30/30 [==============================] - 59s 2s/step - loss: 17.5618 - val_loss: 16.9944 Epoch 174/500 30/30 [==============================] - 58s 2s/step - loss: 17.6525 - val_loss: 17.3570 Epoch 175/500 30/30 [==============================] - 59s 2s/step - loss: 17.5603 - val_loss: 16.9481 Epoch 176/500 30/30 [==============================] - 64s 2s/step - loss: 17.3968 - val_loss: 16.7614 Epoch 177/500 30/30 [==============================] - 61s 2s/step - loss: 17.7041 - val_loss: 17.0379 Epoch 178/500 30/30 [==============================] - 59s 2s/step - loss: 17.4487 - val_loss: 17.8662 Epoch 179/500 30/30 [==============================] - 58s 2s/step - loss: 17.5594 - val_loss: 16.9650 Epoch 180/500 30/30 [==============================] - 59s 2s/step - loss: 17.4884 - val_loss: 16.6101 Epoch 181/500 30/30 [==============================] - 59s 2s/step - loss: 17.3017 - val_loss: 17.3026 Epoch 182/500 30/30 [==============================] - 58s 2s/step - loss: 17.4179 - val_loss: 17.0920 Epoch 183/500 30/30 [==============================] - 58s 2s/step - loss: 17.1374 - val_loss: 17.0096 Epoch 184/500 30/30 [==============================] - 59s 2s/step - loss: 17.2827 - val_loss: 17.5058 Epoch 185/500 30/30 [==============================] - 59s 2s/step - loss: 17.3034 - val_loss: 17.1128 Epoch 186/500 30/30 [==============================] - 72s 2s/step - loss: 17.1985 - val_loss: 16.1411 Epoch 187/500 30/30 [==============================] - 102s 3s/step - loss: 17.2851 - val_loss: 17.6696 Epoch 188/500 30/30 [==============================] - 117s 4s/step - loss: 17.1215 - val_loss: 17.2290 Epoch 189/500 30/30 [==============================] - 123s 4s/step - loss: 17.4202 - val_loss: 16.9745 Epoch 190/500 30/30 [==============================] - 126s 4s/step - loss: 17.0531 - val_loss: 16.7439 Epoch 191/500 30/30 [==============================] - 125s 4s/step - loss: 17.2199 - val_loss: 16.9525 Epoch 192/500 30/30 [==============================] - 130s 4s/step - loss: 17.2730 - val_loss: 16.7329 Epoch 193/500 30/30 [==============================] - 131s 4s/step - loss: 17.0992 - val_loss: 16.7782 Epoch 194/500 30/30 [==============================] - 123s 4s/step - loss: 17.2608 - val_loss: 16.7102 Epoch 195/500 30/30 [==============================] - 123s 4s/step - loss: 17.2919 - val_loss: 16.7896 Epoch 196/500 30/30 [==============================] - 125s 4s/step - loss: 17.2738 - val_loss: 16.4602 Epoch 197/500 30/30 [==============================] - 125s 4s/step - loss: 16.8739 - val_loss: 16.5762 Epoch 198/500 30/30 [==============================] - 128s 4s/step - loss: 17.1413 - val_loss: 16.7527 Epoch 199/500 30/30 [==============================] - 111s 4s/step - loss: 16.9642 - val_loss: 16.8084 Epoch 200/500 30/30 [==============================] - 52s 2s/step - loss: 17.0036 - val_loss: 16.4942 Epoch 201/500 30/30 [==============================] - 67s 2s/step - loss: 16.9632 - val_loss: 16.9797 Epoch 202/500 30/30 [==============================] - 58s 2s/step - loss: 17.0620 - val_loss: 17.0211 Epoch 203/500 30/30 [==============================] - 58s 2s/step - loss: 17.4539 - val_loss: 16.8192 Epoch 204/500 30/30 [==============================] - 59s 2s/step - loss: 16.9974 - val_loss: 16.3421 Epoch 205/500 30/30 [==============================] - 58s 2s/step - loss: 16.8898 - val_loss: 17.2752 Epoch 206/500 30/30 [==============================] - 58s 2s/step - loss: 16.9180 - val_loss: 15.9694 Epoch 207/500 30/30 [==============================] - 58s 2s/step - loss: 17.2808 - val_loss: 16.7605 Epoch 208/500 30/30 [==============================] - 59s 2s/step - loss: 17.0356 - val_loss: 16.7981 Epoch 209/500 30/30 [==============================] - 58s 2s/step - loss: 17.0600 - val_loss: 16.9557 Epoch 210/500 30/30 [==============================] - 59s 2s/step - loss: 17.1601 - val_loss: 17.0695 Epoch 211/500 30/30 [==============================] - 59s 2s/step - loss: 17.0459 - val_loss: 16.6873 Epoch 212/500 30/30 [==============================] - 89s 3s/step - loss: 17.1121 - val_loss: 16.7286 Epoch 213/500 30/30 [==============================] - 112s 4s/step - loss: 16.7431 - val_loss: 16.7320 Epoch 214/500 30/30 [==============================] - 118s 4s/step - loss: 16.8781 - val_loss: 16.9751 Epoch 215/500 30/30 [==============================] - 123s 4s/step - loss: 17.1820 - val_loss: 16.3007 Epoch 216/500 30/30 [==============================] - 125s 4s/step - loss: 16.7895 - val_loss: 16.8100 Epoch 217/500 30/30 [==============================] - 125s 4s/step - loss: 17.0252 - val_loss: 16.4287 Epoch 218/500 30/30 [==============================] - 125s 4s/step - loss: 16.7351 - val_loss: 16.7850 Epoch 219/500 30/30 [==============================] - 124s 4s/step - loss: 16.8772 - val_loss: 16.6999 Epoch 220/500 30/30 [==============================] - 127s 4s/step - loss: 16.8597 - val_loss: 16.5488 Epoch 221/500 30/30 [==============================] - 124s 4s/step - loss: 17.1371 - val_loss: 16.2532 Epoch 222/500 30/30 [==============================] - 125s 4s/step - loss: 16.8663 - val_loss: 16.5585 Epoch 223/500 30/30 [==============================] - 125s 4s/step - loss: 16.9581 - val_loss: 16.2982 Epoch 224/500 30/30 [==============================] - 125s 4s/step - loss: 16.9060 - val_loss: 16.3674 Epoch 225/500 30/30 [==============================] - 123s 4s/step - loss: 16.9360 - val_loss: 16.6738 Epoch 226/500 30/30 [==============================] - 110s 4s/step - loss: 16.6695 - val_loss: 16.8557 Epoch 227/500 30/30 [==============================] - 54s 2s/step - loss: 16.9524 - val_loss: 16.5686 Epoch 228/500 30/30 [==============================] - 68s 2s/step - loss: 16.8033 - val_loss: 16.5986 Epoch 229/500 30/30 [==============================] - 59s 2s/step - loss: 16.7346 - val_loss: 16.0810 Epoch 230/500 30/30 [==============================] - 61s 2s/step - loss: 16.6734 - val_loss: 16.3377 Epoch 231/500 30/30 [==============================] - 61s 2s/step - loss: 16.8451 - val_loss: 16.1956 Epoch 232/500 30/30 [==============================] - 59s 2s/step - loss: 16.8533 - val_loss: 16.4178 Epoch 233/500 30/30 [==============================] - 67s 2s/step - loss: 16.6677 - val_loss: 16.1700 Epoch 234/500 30/30 [==============================] - 62s 2s/step - loss: 16.7513 - val_loss: 16.6826 Epoch 235/500 30/30 [==============================] - 75s 3s/step - loss: 16.5991 - val_loss: 16.3288 Epoch 236/500 30/30 [==============================] - 60s 2s/step - loss: 17.0266 - val_loss: 16.5422 Epoch 237/500 30/30 [==============================] - 59s 2s/step - loss: 16.7941 - val_loss: 16.2773 Epoch 238/500 30/30 [==============================] - 75s 3s/step - loss: 16.5905 - val_loss: 15.8983 Epoch 239/500 30/30 [==============================] - 79s 3s/step - loss: 16.7549 - val_loss: 16.9012 Epoch 240/500 30/30 [==============================] - 83s 3s/step - loss: 16.7844 - val_loss: 16.3224 Epoch 241/500 30/30 [==============================] - 86s 3s/step - loss: 16.6991 - val_loss: 16.3654 Epoch 242/500 30/30 [==============================] - 87s 3s/step - loss: 16.5758 - val_loss: 15.8727 Epoch 243/500 30/30 [==============================] - 87s 3s/step - loss: 16.3914 - val_loss: 16.4542 Epoch 244/500 30/30 [==============================] - 89s 3s/step - loss: 16.7021 - val_loss: 16.4562 Epoch 245/500 30/30 [==============================] - 89s 3s/step - loss: 16.4270 - val_loss: 16.3817 Epoch 246/500 30/30 [==============================] - 89s 3s/step - loss: 16.7722 - val_loss: 16.2395 Epoch 247/500 30/30 [==============================] - 89s 3s/step - loss: 16.6468 - val_loss: 16.6332 Epoch 248/500 30/30 [==============================] - 89s 3s/step - loss: 17.0123 - val_loss: 16.2401 Epoch 249/500 30/30 [==============================] - 88s 3s/step - loss: 16.4098 - val_loss: 16.2627 Epoch 250/500 30/30 [==============================] - 87s 3s/step - loss: 16.6750 - val_loss: 16.3639 Epoch 251/500 30/30 [==============================] - 88s 3s/step - loss: 16.4957 - val_loss: 17.0374 Epoch 252/500 30/30 [==============================] - 88s 3s/step - loss: 16.5535 - val_loss: 16.6554 Epoch 253/500 30/30 [==============================] - 119s 4s/step - loss: 16.5255 - val_loss: 16.8328 Epoch 254/500 30/30 [==============================] - 119s 4s/step - loss: 16.6808 - val_loss: 16.1435 Epoch 255/500 30/30 [==============================] - 54s 2s/step - loss: 16.5841 - val_loss: 16.3919 Epoch 256/500 30/30 [==============================] - 78s 3s/step - loss: 16.5055 - val_loss: 16.5761 Epoch 257/500 30/30 [==============================] - 59s 2s/step - loss: 16.6117 - val_loss: 16.4381 Epoch 258/500 30/30 [==============================] - 59s 2s/step - loss: 16.6162 - val_loss: 16.2132 Epoch 259/500 30/30 [==============================] - 58s 2s/step - loss: 16.4880 - val_loss: 16.6501 Epoch 260/500 30/30 [==============================] - 59s 2s/step - loss: 16.5138 - val_loss: 15.8520 Epoch 261/500 30/30 [==============================] - 59s 2s/step - loss: 16.4561 - val_loss: 15.8716 Epoch 262/500 30/30 [==============================] - 59s 2s/step - loss: 16.6328 - val_loss: 16.2283 Epoch 263/500 30/30 [==============================] - 58s 2s/step - loss: 16.8187 - val_loss: 16.8967 Epoch 264/500 30/30 [==============================] - 90s 3s/step - loss: 16.5232 - val_loss: 15.7357 Epoch 265/500 30/30 [==============================] - 108s 4s/step - loss: 16.3057 - val_loss: 16.0941 Epoch 266/500 30/30 [==============================] - 117s 4s/step - loss: 16.6120 - val_loss: 16.4122 Epoch 267/500 30/30 [==============================] - 125s 4s/step - loss: 16.6497 - val_loss: 15.5423 Epoch 268/500 30/30 [==============================] - 122s 4s/step - loss: 16.4017 - val_loss: 16.8959 Epoch 269/500 30/30 [==============================] - 127s 4s/step - loss: 16.5587 - val_loss: 16.1176 Epoch 270/500 30/30 [==============================] - 124s 4s/step - loss: 16.3952 - val_loss: 16.4328 Epoch 271/500 30/30 [==============================] - 125s 4s/step - loss: 16.5917 - val_loss: 16.1204 Epoch 272/500 30/30 [==============================] - 123s 4s/step - loss: 16.3392 - val_loss: 16.1431 Epoch 273/500 30/30 [==============================] - 128s 4s/step - loss: 16.5220 - val_loss: 16.2746 Epoch 274/500 30/30 [==============================] - 128s 4s/step - loss: 16.6498 - val_loss: 16.3835 Epoch 275/500 30/30 [==============================] - 123s 4s/step - loss: 16.2066 - val_loss: 16.0384 Epoch 276/500 30/30 [==============================] - 81s 3s/step - loss: 16.2591 - val_loss: 16.4378 Epoch 277/500 30/30 [==============================] - 65s 2s/step - loss: 16.6943 - val_loss: 16.1523 Epoch 278/500 30/30 [==============================] - 61s 2s/step - loss: 16.3948 - val_loss: 16.1507 Epoch 279/500 30/30 [==============================] - 59s 2s/step - loss: 16.6854 - val_loss: 16.2779 Epoch 280/500 30/30 [==============================] - 58s 2s/step - loss: 16.4208 - val_loss: 16.0576 Epoch 281/500 30/30 [==============================] - 59s 2s/step - loss: 16.3797 - val_loss: 16.6038 Epoch 282/500 30/30 [==============================] - 59s 2s/step - loss: 16.8321 - val_loss: 16.0848 Epoch 283/500 30/30 [==============================] - 67s 2s/step - loss: 16.2373 - val_loss: 16.3140 Epoch 284/500 30/30 [==============================] - 56s 2s/step - loss: 16.3162 - val_loss: 15.8853 Epoch 285/500 30/30 [==============================] - 58s 2s/step - loss: 16.3769 - val_loss: 16.3856 Epoch 286/500 30/30 [==============================] - 61s 2s/step - loss: 16.4671 - val_loss: 16.0674 Epoch 287/500 30/30 [==============================] - 85s 3s/step - loss: 16.5860 - val_loss: 16.3418 Epoch 288/500 30/30 [==============================] - 106s 4s/step - loss: 16.4896 - val_loss: 16.5205 Epoch 289/500 30/30 [==============================] - 118s 4s/step - loss: 16.4469 - val_loss: 15.8535 Epoch 290/500 30/30 [==============================] - 123s 4s/step - loss: 16.6378 - val_loss: 15.7410 Epoch 291/500 30/30 [==============================] - 122s 4s/step - loss: 16.6027 - val_loss: 16.5198 Epoch 292/500 30/30 [==============================] - 125s 4s/step - loss: 15.9454 - val_loss: 16.8931 Epoch 293/500 30/30 [==============================] - 124s 4s/step - loss: 16.3042 - val_loss: 15.6124 Epoch 294/500 30/30 [==============================] - 124s 4s/step - loss: 16.4451 - val_loss: 15.6456 Epoch 295/500 30/30 [==============================] - 126s 4s/step - loss: 16.3229 - val_loss: 16.1610 Epoch 296/500 30/30 [==============================] - 122s 4s/step - loss: 16.3041 - val_loss: 16.1309 Epoch 297/500 30/30 [==============================] - 123s 4s/step - loss: 16.5280 - val_loss: 15.8774 Epoch 298/500 30/30 [==============================] - 122s 4s/step - loss: 16.3325 - val_loss: 16.5331 Epoch 299/500 30/30 [==============================] - 110s 4s/step - loss: 16.5183 - val_loss: 15.7422 Epoch 300/500 30/30 [==============================] - 69s 2s/step - loss: 16.5641 - val_loss: 16.7612 Epoch 301/500 30/30 [==============================] - 63s 2s/step - loss: 16.2330 - val_loss: 15.8244 Epoch 302/500 30/30 [==============================] - 64s 2s/step - loss: 16.4699 - val_loss: 15.6958 Epoch 303/500 30/30 [==============================] - 63s 2s/step - loss: 16.4143 - val_loss: 16.6897 Epoch 304/500 30/30 [==============================] - 59s 2s/step - loss: 16.2447 - val_loss: 16.1471 Epoch 305/500 30/30 [==============================] - 59s 2s/step - loss: 16.5204 - val_loss: 15.7905 Epoch 306/500 30/30 [==============================] - 59s 2s/step - loss: 16.1380 - val_loss: 16.5672 Epoch 307/500 30/30 [==============================] - 59s 2s/step - loss: 16.5557 - val_loss: 15.9381 Epoch 308/500 30/30 [==============================] - 58s 2s/step - loss: 16.4380 - val_loss: 16.5429 Epoch 309/500 30/30 [==============================] - 60s 2s/step - loss: 16.3664 - val_loss: 15.8925 Epoch 310/500 30/30 [==============================] - 58s 2s/step - loss: 16.3254 - val_loss: 15.8290 Epoch 311/500 30/30 [==============================] - 73s 2s/step - loss: 16.4264 - val_loss: 16.0228 Epoch 312/500 30/30 [==============================] - 97s 3s/step - loss: 16.2977 - val_loss: 16.1006 Epoch 313/500 30/30 [==============================] - 114s 4s/step - loss: 16.4107 - val_loss: 16.0559 Epoch 314/500 30/30 [==============================] - 118s 4s/step - loss: 16.1044 - val_loss: 15.9039 Epoch 315/500 30/30 [==============================] - 129s 4s/step - loss: 16.3085 - val_loss: 16.3312 Epoch 316/500 30/30 [==============================] - 127s 4s/step - loss: 16.1068 - val_loss: 16.0503 Epoch 317/500 30/30 [==============================] - 126s 4s/step - loss: 16.6584 - val_loss: 16.2829 Epoch 318/500 30/30 [==============================] - 125s 4s/step - loss: 16.2703 - val_loss: 15.6388 Epoch 319/500 30/30 [==============================] - 129s 4s/step - loss: 16.2571 - val_loss: 15.7867 Epoch 320/500 30/30 [==============================] - 125s 4s/step - loss: 16.5441 - val_loss: 15.8499 Epoch 321/500 30/30 [==============================] - 123s 4s/step - loss: 16.3501 - val_loss: 16.1323 Epoch 322/500 30/30 [==============================] - 124s 4s/step - loss: 16.2824 - val_loss: 15.9564 Epoch 323/500 30/30 [==============================] - 125s 4s/step - loss: 16.3759 - val_loss: 16.3467 Epoch 324/500 30/30 [==============================] - 60s 2s/step - loss: 16.3403 - val_loss: 15.6820 Epoch 325/500 30/30 [==============================] - 69s 2s/step - loss: 16.2955 - val_loss: 16.1720 Epoch 326/500 30/30 [==============================] - 59s 2s/step - loss: 16.4078 - val_loss: 16.3941 Epoch 327/500 30/30 [==============================] - 59s 2s/step - loss: 16.0622 - val_loss: 16.0237 Epoch 328/500 30/30 [==============================] - 59s 2s/step - loss: 16.3376 - val_loss: 15.5706 Epoch 329/500 30/30 [==============================] - 62s 2s/step - loss: 16.1294 - val_loss: 16.5142 Epoch 330/500 30/30 [==============================] - 63s 2s/step - loss: 16.0853 - val_loss: 16.1133 Epoch 331/500 30/30 [==============================] - 57s 2s/step - loss: 16.1868 - val_loss: 15.9329 Epoch 332/500 30/30 [==============================] - 58s 2s/step - loss: 16.1243 - val_loss: 15.7737 Epoch 333/500 30/30 [==============================] - 58s 2s/step - loss: 16.0936 - val_loss: 15.8534 Epoch 334/500 30/30 [==============================] - 70s 2s/step - loss: 16.3387 - val_loss: 16.0363 Epoch 335/500 30/30 [==============================] - 96s 3s/step - loss: 16.1497 - val_loss: 16.3894 Epoch 336/500 30/30 [==============================] - 114s 4s/step - loss: 15.7429 - val_loss: 16.1402 Epoch 337/500 30/30 [==============================] - 119s 4s/step - loss: 16.3378 - val_loss: 16.3067 Epoch 338/500 30/30 [==============================] - 122s 4s/step - loss: 16.1981 - val_loss: 16.1319 Epoch 339/500 30/30 [==============================] - 124s 4s/step - loss: 16.1361 - val_loss: 15.7421 Epoch 340/500 30/30 [==============================] - 125s 4s/step - loss: 16.2517 - val_loss: 15.5112 Epoch 341/500 30/30 [==============================] - 124s 4s/step - loss: 16.1154 - val_loss: 15.5062 Epoch 342/500 30/30 [==============================] - 125s 4s/step - loss: 16.1898 - val_loss: 15.5263 Epoch 343/500 30/30 [==============================] - 122s 4s/step - loss: 16.0264 - val_loss: 16.6698 Epoch 344/500 30/30 [==============================] - 125s 4s/step - loss: 16.1943 - val_loss: 15.7087 Epoch 345/500 30/30 [==============================] - 125s 4s/step - loss: 16.2535 - val_loss: 16.1479 Epoch 346/500 30/30 [==============================] - 124s 4s/step - loss: 16.5307 - val_loss: 15.6747 Epoch 347/500 30/30 [==============================] - 64s 2s/step - loss: 16.2075 - val_loss: 15.6584 Epoch 348/500 30/30 [==============================] - 69s 2s/step - loss: 16.2071 - val_loss: 15.3423 Epoch 349/500 30/30 [==============================] - 59s 2s/step - loss: 16.0504 - val_loss: 16.2236 Epoch 350/500 30/30 [==============================] - 59s 2s/step - loss: 16.0833 - val_loss: 16.2664 Epoch 351/500 30/30 [==============================] - 59s 2s/step - loss: 16.2250 - val_loss: 15.8436 Epoch 352/500 30/30 [==============================] - 59s 2s/step - loss: 16.1694 - val_loss: 15.7174 Epoch 353/500 30/30 [==============================] - 61s 2s/step - loss: 16.3608 - val_loss: 16.8256 Epoch 354/500 30/30 [==============================] - 63s 2s/step - loss: 16.0936 - val_loss: 15.2995 Epoch 355/500 30/30 [==============================] - 59s 2s/step - loss: 16.0449 - val_loss: 16.5662 Epoch 356/500 30/30 [==============================] - 70s 2s/step - loss: 16.1806 - val_loss: 16.0976 Epoch 357/500 30/30 [==============================] - 96s 3s/step - loss: 16.2721 - val_loss: 15.5171 Epoch 358/500 30/30 [==============================] - 114s 4s/step - loss: 16.2750 - val_loss: 16.0328 Epoch 359/500 30/30 [==============================] - 121s 4s/step - loss: 16.4254 - val_loss: 16.0317 Epoch 360/500 30/30 [==============================] - 123s 4s/step - loss: 16.2188 - val_loss: 15.7162 Epoch 361/500 30/30 [==============================] - 130s 4s/step - loss: 16.0624 - val_loss: 16.2708 Epoch 362/500 30/30 [==============================] - 123s 4s/step - loss: 16.1229 - val_loss: 16.3186 Epoch 363/500 30/30 [==============================] - 122s 4s/step - loss: 16.1250 - val_loss: 15.5198 Epoch 364/500 30/30 [==============================] - 126s 4s/step - loss: 16.1816 - val_loss: 16.0486 Epoch 365/500 30/30 [==============================] - 89s 3s/step - loss: 16.2343 - val_loss: 16.1744 Epoch 366/500 30/30 [==============================] - 88s 3s/step - loss: 16.1624 - val_loss: 15.6001 Epoch 367/500 30/30 [==============================] - 88s 3s/step - loss: 16.1360 - val_loss: 16.4407 Epoch 368/500 30/30 [==============================] - 88s 3s/step - loss: 16.0462 - val_loss: 16.1154 Epoch 369/500 30/30 [==============================] - 67s 2s/step - loss: 16.1973 - val_loss: 15.5669 Epoch 370/500 30/30 [==============================] - 57s 2s/step - loss: 16.1021 - val_loss: 15.6763 Epoch 371/500 30/30 [==============================] - 57s 2s/step - loss: 16.1306 - val_loss: 15.5349 Epoch 372/500 30/30 [==============================] - 56s 2s/step - loss: 16.2231 - val_loss: 16.4343 Epoch 373/500 30/30 [==============================] - 55s 2s/step - loss: 15.9661 - val_loss: 15.7303 Epoch 374/500 30/30 [==============================] - 54s 2s/step - loss: 16.1949 - val_loss: 15.5661 Epoch 375/500 30/30 [==============================] - 54s 2s/step - loss: 15.9551 - val_loss: 16.5234 Epoch 376/500 30/30 [==============================] - 54s 2s/step - loss: 16.0258 - val_loss: 15.4668 Epoch 377/500 30/30 [==============================] - 54s 2s/step - loss: 16.1134 - val_loss: 16.1877 Epoch 378/500 30/30 [==============================] - 54s 2s/step - loss: 15.9459 - val_loss: 16.0216 Epoch 379/500 30/30 [==============================] - 60s 2s/step - loss: 16.4525 - val_loss: 15.6702 Epoch 380/500 30/30 [==============================] - 72s 2s/step - loss: 16.0660 - val_loss: 15.3305 Epoch 381/500 30/30 [==============================] - 81s 3s/step - loss: 16.0083 - val_loss: 16.1274 Epoch 382/500 30/30 [==============================] - 87s 3s/step - loss: 16.0092 - val_loss: 16.1366 Epoch 383/500 30/30 [==============================] - 121s 4s/step - loss: 16.1354 - val_loss: 15.7858 Epoch 384/500 30/30 [==============================] - 123s 4s/step - loss: 16.1129 - val_loss: 15.7107 Epoch 385/500 30/30 [==============================] - 123s 4s/step - loss: 16.0284 - val_loss: 16.0496 Epoch 386/500 30/30 [==============================] - 126s 4s/step - loss: 15.8368 - val_loss: 16.5170 Epoch 387/500 30/30 [==============================] - 124s 4s/step - loss: 16.3342 - val_loss: 15.4547 Epoch 388/500 30/30 [==============================] - 123s 4s/step - loss: 16.2401 - val_loss: 15.5744 Epoch 389/500 30/30 [==============================] - 126s 4s/step - loss: 16.2276 - val_loss: 15.5983 Epoch 390/500 30/30 [==============================] - 125s 4s/step - loss: 15.9413 - val_loss: 15.6545 Epoch 391/500 30/30 [==============================] - 123s 4s/step - loss: 16.3595 - val_loss: 15.3371 Epoch 392/500 30/30 [==============================] - 63s 2s/step - loss: 15.8981 - val_loss: 16.1008 Epoch 393/500 30/30 [==============================] - 68s 2s/step - loss: 16.1883 - val_loss: 15.8930 Epoch 394/500 30/30 [==============================] - 58s 2s/step - loss: 15.9516 - val_loss: 15.9927 Epoch 395/500 30/30 [==============================] - 61s 2s/step - loss: 15.9433 - val_loss: 14.9362 Epoch 396/500 30/30 [==============================] - 60s 2s/step - loss: 16.1004 - val_loss: 16.4069 Epoch 397/500 30/30 [==============================] - 58s 2s/step - loss: 15.9822 - val_loss: 15.7637 Epoch 398/500 30/30 [==============================] - 58s 2s/step - loss: 16.1382 - val_loss: 15.9379 Epoch 399/500 30/30 [==============================] - 58s 2s/step - loss: 16.1960 - val_loss: 16.0348 Epoch 400/500 30/30 [==============================] - 57s 2s/step - loss: 16.0175 - val_loss: 16.3064 Epoch 401/500 30/30 [==============================] - 62s 2s/step - loss: 15.8997 - val_loss: 15.6500 Epoch 402/500 30/30 [==============================] - 83s 3s/step - loss: 16.2010 - val_loss: 15.8539 Epoch 403/500 30/30 [==============================] - 104s 3s/step - loss: 15.7408 - val_loss: 16.1076 Epoch 404/500 30/30 [==============================] - 115s 4s/step - loss: 16.4274 - val_loss: 15.3456 Epoch 405/500 30/30 [==============================] - 120s 4s/step - loss: 15.7428 - val_loss: 15.5534 Epoch 406/500 30/30 [==============================] - 122s 4s/step - loss: 16.2359 - val_loss: 16.0074 Epoch 407/500 30/30 [==============================] - 124s 4s/step - loss: 16.0718 - val_loss: 16.4514 Epoch 408/500 30/30 [==============================] - 125s 4s/step - loss: 16.3895 - val_loss: 15.2282 Epoch 409/500 30/30 [==============================] - 122s 4s/step - loss: 15.9361 - val_loss: 15.5478 Epoch 410/500 30/30 [==============================] - 125s 4s/step - loss: 16.1194 - val_loss: 15.6116 Epoch 411/500 30/30 [==============================] - 126s 4s/step - loss: 16.0214 - val_loss: 16.1073 Epoch 412/500 30/30 [==============================] - 123s 4s/step - loss: 16.0557 - val_loss: 15.5114 Epoch 413/500 30/30 [==============================] - 127s 4s/step - loss: 16.1405 - val_loss: 16.4630 Epoch 414/500 30/30 [==============================] - 125s 4s/step - loss: 16.1881 - val_loss: 15.7337 Epoch 415/500 30/30 [==============================] - 70s 2s/step - loss: 15.9524 - val_loss: 15.4768 Epoch 416/500 30/30 [==============================] - 65s 2s/step - loss: 15.8019 - val_loss: 15.7502 Epoch 417/500 30/30 [==============================] - 60s 2s/step - loss: 16.3821 - val_loss: 15.6619 Epoch 418/500 30/30 [==============================] - 58s 2s/step - loss: 15.8915 - val_loss: 15.7251 Epoch 419/500 30/30 [==============================] - 58s 2s/step - loss: 15.8573 - val_loss: 16.5752 Epoch 420/500 30/30 [==============================] - 58s 2s/step - loss: 16.0249 - val_loss: 16.2398 Epoch 421/500 30/30 [==============================] - 57s 2s/step - loss: 15.9861 - val_loss: 16.3022 Epoch 422/500 30/30 [==============================] - 58s 2s/step - loss: 15.8775 - val_loss: 15.7504 Epoch 423/500 30/30 [==============================] - 58s 2s/step - loss: 16.0352 - val_loss: 15.9333 Epoch 424/500 30/30 [==============================] - 61s 2s/step - loss: 15.9813 - val_loss: 16.0950 Epoch 425/500 30/30 [==============================] - 60s 2s/step - loss: 16.0516 - val_loss: 15.4165 Epoch 426/500 30/30 [==============================] - 71s 2s/step - loss: 16.1241 - val_loss: 15.4657 Epoch 427/500 30/30 [==============================] - 98s 3s/step - loss: 16.0654 - val_loss: 16.1920 Epoch 428/500 30/30 [==============================] - 114s 4s/step - loss: 15.9455 - val_loss: 15.2535 Epoch 429/500 30/30 [==============================] - 121s 4s/step - loss: 16.0065 - val_loss: 15.8941 Epoch 430/500 30/30 [==============================] - 124s 4s/step - loss: 15.7573 - val_loss: 15.4150 Epoch 431/500 30/30 [==============================] - 124s 4s/step - loss: 16.0947 - val_loss: 15.7753 Epoch 432/500 30/30 [==============================] - 127s 4s/step - loss: 15.8444 - val_loss: 15.5911 Epoch 433/500 30/30 [==============================] - 125s 4s/step - loss: 16.1289 - val_loss: 15.9490 Epoch 434/500 30/30 [==============================] - 125s 4s/step - loss: 15.9296 - val_loss: 15.6148 Epoch 435/500 30/30 [==============================] - 126s 4s/step - loss: 15.9802 - val_loss: 15.4892 Epoch 436/500 30/30 [==============================] - 123s 4s/step - loss: 16.0529 - val_loss: 15.2430 Epoch 437/500 30/30 [==============================] - 124s 4s/step - loss: 15.7882 - val_loss: 15.6371 Epoch 438/500 30/30 [==============================] - 119s 4s/step - loss: 16.0208 - val_loss: 15.5694 Epoch 439/500 30/30 [==============================] - 53s 2s/step - loss: 16.2243 - val_loss: 16.4516 Epoch 440/500 30/30 [==============================] - 69s 2s/step - loss: 15.8460 - val_loss: 15.2869 Epoch 441/500 30/30 [==============================] - 65s 2s/step - loss: 15.9455 - val_loss: 15.9559 Epoch 442/500 30/30 [==============================] - 57s 2s/step - loss: 15.9085 - val_loss: 15.4212 Epoch 443/500 30/30 [==============================] - 58s 2s/step - loss: 16.0805 - val_loss: 15.5691 Epoch 444/500 30/30 [==============================] - 58s 2s/step - loss: 15.8312 - val_loss: 15.5900 Epoch 445/500 30/30 [==============================] - 58s 2s/step - loss: 16.1131 - val_loss: 14.9550 Epoch 446/500 30/30 [==============================] - 59s 2s/step - loss: 16.1825 - val_loss: 16.5839 Epoch 447/500 30/30 [==============================] - 59s 2s/step - loss: 15.8725 - val_loss: 15.4740 Epoch 448/500 30/30 [==============================] - 83s 3s/step - loss: 15.9381 - val_loss: 15.3606 Epoch 449/500 30/30 [==============================] - 107s 4s/step - loss: 15.7734 - val_loss: 15.8835 Epoch 450/500 30/30 [==============================] - 118s 4s/step - loss: 16.2426 - val_loss: 16.0760 Epoch 451/500 30/30 [==============================] - 122s 4s/step - loss: 15.7717 - val_loss: 16.1588 Epoch 452/500 30/30 [==============================] - 126s 4s/step - loss: 15.8032 - val_loss: 15.5423 Epoch 453/500 30/30 [==============================] - 125s 4s/step - loss: 16.0863 - val_loss: 16.2087 Epoch 454/500 30/30 [==============================] - 123s 4s/step - loss: 15.7231 - val_loss: 15.4152 Epoch 455/500 30/30 [==============================] - 123s 4s/step - loss: 15.9819 - val_loss: 15.6086 Epoch 456/500 30/30 [==============================] - 124s 4s/step - loss: 16.2392 - val_loss: 15.6546 Epoch 457/500 30/30 [==============================] - 125s 4s/step - loss: 15.9337 - val_loss: 15.5734 Epoch 458/500 30/30 [==============================] - 123s 4s/step - loss: 15.7483 - val_loss: 16.0871 Epoch 459/500 30/30 [==============================] - 125s 4s/step - loss: 15.9154 - val_loss: 15.7753 Epoch 460/500 30/30 [==============================] - 116s 4s/step - loss: 16.1634 - val_loss: 16.0291 Epoch 461/500 30/30 [==============================] - 52s 2s/step - loss: 16.0713 - val_loss: 15.6570 Epoch 462/500 30/30 [==============================] - 68s 2s/step - loss: 15.7077 - val_loss: 15.3641 Epoch 463/500 30/30 [==============================] - 59s 2s/step - loss: 16.0866 - val_loss: 15.8481 Epoch 464/500 30/30 [==============================] - 57s 2s/step - loss: 15.9679 - val_loss: 15.6844 Epoch 465/500 30/30 [==============================] - 59s 2s/step - loss: 15.9050 - val_loss: 15.2170 Epoch 466/500 30/30 [==============================] - 57s 2s/step - loss: 15.7928 - val_loss: 16.0792 Epoch 467/500 30/30 [==============================] - 57s 2s/step - loss: 15.9432 - val_loss: 15.4652 Epoch 468/500 30/30 [==============================] - 59s 2s/step - loss: 16.0087 - val_loss: 15.8910 Epoch 469/500 30/30 [==============================] - 64s 2s/step - loss: 15.9682 - val_loss: 15.9137 Epoch 470/500 30/30 [==============================] - 59s 2s/step - loss: 15.6714 - val_loss: 15.9395 Epoch 471/500 30/30 [==============================] - 88s 3s/step - loss: 16.0309 - val_loss: 15.9491 Epoch 472/500 30/30 [==============================] - 109s 4s/step - loss: 15.8227 - val_loss: 15.7770 Epoch 473/500 30/30 [==============================] - 117s 4s/step - loss: 16.0340 - val_loss: 15.3767 Epoch 474/500 30/30 [==============================] - 122s 4s/step - loss: 15.8100 - val_loss: 16.0189 Epoch 475/500 30/30 [==============================] - 129s 4s/step - loss: 15.8677 - val_loss: 15.8241 Epoch 476/500 30/30 [==============================] - 129s 4s/step - loss: 15.8201 - val_loss: 15.2546 Epoch 477/500 30/30 [==============================] - 123s 4s/step - loss: 16.1264 - val_loss: 16.2662 Epoch 478/500 30/30 [==============================] - 124s 4s/step - loss: 16.1311 - val_loss: 15.2587 Epoch 479/500 30/30 [==============================] - 125s 4s/step - loss: 16.2160 - val_loss: 15.7506 Epoch 480/500 30/30 [==============================] - 123s 4s/step - loss: 15.8996 - val_loss: 16.0202 Epoch 481/500 30/30 [==============================] - 127s 4s/step - loss: 15.9867 - val_loss: 15.5650 Epoch 482/500 30/30 [==============================] - 170s 6s/step - loss: 15.7489 - val_loss: 15.4263 Epoch 483/500 30/30 [==============================] - 63s 2s/step - loss: 16.0861 - val_loss: 15.6782 Epoch 484/500 30/30 [==============================] - 77s 3s/step - loss: 15.8524 - val_loss: 15.6728 Epoch 485/500 30/30 [==============================] - 68s 2s/step - loss: 15.9259 - val_loss: 15.5141 Epoch 486/500 30/30 [==============================] - 67s 2s/step - loss: 15.7106 - val_loss: 15.6335 Epoch 487/500 30/30 [==============================] - 63s 2s/step - loss: 15.9842 - val_loss: 15.1482 Epoch 488/500 30/30 [==============================] - 61s 2s/step - loss: 15.8998 - val_loss: 16.0844 Epoch 489/500 30/30 [==============================] - 59s 2s/step - loss: 15.8302 - val_loss: 16.6305 Epoch 490/500 30/30 [==============================] - 59s 2s/step - loss: 15.8365 - val_loss: 15.8551 Epoch 491/500 30/30 [==============================] - 57s 2s/step - loss: 16.0139 - val_loss: 15.3942 Epoch 492/500 30/30 [==============================] - 58s 2s/step - loss: 15.9906 - val_loss: 16.0351 Epoch 493/500 30/30 [==============================] - 59s 2s/step - loss: 15.7704 - val_loss: 15.5585 Epoch 494/500 30/30 [==============================] - 64s 2s/step - loss: 15.8734 - val_loss: 15.5017 Epoch 495/500 30/30 [==============================] - 86s 3s/step - loss: 15.8414 - val_loss: 16.0038 Epoch 496/500 30/30 [==============================] - 109s 4s/step - loss: 16.0293 - val_loss: 15.9147 Epoch 497/500 30/30 [==============================] - 119s 4s/step - loss: 15.7651 - val_loss: 15.6716 Epoch 498/500 30/30 [==============================] - 123s 4s/step - loss: 15.8485 - val_loss: 16.0082 Epoch 499/500 30/30 [==============================] - 126s 4s/step - loss: 15.8425 - val_loss: 14.8089 Epoch 500/500 30/30 [==============================] - 101s 3s/step - loss: 16.0761 - val_loss: 15.9947 Unfreeze all of the layers. Train on 488 samples, val on 121 samples, with batch size 16.
/var/folders/j_/grk4ythd0392dcw5z3gkgw5w0000gn/T/ipykernel_39692/4035785499.py:81: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators. model.fit_generator(data_generator_wrapper(lines[:num_train], batch_size, input_shape, anchors, num_classes),
Epoch 51/100
2023-01-22 13:37:37.110606: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Subshape must have computed start >= end since stride is negative, but is 0 and 2 (computed from start 0 and end 9223372036854775807 over shape with rank 2 and stride-1)
30/30 [==============================] - ETA: 0s - loss: 16.2221
2023-01-22 13:43:00.326593: E tensorflow/core/grappler/optimizers/meta_optimizer.cc:954] layout failed: INVALID_ARGUMENT: Subshape must have computed start >= end since stride is negative, but is 0 and 2 (computed from start 0 and end 9223372036854775807 over shape with rank 2 and stride-1)
30/30 [==============================] - 350s 11s/step - loss: 16.2221 - val_loss: 15.0134 - lr: 1.0000e-04 Epoch 52/100 30/30 [==============================] - 346s 12s/step - loss: 14.9261 - val_loss: 14.5771 - lr: 1.0000e-04 Epoch 53/100 30/30 [==============================] - 243s 8s/step - loss: 14.5103 - val_loss: 14.5714 - lr: 1.0000e-04 Epoch 54/100 30/30 [==============================] - 231s 8s/step - loss: 14.2489 - val_loss: 13.8991 - lr: 1.0000e-04 Epoch 55/100 30/30 [==============================] - 276s 9s/step - loss: 14.1362 - val_loss: 14.2145 - lr: 1.0000e-04 Epoch 56/100 30/30 [==============================] - 334s 11s/step - loss: 13.6959 - val_loss: 13.6794 - lr: 1.0000e-04 Epoch 57/100 30/30 [==============================] - 340s 11s/step - loss: 13.5898 - val_loss: 13.1452 - lr: 1.0000e-04 Epoch 58/100 30/30 [==============================] - 336s 11s/step - loss: 13.4866 - val_loss: 13.5824 - lr: 1.0000e-04 Epoch 59/100 30/30 [==============================] - 259s 9s/step - loss: 13.4531 - val_loss: 13.1278 - lr: 1.0000e-04 Epoch 60/100 30/30 [==============================] - 219s 7s/step - loss: 13.3503 - val_loss: 13.0499 - lr: 1.0000e-04 Epoch 61/100 30/30 [==============================] - 254s 8s/step - loss: 13.2267 - val_loss: 13.0210 - lr: 1.0000e-04 Epoch 62/100 30/30 [==============================] - 414s 14s/step - loss: 13.2120 - val_loss: 14.0383 - lr: 1.0000e-04 Epoch 63/100 30/30 [==============================] - 472s 16s/step - loss: 12.9336 - val_loss: 13.2708 - lr: 1.0000e-04 Epoch 64/100 30/30 [==============================] - ETA: 0s - loss: 13.1477 Epoch 64: ReduceLROnPlateau reducing learning rate to 9.999999747378752e-06. 30/30 [==============================] - 470s 16s/step - loss: 13.1477 - val_loss: 13.6016 - lr: 1.0000e-04 Epoch 65/100 30/30 [==============================] - 272s 9s/step - loss: 13.2001 - val_loss: 12.9789 - lr: 1.0000e-05 Epoch 66/100 30/30 [==============================] - 256s 9s/step - loss: 12.8699 - val_loss: 12.7537 - lr: 1.0000e-05 Epoch 67/100 30/30 [==============================] - 460s 15s/step - loss: 12.8529 - val_loss: 12.5797 - lr: 1.0000e-05 Epoch 68/100 30/30 [==============================] - 520s 17s/step - loss: 12.8881 - val_loss: 12.8464 - lr: 1.0000e-05 Epoch 69/100 30/30 [==============================] - 316s 10s/step - loss: 12.8289 - val_loss: 13.0487 - lr: 1.0000e-05 Epoch 70/100 30/30 [==============================] - ETA: 0s - loss: 12.7765 Epoch 70: ReduceLROnPlateau reducing learning rate to 9.999999747378752e-07. 30/30 [==============================] - 236s 8s/step - loss: 12.7765 - val_loss: 12.7764 - lr: 1.0000e-05 Epoch 71/100 30/30 [==============================] - 357s 12s/step - loss: 12.7222 - val_loss: 12.6030 - lr: 1.0000e-06 Epoch 72/100 30/30 [==============================] - 471s 16s/step - loss: 12.9312 - val_loss: 12.7407 - lr: 1.0000e-06 Epoch 73/100 30/30 [==============================] - ETA: 0s - loss: 12.7563 Epoch 73: ReduceLROnPlateau reducing learning rate to 9.999999974752428e-08. 30/30 [==============================] - 474s 16s/step - loss: 12.7563 - val_loss: 12.8981 - lr: 1.0000e-06 Epoch 74/100 30/30 [==============================] - 337s 11s/step - loss: 12.6372 - val_loss: 13.0085 - lr: 1.0000e-07 Epoch 75/100 30/30 [==============================] - 238s 8s/step - loss: 12.6892 - val_loss: 12.6015 - lr: 1.0000e-07 Epoch 76/100 30/30 [==============================] - ETA: 0s - loss: 12.7828 Epoch 76: ReduceLROnPlateau reducing learning rate to 1.0000000116860975e-08. 30/30 [==============================] - 308s 10s/step - loss: 12.7828 - val_loss: 13.2228 - lr: 1.0000e-07 Epoch 77/100 30/30 [==============================] - 336s 11s/step - loss: 12.7876 - val_loss: 12.4209 - lr: 1.0000e-08 Epoch 78/100 30/30 [==============================] - 337s 11s/step - loss: 12.5455 - val_loss: 12.7752 - lr: 1.0000e-08 Epoch 79/100 30/30 [==============================] - 258s 8s/step - loss: 12.7785 - val_loss: 12.7235 - lr: 1.0000e-08 Epoch 80/100 30/30 [==============================] - ETA: 0s - loss: 12.7194 Epoch 80: ReduceLROnPlateau reducing learning rate to 9.999999939225292e-10. 30/30 [==============================] - 222s 7s/step - loss: 12.7194 - val_loss: 12.8656 - lr: 1.0000e-08 Epoch 81/100 30/30 [==============================] - 255s 9s/step - loss: 13.0056 - val_loss: 12.7722 - lr: 1.0000e-09 Epoch 82/100 30/30 [==============================] - 275s 9s/step - loss: 12.6045 - val_loss: 12.7747 - lr: 1.0000e-09 Epoch 83/100 30/30 [==============================] - ETA: 0s - loss: 12.8213 Epoch 83: ReduceLROnPlateau reducing learning rate to 9.999999717180686e-11. 30/30 [==============================] - 371s 12s/step - loss: 12.8213 - val_loss: 12.7278 - lr: 1.0000e-09 Epoch 84/100 30/30 [==============================] - 476s 16s/step - loss: 12.8018 - val_loss: 12.7182 - lr: 1.0000e-10 Epoch 85/100 30/30 [==============================] - 395s 13s/step - loss: 12.6051 - val_loss: 12.9475 - lr: 1.0000e-10 Epoch 86/100 30/30 [==============================] - ETA: 0s - loss: 12.7930 Epoch 86: ReduceLROnPlateau reducing learning rate to 9.99999943962493e-12. 30/30 [==============================] - 221s 7s/step - loss: 12.7930 - val_loss: 12.5336 - lr: 1.0000e-10 Epoch 87/100 30/30 [==============================] - 239s 8s/step - loss: 12.6282 - val_loss: 12.7567 - lr: 1.0000e-11 Epoch 87: early stopping
Prepare image to ocr
import cv2 as cv
from matplotlib import pyplot as plt
def grayscale(image):
return cv.cvtColor(image, cv.COLOR_BGR2GRAY)
def noise_removal(image):
import numpy as np
kernel = np.ones((1, 1), np.uint8)
image = cv.dilate(image, kernel, iterations=1)
kernel = np.ones((1, 1), np.uint8)
image = cv.erode(image, kernel, iterations=1)
image = cv.morphologyEx(image, cv.MORPH_CLOSE, kernel)
image = cv.medianBlur(image, 3)
return (image)
def thin_font(image):
import numpy as np
image = cv.bitwise_not(image)
kernel = np.ones((2,2),np.uint8)
image = cv.erode(image, kernel, iterations=1)
image = cv.bitwise_not(image)
return (image)
def thick_font(image):
import numpy as np
image = cv.bitwise_not(image)
kernel = np.ones((2,2),np.uint8)
image = cv.dilate(image, kernel, iterations=1)
image = cv.bitwise_not(image)
return (image)
def remove_borders(image):
contours, heiarchy = cv.findContours(image, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE)
cntsSorted = sorted(contours, key=lambda x:cv.contourArea(x))
cnt = cntsSorted[-1]
x, y, w, h = cv.boundingRect(cnt)
crop = image[y:y+h, x:x+w]
return (crop)
image_file = './img/img00.png'
img = cv.imread(image_file)
gray_image = grayscale(img)
thresh, im_bw = cv.threshold(gray_image, 100, 150, cv.THRESH_BINARY)
no_noise = noise_removal(im_bw)
# eroded_image = thin_font(no_noise)
# dilated_image = thick_font(eroded_image)
no_borders = remove_borders(no_noise)
cv.imwrite("temp/no_borders.jpg", no_borders)
display('temp/no_borders.jpg')
def display(im_path):
dpi = 80
im_data = plt.imread(im_path)
height, width = im_data.shape[:2]
# What size does the figure need to be in inches to fit the image?
figsize = width / float(dpi), height / float(dpi)
# Create a figure of the right size with one axes that takes up the full figure
fig = plt.figure(figsize=figsize)
ax = fig.add_axes([0, 0, 1, 1])
# Hide spines, ticks, etc.
ax.axis('off')
# Display the image.
ax.imshow(im_data, cmap='gray')
plt.show()
display(image_file)
inverted_image = cv.bitwise_not(img)
cv.imwrite("temp/inverted.jpg", inverted_image)
display("temp/inverted.jpg")
def grayscale(image):
return cv.cvtColor(image, cv.COLOR_BGR2GRAY)
gray_image = grayscale(img)
cv.imwrite("temp/gray.jpg", gray_image)
True
display("temp/gray.jpg")
thresh, im_bw = cv.threshold(gray_image, 170, 210, cv.THRESH_BINARY)
cv.imwrite("temp/bw_image.jpg", im_bw)
True
display("temp/bw_image.jpg")
def noise_removal(image):
import numpy as np
kernel = np.ones((1, 1), np.uint8)
image = cv.dilate(image, kernel, iterations=1)
kernel = np.ones((1, 1), np.uint8)
image = cv.erode(image, kernel, iterations=1)
image = cv.morphologyEx(image, cv.MORPH_CLOSE, kernel)
image = cv.medianBlur(image, 3)
return (image)
no_noise = noise_removal(im_bw)
cv.imwrite("temp/no_noise.jpg", no_noise)
True
display("temp/no_noise.jpg")
def thin_font(image):
import numpy as np
image = cv.bitwise_not(image)
kernel = np.ones((2,2),np.uint8)
image = cv.erode(image, kernel, iterations=1)
image = cv.bitwise_not(image)
return (image)
eroded_image = thin_font(no_noise)
cv.imwrite("temp/eroded_image.jpg", eroded_image)
True
display("temp/eroded_image.jpg")
def thick_font(image):
import numpy as np
image = cv.bitwise_not(image)
kernel = np.ones((2,2),np.uint8)
image = cv.dilate(image, kernel, iterations=1)
image = cv.bitwise_not(image)
return (image)
dilated_image = thick_font(no_noise)
cv.imwrite("temp/dilated_image.jpg", dilated_image)
True
display("temp/dilated_image.jpg")
def remove_borders(image):
contours, heiarchy = cv.findContours(image, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE)
cntsSorted = sorted(contours, key=lambda x:cv.contourArea(x))
cnt = cntsSorted[-1]
x, y, w, h = cv.boundingRect(cnt)
crop = image[y:y+h, x:x+w]
return (crop)
no_borders = remove_borders(no_noise)
cv.imwrite("temp/no_borders.jpg", no_borders)
display('temp/no_borders.jpg')