wko-on-cloud-n/main.ipynb

254 lines
52 KiB
Plaintext
Raw Normal View History

2022-02-17 01:19:25 +01:00
{
"cells": [
{
"cell_type": "code",
2022-02-17 01:56:49 +01:00
"execution_count": 21,
2022-02-17 01:19:25 +01:00
"metadata": {},
"outputs": [],
"source": [
"\n",
"from src.Unet import Unet\n",
"from src.loss import jaccard_loss\n",
"from src.metrics import IOU\n",
2022-02-17 01:56:49 +01:00
"from src.consts import EPOCHS, STEPS, SEED, RGB_DIR, JPG_IMAGES, MASK_DIR, FC_DIR\n",
2022-02-17 01:19:25 +01:00
"from src.helpers import create_folder\n",
"from tensorflow.keras.callbacks import ModelCheckpoint\n",
"import tensorflow as tf "
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"model = Unet(num_classes=1).build_model()\n",
"\n",
"compile_params ={\n",
" 'loss':jaccard_loss(smooth=90), \n",
" 'optimizer':'rmsprop',\n",
" 'metrics':[IOU]\n",
" }\n",
" \n",
"model.compile(**compile_params)\n",
"\n",
"model_name = \"models/unet.h5\"\n",
"modelcheckpoint = ModelCheckpoint(model_name,\n",
" monitor='val_loss',\n",
" mode='auto',\n",
" verbose=1,\n",
" save_best_only=True)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Found 9399 images belonging to 1 classes.\n",
"Found 9399 images belonging to 1 classes.\n",
"Found 2349 images belonging to 1 classes.\n",
"Found 2349 images belonging to 1 classes.\n"
]
}
],
"source": [
"train_gen = create_generators('training', SEED)\n",
"val_gen = create_generators('validation', SEED)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\masob\\AppData\\Local\\Temp\\ipykernel_15244\\933514074.py:1: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.\n",
" history = model.fit_generator(train_gen,\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/5\n",
"10/10 [==============================] - 1080s 109s/step - loss: 1.0869 - IOU: 0.6101 - val_loss: 1.1605 - val_IOU: 0.6003\n",
"Epoch 2/5\n",
"10/10 [==============================] - 1066s 109s/step - loss: 1.1465 - IOU: 0.6051 - val_loss: 1.1744 - val_IOU: 0.5955\n",
"Epoch 3/5\n",
"10/10 [==============================] - 1082s 109s/step - loss: 1.1440 - IOU: 0.6060 - val_loss: 1.0622 - val_IOU: 0.6341\n",
"Epoch 4/5\n",
"10/10 [==============================] - 1060s 108s/step - loss: 1.1511 - IOU: 0.6035 - val_loss: 1.3288 - val_IOU: 0.5423\n",
"Epoch 5/5\n",
"10/10 [==============================] - 1062s 108s/step - loss: 1.1654 - IOU: 0.5986 - val_loss: 1.1816 - val_IOU: 0.5930\n"
]
}
],
"source": [
"history = model.fit_generator(train_gen,\n",
" validation_data=val_gen,\n",
" epochs=EPOCHS,\n",
" steps_per_epoch=STEPS,\n",
" validation_steps = STEPS,\n",
" shuffle=True,\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"create_folder('models', '.')\n",
"model.save(filepath=model_name)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"dict_keys(['loss', 'IOU', 'val_loss', 'val_IOU'])\n"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEWCAYAAAB8LwAVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAA06UlEQVR4nO3dd5xU1dnA8d+znS2wjd52RVAQhAVEFIkVxQIKGILIqinW+GryRhNN1ERNMc1YYje+sSKKgKgYigFRbBQBaQpSF5Cy9O3lvH+cu8vs7rCNuXNnZ57v5zOfnbll7jMDM8/cc+55jhhjUEoppWqL8joApZRSoUkThFJKKb80QSillPJLE4RSSim/NEEopZTySxOEUkopvzRBKBUAIvJvEfl9I7fdLCIXuB2TUsdLE4RSIaR2ohGReBH5k4hsFZEiEVkvIneKiPhsUyfhiMh1IvJxMGNX4SfG6wCUUvV6E+gAXAKsAwYDLwNdgds8jEtFAD2DUBHD+aV9p4isFJECEfmXiLQXkfdF5LCIzBORNJ/tR4vIahE5ICILRKS3z7ocEVnm7DcFSKh1rMtEZLmz7ycicmoz4j0fuBAYZ4xZZYwpN8Z8BkwCfioiJzb7zVCqETRBqEgzDhgB9AJGAe8DvwbaYj8PtwGISC9gMvAzZ90s4B0RiROROGAG9pd8OvZX/riqA4hIDvACcCOQATwDzBSR+CbGOgL43BizzXehMeZzIA84v4nPp1STaIJQkeZxY8wuY8x24CPsF/CXxphiYDqQ42z3A+A9Y8xcY0wZ8DegFXAmMBSIBR4xxpQZY6YCi32OcQPwjDHmc2NMhTHmRaDE2a8pMoGdx1i301mvlGs0QahIs8vnfpGfx8nO/U7AlqoVxphKYBvQ2Vm33dSsdLnF53534BdO89IBETmA7TPo1MRY9wIdj7Guo7MeoBybsHzFAmVNPJ5SNWiCUMq/HdgvegCcq4a6Atuxv947+15JBHTzub8N+IMxJtXnlmiMmdzEGOYBp4tIV9+FInK6E8t/nUVbgaxa+2ZTM2kp1WSaIJTy7w3gUhE5X0RigV9gm4k+AT7F/mq/TURiRWQsMMRn3+eAm0TkdLGSRORSEUlpSgDGmHnAB8BbInKKiESLyFDgFeApY8x6Z9MpwM9E5GTneIOBHwGvN//lK6WXuSrllzHmaxGZBDyObVZaDowyxpQCOEnhOeD32A7saT77LhGR64F/Aj2xTVcfAwubEco44H7gP9g+h+3A88BffLZ5DkgD3gHaYzuwf2OM+U8zjqdUNdEJg5RSSvmjTUxKKaX80gShlFLKL00QSiml/NIEoZRSyq+wuYopMzPTZGVleR2GUkq1KEuXLt1rjGnrb13YJIisrCyWLFnidRhKKdWiiMgxB1RqE5NSSim/NEEopZTySxOEUkopv8KmD8KfsrIy8vLyKC4u9joU1yUkJNClSxdiY2sX9VRKqeYJ6wSRl5dHSkoKWVlZ1Cy8GV6MMeTn55OXl0d2drbX4SilwkRYNzEVFxeTkZER1skBQETIyMiIiDMlpVTwhHWCAMI+OVSJlNeplAqesE8QSnnu6/ch/1uvo1CqyTRBuOzAgQM8+eSTTd7vkksu4cCBA4EPSAXX4n/B5Akw83+8jkSpJtME4bJjJYjy8vJ695s1axapqakuRaWCYvUMeO8XkJgBWxbpWYRqcTRBuOyuu+7i22+/ZcCAAZx22mkMHz6c0aNH06dPHwCuuOIKBg0axCmnnMKzzz5bvV9WVhZ79+5l8+bN9O7dm+uvv55TTjmFCy+8kKKiIq9ejmqsjQtg2vXQ9XT4yTyQKPjyFa+jUqpJwvoyV1/3v7OaNTsOBfQ5+3RqzW9HnVLvNg899BCrVq1i+fLlLFiwgEsvvZRVq1ZVX476wgsvkJ6eTlFREaeddhrjxo0jIyOjxnOsX7+eyZMn89xzzzF+/HjeeustJk2aFNDXogJox5fw+tWQcSJMfB1apcGJI2DFZDj3NxAdMR871cLpGUSQDRkypMZYhccee4z+/fszdOhQtm3bxvr16+vsk52dzYABAwAYNGgQmzdvDlK0qsn2boBXroRW6TBpmk0OAANz4fBO+PYDb+NTqgki5qdMQ7/0gyUpKan6/oIFC5g3bx6ffvopiYmJnHPOOX7HMsTHx1ffj46O1iamUHVoJ7w8xt7PnQ6tOx5d1/MiSMyEL1+GXhd5E59STaRnEC5LSUnh8OHDftcdPHiQtLQ0EhMTWbduHZ999lmQo1MBU7QfXhkLRftg0lTIPLHm+pg46D/BXvJ6ZI83MSrVRJogXJaRkcGwYcPo27cvd955Z411I0eOpLy8nN69e3PXXXcxdOhQj6JUx6W0EF6bAPkbYMJr0CnH/3Y5uVBZDitfD258SjWTGGO8jiEgBg8ebGpPGLR27Vp69+7tUUTBF2mvNyRUlNkO6fVz4Pv/hlOuqH/75y+AksNwy2ego99VCBCRpcaYwf7W6RmEUs1VWQlv3wrrZ8Olf284OQDkTII96yBPZz9UoU8ThFLNYQzMvdc2F537Gzjtx43b75SxEJtoO6uVCnGaIJRqjkWPwqf/hCE3wPfubHj7Kgmtoc8VsGoalBa4Fp5SgaAJQqmmWvYyzPst9B0HI//c9L6EgblQehjWvO1OfEoFiCYIpZpi3Sx45zbocR5c8TRENeMj1O0MSO9hE41SIUwThFKNtXkRTP2hvYx1/Mt2bENziNjO6q2f2JHXSoUoTRAua265b4BHHnmEwsLCAEekmuW7r2DyVZDaDSa+CfHJx/d8AyaCRMNyLeCnQpcmCJdpgggD+zbBK+NsUpg0DZIyGt6nISkdoOcIWD4ZKuov/a6UVyKmFpNXfMt9jxgxgnbt2vHGG29QUlLCmDFjuP/++ykoKGD8+PHk5eVRUVHBvffey65du9ixYwfnnnsumZmZzJ8/3+uXEpmO7Lb1lSpK4ZqZkNo1cM+dkwvf/Ac2zIOTRgbueZUKkMhJEO/fZZsJAqlDP7j4oXo38S33PWfOHKZOncoXX3yBMYbRo0ezcOFC9uzZQ6dOnXjvvfcAW6OpTZs2PPzww8yfP5/MzMzAxq0ap/igra90ZJdNDu1ODuzz97oIktraMRGaIFQI0iamIJozZw5z5swhJyeHgQMHsm7dOtavX0+/fv2YO3cuv/rVr/joo49o06aN16GqsmKYPBF2r7Ud0l1PC/wxomNtAb9v/mPPVJQKMZFzBtHAL/1gMMZw9913c+ONN9ZZt2zZMmbNmsU999zD+eefz3333edBhAqwfQJv/Ri2fAxjn4eeF7h3rJxc+ORxWPE6DLvNveMo1Qx6BuEy33LfF110ES+88AJHjhwBYPv27ezevZsdO3aQmJjIpEmTuPPOO1m2bFmdfVWQGAPv/RzWvWsHwZ36fXeP1/Yk6DLETkcaJoUzVfiInDMIj/iW+7744ouZOHEiZ5xxBgDJycm88sorbNiwgTvvvJOoqChiY2N56qmnALjhhhsYOXIknTp10k7qYPnvg7DsJRh+Bwy9KTjHzJlkB9/lLYauQ4JzTKUaQct9h5FIe70B9+mTMPtuGHgtjHo0eOW4Sw7D33pBvyth9OPBOaZSDi33rVRDVr5hk0PvUXDZP4I7V0N8CpwyxhbwKzkSvOMq1QBNEEqtnwszboas4bZTOio6+DHk5ELpES3gp0JK2CeIcGlCa0ikvM6A2/YFTMmFdn3sdKGxCd7E0W0oZJyo80SokBLWCSIhIYH8/Pyw//I0xpCfn09Cgkdfbi3V7rXw6vehdUeY9Jadq8Er1QX8PtUCfipkuHoVk4iMBB4FooHnjTF1BiOIyHjgd4ABVhhjJorIAOApoDVQAfzBGDOlqcfv0qULeXl57Nmzp/kvooVISEigS5cuXofRchzYBi+PhZh4yJ0Oye28jgj6XwUfPGjPIkbc73U0SrmXIEQkGngCGAHkAYtFZKYxZo3PNj2Bu4Fhxpj9IlL1KS0ErjHGrBeRTsBSEZltjDnQlBhiY2PJzs4OxMtR4aQg39ZXKi2AH86CtCyvI7JSOkDPC2HFZDjvXojWq9CVt9xsYho
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEWCAYAAAB8LwAVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAA3EElEQVR4nO3dd3hUZdr48e+dXggJJPQWRKX3IooFGyIgRawIa8f66vpbXXXX8qpb3Ka+lhVRWRUQK6gICLqKvSFButIChGIgEFp68vz+eE4gwUlImTNnJnN/ritXZk6Zc2dgzj3nKfcRYwxKKaXU0SK8DkAppVRw0gShlFLKJ00QSimlfNIEoZRSyidNEEoppXzSBKGUUsonTRBK+YGIvCQif6rhtpkick59X0cpt2mCUEop5ZMmCKWUUj5pglBhw2nauUtElovIIRF5UURaiMgCETkgIh+JSJMK248WkVUikisii0Wka4V1fUVkqbPf60DcUccaJSLLnH2/EpFedYz5ehFZLyJ7ROQ9EWntLBcReVxEskVkv4isEJEezroRIrLaiW2biNxZpzdMhT1NECrcjAfOBU4ELgAWAH8AmmE/D7cBiMiJwCzgt866+cBcEYkRkRjgHWA60BR403ldnH37AtOAG4BU4DngPRGJrU2gInIW8FfgEqAVsBl4zVk9DDjd+TuSnW1ynHUvAjcYY5KAHsDHtTmuUuU0Qahw85Qx5hdjzDbgc+BbY0yGMaYAmAP0dba7FJhnjPnQGFMM/BOIB04BBgPRwBPGmGJjzFvA9xWOMRl4zhjzrTGm1BjzMlDo7FcbVwDTjDFLjTGFwL3AySKSDhQDSUAXQIwxa4wxO5z9ioFuItLYGLPXGLO0lsdVCtAEocLPLxUe5/t43sh53Br7jR0AY0wZsBVo46zbZipXutxc4XEH4HdO81KuiOQC7Zz9auPoGA5irxLaGGM+Bp4GngGyRWSqiDR2Nh0PjAA2i8inInJyLY+rFKAJQqmqbMee6AHb5o89yW8DdgBtnGXl2ld4vBX4szEmpcJPgjFmVj1jSMQ2WW0DMMY8aYzpD3TDNjXd5Sz/3hgzBmiObQp7o5bHVQrQBKFUVd4ARorI2SISDfwO20z0FfA1UALcJiLRInIhMKjCvs8DN4rISU5ncqKIjBSRpFrGMAu4WkT6OP0Xf8E2iWWKyEDn9aOBQ0ABUOb0kVwhIslO09h+oKwe74MKY5oglPLBGPMTMBF4CtiN7dC+wBhTZIwpAi4ErgL2YPsrZlfYdwlwPbYJaC+w3tm2tjF8BNwPvI29aukEXOasboxNRHuxzVA5wD+cdZOATBHZD9yI7ctQqtZEbxiklFLKF72CUEop5ZMmCKWUUj5pglBKKeWTJgillFI+RXkdgD+lpaWZ9PR0r8NQSqmQ8cMPP+w2xjTzta5BJYj09HSWLFnidRhKKRUyRGRzVeu0iUkppZRPmiCUUkr5pAlCKaWUTw2qD8KX4uJisrKyKCgo8DoUV8XFxdG2bVuio6O9DkUp1UA0+ASRlZVFUlIS6enpVC6+2XAYY8jJySErK4uOHTt6HY5SqoFo8E1MBQUFpKamNtjkACAipKamNvirJKVUYDX4BAE06ORQLhz+RqVUYIVFglBKhZBtS2H9f72OQqEJwnW5ubn8+9//rvV+I0aMIDc31/8BKRXMjIE5N8Crl8LOFV5HE/Y0QbisqgRRUlJS7X7z588nJSXFpaiUClJZ38Pun8GUwuzJUKz9al7SBOGye+65hw0bNtCnTx8GDhzIaaedxujRo+nWrRsAY8eOpX///nTv3p2pU6ce3i89PZ3du3eTmZlJ165duf766+nevTvDhg0jPz/fqz9HKXctfQWiE+GiaZC9Gj5+xOuIwlqDH+Za0UNzV7F6+36/vma31o158ILuVa5/9NFHWblyJcuWLWPx4sWMHDmSlStXHh6OOm3aNJo2bUp+fj4DBw5k/PjxpKamVnqNdevWMWvWLJ5//nkuueQS3n77bSZOnOjXv0MpzxUehFVzoMc46D4OMr+Ar5+GE4bBcWd4HV1Y0iuIABs0aFCluQpPPvkkvXv3ZvDgwWzdupV169b9ap+OHTvSp08fAPr3709mZmaAolUqgFa/A0UHoe8k+/zcRyD1eHjnZsjP9TKysBVWVxDVfdMPlMTExMOPFy9ezEcffcTXX39NQkICQ4cO9TmXITY29vDjyMhIbWJSDVPGDEg9AdqdZJ/HJMCFU+HFYTD/Lhj/vLfxhSG9gnBZUlISBw4c8Llu3759NGnShISEBNauXcs333wT4OiUChK718GWr6HvRKg4p6dNfzjjbljxBqx827v4wlRYXUF4ITU1lSFDhtCjRw/i4+Np0aLF4XXDhw9nypQpdO3alc6dOzN48GAPI1XKQxnTQSKh9+W/Xnfq/4N1i+D9O6D9ydC4deDjC1NijPE6Br8ZMGCAOfqGQWvWrKFr164eRRRY4fS3qgaktBge6wZtB8Dls3xvk7MBppwK7QbBxDkQoY0f/iIiPxhjBvhap++yUspb6z6EQ9lHOqd9Se0E5/0FNi6G76ZWvZ3yK00QSilvZcyAxOZwwrnVb9f/KjhxOHz0IGSvDUho4U4ThFLKOwd+gZ8/gD6XQ+Qx7mUiAqOfgphGMPt6KCkKTIxhTBOEUso7y1+zZTX61HDiZ6PmMPpJ2LkcFv/V3diUJgillEeMgaXTod1gaHZizffrMtL2V3z5BGz+2rXwlCYIpZRXtn4HOeugXzWd01UZ/ldIaW8rvxb4t3yOOkIThMvqWu4b4IknniAvL8/PESkVJDJesf0J3cbWft/YJBg3FfZthYX3+j00ZWmCcJkmCKV8KDwIK+fYonyxjer2Gu1PspPoMmbAmvf9G58CdCa16yqW+z733HNp3rw5b7zxBoWFhYwbN46HHnqIQ4cOcckll5CVlUVpaSn3338/v/zyC9u3b+fMM88kLS2NTz75xOs/RSn/WTUHig9VP/ehJs64G9Z/CHNvg7YDIanFsfdRNeZqghCRacAoINsY08PH+jHAI0AZUAL81hjzhbPuSuA+Z9M/GWNerndAC+7x/12qWvaE8x+tcnXFct+LFi3irbfe4rvvvsMYw+jRo/nss8/YtWsXrVu3Zt68eYCt0ZScnMxjjz3GJ598Qlpamn9jVsprGdMh7UQ7M7o+omLgwufhudPhvVthwhuVazmpenG7ieklYHg16/8L9DbG9AGuAV4AEJGmwIPAScAg4EERaeJqpAGwaNEiFi1aRN++fenXrx9r165l3bp19OzZkw8//JC7776bzz//nOTkZK9DVco9u36Grd/+ujBfXTXrDOc+bOs1LZlW/9dTh7l6BWGM+UxE0qtZf7DC00SgvDDUecCHxpg9ACLyITbRVFGopYaq+aYfCMYY7r33Xm644YZfrVu6dCnz58/nvvvu4+yzz+aBBx7wIEKlAqC6wnx1NfB6O+Fu0X3Q8QxIO95/rx3GPO+kFpFxIrIWmIe9igBoA2ytsFmWsyzkVCz3fd555zFt2jQOHrR5cdu2bWRnZ7N9+3YSEhKYOHEid911F0uXLv3Vvko1CKXF8ONrtmRGo+b+e92ICBjzDETGwJzJ9jiq3jzvpDbGzAHmiMjp2P6Ic2qzv4hMBiYDtG/f3v8B1lPFct/nn38+EyZM4OSTTwagUaNGzJgxg/Xr13PXXXcRERFBdHQ0zz77LACTJ09m+PDhtG7dWjupVcOwbpEtzFeXuQ/H0rg1XPAEvHkVfP4vGHqP/48RZlwv9+00Mb3vq5Pax7YbsX0O5wJDjTE3OMufAxYbY6ptYtJy3+Hzt6oQNety2PYD3LEaIl36fjp7Mqx4C65dZEuIq2oFbblvETlexPZSiUg/IBbIARYCw0SkidM5PcxZppQKVQd2ws8Lbd+DW8kBYMQ/7NXE7MlQdMi944QBVxOEiMwCvgY6i0iWiFwrIjeKyI3OJuOBlSKyDHgGuNRYe7DNTd87Pw+Xd1grpULUj05hvr41LMxXV3HJMPZZ2LPRdlqrOnN7FFO1wxSMMX8D/lbFummAX8asGWOQBj42uiHdGVA
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"# summarize history for accuracy\n",
"import matplotlib.pyplot as plt\n",
"print(history.history.keys())\n",
"plt.plot(history.history['IOU'])\n",
"plt.plot(history.history['val_IOU'])\n",
"plt.title('model IOU')\n",
"plt.ylabel('IOU')\n",
"plt.xlabel('epoch')\n",
"plt.legend(['train', 'test'], loc='upper left')\n",
"plt.show()\n",
"# summarize history for loss\n",
"plt.plot(history.history['loss'])\n",
"plt.plot(history.history['val_loss'])\n",
"plt.title('model loss')\n",
"plt.ylabel('loss')\n",
"plt.xlabel('epoch')\n",
"plt.legend(['train', 'test'], loc='upper left')\n",
"plt.show()"
]
},
{
"cell_type": "code",
2022-02-17 01:56:49 +01:00
"execution_count": 29,
2022-02-17 01:19:25 +01:00
"metadata": {},
2022-02-17 01:56:49 +01:00
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"['wlfr.jpeg', 'tfxr.jpeg', 'yeaw.jpeg']\n"
]
},
{
"ename": "ValueError",
"evalue": "in user code:\n\n File \"c:\\Users\\masob\\Desktop\\STUDIA\\WIDZENIE KOMPUTEROWE\\Projekt ON CLOUD\\cloud-detection-challenge\\venv\\lib\\site-packages\\keras\\engine\\training.py\", line 1801, in predict_function *\n return step_function(self, iterator)\n File \"c:\\Users\\masob\\Desktop\\STUDIA\\WIDZENIE KOMPUTEROWE\\Projekt ON CLOUD\\cloud-detection-challenge\\venv\\lib\\site-packages\\keras\\engine\\training.py\", line 1790, in step_function **\n outputs = model.distribute_strategy.run(run_step, args=(data,))\n File \"c:\\Users\\masob\\Desktop\\STUDIA\\WIDZENIE KOMPUTEROWE\\Projekt ON CLOUD\\cloud-detection-challenge\\venv\\lib\\site-packages\\keras\\engine\\training.py\", line 1783, in run_step **\n outputs = model.predict_step(data)\n File \"c:\\Users\\masob\\Desktop\\STUDIA\\WIDZENIE KOMPUTEROWE\\Projekt ON CLOUD\\cloud-detection-challenge\\venv\\lib\\site-packages\\keras\\engine\\training.py\", line 1751, in predict_step\n return self(x, training=False)\n File \"c:\\Users\\masob\\Desktop\\STUDIA\\WIDZENIE KOMPUTEROWE\\Projekt ON CLOUD\\cloud-detection-challenge\\venv\\lib\\site-packages\\keras\\utils\\traceback_utils.py\", line 67, in error_handler\n raise e.with_traceback(filtered_tb) from None\n File \"c:\\Users\\masob\\Desktop\\STUDIA\\WIDZENIE KOMPUTEROWE\\Projekt ON CLOUD\\cloud-detection-challenge\\venv\\lib\\site-packages\\keras\\engine\\input_spec.py\", line 264, in assert_input_compatibility\n raise ValueError(f'Input {input_index} of layer \"{layer_name}\" is '\n\n ValueError: Input 0 of layer \"model\" is incompatible with the layer: expected shape=(None, 512, 512, 3), found shape=(32, 512, 3)\n",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mValueError\u001b[0m Traceback (most recent call last)",
"\u001b[1;32mc:\\Users\\masob\\Desktop\\STUDIA\\WIDZENIE KOMPUTEROWE\\Projekt ON CLOUD\\cloud-detection-challenge\\main.ipynb Cell 7'\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/masob/Desktop/STUDIA/WIDZENIE%20KOMPUTEROWE/Projekt%20ON%20CLOUD/cloud-detection-challenge/main.ipynb#ch0000005?line=6'>7</a>\u001b[0m r_img \u001b[39m=\u001b[39m cv2\u001b[39m.\u001b[39mimread(os\u001b[39m.\u001b[39mpath\u001b[39m.\u001b[39mjoin(\u001b[39m'\u001b[39m\u001b[39m./images/\u001b[39m\u001b[39m'\u001b[39m, RGB_DIR, img_names[\u001b[39m0\u001b[39m]))\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/masob/Desktop/STUDIA/WIDZENIE%20KOMPUTEROWE/Projekt%20ON%20CLOUD/cloud-detection-challenge/main.ipynb#ch0000005?line=7'>8</a>\u001b[0m m_img \u001b[39m=\u001b[39m cv2\u001b[39m.\u001b[39mimread(os\u001b[39m.\u001b[39mpath\u001b[39m.\u001b[39mjoin(\u001b[39m'\u001b[39m\u001b[39m./images/\u001b[39m\u001b[39m'\u001b[39m, MASK_DIR, img_names[\u001b[39m0\u001b[39m]))\n\u001b[1;32m----> <a href='vscode-notebook-cell:/c%3A/Users/masob/Desktop/STUDIA/WIDZENIE%20KOMPUTEROWE/Projekt%20ON%20CLOUD/cloud-detection-challenge/main.ipynb#ch0000005?line=8'>9</a>\u001b[0m pred \u001b[39m=\u001b[39m model\u001b[39m.\u001b[39;49mpredict(r_img)\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/masob/Desktop/STUDIA/WIDZENIE%20KOMPUTEROWE/Projekt%20ON%20CLOUD/cloud-detection-challenge/main.ipynb#ch0000005?line=10'>11</a>\u001b[0m fig,ax\u001b[39m=\u001b[39mplt\u001b[39m.\u001b[39msubplots(\u001b[39m1\u001b[39m,\u001b[39m3\u001b[39m,figsize\u001b[39m=\u001b[39m(\u001b[39m16\u001b[39m,\u001b[39m8\u001b[39m))\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/masob/Desktop/STUDIA/WIDZENIE%20KOMPUTEROWE/Projekt%20ON%20CLOUD/cloud-detection-challenge/main.ipynb#ch0000005?line=12'>13</a>\u001b[0m ax[\u001b[39m0\u001b[39m]\u001b[39m.\u001b[39mset_title(\u001b[39m'\u001b[39m\u001b[39mRGB Image\u001b[39m\u001b[39m'\u001b[39m)\n",
"File \u001b[1;32mc:\\Users\\masob\\Desktop\\STUDIA\\WIDZENIE KOMPUTEROWE\\Projekt ON CLOUD\\cloud-detection-challenge\\venv\\lib\\site-packages\\keras\\utils\\traceback_utils.py:67\u001b[0m, in \u001b[0;36mfilter_traceback.<locals>.error_handler\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m <a href='file:///c%3A/Users/masob/Desktop/STUDIA/WIDZENIE%20KOMPUTEROWE/Projekt%20ON%20CLOUD/cloud-detection-challenge/venv/lib/site-packages/keras/utils/traceback_utils.py?line=64'>65</a>\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mException\u001b[39;00m \u001b[39mas\u001b[39;00m e: \u001b[39m# pylint: disable=broad-except\u001b[39;00m\n\u001b[0;32m <a href='file:///c%3A/Users/masob/Desktop/STUDIA/WIDZENIE%20KOMPUTEROWE/Projekt%20ON%20CLOUD/cloud-detection-challenge/venv/lib/site-packages/keras/utils/traceback_utils.py?line=65'>66</a>\u001b[0m filtered_tb \u001b[39m=\u001b[39m _process_traceback_frames(e\u001b[39m.\u001b[39m__traceback__)\n\u001b[1;32m---> <a href='file:///c%3A/Users/masob/Desktop/STUDIA/WIDZENIE%20KOMPUTEROWE/Projekt%20ON%20CLOUD/cloud-detection-challenge/venv/lib/site-packages/keras/utils/traceback_utils.py?line=66'>67</a>\u001b[0m \u001b[39mraise\u001b[39;00m e\u001b[39m.\u001b[39mwith_traceback(filtered_tb) \u001b[39mfrom\u001b[39;00m \u001b[39mNone\u001b[39m\n\u001b[0;32m <a href='file:///c%3A/Users/masob/Desktop/STUDIA/WIDZENIE%20KOMPUTEROWE/Projekt%20ON%20CLOUD/cloud-detection-challenge/venv/lib/site-packages/keras/utils/traceback_utils.py?line=67'>68</a>\u001b[0m \u001b[39mfinally\u001b[39;00m:\n\u001b[0;32m <a href='file:///c%3A/Users/masob/Desktop/STUDIA/WIDZENIE%20KOMPUTEROWE/Projekt%20ON%20CLOUD/cloud-detection-challenge/venv/lib/site-packages/keras/utils/traceback_utils.py?line=68'>69</a>\u001b[0m \u001b[39mdel\u001b[39;00m filtered_tb\n",
"File \u001b[1;32mc:\\Users\\masob\\Desktop\\STUDIA\\WIDZENIE KOMPUTEROWE\\Projekt ON CLOUD\\cloud-detection-challenge\\venv\\lib\\site-packages\\tensorflow\\python\\framework\\func_graph.py:1147\u001b[0m, in \u001b[0;36mfunc_graph_from_py_func.<locals>.autograph_handler\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m <a href='file:///c%3A/Users/masob/Desktop/STUDIA/WIDZENIE%20KOMPUTEROWE/Projekt%20ON%20CLOUD/cloud-detection-challenge/venv/lib/site-packages/tensorflow/python/framework/func_graph.py?line=1144'>1145</a>\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mException\u001b[39;00m \u001b[39mas\u001b[39;00m e: \u001b[39m# pylint:disable=broad-except\u001b[39;00m\n\u001b[0;32m <a href='file:///c%3A/Users/masob/Desktop/STUDIA/WIDZENIE%20KOMPUTEROWE/Projekt%20ON%20CLOUD/cloud-detection-challenge/venv/lib/site-packages/tensorflow/python/framework/func_graph.py?line=1145'>1146</a>\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mhasattr\u001b[39m(e, \u001b[39m\"\u001b[39m\u001b[39mag_error_metadata\u001b[39m\u001b[39m\"\u001b[39m):\n\u001b[1;32m-> <a href='file:///c%3A/Users/masob/Desktop/STUDIA/WIDZENIE%20KOMPUTEROWE/Projekt%20ON%20CLOUD/cloud-detection-challenge/venv/lib/site-packages/tensorflow/python/framework/func_graph.py?line=1146'>1147</a>\u001b[0m \u001b[39mraise\u001b[39;00m e\u001b[39m.\u001b[39mag_error_metadata\u001b[39m.\u001b[39mto_exception(e)\n\u001b[0;32m <a href='file:///c%3A/Users/masob/Desktop/STUDIA/WIDZENIE%20KOMPUTEROWE/Projekt%20ON%20CLOUD/cloud-detection-challenge/venv/lib/site-packages/tensorflow/python/framework/func_graph.py?line=1147'>1148</a>\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[0;32m <a href='file:///c%3A/Users/masob/Desktop/STUDIA/WIDZENIE%20KOMPUTEROWE/Projekt%20ON%20CLOUD/cloud-detection-challenge/venv/lib/site-packages/tensorflow/python/framework/func_graph.py?line=1148'>1149</a>\u001b[0m \u001b[39mraise\u001b[39;00m\n",
"\u001b[1;31mValueError\u001b[0m: in user code:\n\n File \"c:\\Users\\masob\\Desktop\\STUDIA\\WIDZENIE KOMPUTEROWE\\Projekt ON CLOUD\\cloud-detection-challenge\\venv\\lib\\site-packages\\keras\\engine\\training.py\", line 1801, in predict_function *\n return step_function(self, iterator)\n File \"c:\\Users\\masob\\Desktop\\STUDIA\\WIDZENIE KOMPUTEROWE\\Projekt ON CLOUD\\cloud-detection-challenge\\venv\\lib\\site-packages\\keras\\engine\\training.py\", line 1790, in step_function **\n outputs = model.distribute_strategy.run(run_step, args=(data,))\n File \"c:\\Users\\masob\\Desktop\\STUDIA\\WIDZENIE KOMPUTEROWE\\Projekt ON CLOUD\\cloud-detection-challenge\\venv\\lib\\site-packages\\keras\\engine\\training.py\", line 1783, in run_step **\n outputs = model.predict_step(data)\n File \"c:\\Users\\masob\\Desktop\\STUDIA\\WIDZENIE KOMPUTEROWE\\Projekt ON CLOUD\\cloud-detection-challenge\\venv\\lib\\site-packages\\keras\\engine\\training.py\", line 1751, in predict_step\n return self(x, training=False)\n File \"c:\\Users\\masob\\Desktop\\STUDIA\\WIDZENIE KOMPUTEROWE\\Projekt ON CLOUD\\cloud-detection-challenge\\venv\\lib\\site-packages\\keras\\utils\\traceback_utils.py\", line 67, in error_handler\n raise e.with_traceback(filtered_tb) from None\n File \"c:\\Users\\masob\\Desktop\\STUDIA\\WIDZENIE KOMPUTEROWE\\Projekt ON CLOUD\\cloud-detection-challenge\\venv\\lib\\site-packages\\keras\\engine\\input_spec.py\", line 264, in assert_input_compatibility\n raise ValueError(f'Input {input_index} of layer \"{layer_name}\" is '\n\n ValueError: Input 0 of layer \"model\" is incompatible with the layer: expected shape=(None, 512, 512, 3), found shape=(32, 512, 3)\n"
]
}
],
2022-02-17 01:19:25 +01:00
"source": [
"import random, os\n",
"import cv2\n",
2022-02-17 01:56:49 +01:00
"import numpy as np\n",
2022-02-17 01:19:25 +01:00
"\n",
2022-02-17 01:56:49 +01:00
"img_names = [random.choice(os.listdir('./images/rgb/img')) for _ in range(3)]\n",
"print(img_names)\n",
2022-02-17 01:19:25 +01:00
"\n",
2022-02-17 01:56:49 +01:00
"r_img = cv2.imread(os.path.join('./images/', RGB_DIR, img_names[0]))\n",
"m_img = cv2.imread(os.path.join('./images/', MASK_DIR, img_names[0]))\n",
"img = np.expand_dims(r_img,axis=0)\n",
"pred = model.predict(r_img)\n",
2022-02-17 01:19:25 +01:00
"fig,ax=plt.subplots(1,3,figsize=(16,8))\n",
"\n",
"ax[0].set_title('RGB Image')\n",
2022-02-17 01:56:49 +01:00
"ax[0].imshow(r_img)\n",
2022-02-17 01:19:25 +01:00
"ax[0].axis('off')\n",
"\n",
"ax[1].set_title('Original Mask')\n",
2022-02-17 01:56:49 +01:00
"ax[1].imshow(m_img)\n",
2022-02-17 01:19:25 +01:00
"ax[1].axis('off')\n",
"\n",
2022-02-17 01:56:49 +01:00
"# ax[2].set_title('Predicted Mask')\n",
"# ax[2].axis('off')\n",
"# ax[2].imshow(tf.keras.preprocessing.image.array_to_img(pred[0]>0.5),cmap='gray')\n",
2022-02-17 01:19:25 +01:00
"\n",
"plt.show()\n"
]
}
],
"metadata": {
"interpreter": {
"hash": "a0ec3e03c477d553d7e02db72be164410aea09f54984d03651765aaff9c92bc7"
},
"kernelspec": {
"display_name": "Python 3.9.0 ('venv': venv)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.0"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}