widzenie-komputerowe-projekt/graph.ipynb

387 lines
357 KiB
Plaintext
Raw Normal View History

2023-01-31 19:30:04 +01:00
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import tensorflow as tf\n",
"from tensorflow.python.framework.convert_to_constants import convert_variables_to_constants_v2"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"model = tf.keras.models.load_model('./model')"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'./frozen_models\\\\frozen_graph.pb'"
]
},
"execution_count": 8,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# Convert Keras model to ConcreteFunction\n",
"full_model = tf.function(lambda x: model(x))\n",
"full_model = full_model.get_concrete_function(\n",
" tf.TensorSpec(model.inputs[0].shape, model.inputs[0].dtype))\n",
"\n",
"# Get frozen ConcreteFunction\n",
"frozen_func = convert_variables_to_constants_v2(full_model)\n",
"frozen_func.graph.as_graph_def()\n",
"\n",
"\n",
"# Save frozen graph from frozen ConcreteFunction to hard drive\n",
"tf.io.write_graph(graph_or_graph_def=frozen_func.graph,\n",
" logdir=\"./frozen_models\",\n",
" name=\"frozen_graph.pb\",\n",
" as_text=False)"
]
},
{
"cell_type": "code",
2023-02-01 10:23:06 +01:00
"execution_count": 21,
2023-01-31 19:30:04 +01:00
"metadata": {},
"outputs": [],
"source": [
"def wrap_frozen_graph(graph_def, inputs, outputs, print_graph=False):\n",
" def _imports_graph_def():\n",
" tf.compat.v1.import_graph_def(graph_def, name=\"\")\n",
"\n",
" wrapped_import = tf.compat.v1.wrap_function(_imports_graph_def, [])\n",
" import_graph = wrapped_import.graph\n",
"\n",
" if print_graph == True:\n",
" print(\"-\" * 50)\n",
" print(\"Frozen model layers: \")\n",
" layers = [op.name for op in import_graph.get_operations()]\n",
" for layer in layers:\n",
" print(layer)\n",
" print(\"-\" * 50)\n",
"\n",
" return wrapped_import.prune(\n",
" tf.nest.map_structure(import_graph.as_graph_element, inputs),\n",
" tf.nest.map_structure(import_graph.as_graph_element, outputs))"
]
},
{
"cell_type": "code",
2023-02-01 10:23:06 +01:00
"execution_count": 32,
2023-01-31 19:30:04 +01:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"--------------------------------------------------\n",
"Frozen model layers: \n",
"x\n",
2023-02-01 10:23:06 +01:00
"sequential/conv2d/Conv2D/ReadVariableOp/resource\n",
"sequential/conv2d/Conv2D/ReadVariableOp\n",
"sequential/conv2d/Conv2D\n",
"sequential/conv2d/BiasAdd/ReadVariableOp/resource\n",
"sequential/conv2d/BiasAdd/ReadVariableOp\n",
"sequential/conv2d/BiasAdd\n",
"sequential/conv2d/Relu\n",
"sequential/batch_normalization/ReadVariableOp/resource\n",
"sequential/batch_normalization/ReadVariableOp\n",
"sequential/batch_normalization/ReadVariableOp_1/resource\n",
"sequential/batch_normalization/ReadVariableOp_1\n",
"sequential/batch_normalization/FusedBatchNormV3/ReadVariableOp/resource\n",
"sequential/batch_normalization/FusedBatchNormV3/ReadVariableOp\n",
"sequential/batch_normalization/FusedBatchNormV3/ReadVariableOp_1/resource\n",
"sequential/batch_normalization/FusedBatchNormV3/ReadVariableOp_1\n",
"sequential/batch_normalization/FusedBatchNormV3\n",
"sequential/max_pooling2d/MaxPool\n",
"sequential/conv2d_1/Conv2D/ReadVariableOp/resource\n",
"sequential/conv2d_1/Conv2D/ReadVariableOp\n",
"sequential/conv2d_1/Conv2D\n",
"sequential/conv2d_1/BiasAdd/ReadVariableOp/resource\n",
"sequential/conv2d_1/BiasAdd/ReadVariableOp\n",
"sequential/conv2d_1/BiasAdd\n",
"sequential/conv2d_1/Relu\n",
"sequential/batch_normalization_1/ReadVariableOp/resource\n",
"sequential/batch_normalization_1/ReadVariableOp\n",
"sequential/batch_normalization_1/ReadVariableOp_1/resource\n",
"sequential/batch_normalization_1/ReadVariableOp_1\n",
"sequential/batch_normalization_1/FusedBatchNormV3/ReadVariableOp/resource\n",
"sequential/batch_normalization_1/FusedBatchNormV3/ReadVariableOp\n",
"sequential/batch_normalization_1/FusedBatchNormV3/ReadVariableOp_1/resource\n",
"sequential/batch_normalization_1/FusedBatchNormV3/ReadVariableOp_1\n",
"sequential/batch_normalization_1/FusedBatchNormV3\n",
"sequential/max_pooling2d_1/MaxPool\n",
"sequential/conv2d_2/Conv2D/ReadVariableOp/resource\n",
"sequential/conv2d_2/Conv2D/ReadVariableOp\n",
"sequential/conv2d_2/Conv2D\n",
"sequential/conv2d_2/BiasAdd/ReadVariableOp/resource\n",
"sequential/conv2d_2/BiasAdd/ReadVariableOp\n",
"sequential/conv2d_2/BiasAdd\n",
"sequential/conv2d_2/Relu\n",
"sequential/batch_normalization_2/ReadVariableOp/resource\n",
"sequential/batch_normalization_2/ReadVariableOp\n",
"sequential/batch_normalization_2/ReadVariableOp_1/resource\n",
"sequential/batch_normalization_2/ReadVariableOp_1\n",
"sequential/batch_normalization_2/FusedBatchNormV3/ReadVariableOp/resource\n",
"sequential/batch_normalization_2/FusedBatchNormV3/ReadVariableOp\n",
"sequential/batch_normalization_2/FusedBatchNormV3/ReadVariableOp_1/resource\n",
"sequential/batch_normalization_2/FusedBatchNormV3/ReadVariableOp_1\n",
"sequential/batch_normalization_2/FusedBatchNormV3\n",
"sequential/conv2d_3/Conv2D/ReadVariableOp/resource\n",
"sequential/conv2d_3/Conv2D/ReadVariableOp\n",
"sequential/conv2d_3/Conv2D\n",
"sequential/conv2d_3/BiasAdd/ReadVariableOp/resource\n",
"sequential/conv2d_3/BiasAdd/ReadVariableOp\n",
"sequential/conv2d_3/BiasAdd\n",
"sequential/conv2d_3/Relu\n",
"sequential/batch_normalization_3/ReadVariableOp/resource\n",
"sequential/batch_normalization_3/ReadVariableOp\n",
"sequential/batch_normalization_3/ReadVariableOp_1/resource\n",
"sequential/batch_normalization_3/ReadVariableOp_1\n",
"sequential/batch_normalization_3/FusedBatchNormV3/ReadVariableOp/resource\n",
"sequential/batch_normalization_3/FusedBatchNormV3/ReadVariableOp\n",
"sequential/batch_normalization_3/FusedBatchNormV3/ReadVariableOp_1/resource\n",
"sequential/batch_normalization_3/FusedBatchNormV3/ReadVariableOp_1\n",
"sequential/batch_normalization_3/FusedBatchNormV3\n",
"sequential/conv2d_4/Conv2D/ReadVariableOp/resource\n",
"sequential/conv2d_4/Conv2D/ReadVariableOp\n",
"sequential/conv2d_4/Conv2D\n",
"sequential/conv2d_4/BiasAdd/ReadVariableOp/resource\n",
"sequential/conv2d_4/BiasAdd/ReadVariableOp\n",
"sequential/conv2d_4/BiasAdd\n",
"sequential/conv2d_4/Relu\n",
"sequential/batch_normalization_4/ReadVariableOp/resource\n",
"sequential/batch_normalization_4/ReadVariableOp\n",
"sequential/batch_normalization_4/ReadVariableOp_1/resource\n",
"sequential/batch_normalization_4/ReadVariableOp_1\n",
"sequential/batch_normalization_4/FusedBatchNormV3/ReadVariableOp/resource\n",
"sequential/batch_normalization_4/FusedBatchNormV3/ReadVariableOp\n",
"sequential/batch_normalization_4/FusedBatchNormV3/ReadVariableOp_1/resource\n",
"sequential/batch_normalization_4/FusedBatchNormV3/ReadVariableOp_1\n",
"sequential/batch_normalization_4/FusedBatchNormV3\n",
"sequential/max_pooling2d_2/MaxPool\n",
"sequential/flatten/Const\n",
"sequential/flatten/Reshape\n",
"sequential/dense/MatMul/ReadVariableOp/resource\n",
"sequential/dense/MatMul/ReadVariableOp\n",
"sequential/dense/MatMul\n",
"sequential/dense/BiasAdd/ReadVariableOp/resource\n",
"sequential/dense/BiasAdd/ReadVariableOp\n",
"sequential/dense/BiasAdd\n",
"sequential/dense/Relu\n",
"sequential/dense_1/MatMul/ReadVariableOp/resource\n",
"sequential/dense_1/MatMul/ReadVariableOp\n",
"sequential/dense_1/MatMul\n",
"sequential/dense_1/BiasAdd/ReadVariableOp/resource\n",
"sequential/dense_1/BiasAdd/ReadVariableOp\n",
"sequential/dense_1/BiasAdd\n",
"sequential/dense_1/Relu\n",
"sequential/dense_2/MatMul/ReadVariableOp/resource\n",
"sequential/dense_2/MatMul/ReadVariableOp\n",
"sequential/dense_2/MatMul\n",
"sequential/dense_2/BiasAdd/ReadVariableOp/resource\n",
"sequential/dense_2/BiasAdd/ReadVariableOp\n",
"sequential/dense_2/BiasAdd\n",
"sequential/dense_2/Softmax\n",
2023-01-31 19:30:04 +01:00
"NoOp\n",
"Identity\n",
"--------------------------------------------------\n"
]
}
],
"source": [
" # Load frozen graph using TensorFlow 1.x functions\n",
2023-02-01 10:23:06 +01:00
"with tf.io.gfile.GFile(\"./frozen_models/frozen_graph_best.pb\", \"rb\") as f:\n",
2023-01-31 19:30:04 +01:00
" graph_def = tf.compat.v1.GraphDef()\n",
" loaded = graph_def.ParseFromString(f.read())\n",
"\n",
"# Wrap frozen graph to ConcreteFunctions\n",
"frozen_func = wrap_frozen_graph(graph_def=graph_def,\n",
" inputs=[\"x:0\"],\n",
" outputs=[\"Identity:0\"],\n",
" print_graph=True)"
]
},
{
"cell_type": "code",
2023-02-01 10:23:06 +01:00
"execution_count": 33,
2023-01-31 19:30:04 +01:00
"metadata": {},
"outputs": [],
"source": [
"import cv2 as cv"
]
},
{
"cell_type": "code",
2023-02-01 10:23:06 +01:00
"execution_count": 34,
2023-01-31 19:30:04 +01:00
"metadata": {},
"outputs": [],
"source": [
2023-02-01 10:23:06 +01:00
"fish = cv.imread('./new_data/train/Jellyfish/03Q7BTGDW34G.jpg')"
2023-01-31 19:30:04 +01:00
]
},
{
"cell_type": "code",
2023-02-01 10:23:06 +01:00
"execution_count": 35,
2023-01-31 19:30:04 +01:00
"metadata": {},
"outputs": [],
"source": [
"fish = cv.resize(fish, (227, 227), interpolation=cv.INTER_AREA)"
]
},
{
"cell_type": "code",
2023-02-01 10:23:06 +01:00
"execution_count": 28,
2023-01-31 19:30:04 +01:00
"metadata": {},
"outputs": [],
"source": [
"class_names=sorted(['Fish', \"Jellyfish\", 'Penguin', 'Puffin', 'Shark', 'Starfish', 'Stingray'])"
]
},
{
"cell_type": "code",
"execution_count": 57,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([[0., 0., 0., 0., 0., 0., 1., 0., 0., 0.]], dtype=float32)"
]
},
"execution_count": 57,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"frozen_func(x=tf.convert_to_tensor(fish[None, :], dtype=\"float32\"))[0].numpy()"
]
},
{
"cell_type": "code",
2023-02-01 10:23:06 +01:00
"execution_count": 23,
2023-01-31 19:30:04 +01:00
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
"from PIL import Image"
]
},
2023-01-31 20:05:31 +01:00
{
"cell_type": "code",
"execution_count": 18,
"metadata": {},
"outputs": [],
"source": [
"fishes = [\n",
" tf.convert_to_tensor(cv.resize(cv.imread('./new_data/train/Shark/D3U6ZGZZCQTF.jpg'), (227,227),interpolation=cv.INTER_AREA)[None, :], dtype='float32'),\n",
" tf.convert_to_tensor(cv.resize(cv.imread('./new_data/train/Shark/08XY6WGTVFYN.jpg'), (227,227), interpolation=cv.INTER_AREA)[None, :], dtype='float32')\n",
" ]"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[<tf.Tensor: shape=(1, 10), dtype=float32, numpy=array([[1., 0., 0., 0., 0., 0., 0., 0., 0., 0.]], dtype=float32)>]"
]
},
"execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"frozen_func(x=fishes[0])"
]
},
2023-01-31 19:30:04 +01:00
{
"cell_type": "code",
2023-02-01 10:23:06 +01:00
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 49,
2023-01-31 19:30:04 +01:00
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
2023-02-01 10:23:06 +01:00
"[<tf.Tensor: shape=(1, 6), dtype=float32, numpy=array([[1., 0., 0., 0., 0., 0.]], dtype=float32)>]\n",
2023-01-31 19:30:04 +01:00
"Category: Fish\n"
]
},
{
"data": {
2023-02-01 10:23:06 +01:00
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAfQAAAF3CAIAAADckC6rAAEAAElEQVR4nOz9vZYkSbImiMmPqpm7R0RmVf/cuQMsGHDL4w3wRKCBZUDjAAQeAAcEaDA4B28AHjyI3cUsZnrune6qzAh3N1MVERCiKqbmP1mV1XXvzi6gXSc7wsPcTE1VVH4/EUH4X/9X8Ggg0cPPwYbPUQEAQAHUasXMzAygImJaAZGIiMjMACClxMxmVqTWUjgl6gMAtI/EypwpMQKZYS2wimgB5AzGBgSQwBDAp0EICwD4I8bht+0zHKafpF3vX4kvIt7cwD/BlHYf/tJoM/G7IeJw2/bQuAAREQ0f3uZbD/Av+p3NrN32dv591ApE0Nd5+8qT4Zf5BeMqpWmqKiACAMDMKfkE1Cze0cxUFVTBzNct5rn9oMvD5zptqGpMD/erdzNUx53d9oWQ7z/062/uTESIuJQVoG2Hf+IX1FIeP9j48edP1/+WAtvknpyvnDP0PRrn/I0t+75RK0B7Xxh2R31n7wbj43lKKUHkvm5+K3lyn2fD5PH65JxrrSYCRDlnIhKRWiuUGs/1f9tB+M7ngghwGz5tEQERePK+8GT9UX/jvtx+7eH9zWhYHvwVj0qH98PhME2TmaVfvvxXDmYiQjRVM62gCoiGWK9XAAAiAOcwlIwxgfa3u6daMwO1OJtkoLuTo4AEpoAEoKZ7Hh3jjq3fXYDPNuzvHSPxPfvrv9bAnAFCHGyPrn7IH43djvR3URx+BbDO01VkPN5EZIi/gRMFl/nduNh+PBRpZuaUOQpL+B056fePEFqxwr+NaT4dQZx9K78hQX/NffCOJH6X4YItfm7/mgFzfz7+XeszTPv+w3+Fgff8/fcYy/ksIqUUM0vP3uf5PvXPUQEQwP9FYiQiALVBOUVEI4J+YHzDXEMnIlAz3Fi8E4f1Aapq2J7WLrOYm6EvTpcAI6dGBcRtnnfvFQ/affH2fb+lNn5rDN9qd6CNawA8e9yDeT78/BnfeXZ9Smnkay5fEfHZYRjv7+fH2bq5cdDsIYz9Cil+Iz+e3f8XVxUHpvONi/d/+uWdcgZxa0gBjFonDFT6fIO+jyq+l4qCufsXQ8HfWyq/PJ7Kp7v52K8jyIf3uXm73yzU78d2q4FvgBnRE+b+/Q+GQeQP3OB7b/Tbxy/z998wmVUriGlFxPRscX+BuTfODsHfzVzSGoA6F0iZUkrpdKq1+lH3Haq1Wq10OAAAqFmwbF9eIFBTNEQFI3dCJEIFA0BAlxza2bpyinliN7oT/DomaE8+317zN4+4G22sytozd8zLnmzgU6K/Y6Ptpr90OG900mfXNyZCXbNDJNfQVZ0P2jDQ2f1+St92+3yb3kL0jp/8ivsMPz95ctx2dE8BADG7wQHhVgIAs2duye9lX7+NucdUb1jYrx+/hrnjk5/3N3r88ehW+nsU9l9g7oihScCegO9v9PjjJ8+Nu9xq7t95/79T/46b3t/lKWv49sgzpZxSRsQE8H0awbBaO2+QiiIaMxMlJSKGnHNKKfNUmUspqtr0ZbPwCIz6yGgXgyg4NzQnbgRziULQ+TwAgFFKj1l5rfXJq+HIQZqO9jua4SNbvzuWBl04ds/59z56YKS/6kSFWG0TMBORb313+MvuQWaI6J7xWit0Dp6628e/4mbZb7DQg4Xd8/ffa2APAo0i6uZDf4X/3pwy//LjRmj9ZgfUyNz/Hi/W05gTtpiABx/ADBAA0ar4XyN48gv3/8bfEHWwD/4F/bTfP36Ne/3hoHRkYsJERL9Nc9fdDwhOM8ycMxNlA4HmAcDxtDefe0qqWkqBQcvbNJQuosV5IzJC966DoW8ydn0fxVX4R5N9LLTu38t6iPLuNQF+k1xGRMNbs3H3OACgRrvPHvANqn3I3J9dL3WLbIc8c0H75O50c8NR500piSqIxM4l3gKMWwBQFfhx4PEb9OYPigu+LcCeae7PjmfQXvBxETEzUfUHudyKt3iqUT7VcL/TAntyfTDNEDZxQB4/9zvHbcD8l+b57L3G+8Dfwd+/QQ++U6paw9NLBLZzwIwv8NsmsHtBfOop+X31jG+P38zZAQCMVbCaEVnCJ3d69vngWNDmJwEE0JzzPOdpmohBREpZVUWkvkwvBqjm8RAAAzADVQZUVVQbI6tE5C5eNUVkI0MzZERAQ0B3UqGBIXa/0F1g0O5+uHmvnc89+N3vxtwJRyfJDX/fnQFX7Z8YTt/LFL5xOJHIrSgAcLeYiPAT5svj4Xe3GbQDgETE3EIp/UO3A2IwMyJah0h9xzwHdjYu2q9TPkbb6Ckz6uAIdpbhq1GkUo+pBnALvn+df6/rb7xGNy74v388IMW+uU++8QvvNb7I7+hz9/1ykIyItPAIkfaoD+zF//c+OpjOiF77DfP8z0XV78OU1UwRiOA3oGXusTkKANOUpjnNcybCdV1rxVqrSPnjpz/6WWredjPfrYP73KFhiYiQDMic+ZszeHSUjSkQohqhe/fBUAkaRkiqPNyVZ1Az2Dt2/x6L8tvjmeb+rz9cY805I/qmfMst0w754OXcDsB4nHrIsZaC/RxSx0QBwLqu3zXJkbP/YmDgNwyfXjD3TS+uO9sxwLu/Gzrl++cJA2cP0/aZMP4N97+h+ZDT3zXuD9Hve5QCJ91UsW472h7dFP9+A/31jfkDwMbcIQJj/wMeRNwVAkxyPUNKnHPOecQaP/vycj5Px+PhcDCzdb2KyDSleZ7zxMuynM8fzDzP8zQl1Vrr+pd//x+cuUwp+4L6/Usppur/QWcgZg2visiG7opRMwNxfm8IYAgEaCgJGQCKQx77yYxdd7fPSATt83WN62Gvr+GAdDbDdrR6XBHvlIWHw5XW5iiML3RVcbuss49nQsgDmM40/eIG+wXI85xzNrN1XaVWYs45L8vy8H0pJQBYlsW5bTCLerkAM6Xk+Qfxp3rPxwEAQETWdV3Xtd0/t92MF/Q5N7ywKnWUTjhbOve0kcZizUcOG7rzt9xHT4Yvl6qa6Xh/n97lcrl5ruPKRYSIcs4Oqa61rusaaxXzMTMwIOaUUlhC7ZWf7CMjRw4HDDzrGfplFGwjQw+mH5z0tzFTf/T4FF+HZ8wRb8/Fdn5Pp9PhcFDV8/l8vV59wmVdR5Loi2acHuuR8uS5N+vjOkdI3PvX/8Y5evi5xefxxW8u6bP15+dK5HfN50bXDMET6CDYu2ueo6e2jJbEx2MwEafUWmtAI8YH+79pnnPOzKhqjn1kZoesOMrRTNhhkajEQEo3bj4LTAIAALDfHIAACECNPMZCDYXjrguVUg0dmFG1U48hcMqxHGZitj3rZv7jQt5/fs+123I/uf67hiL8brjWQa9ERAd7xK8P5xkKKYwBz2mKfYfh0Aa852Y8XBzoiI57To0jqvVXMKORrHHvn/kNAz0I/3za4w9Bk74CtVY3MR/OZ3Bm2M1NfsWU/vXG7/W4Z+fCDXHnFSJiqu6NxK5gjcz991Xq/0c5fg0Jfe9IRO5ed25rAOq/6pDJNu5WzhnRfEdVKyJywpQSEfb0VF3L1eHVzJyVwcOHZs5CPPaNZm4eW3+37QiZn0sgAANFY2rs3gwMELmxXUbbcNydPYlrCkjkUBtoQEHsv+7OfMitpyuLj69/tqAP70IGgPQQlvWNW90fKkP09y2lhJPB5TE+macz3FDPmwVgllIaT2Bol0iPNaxn87xFFvbhTwmNdVPDn3sXbrYAB8P/l8bgc//OAxIrEN6qWM+H84FRIR3+9Gye9ihK/Kvf67eP73XjPJ/P4wU11UiWaelFA172O5j7d67D77ZuvyqW8/uPb98fHzknv+s+brD6SKUsziYAmIhSImYESJePD7+JK2TY05VrXVVr2JUOZs85TdNkJrWua7mG9YRoj32I2rKXCIZMOTdEYvbmHhjn73CYkmJziiGiK5iIWEX6KhiiOZzBzCXWlk2+/TAsze7zgSLhEZe8uf4XF30UWr+eg4+f33zR+TQ7rlQEBzgBqEa6//17hbsZetR652d4ZE7dj3uURXDz+O4oSO4zDPtXnjLBG3b57fW5Wa3tPhB889aNdru
2023-01-31 19:30:04 +01:00
"text/plain": [
2023-02-01 10:23:06 +01:00
"<PIL.JpegImagePlugin.JpegImageFile image mode=RGB size=500x375>"
2023-01-31 19:30:04 +01:00
]
},
2023-02-01 10:23:06 +01:00
"execution_count": 49,
2023-01-31 19:30:04 +01:00
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
2023-02-01 10:23:06 +01:00
"fish_path = './new_data/test/4R4JNOPP4WYE.jpg'\n",
2023-01-31 19:30:04 +01:00
"fish = cv.imread(fish_path)\n",
"fish = cv.resize(fish, (227, 227), interpolation=cv.INTER_AREA)\n",
2023-02-01 10:23:06 +01:00
"print(frozen_func(x=tf.convert_to_tensor(fish[None, :], dtype='float32')))\n",
2023-01-31 19:30:04 +01:00
"print(f\"Category: {class_names[np.argmax(frozen_func(x=tf.convert_to_tensor(fish[None, :], dtype='float32'))[0].numpy())]}\")\n",
"Image.open(fish_path)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "um",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.15"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "876e189cbbe99a9a838ece62aae1013186c4bb7e0254a10cfa2f9b2381853efb"
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}