updates in notes
This commit is contained in:
parent
6ca0c8d4cc
commit
0aed49a28b
4
.gitignore
vendored
4
.gitignore
vendored
@ -1,4 +1,6 @@
|
|||||||
data
|
data
|
||||||
new_data
|
new_data
|
||||||
*.zip
|
*.zip
|
||||||
model
|
model
|
||||||
|
*avi
|
||||||
|
*pb
|
32
graph.ipynb
32
graph.ipynb
@ -282,6 +282,38 @@
|
|||||||
"from PIL import Image"
|
"from PIL import Image"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 18,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"fishes = [\n",
|
||||||
|
" tf.convert_to_tensor(cv.resize(cv.imread('./new_data/train/Shark/D3U6ZGZZCQTF.jpg'), (227,227),interpolation=cv.INTER_AREA)[None, :], dtype='float32'),\n",
|
||||||
|
" tf.convert_to_tensor(cv.resize(cv.imread('./new_data/train/Shark/08XY6WGTVFYN.jpg'), (227,227), interpolation=cv.INTER_AREA)[None, :], dtype='float32')\n",
|
||||||
|
" ]"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 20,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"[<tf.Tensor: shape=(1, 10), dtype=float32, numpy=array([[1., 0., 0., 0., 0., 0., 0., 0., 0., 0.]], dtype=float32)>]"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 20,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"frozen_func(x=fishes[0])"
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 13,
|
"execution_count": 13,
|
||||||
|
183
rybki.ipynb
183
rybki.ipynb
@ -10,7 +10,8 @@
|
|||||||
"import matplotlib.pyplot as plt\n",
|
"import matplotlib.pyplot as plt\n",
|
||||||
"import keras\n",
|
"import keras\n",
|
||||||
"import numpy as np\n",
|
"import numpy as np\n",
|
||||||
"import threading"
|
"import threading\n",
|
||||||
|
"import tensorflow as tf"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -19,12 +20,166 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"model = keras.models.load_model('./model')"
|
"def wrap_frozen_graph(graph_def, inputs, outputs, print_graph=False):\n",
|
||||||
|
" def _imports_graph_def():\n",
|
||||||
|
" tf.compat.v1.import_graph_def(graph_def, name=\"\")\n",
|
||||||
|
"\n",
|
||||||
|
" wrapped_import = tf.compat.v1.wrap_function(_imports_graph_def, [])\n",
|
||||||
|
" import_graph = wrapped_import.graph\n",
|
||||||
|
"\n",
|
||||||
|
" if print_graph == True:\n",
|
||||||
|
" print(\"-\" * 50)\n",
|
||||||
|
" print(\"Frozen model layers: \")\n",
|
||||||
|
" layers = [op.name for op in import_graph.get_operations()]\n",
|
||||||
|
" for layer in layers:\n",
|
||||||
|
" print(layer)\n",
|
||||||
|
" print(\"-\" * 50)\n",
|
||||||
|
"\n",
|
||||||
|
" return wrapped_import.prune(\n",
|
||||||
|
" tf.nest.map_structure(import_graph.as_graph_element, inputs),\n",
|
||||||
|
" tf.nest.map_structure(import_graph.as_graph_element, outputs))"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 5,
|
"execution_count": 3,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"--------------------------------------------------\n",
|
||||||
|
"Frozen model layers: \n",
|
||||||
|
"x\n",
|
||||||
|
"sequential/conv2d/Conv2D/ReadVariableOp/resource\n",
|
||||||
|
"sequential/conv2d/Conv2D/ReadVariableOp\n",
|
||||||
|
"sequential/conv2d/Conv2D\n",
|
||||||
|
"sequential/conv2d/BiasAdd/ReadVariableOp/resource\n",
|
||||||
|
"sequential/conv2d/BiasAdd/ReadVariableOp\n",
|
||||||
|
"sequential/conv2d/BiasAdd\n",
|
||||||
|
"sequential/conv2d/Relu\n",
|
||||||
|
"sequential/batch_normalization/ReadVariableOp/resource\n",
|
||||||
|
"sequential/batch_normalization/ReadVariableOp\n",
|
||||||
|
"sequential/batch_normalization/ReadVariableOp_1/resource\n",
|
||||||
|
"sequential/batch_normalization/ReadVariableOp_1\n",
|
||||||
|
"sequential/batch_normalization/FusedBatchNormV3/ReadVariableOp/resource\n",
|
||||||
|
"sequential/batch_normalization/FusedBatchNormV3/ReadVariableOp\n",
|
||||||
|
"sequential/batch_normalization/FusedBatchNormV3/ReadVariableOp_1/resource\n",
|
||||||
|
"sequential/batch_normalization/FusedBatchNormV3/ReadVariableOp_1\n",
|
||||||
|
"sequential/batch_normalization/FusedBatchNormV3\n",
|
||||||
|
"sequential/max_pooling2d/MaxPool\n",
|
||||||
|
"sequential/conv2d_1/Conv2D/ReadVariableOp/resource\n",
|
||||||
|
"sequential/conv2d_1/Conv2D/ReadVariableOp\n",
|
||||||
|
"sequential/conv2d_1/Conv2D\n",
|
||||||
|
"sequential/conv2d_1/BiasAdd/ReadVariableOp/resource\n",
|
||||||
|
"sequential/conv2d_1/BiasAdd/ReadVariableOp\n",
|
||||||
|
"sequential/conv2d_1/BiasAdd\n",
|
||||||
|
"sequential/conv2d_1/Relu\n",
|
||||||
|
"sequential/batch_normalization_1/ReadVariableOp/resource\n",
|
||||||
|
"sequential/batch_normalization_1/ReadVariableOp\n",
|
||||||
|
"sequential/batch_normalization_1/ReadVariableOp_1/resource\n",
|
||||||
|
"sequential/batch_normalization_1/ReadVariableOp_1\n",
|
||||||
|
"sequential/batch_normalization_1/FusedBatchNormV3/ReadVariableOp/resource\n",
|
||||||
|
"sequential/batch_normalization_1/FusedBatchNormV3/ReadVariableOp\n",
|
||||||
|
"sequential/batch_normalization_1/FusedBatchNormV3/ReadVariableOp_1/resource\n",
|
||||||
|
"sequential/batch_normalization_1/FusedBatchNormV3/ReadVariableOp_1\n",
|
||||||
|
"sequential/batch_normalization_1/FusedBatchNormV3\n",
|
||||||
|
"sequential/max_pooling2d_1/MaxPool\n",
|
||||||
|
"sequential/conv2d_2/Conv2D/ReadVariableOp/resource\n",
|
||||||
|
"sequential/conv2d_2/Conv2D/ReadVariableOp\n",
|
||||||
|
"sequential/conv2d_2/Conv2D\n",
|
||||||
|
"sequential/conv2d_2/BiasAdd/ReadVariableOp/resource\n",
|
||||||
|
"sequential/conv2d_2/BiasAdd/ReadVariableOp\n",
|
||||||
|
"sequential/conv2d_2/BiasAdd\n",
|
||||||
|
"sequential/conv2d_2/Relu\n",
|
||||||
|
"sequential/batch_normalization_2/ReadVariableOp/resource\n",
|
||||||
|
"sequential/batch_normalization_2/ReadVariableOp\n",
|
||||||
|
"sequential/batch_normalization_2/ReadVariableOp_1/resource\n",
|
||||||
|
"sequential/batch_normalization_2/ReadVariableOp_1\n",
|
||||||
|
"sequential/batch_normalization_2/FusedBatchNormV3/ReadVariableOp/resource\n",
|
||||||
|
"sequential/batch_normalization_2/FusedBatchNormV3/ReadVariableOp\n",
|
||||||
|
"sequential/batch_normalization_2/FusedBatchNormV3/ReadVariableOp_1/resource\n",
|
||||||
|
"sequential/batch_normalization_2/FusedBatchNormV3/ReadVariableOp_1\n",
|
||||||
|
"sequential/batch_normalization_2/FusedBatchNormV3\n",
|
||||||
|
"sequential/conv2d_3/Conv2D/ReadVariableOp/resource\n",
|
||||||
|
"sequential/conv2d_3/Conv2D/ReadVariableOp\n",
|
||||||
|
"sequential/conv2d_3/Conv2D\n",
|
||||||
|
"sequential/conv2d_3/BiasAdd/ReadVariableOp/resource\n",
|
||||||
|
"sequential/conv2d_3/BiasAdd/ReadVariableOp\n",
|
||||||
|
"sequential/conv2d_3/BiasAdd\n",
|
||||||
|
"sequential/conv2d_3/Relu\n",
|
||||||
|
"sequential/batch_normalization_3/ReadVariableOp/resource\n",
|
||||||
|
"sequential/batch_normalization_3/ReadVariableOp\n",
|
||||||
|
"sequential/batch_normalization_3/ReadVariableOp_1/resource\n",
|
||||||
|
"sequential/batch_normalization_3/ReadVariableOp_1\n",
|
||||||
|
"sequential/batch_normalization_3/FusedBatchNormV3/ReadVariableOp/resource\n",
|
||||||
|
"sequential/batch_normalization_3/FusedBatchNormV3/ReadVariableOp\n",
|
||||||
|
"sequential/batch_normalization_3/FusedBatchNormV3/ReadVariableOp_1/resource\n",
|
||||||
|
"sequential/batch_normalization_3/FusedBatchNormV3/ReadVariableOp_1\n",
|
||||||
|
"sequential/batch_normalization_3/FusedBatchNormV3\n",
|
||||||
|
"sequential/conv2d_4/Conv2D/ReadVariableOp/resource\n",
|
||||||
|
"sequential/conv2d_4/Conv2D/ReadVariableOp\n",
|
||||||
|
"sequential/conv2d_4/Conv2D\n",
|
||||||
|
"sequential/conv2d_4/BiasAdd/ReadVariableOp/resource\n",
|
||||||
|
"sequential/conv2d_4/BiasAdd/ReadVariableOp\n",
|
||||||
|
"sequential/conv2d_4/BiasAdd\n",
|
||||||
|
"sequential/conv2d_4/Relu\n",
|
||||||
|
"sequential/batch_normalization_4/ReadVariableOp/resource\n",
|
||||||
|
"sequential/batch_normalization_4/ReadVariableOp\n",
|
||||||
|
"sequential/batch_normalization_4/ReadVariableOp_1/resource\n",
|
||||||
|
"sequential/batch_normalization_4/ReadVariableOp_1\n",
|
||||||
|
"sequential/batch_normalization_4/FusedBatchNormV3/ReadVariableOp/resource\n",
|
||||||
|
"sequential/batch_normalization_4/FusedBatchNormV3/ReadVariableOp\n",
|
||||||
|
"sequential/batch_normalization_4/FusedBatchNormV3/ReadVariableOp_1/resource\n",
|
||||||
|
"sequential/batch_normalization_4/FusedBatchNormV3/ReadVariableOp_1\n",
|
||||||
|
"sequential/batch_normalization_4/FusedBatchNormV3\n",
|
||||||
|
"sequential/max_pooling2d_2/MaxPool\n",
|
||||||
|
"sequential/flatten/Const\n",
|
||||||
|
"sequential/flatten/Reshape\n",
|
||||||
|
"sequential/dense/MatMul/ReadVariableOp/resource\n",
|
||||||
|
"sequential/dense/MatMul/ReadVariableOp\n",
|
||||||
|
"sequential/dense/MatMul\n",
|
||||||
|
"sequential/dense/BiasAdd/ReadVariableOp/resource\n",
|
||||||
|
"sequential/dense/BiasAdd/ReadVariableOp\n",
|
||||||
|
"sequential/dense/BiasAdd\n",
|
||||||
|
"sequential/dense/Relu\n",
|
||||||
|
"sequential/dense_1/MatMul/ReadVariableOp/resource\n",
|
||||||
|
"sequential/dense_1/MatMul/ReadVariableOp\n",
|
||||||
|
"sequential/dense_1/MatMul\n",
|
||||||
|
"sequential/dense_1/BiasAdd/ReadVariableOp/resource\n",
|
||||||
|
"sequential/dense_1/BiasAdd/ReadVariableOp\n",
|
||||||
|
"sequential/dense_1/BiasAdd\n",
|
||||||
|
"sequential/dense_1/Relu\n",
|
||||||
|
"sequential/dense_2/MatMul/ReadVariableOp/resource\n",
|
||||||
|
"sequential/dense_2/MatMul/ReadVariableOp\n",
|
||||||
|
"sequential/dense_2/MatMul\n",
|
||||||
|
"sequential/dense_2/BiasAdd/ReadVariableOp/resource\n",
|
||||||
|
"sequential/dense_2/BiasAdd/ReadVariableOp\n",
|
||||||
|
"sequential/dense_2/BiasAdd\n",
|
||||||
|
"sequential/dense_2/Softmax\n",
|
||||||
|
"NoOp\n",
|
||||||
|
"Identity\n",
|
||||||
|
"--------------------------------------------------\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
" # Load frozen graph using TensorFlow 1.x functions\n",
|
||||||
|
"with tf.io.gfile.GFile(\"./frozen_models/frozen_graph2.pb\", \"rb\") as f:\n",
|
||||||
|
" graph_def = tf.compat.v1.GraphDef()\n",
|
||||||
|
" loaded = graph_def.ParseFromString(f.read())\n",
|
||||||
|
"\n",
|
||||||
|
"# Wrap frozen graph to ConcreteFunctions\n",
|
||||||
|
"frozen_func = wrap_frozen_graph(graph_def=graph_def,\n",
|
||||||
|
" inputs=[\"x:0\"],\n",
|
||||||
|
" outputs=[\"Identity:0\"],\n",
|
||||||
|
" print_graph=False)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 4,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
@ -58,15 +213,15 @@
|
|||||||
" rectangle = cv2.rectangle(roi,(x,y),(x+w,y+h),(0,255,0),3)\n",
|
" rectangle = cv2.rectangle(roi,(x,y),(x+w,y+h),(0,255,0),3)\n",
|
||||||
" image_to_predict = roi[y:y+h,x:x+w]\n",
|
" image_to_predict = roi[y:y+h,x:x+w]\n",
|
||||||
" image_to_predict = cv2.resize(image_to_predict,(227,227))\n",
|
" image_to_predict = cv2.resize(image_to_predict,(227,227))\n",
|
||||||
" images.append((x,y,rectangle,np.expand_dims(image_to_predict,axis=0)))\n",
|
" # images.append((x,y,rectangle,np.expand_dims(image_to_predict,axis=0)))\n",
|
||||||
" \n",
|
" \n",
|
||||||
" # pred = model.predict(np.expand_dims(image_to_predict, axis=0))\n",
|
" pred = frozen_func(x=tf.convert_to_tensor(image_to_predict[None, :], dtype='float32'))\n",
|
||||||
" # label = class_names[np.argmax(pred)]\n",
|
" label = class_names[np.argmax(pred)]\n",
|
||||||
" if images:\n",
|
" cv2.putText(rectangle, label, (x, y-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 1)\n",
|
||||||
" pred = model.predict(np.vstack([image[3] for image in images]))\n",
|
" # if images:\n",
|
||||||
" labels = [class_names[np.argmax(pre)] for pre in pred]\n",
|
" # pred = model.predict(np.vstack([image[3] for image in images]))\n",
|
||||||
" for i,image in enumerate(images):\n",
|
" # labels = [class_names[np.argmax(pre)] for pre in pred]\n",
|
||||||
" cv2.putText(image[2], labels[i], (image[0], image[1]-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 1)\n",
|
" # for i,image in enumerate(images):\n",
|
||||||
" roi = cv2.resize(roi, (960, 540)) \n",
|
" roi = cv2.resize(roi, (960, 540)) \n",
|
||||||
" cv2.imshow(\"roi\", roi)\n",
|
" cv2.imshow(\"roi\", roi)\n",
|
||||||
"\n",
|
"\n",
|
||||||
@ -94,7 +249,7 @@
|
|||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"kernelspec": {
|
"kernelspec": {
|
||||||
"display_name": "Python 3",
|
"display_name": "um",
|
||||||
"language": "python",
|
"language": "python",
|
||||||
"name": "python3"
|
"name": "python3"
|
||||||
},
|
},
|
||||||
@ -108,12 +263,12 @@
|
|||||||
"name": "python",
|
"name": "python",
|
||||||
"nbconvert_exporter": "python",
|
"nbconvert_exporter": "python",
|
||||||
"pygments_lexer": "ipython3",
|
"pygments_lexer": "ipython3",
|
||||||
"version": "3.9.2"
|
"version": "3.9.15"
|
||||||
},
|
},
|
||||||
"orig_nbformat": 4,
|
"orig_nbformat": 4,
|
||||||
"vscode": {
|
"vscode": {
|
||||||
"interpreter": {
|
"interpreter": {
|
||||||
"hash": "393784674bcf6e74f2fc9b1b5fb3713f9bd5fc6f8172c594e5cfa8e8c12849bc"
|
"hash": "876e189cbbe99a9a838ece62aae1013186c4bb7e0254a10cfa2f9b2381853efb"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
Loading…
Reference in New Issue
Block a user