feature/basic-model-setup #3

Merged
s495727 merged 9 commits from feature/basic-model-setup into main 2024-05-11 20:00:07 +02:00
3 changed files with 5 additions and 368 deletions
Showing only changes of commit 2093f84c5f - Show all commits

View File

@ -4,4 +4,4 @@ from model.test_model import TestModel
if __name__ == "__main__": if __name__ == "__main__":
model = TestModel() model = TestModel()
history = model.fit() history = model.fit()
model.save() model.save("model/test_model_final.keras")

View File

@ -13,7 +13,7 @@ class TestModel:
# Start of config # Start of config
self.config.layer_1 = 512 self.config.layer_1 = 512
self.config.activation_1 = "relu" self.config.activation_1 = "relu"
self.config.dropout = random.uniform(0.01, 0.80), self.config.dropout = random.uniform(0.01, 0.80)
self.config.layer_2 = 10 self.config.layer_2 = 10
self.config.activation_2 = "softmax" self.config.activation_2 = "softmax"
self.config.optimizer = "sgd" self.config.optimizer = "sgd"
@ -26,7 +26,7 @@ class TestModel:
def __build_model(self): def __build_model(self):
return tf.keras.models.Sequential([ return tf.keras.models.Sequential([
tf.keras.layers.Input(shape=(28,28)), tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(self.config.layer_1, activation=self.config.activation_1), tf.keras.layers.Dense(self.config.layer_1, activation=self.config.activation_1),
tf.keras.layers.Dropout(self.config.dropout), tf.keras.layers.Dropout(self.config.dropout),
tf.keras.layers.Dense(self.config.layer_2, activation=self.config.activation_2) tf.keras.layers.Dense(self.config.layer_2, activation=self.config.activation_2)
@ -60,6 +60,6 @@ class TestModel:
callbacks=wandb_callbacks callbacks=wandb_callbacks
) )
def save(self): def save(self, filepath):
self.model.save("test_model/final_model.keras") self.model.save(filepath)

View File

@ -1,363 +0,0 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"Tracking run with wandb version 0.16.6"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/html": [
"Run data is saved locally in <code>/mnt/c/Users/krzys/OneDrive/Studia/inz-uczenia-maszynowego/Detection-of-plant-diseases/wandb/run-20240416_232247-bfji8amn</code>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/html": [
"Syncing run <strong><a href='https://wandb.ai/uczenie-maszynowe-projekt/Detection%20of%20plant%20diseases/runs/bfji8amn' target=\"_blank\">floral-energy-3</a></strong> to <a href='https://wandb.ai/uczenie-maszynowe-projekt/Detection%20of%20plant%20diseases' target=\"_blank\">Weights & Biases</a> (<a href='https://wandb.me/run' target=\"_blank\">docs</a>)<br/>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/html": [
" View project at <a href='https://wandb.ai/uczenie-maszynowe-projekt/Detection%20of%20plant%20diseases' target=\"_blank\">https://wandb.ai/uczenie-maszynowe-projekt/Detection%20of%20plant%20diseases</a>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/html": [
" View run at <a href='https://wandb.ai/uczenie-maszynowe-projekt/Detection%20of%20plant%20diseases/runs/bfji8amn' target=\"_blank\">https://wandb.ai/uczenie-maszynowe-projekt/Detection%20of%20plant%20diseases/runs/bfji8amn</a>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/8\n",
"44/47 [===========================>..] - ETA: 0s - loss: 2.1872 - accuracy: 0.2224INFO:tensorflow:Assets written to: models/assets\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:tensorflow:Assets written to: models/assets\n",
"\u001b[34m\u001b[1mwandb\u001b[0m: Adding directory to artifact (./models)... Done. 0.1s\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"47/47 [==============================] - 2s 32ms/step - loss: 2.1734 - accuracy: 0.2344 - val_loss: 1.9111 - val_accuracy: 0.5380\n",
"Epoch 2/8\n",
"40/47 [========================>.....] - ETA: 0s - loss: 1.7703 - accuracy: 0.5437INFO:tensorflow:Assets written to: models/assets\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:tensorflow:Assets written to: models/assets\n",
"\u001b[34m\u001b[1mwandb\u001b[0m: Adding directory to artifact (./models)... Done. 0.1s\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"47/47 [==============================] - 1s 31ms/step - loss: 1.7483 - accuracy: 0.5527 - val_loss: 1.5486 - val_accuracy: 0.6880\n",
"Epoch 3/8\n",
"46/47 [============================>.] - ETA: 0s - loss: 1.4466 - accuracy: 0.6818INFO:tensorflow:Assets written to: models/assets\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:tensorflow:Assets written to: models/assets\n",
"\u001b[34m\u001b[1mwandb\u001b[0m: Adding directory to artifact (./models)... Done. 0.1s\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"47/47 [==============================] - 2s 33ms/step - loss: 1.4444 - accuracy: 0.6829 - val_loss: 1.2824 - val_accuracy: 0.7460\n",
"Epoch 4/8\n",
"44/47 [===========================>..] - ETA: 0s - loss: 1.2232 - accuracy: 0.7362INFO:tensorflow:Assets written to: models/assets\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:tensorflow:Assets written to: models/assets\n",
"\u001b[34m\u001b[1mwandb\u001b[0m: Adding directory to artifact (./models)... Done. 0.1s\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"47/47 [==============================] - 2s 32ms/step - loss: 1.2162 - accuracy: 0.7390 - val_loss: 1.0886 - val_accuracy: 0.7880\n",
"Epoch 5/8\n",
"44/47 [===========================>..] - ETA: 0s - loss: 1.0583 - accuracy: 0.7694INFO:tensorflow:Assets written to: models/assets\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:tensorflow:Assets written to: models/assets\n",
"\u001b[34m\u001b[1mwandb\u001b[0m: Adding directory to artifact (./models)... Done. 0.1s\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"47/47 [==============================] - 1s 28ms/step - loss: 1.0519 - accuracy: 0.7711 - val_loss: 0.9497 - val_accuracy: 0.8020\n",
"Epoch 6/8\n",
"41/47 [=========================>....] - ETA: 0s - loss: 0.9382 - accuracy: 0.7897INFO:tensorflow:Assets written to: models/assets\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:tensorflow:Assets written to: models/assets\n",
"\u001b[34m\u001b[1mwandb\u001b[0m: Adding directory to artifact (./models)... Done. 0.1s\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"47/47 [==============================] - 1s 28ms/step - loss: 0.9339 - accuracy: 0.7902 - val_loss: 0.8484 - val_accuracy: 0.8180\n",
"Epoch 7/8\n",
"47/47 [==============================] - ETA: 0s - loss: 0.8496 - accuracy: 0.8043INFO:tensorflow:Assets written to: models/assets\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:tensorflow:Assets written to: models/assets\n",
"\u001b[34m\u001b[1mwandb\u001b[0m: Adding directory to artifact (./models)... Done. 0.1s\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"47/47 [==============================] - 1s 27ms/step - loss: 0.8496 - accuracy: 0.8043 - val_loss: 0.7735 - val_accuracy: 0.8220\n",
"Epoch 8/8\n",
"44/47 [===========================>..] - ETA: 0s - loss: 0.7790 - accuracy: 0.8180INFO:tensorflow:Assets written to: models/assets\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:tensorflow:Assets written to: models/assets\n",
"\u001b[34m\u001b[1mwandb\u001b[0m: Adding directory to artifact (./models)... Done. 0.1s\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\r",
"47/47 [==============================] - 1s 29ms/step - loss: 0.7779 - accuracy: 0.8183 - val_loss: 0.7165 - val_accuracy: 0.8260\n"
]
},
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "316da49b179f47019f8cf5c9c72353fe"
}
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/html": [
"<style>\n",
" table.wandb td:nth-child(1) { padding: 0 10px; text-align: left ; width: auto;} td:nth-child(2) {text-align: left ; width: 100%}\n",
" .wandb-row { display: flex; flex-direction: row; flex-wrap: wrap; justify-content: flex-start; width: 100% }\n",
" .wandb-col { display: flex; flex-direction: column; flex-basis: 100%; flex: 1; padding: 10px; }\n",
" </style>\n",
"<div class=\"wandb-row\"><div class=\"wandb-col\"><h3>Run history:</h3><br/><table class=\"wandb\"><tr><td>batch/accuracy</td><td>▁▁▁▂▂▄▅▅▅▅▆▆▆▇▇▇▇▇▇▇▇▇▇▇████████████████</td></tr><tr><td>batch/batch_step</td><td>▁▁▁▂▂▂▂▂▂▃▃▃▃▃▃▄▄▄▄▄▅▅▅▅▅▅▆▆▆▆▆▆▇▇▇▇▇███</td></tr><tr><td>batch/learning_rate</td><td>▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁</td></tr><tr><td>batch/loss</td><td>███▇▇▆▆▆▅▅▅▄▄▄▄▄▃▃▃▃▃▂▂▂▂▂▂▂▂▂▁▁▁▁▁▁▁▁▁▁</td></tr><tr><td>epoch/accuracy</td><td>▁▅▆▇▇███</td></tr><tr><td>epoch/epoch</td><td>▁▂▃▄▅▆▇█</td></tr><tr><td>epoch/learning_rate</td><td>▁▁▁▁▁▁▁▁</td></tr><tr><td>epoch/loss</td><td>█▆▄▃▂▂▁▁</td></tr><tr><td>epoch/val_accuracy</td><td>▁▅▆▇▇███</td></tr><tr><td>epoch/val_loss</td><td>█▆▄▃▂▂▁▁</td></tr></table><br/></div><div class=\"wandb-col\"><h3>Run summary:</h3><br/><table class=\"wandb\"><tr><td>batch/accuracy</td><td>0.81726</td></tr><tr><td>batch/batch_step</td><td>395</td></tr><tr><td>batch/learning_rate</td><td>0.01</td></tr><tr><td>batch/loss</td><td>0.77969</td></tr><tr><td>epoch/accuracy</td><td>0.81825</td></tr><tr><td>epoch/epoch</td><td>7</td></tr><tr><td>epoch/learning_rate</td><td>0.01</td></tr><tr><td>epoch/loss</td><td>0.77791</td></tr><tr><td>epoch/val_accuracy</td><td>0.826</td></tr><tr><td>epoch/val_loss</td><td>0.71648</td></tr></table><br/></div></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/html": [
" View run <strong style=\"color:#cdcd00\">floral-energy-3</strong> at: <a href='https://wandb.ai/uczenie-maszynowe-projekt/Detection%20of%20plant%20diseases/runs/bfji8amn' target=\"_blank\">https://wandb.ai/uczenie-maszynowe-projekt/Detection%20of%20plant%20diseases/runs/bfji8amn</a><br/> View project at: <a href='https://wandb.ai/uczenie-maszynowe-projekt/Detection%20of%20plant%20diseases' target=\"_blank\">https://wandb.ai/uczenie-maszynowe-projekt/Detection%20of%20plant%20diseases</a><br/>Synced 5 W&B file(s), 0 media file(s), 42 artifact file(s) and 0 other file(s)"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/html": [
"Find logs at: <code>./wandb/run-20240416_232247-bfji8amn/logs</code>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# This script needs these libraries to be installed:\n",
"# tensorflow, numpy\n",
"\n",
"import wandb\n",
"from wandb.keras import WandbMetricsLogger, WandbModelCheckpoint\n",
"\n",
"import random\n",
"import numpy as np\n",
"import tensorflow as tf\n",
"\n",
"\n",
"# Start a run, tracking hyperparameters\n",
"wandb.init(\n",
" # set the wandb project where this run will be logged\n",
" project=\"Detection of plant diseases\",\n",
"\n",
" # track hyperparameters and run metadata with wandb.config\n",
" config={\n",
" \"layer_1\": 512,\n",
" \"activation_1\": \"relu\",\n",
" \"dropout\": random.uniform(0.01, 0.80),\n",
" \"layer_2\": 10,\n",
" \"activation_2\": \"softmax\",\n",
" \"optimizer\": \"sgd\",\n",
" \"loss\": \"sparse_categorical_crossentropy\",\n",
" \"metric\": \"accuracy\",\n",
" \"epoch\": 8,\n",
" \"batch_size\": 256\n",
" }\n",
")\n",
"\n",
"# [optional] use wandb.config as your config\n",
"config = wandb.config\n",
"\n",
"# get the data\n",
"mnist = tf.keras.datasets.mnist\n",
"(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
"x_train, x_test = x_train / 255.0, x_test / 255.0\n",
"x_train, y_train = x_train[::5], y_train[::5]\n",
"x_test, y_test = x_test[::20], y_test[::20]\n",
"labels = [str(digit) for digit in range(np.max(y_train) + 1)]\n",
"\n",
"# build a model\n",
"model = tf.keras.models.Sequential([\n",
" tf.keras.layers.Flatten(input_shape=(28, 28)),\n",
" tf.keras.layers.Dense(config.layer_1, activation=config.activation_1),\n",
" tf.keras.layers.Dropout(config.dropout),\n",
" tf.keras.layers.Dense(config.layer_2, activation=config.activation_2)\n",
" ])\n",
"\n",
"# compile the model\n",
"model.compile(optimizer=config.optimizer,\n",
" loss=config.loss,\n",
" metrics=[config.metric]\n",
" )\n",
"\n",
"# WandbMetricsLogger will log train and validation metrics to wandb\n",
"# WandbModelCheckpoint will upload model checkpoints to wandb\n",
"history = model.fit(x=x_train, y=y_train,\n",
" epochs=config.epoch,\n",
" batch_size=config.batch_size,\n",
" validation_data=(x_test, y_test),\n",
" callbacks=[\n",
" WandbMetricsLogger(log_freq=5),\n",
" WandbModelCheckpoint(\"models\")\n",
" ])\n",
"\n",
"# [optional] finish the wandb run, necessary in notebooks\n",
"wandb.finish()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.12"
}
},
"nbformat": 4,
"nbformat_minor": 2
}