♻ Refactor coloring to use code for videos and photos

This commit is contained in:
Adam Wysocki 2020-01-31 20:27:56 +01:00
parent 0ce31d9433
commit 2b25c21b5e
14 changed files with 717 additions and 6 deletions

3
.gitignore vendored
View File

@ -194,5 +194,4 @@ fabric.properties
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
.idea/caches/build_file_checksums.ser

14
app.py
View File

@ -1,12 +1,18 @@
import sys
from flask import Flask
from main import colorize
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/<file_name>')
def hello_world(file_name: str):
colorize(file_name)
if __name__ == '__main__':
app.run()
if len(sys.argv):
colorize(sys.argv[1])
else:
app.run()

2
constant.py Normal file
View File

@ -0,0 +1,2 @@
FLOAT32 = "float32"

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 100 KiB

115
main.py Normal file
View File

@ -0,0 +1,115 @@
import os
from os.path import isfile, join
import imutils as imutils
import numpy as np
import cv2
import constant
import mimetypes
try:
from cv2 import cv2
finally:
pass
proto_file = "./model/colorization_deploy_v2.prototxt"
model = "./model/colorization_release_v2.caffemodel"
points_file = "./model/pts_in_hull.npy"
network = cv2.dnn.readNetFromCaffe(proto_file, model)
points = np.load(points_file)
points = points.transpose().reshape(2, 313, 1, 1)
class_layer = network.getLayerId("class8_ab")
convolution_layer = network.getLayerId("conv8_313_rh")
network.getLayer(class_layer).blobs = [points.astype(constant.FLOAT32)]
network.getLayer(convolution_layer).blobs = [np.full([1, 313], 2.606, dtype=constant.FLOAT32)]
# Convert from grayscale to gray to remove all little noises, and then again to rgb
def clear_image(image):
cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
cv2.cvtColor(image, cv2.COLOR_GRAY2RGB)
return image
def convert_frames_to_video(file_name):
path_in = 'videos/colorized_frames/'
path_out = 'videos/output/' + file_name.replace('/', '_')
frame_array = []
files = [f for f in os.listdir(path_in) if isfile(join(path_in, f))]
files.sort(key=lambda x: int(x[5:-4]))
for i in range(len(files)):
filename = path_in + files[i]
img = cv2.imread(filename)
height, width, layers = img.shape
size = (width, height)
frame_array.append(img)
out = cv2.VideoWriter(path_out, cv2.VideoWriter_fourcc(*'MJPG'), 30.0, size)
for i in range(len(frame_array)):
out.write(frame_array[i])
out.release()
def colorize_image(image):
image_scaled = image.astype(constant.FLOAT32) / 255.0
lab = cv2.cvtColor(image_scaled, cv2.COLOR_RGB2LAB)
resized = cv2.resize(lab, (224, 224))
image_l_channel = cv2.split(resized)[0] - 50
network.setInput(cv2.dnn.blobFromImage(image_l_channel))
ab = network.forward()[0, :, :, :].transpose((1, 2, 0))
ab = cv2.resize(ab, (image.shape[1], image.shape[0]))
image_l_channel = cv2.split(lab)[0]
colorized = np.concatenate((image_l_channel[:, :, np.newaxis], ab), axis=2)
colorized = cv2.cvtColor(colorized, cv2.COLOR_LAB2RGB)
colorized = np.clip(colorized, 0, 1)
return (255 * colorized).astype("uint8")
def colorize(file_path: str) -> None:
file_type = mimetypes.guess_type(file_path)[0].split('/')
if file_type[0] == 'image':
image = cv2.imread(file_path)
colorized = colorize_image(image)
cv2.imwrite("images/output/" + file_path.replace('/', '_'), cv2.cvtColor(colorized, cv2.COLOR_RGB2BGR))
print('Success')
elif file_type[0] == 'video':
vs = cv2.VideoCapture(file_path)
count = 0
success = True
while success:
success, frame = vs.read()
if frame is None:
break
frame = imutils.resize(frame, 500)
colorized = colorize_image(frame)
cv2.imwrite("./videos/colorized_frames/frame%d.jpg" % count, colorized)
count += 1
key = cv2.waitKey(1) & 0xFF
if key == ord("q"):
break
vs.release()
convert_frames_to_video(file_path)
print('Success')
else:
print('Wrong media format')

View File

@ -0,0 +1,589 @@
name: "LtoAB"
layer {
name: "data_l"
type: "Input"
top: "data_l"
input_param {
shape { dim: 1 dim: 1 dim: 224 dim: 224 }
}
}
# *****************
# ***** conv1 *****
# *****************
layer {
name: "bw_conv1_1"
type: "Convolution"
bottom: "data_l"
top: "conv1_1"
# param {lr_mult: 0 decay_mult: 0}
# param {lr_mult: 0 decay_mult: 0}
convolution_param {
num_output: 64
pad: 1
kernel_size: 3
}
}
layer {
name: "relu1_1"
type: "ReLU"
bottom: "conv1_1"
top: "conv1_1"
}
layer {
name: "conv1_2"
type: "Convolution"
bottom: "conv1_1"
top: "conv1_2"
# param {lr_mult: 0 decay_mult: 0}
# param {lr_mult: 0 decay_mult: 0}
convolution_param {
num_output: 64
pad: 1
kernel_size: 3
stride: 2
}
}
layer {
name: "relu1_2"
type: "ReLU"
bottom: "conv1_2"
top: "conv1_2"
}
layer {
name: "conv1_2norm"
type: "BatchNorm"
bottom: "conv1_2"
top: "conv1_2norm"
batch_norm_param{ }
param {lr_mult: 0 decay_mult: 0}
param {lr_mult: 0 decay_mult: 0}
param {lr_mult: 0 decay_mult: 0}
}
# *****************
# ***** conv2 *****
# *****************
layer {
name: "conv2_1"
type: "Convolution"
# bottom: "conv1_2"
bottom: "conv1_2norm"
# bottom: "pool1"
top: "conv2_1"
# param {lr_mult: 0 decay_mult: 0}
# param {lr_mult: 0 decay_mult: 0}
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
}
}
layer {
name: "relu2_1"
type: "ReLU"
bottom: "conv2_1"
top: "conv2_1"
}
layer {
name: "conv2_2"
type: "Convolution"
bottom: "conv2_1"
top: "conv2_2"
# param {lr_mult: 0 decay_mult: 0}
# param {lr_mult: 0 decay_mult: 0}
convolution_param {
num_output: 128
pad: 1
kernel_size: 3
stride: 2
}
}
layer {
name: "relu2_2"
type: "ReLU"
bottom: "conv2_2"
top: "conv2_2"
}
layer {
name: "conv2_2norm"
type: "BatchNorm"
bottom: "conv2_2"
top: "conv2_2norm"
batch_norm_param{ }
param {lr_mult: 0 decay_mult: 0}
param {lr_mult: 0 decay_mult: 0}
param {lr_mult: 0 decay_mult: 0}
}
# *****************
# ***** conv3 *****
# *****************
layer {
name: "conv3_1"
type: "Convolution"
# bottom: "conv2_2"
bottom: "conv2_2norm"
# bottom: "pool2"
top: "conv3_1"
# param {lr_mult: 0 decay_mult: 0}
# param {lr_mult: 0 decay_mult: 0}
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
}
}
layer {
name: "relu3_1"
type: "ReLU"
bottom: "conv3_1"
top: "conv3_1"
}
layer {
name: "conv3_2"
type: "Convolution"
bottom: "conv3_1"
top: "conv3_2"
# param {lr_mult: 0 decay_mult: 0}
# param {lr_mult: 0 decay_mult: 0}
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
}
}
layer {
name: "relu3_2"
type: "ReLU"
bottom: "conv3_2"
top: "conv3_2"
}
layer {
name: "conv3_3"
type: "Convolution"
bottom: "conv3_2"
top: "conv3_3"
# param {lr_mult: 0 decay_mult: 0}
# param {lr_mult: 0 decay_mult: 0}
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
stride: 2
}
}
layer {
name: "relu3_3"
type: "ReLU"
bottom: "conv3_3"
top: "conv3_3"
}
layer {
name: "conv3_3norm"
type: "BatchNorm"
bottom: "conv3_3"
top: "conv3_3norm"
batch_norm_param{ }
param {lr_mult: 0 decay_mult: 0}
param {lr_mult: 0 decay_mult: 0}
param {lr_mult: 0 decay_mult: 0}
}
# *****************
# ***** conv4 *****
# *****************
layer {
name: "conv4_1"
type: "Convolution"
# bottom: "conv3_3"
bottom: "conv3_3norm"
# bottom: "pool3"
top: "conv4_1"
# param {lr_mult: 0 decay_mult: 0}
# param {lr_mult: 0 decay_mult: 0}
convolution_param {
num_output: 512
kernel_size: 3
stride: 1
pad: 1
dilation: 1
}
}
layer {
name: "relu4_1"
type: "ReLU"
bottom: "conv4_1"
top: "conv4_1"
}
layer {
name: "conv4_2"
type: "Convolution"
bottom: "conv4_1"
top: "conv4_2"
# param {lr_mult: 0 decay_mult: 0}
# param {lr_mult: 0 decay_mult: 0}
convolution_param {
num_output: 512
kernel_size: 3
stride: 1
pad: 1
dilation: 1
}
}
layer {
name: "relu4_2"
type: "ReLU"
bottom: "conv4_2"
top: "conv4_2"
}
layer {
name: "conv4_3"
type: "Convolution"
bottom: "conv4_2"
top: "conv4_3"
# param {lr_mult: 0 decay_mult: 0}
# param {lr_mult: 0 decay_mult: 0}
convolution_param {
num_output: 512
kernel_size: 3
stride: 1
pad: 1
dilation: 1
}
}
layer {
name: "relu4_3"
type: "ReLU"
bottom: "conv4_3"
top: "conv4_3"
}
layer {
name: "conv4_3norm"
type: "BatchNorm"
bottom: "conv4_3"
top: "conv4_3norm"
batch_norm_param{ }
param {lr_mult: 0 decay_mult: 0}
param {lr_mult: 0 decay_mult: 0}
param {lr_mult: 0 decay_mult: 0}
}
# *****************
# ***** conv5 *****
# *****************
layer {
name: "conv5_1"
type: "Convolution"
# bottom: "conv4_3"
bottom: "conv4_3norm"
# bottom: "pool4"
top: "conv5_1"
# param {lr_mult: 0 decay_mult: 0}
# param {lr_mult: 0 decay_mult: 0}
convolution_param {
num_output: 512
kernel_size: 3
stride: 1
pad: 2
dilation: 2
}
}
layer {
name: "relu5_1"
type: "ReLU"
bottom: "conv5_1"
top: "conv5_1"
}
layer {
name: "conv5_2"
type: "Convolution"
bottom: "conv5_1"
top: "conv5_2"
# param {lr_mult: 0 decay_mult: 0}
# param {lr_mult: 0 decay_mult: 0}
convolution_param {
num_output: 512
kernel_size: 3
stride: 1
pad: 2
dilation: 2
}
}
layer {
name: "relu5_2"
type: "ReLU"
bottom: "conv5_2"
top: "conv5_2"
}
layer {
name: "conv5_3"
type: "Convolution"
bottom: "conv5_2"
top: "conv5_3"
# param {lr_mult: 0 decay_mult: 0}
# param {lr_mult: 0 decay_mult: 0}
convolution_param {
num_output: 512
kernel_size: 3
stride: 1
pad: 2
dilation: 2
}
}
layer {
name: "relu5_3"
type: "ReLU"
bottom: "conv5_3"
top: "conv5_3"
}
layer {
name: "conv5_3norm"
type: "BatchNorm"
bottom: "conv5_3"
top: "conv5_3norm"
batch_norm_param{ }
param {lr_mult: 0 decay_mult: 0}
param {lr_mult: 0 decay_mult: 0}
param {lr_mult: 0 decay_mult: 0}
}
# *****************
# ***** conv6 *****
# *****************
layer {
name: "conv6_1"
type: "Convolution"
bottom: "conv5_3norm"
top: "conv6_1"
convolution_param {
num_output: 512
kernel_size: 3
pad: 2
dilation: 2
}
}
layer {
name: "relu6_1"
type: "ReLU"
bottom: "conv6_1"
top: "conv6_1"
}
layer {
name: "conv6_2"
type: "Convolution"
bottom: "conv6_1"
top: "conv6_2"
convolution_param {
num_output: 512
kernel_size: 3
pad: 2
dilation: 2
}
}
layer {
name: "relu6_2"
type: "ReLU"
bottom: "conv6_2"
top: "conv6_2"
}
layer {
name: "conv6_3"
type: "Convolution"
bottom: "conv6_2"
top: "conv6_3"
convolution_param {
num_output: 512
kernel_size: 3
pad: 2
dilation: 2
}
}
layer {
name: "relu6_3"
type: "ReLU"
bottom: "conv6_3"
top: "conv6_3"
}
layer {
name: "conv6_3norm"
type: "BatchNorm"
bottom: "conv6_3"
top: "conv6_3norm"
batch_norm_param{ }
param {lr_mult: 0 decay_mult: 0}
param {lr_mult: 0 decay_mult: 0}
param {lr_mult: 0 decay_mult: 0}
}
# *****************
# ***** conv7 *****
# *****************
layer {
name: "conv7_1"
type: "Convolution"
bottom: "conv6_3norm"
top: "conv7_1"
convolution_param {
num_output: 512
kernel_size: 3
pad: 1
dilation: 1
}
}
layer {
name: "relu7_1"
type: "ReLU"
bottom: "conv7_1"
top: "conv7_1"
}
layer {
name: "conv7_2"
type: "Convolution"
bottom: "conv7_1"
top: "conv7_2"
convolution_param {
num_output: 512
kernel_size: 3
pad: 1
dilation: 1
}
}
layer {
name: "relu7_2"
type: "ReLU"
bottom: "conv7_2"
top: "conv7_2"
}
layer {
name: "conv7_3"
type: "Convolution"
bottom: "conv7_2"
top: "conv7_3"
convolution_param {
num_output: 512
kernel_size: 3
pad: 1
dilation: 1
}
}
layer {
name: "relu7_3"
type: "ReLU"
bottom: "conv7_3"
top: "conv7_3"
}
layer {
name: "conv7_3norm"
type: "BatchNorm"
bottom: "conv7_3"
top: "conv7_3norm"
batch_norm_param{ }
param {lr_mult: 0 decay_mult: 0}
param {lr_mult: 0 decay_mult: 0}
param {lr_mult: 0 decay_mult: 0}
}
# *****************
# ***** conv8 *****
# *****************
layer {
name: "conv8_1"
type: "Deconvolution"
bottom: "conv7_3norm"
top: "conv8_1"
convolution_param {
num_output: 256
kernel_size: 4
pad: 1
dilation: 1
stride: 2
}
}
layer {
name: "relu8_1"
type: "ReLU"
bottom: "conv8_1"
top: "conv8_1"
}
layer {
name: "conv8_2"
type: "Convolution"
bottom: "conv8_1"
top: "conv8_2"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
dilation: 1
}
}
layer {
name: "relu8_2"
type: "ReLU"
bottom: "conv8_2"
top: "conv8_2"
}
layer {
name: "conv8_3"
type: "Convolution"
bottom: "conv8_2"
top: "conv8_3"
convolution_param {
num_output: 256
kernel_size: 3
pad: 1
dilation: 1
}
}
layer {
name: "relu8_3"
type: "ReLU"
bottom: "conv8_3"
top: "conv8_3"
}
# *******************
# ***** Softmax *****
# *******************
layer {
name: "conv8_313"
type: "Convolution"
bottom: "conv8_3"
top: "conv8_313"
convolution_param {
num_output: 313
kernel_size: 1
stride: 1
dilation: 1
}
}
layer {
name: "conv8_313_rh"
type: "Scale"
bottom: "conv8_313"
top: "conv8_313_rh"
scale_param {
bias_term: false
filler { type: 'constant' value: 2.606 }
}
}
layer {
name: "class8_313_rh"
type: "Softmax"
bottom: "conv8_313_rh"
top: "class8_313_rh"
}
# ********************
# ***** Decoding *****
# ********************
layer {
name: "class8_ab"
type: "Convolution"
bottom: "class8_313_rh"
top: "class8_ab"
convolution_param {
num_output: 2
kernel_size: 1
stride: 1
dilation: 1
}
}
layer {
name: "Silence"
type: "Silence"
bottom: "class8_ab"
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
model/pts_in_hull.npy Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.