code clarity

This commit is contained in:
Cezary Pukownik 2019-10-27 14:34:02 +01:00
parent eaaad76a2a
commit 4d9ee930a1
5 changed files with 141 additions and 103 deletions

View File

@ -5,54 +5,70 @@ import pickle
from midi_processing import extract_data, analyze_data
parser = argparse.ArgumentParser()
parser.add_argument('midi_pack', help='folder name for midi pack in midi_packs folder', type=str)
parser.add_argument('name', help='name for experiment', type=str)
parser.add_argument('--b', help='lengh of sequence in bars', type=int)
parser.add_argument('-a', help='analize data', action='store_true')
args = parser.parse_args()
'''SETTINGS'''
MIDI_PACK_NAME = args.midi_pack
EXPERIMENT_NAME = args.name
BARS_IN_SEQ = args.b
midi_folder_path = os.path.join('midi_packs', MIDI_PACK_NAME)
# analyze data set for intresting intruments
if args.a:
analyze_data(midi_folder_path)
sys.exit()
'''MODEL WORKFLOW DIALOG'''
number_of_instruments = int(input('Please specify number of instruments\n'))
model_workflow = dict()
input_list = []
for i in range(number_of_instruments):
input_string = input('Please specify a workflow step\n')
tokens = input_string.split()
if tokens[-1] == 'melody':
model_workflow[i] = (tokens[0], tokens[1])
else:
model_workflow[i] = ((tokens[1], tokens[0]), tokens[2])
# make folder for new experiment if no exist
def make_folder_if_not_exist(path):
try:
os.mkdir(os.path.join('training_sets', EXPERIMENT_NAME))
os.mkdir(path)
except:
pass
# extract process
def parse_argv():
parser = argparse.ArgumentParser()
parser.add_argument('midi_pack', help='folder name for midi pack in midi_packs folder', type=str)
parser.add_argument('--n', help='name for experiment', type=str)
parser.add_argument('--b', help='lengh of sequence in bars', type=int)
parser.add_argument('-a', help='analize data', action='store_true')
args = parser.parse_args()
return args
def ask_for_workflow():
'''MODEL WORKFLOW DIALOG'''
number_of_instruments = int(input('Please specify number of instruments\n'))
model_workflow = dict()
for i in range(number_of_instruments):
input_string = input('Please specify a workflow step <Instrument> [<Second Instrument>] <mode> {m : melody, a : arrangment}\n')
tokens = input_string.split()
if tokens[-1] == 'm':
model_workflow[i] = (tokens[0], 'melody')
elif tokens[-1] == 'a':
model_workflow[i] = ((tokens[1], tokens[0]), 'arrangment')
else:
raise ValueError("The step definitiom must end with {'m', 'a'}");
make_folder_if_not_exist(os.path.join('training_sets', EXPERIMENT_NAME))
pickle.dump(model_workflow, open(os.path.join('training_sets', EXPERIMENT_NAME, 'workflow.pkl'),'wb'))
return model_workflow
def extract_from_folder(model_workflow):
for key, (instrument, how) in model_workflow.items():
if how == 'melody':
instrument_name = instrument
else:
instrument_name = instrument[1]
make_folder_if_not_exist(os.path.join('training_sets', EXPERIMENT_NAME))
save_path = os.path.join('training_sets', EXPERIMENT_NAME, instrument_name.lower() + '_data.pkl')
x_train, y_train, program = extract_data(midi_folder_path=midi_folder_path, how=how,
instrument=instrument, bar_in_seq=BARS_IN_SEQ)
x_train, y_train, program = extract_data(midi_folder_path=os.path.join('midi_packs', MIDI_PACK_NAME),
how=how,
instrument=instrument,
bar_in_seq=BARS_IN_SEQ)
pickle.dump((x_train, y_train, program), open(save_path,'wb'))
pickle.dump(model_workflow, open(os.path.join('training_sets', EXPERIMENT_NAME, 'workflow.pkl'),'wb'))
if __name__ == '__main__':
args = parse_argv()
MIDI_PACK_NAME = args.midi_pack
EXPERIMENT_NAME = args.n
BARS_IN_SEQ = args.b
if not EXPERIMENT_NAME:
EXPERIMENT_NAME = MIDI_PACK_NAME
if not BARS_IN_SEQ:
BARS_IN_SEQ = 4
ANALIZE = args.a
if ANALIZE:
analyze_data(os.path.join('midi_packs', MIDI_PACK_NAME))
else:
extract_from_folder(ask_for_workflow())

View File

@ -7,19 +7,27 @@ import pickle
parser = argparse.ArgumentParser()
parser.add_argument('n', help='name for experiment', type=str)
parser.add_argument('s', help='session name', type=str)
parser.add_argument('--i', help='number of midis to generate', type=int)
parser.add_argument('--l', help='latent_dim_of_model', type=int)
parser.add_argument('--m', help="mode {'from_seq', 'from_state}'", type=str)
args = parser.parse_args()
EXPERIMENT_NAME = args.n
SESSION_NAME = args.s
GENERETIONS_COUNT = args.i
LATENT_DIM = args.l
MODE = args.m
if GENERETIONS_COUNT == None:
if not GENERETIONS_COUNT:
GENERETIONS_COUNT = 1
if not LATENT_DIM:
LATENT_DIM = 256
if not MODE:
MODE = 'from_seq'
model_workflow = pickle.load(open(os.path.join('training_sets', EXPERIMENT_NAME, 'workflow.pkl'),'rb'))
band = dict()
@ -45,7 +53,7 @@ for instrument in tqdm(band):
band[instrument][0] = model
band[instrument][1] = program
for midi_counter in range(GENERETIONS_COUNT):
for midi_counter in tqdm(range(GENERETIONS_COUNT)):
''' MAKE MULTIINSTRUMENTAL MUSIC !!!'''
notes = dict()
@ -76,7 +84,11 @@ for midi_counter in range(GENERETIONS_COUNT):
os.mkdir(os.path.join('generated_music', EXPERIMENT_NAME))
except:
pass
try:
os.mkdir(os.path.join('generated_music', EXPERIMENT_NAME, SESSION_NAME))
except:
pass
save_path = os.path.join('generated_music', EXPERIMENT_NAME, f'{EXPERIMENT_NAME}_{midi_counter}_{MODE}_{LATENT_DIM}.mid')
save_path = os.path.join('generated_music', EXPERIMENT_NAME, SESSION_NAME, f'{EXPERIMENT_NAME}_{midi_counter}_{MODE}_{LATENT_DIM}.mid')
generated_midi.save(save_path)
print(f'Generated succefuly to {save_path}')
# print(f'Generated succefuly to {save_path}')

View File

@ -10,8 +10,6 @@ from random import randint
import pretty_midi as pm
from tqdm import tqdm
# TODO: Stream class is no logner needed <- remore from code and make just SingleTrack.notes instead on SingleTrack.stream.notes
class Stream():

View File

@ -52,7 +52,6 @@ class Seq2SeqTransformer():
self.x_vocab_size = len(self.x_vocab)
self.y_vocab_size = len(self.y_vocab)
self.x_transform_dict = dict(
[(char, i) for i, char in enumerate(self.x_vocab)])
self.y_transform_dict = dict(

View File

@ -3,48 +3,37 @@ import sys
import pickle
import keras
import argparse
import warnings
from model import Seq2SeqModel
from extract import make_folder_if_not_exist
# TODO:
# FIXME:
def parse_argv():
parser = argparse.ArgumentParser()
parser.add_argument('n', help='name for experiment', type=str)
parser.add_argument('--b', help='batch_size', type=int)
parser.add_argument('--l', help='latent_dim', type=int)
parser.add_argument('--e', help='epochs', type=int)
parser.add_argument('--r', help='reset, use when you want to reset waights and train from scratch', action='store_true')
parser.add_argument('--i', help='refrance to instrument to train, if you want to train only one instument')
parser.add_argument('-r', help='reset, use when you want to reset waights and train from scratch', action='store_true')
args = parser.parse_args()
return args
def load_workflow():
workflow_path = os.path.join('training_sets', EXPERIMENT_NAME, 'workflow.pkl')
if os.path.isfile(workflow_path):
model_workflow = pickle.load(open(workflow_path,'rb'))
else:
raise FileNotFoundError(f'There is no workflow.pkl file in trainig_sets/{EXPERIMENT_NAME}/ folder')
return model_workflow
'''HYPER PARAMETERS'''
EXPERIMENT_NAME = args.n
BATCH_SIZE = args.b
LATENT_DIM = args.l
EPOCHS = args.e
RESET = args.r
INSTRUMENT = args.i
if BATCH_SIZE == None:
BATCH_SIZE = 32
if LATENT_DIM == None:
LATENT_DIM = 256
if EPOCHS == None:
EPOCHS = 1
if RESET == None:
RESET = False
## TODO: raise error if file not found
model_workflow = pickle.load(open(os.path.join('training_sets', EXPERIMENT_NAME, 'workflow.pkl'),'rb'))
tbCallBack = keras.callbacks.TensorBoard(log_dir='./Graph', histogram_freq=0, write_graph=True, write_images=True)
def train_models(model_workflow):
instruments = [instrument if how == 'melody' else instrument[1] for key, (instrument, how) in model_workflow.items()]
# make_folder_if_not_exist(os.mkdir(os.path.join('models',EXPERIMENT_NAME)))
# make folder for new experiment
try:
os.mkdir(os.path.join('models',EXPERIMENT_NAME))
except:
pass
# init models
found = False
for instrument in instruments:
@ -58,9 +47,33 @@ for instrument in instruments:
model.load(model_path)
print(f'Training: {instrument}')
train_history = model.fit(BATCH_SIZE, EPOCHS, callbacks=[tbCallBack])
model.fit(BATCH_SIZE, EPOCHS, callbacks=[])
model.save(model_path)
found = True
if not found:
raise ValueError(f'Instrument not found. Use one of the {instruments}')
if __name__ == '__main__':
warnings.filterwarnings("ignore")
args = parse_argv()
EXPERIMENT_NAME = args.n
BATCH_SIZE = args.b
LATENT_DIM = args.l
EPOCHS = args.e
RESET = args.r
INSTRUMENT = args.i
# default settings if not args passed
if not BATCH_SIZE:
BATCH_SIZE = 32
if not LATENT_DIM:
LATENT_DIM = 256
if not EPOCHS:
EPOCHS = 1
if not RESET:
RESET = False
train_models(load_workflow())