seq2seq model to master #2

Merged
s444337 merged 1 commits from seq2seq_model into master 2019-10-28 10:47:52 +01:00
5 changed files with 141 additions and 103 deletions

View File

@ -5,54 +5,70 @@ import pickle
from midi_processing import extract_data, analyze_data from midi_processing import extract_data, analyze_data
parser = argparse.ArgumentParser() def make_folder_if_not_exist(path):
parser.add_argument('midi_pack', help='folder name for midi pack in midi_packs folder', type=str) try:
parser.add_argument('name', help='name for experiment', type=str) os.mkdir(path)
parser.add_argument('--b', help='lengh of sequence in bars', type=int) except:
parser.add_argument('-a', help='analize data', action='store_true')
args = parser.parse_args()
'''SETTINGS'''
MIDI_PACK_NAME = args.midi_pack
EXPERIMENT_NAME = args.name
BARS_IN_SEQ = args.b
midi_folder_path = os.path.join('midi_packs', MIDI_PACK_NAME)
# analyze data set for intresting intruments
if args.a:
analyze_data(midi_folder_path)
sys.exit()
'''MODEL WORKFLOW DIALOG'''
number_of_instruments = int(input('Please specify number of instruments\n'))
model_workflow = dict()
input_list = []
for i in range(number_of_instruments):
input_string = input('Please specify a workflow step\n')
tokens = input_string.split()
if tokens[-1] == 'melody':
model_workflow[i] = (tokens[0], tokens[1])
else:
model_workflow[i] = ((tokens[1], tokens[0]), tokens[2])
# make folder for new experiment if no exist
try:
os.mkdir(os.path.join('training_sets', EXPERIMENT_NAME))
except:
pass pass
# extract process def parse_argv():
for key, (instrument, how) in model_workflow.items(): parser = argparse.ArgumentParser()
parser.add_argument('midi_pack', help='folder name for midi pack in midi_packs folder', type=str)
parser.add_argument('--n', help='name for experiment', type=str)
parser.add_argument('--b', help='lengh of sequence in bars', type=int)
parser.add_argument('-a', help='analize data', action='store_true')
args = parser.parse_args()
return args
def ask_for_workflow():
'''MODEL WORKFLOW DIALOG'''
number_of_instruments = int(input('Please specify number of instruments\n'))
model_workflow = dict()
for i in range(number_of_instruments):
input_string = input('Please specify a workflow step <Instrument> [<Second Instrument>] <mode> {m : melody, a : arrangment}\n')
tokens = input_string.split()
if tokens[-1] == 'm':
model_workflow[i] = (tokens[0], 'melody')
elif tokens[-1] == 'a':
model_workflow[i] = ((tokens[1], tokens[0]), 'arrangment')
else:
raise ValueError("The step definitiom must end with {'m', 'a'}");
make_folder_if_not_exist(os.path.join('training_sets', EXPERIMENT_NAME))
pickle.dump(model_workflow, open(os.path.join('training_sets', EXPERIMENT_NAME, 'workflow.pkl'),'wb'))
return model_workflow
def extract_from_folder(model_workflow):
for key, (instrument, how) in model_workflow.items():
if how == 'melody': if how == 'melody':
instrument_name = instrument instrument_name = instrument
else: else:
instrument_name = instrument[1] instrument_name = instrument[1]
make_folder_if_not_exist(os.path.join('training_sets', EXPERIMENT_NAME))
save_path = os.path.join('training_sets', EXPERIMENT_NAME, instrument_name.lower() + '_data.pkl') save_path = os.path.join('training_sets', EXPERIMENT_NAME, instrument_name.lower() + '_data.pkl')
x_train, y_train, program = extract_data(midi_folder_path=midi_folder_path, how=how,
instrument=instrument, bar_in_seq=BARS_IN_SEQ) x_train, y_train, program = extract_data(midi_folder_path=os.path.join('midi_packs', MIDI_PACK_NAME),
how=how,
instrument=instrument,
bar_in_seq=BARS_IN_SEQ)
pickle.dump((x_train, y_train, program), open(save_path,'wb')) pickle.dump((x_train, y_train, program), open(save_path,'wb'))
pickle.dump(model_workflow, open(os.path.join('training_sets', EXPERIMENT_NAME, 'workflow.pkl'),'wb')) if __name__ == '__main__':
args = parse_argv()
MIDI_PACK_NAME = args.midi_pack
EXPERIMENT_NAME = args.n
BARS_IN_SEQ = args.b
if not EXPERIMENT_NAME:
EXPERIMENT_NAME = MIDI_PACK_NAME
if not BARS_IN_SEQ:
BARS_IN_SEQ = 4
ANALIZE = args.a
if ANALIZE:
analyze_data(os.path.join('midi_packs', MIDI_PACK_NAME))
else:
extract_from_folder(ask_for_workflow())

View File

@ -7,19 +7,27 @@ import pickle
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('n', help='name for experiment', type=str) parser.add_argument('n', help='name for experiment', type=str)
parser.add_argument('s', help='session name', type=str)
parser.add_argument('--i', help='number of midis to generate', type=int) parser.add_argument('--i', help='number of midis to generate', type=int)
parser.add_argument('--l', help='latent_dim_of_model', type=int) parser.add_argument('--l', help='latent_dim_of_model', type=int)
parser.add_argument('--m', help="mode {'from_seq', 'from_state}'", type=str) parser.add_argument('--m', help="mode {'from_seq', 'from_state}'", type=str)
args = parser.parse_args() args = parser.parse_args()
EXPERIMENT_NAME = args.n EXPERIMENT_NAME = args.n
SESSION_NAME = args.s
GENERETIONS_COUNT = args.i GENERETIONS_COUNT = args.i
LATENT_DIM = args.l LATENT_DIM = args.l
MODE = args.m MODE = args.m
if GENERETIONS_COUNT == None: if not GENERETIONS_COUNT:
GENERETIONS_COUNT = 1 GENERETIONS_COUNT = 1
if not LATENT_DIM:
LATENT_DIM = 256
if not MODE:
MODE = 'from_seq'
model_workflow = pickle.load(open(os.path.join('training_sets', EXPERIMENT_NAME, 'workflow.pkl'),'rb')) model_workflow = pickle.load(open(os.path.join('training_sets', EXPERIMENT_NAME, 'workflow.pkl'),'rb'))
band = dict() band = dict()
@ -45,7 +53,7 @@ for instrument in tqdm(band):
band[instrument][0] = model band[instrument][0] = model
band[instrument][1] = program band[instrument][1] = program
for midi_counter in range(GENERETIONS_COUNT): for midi_counter in tqdm(range(GENERETIONS_COUNT)):
''' MAKE MULTIINSTRUMENTAL MUSIC !!!''' ''' MAKE MULTIINSTRUMENTAL MUSIC !!!'''
notes = dict() notes = dict()
@ -76,7 +84,11 @@ for midi_counter in range(GENERETIONS_COUNT):
os.mkdir(os.path.join('generated_music', EXPERIMENT_NAME)) os.mkdir(os.path.join('generated_music', EXPERIMENT_NAME))
except: except:
pass pass
try:
os.mkdir(os.path.join('generated_music', EXPERIMENT_NAME, SESSION_NAME))
except:
pass
save_path = os.path.join('generated_music', EXPERIMENT_NAME, f'{EXPERIMENT_NAME}_{midi_counter}_{MODE}_{LATENT_DIM}.mid') save_path = os.path.join('generated_music', EXPERIMENT_NAME, SESSION_NAME, f'{EXPERIMENT_NAME}_{midi_counter}_{MODE}_{LATENT_DIM}.mid')
generated_midi.save(save_path) generated_midi.save(save_path)
print(f'Generated succefuly to {save_path}') # print(f'Generated succefuly to {save_path}')

View File

@ -10,8 +10,6 @@ from random import randint
import pretty_midi as pm import pretty_midi as pm
from tqdm import tqdm from tqdm import tqdm
# TODO: Stream class is no logner needed <- remore from code and make just SingleTrack.notes instead on SingleTrack.stream.notes # TODO: Stream class is no logner needed <- remore from code and make just SingleTrack.notes instead on SingleTrack.stream.notes
class Stream(): class Stream():

View File

@ -52,7 +52,6 @@ class Seq2SeqTransformer():
self.x_vocab_size = len(self.x_vocab) self.x_vocab_size = len(self.x_vocab)
self.y_vocab_size = len(self.y_vocab) self.y_vocab_size = len(self.y_vocab)
self.x_transform_dict = dict( self.x_transform_dict = dict(
[(char, i) for i, char in enumerate(self.x_vocab)]) [(char, i) for i, char in enumerate(self.x_vocab)])
self.y_transform_dict = dict( self.y_transform_dict = dict(

View File

@ -3,50 +3,39 @@ import sys
import pickle import pickle
import keras import keras
import argparse import argparse
import warnings
from model import Seq2SeqModel from model import Seq2SeqModel
from extract import make_folder_if_not_exist
parser = argparse.ArgumentParser() # TODO:
parser.add_argument('n', help='name for experiment', type=str) # FIXME:
parser.add_argument('--b', help='batch_size', type=int)
parser.add_argument('--l', help='latent_dim', type=int)
parser.add_argument('--e', help='epochs', type=int)
parser.add_argument('--r', help='reset, use when you want to reset waights and train from scratch', action='store_true')
parser.add_argument('--i', help='refrance to instrument to train, if you want to train only one instument')
args = parser.parse_args()
def parse_argv():
parser = argparse.ArgumentParser()
parser.add_argument('n', help='name for experiment', type=str)
parser.add_argument('--b', help='batch_size', type=int)
parser.add_argument('--l', help='latent_dim', type=int)
parser.add_argument('--e', help='epochs', type=int)
parser.add_argument('--i', help='refrance to instrument to train, if you want to train only one instument')
parser.add_argument('-r', help='reset, use when you want to reset waights and train from scratch', action='store_true')
args = parser.parse_args()
return args
'''HYPER PARAMETERS''' def load_workflow():
EXPERIMENT_NAME = args.n workflow_path = os.path.join('training_sets', EXPERIMENT_NAME, 'workflow.pkl')
BATCH_SIZE = args.b if os.path.isfile(workflow_path):
LATENT_DIM = args.l model_workflow = pickle.load(open(workflow_path,'rb'))
EPOCHS = args.e else:
RESET = args.r raise FileNotFoundError(f'There is no workflow.pkl file in trainig_sets/{EXPERIMENT_NAME}/ folder')
INSTRUMENT = args.i return model_workflow
if BATCH_SIZE == None: def train_models(model_workflow):
BATCH_SIZE = 32
if LATENT_DIM == None:
LATENT_DIM = 256
if EPOCHS == None:
EPOCHS = 1
if RESET == None:
RESET = False
## TODO: raise error if file not found instruments = [instrument if how == 'melody' else instrument[1] for key, (instrument, how) in model_workflow.items()]
model_workflow = pickle.load(open(os.path.join('training_sets', EXPERIMENT_NAME, 'workflow.pkl'),'rb')) # make_folder_if_not_exist(os.mkdir(os.path.join('models',EXPERIMENT_NAME)))
tbCallBack = keras.callbacks.TensorBoard(log_dir='./Graph', histogram_freq=0, write_graph=True, write_images=True)
instruments = [instrument if how == 'melody' else instrument[1] for key, (instrument, how) in model_workflow.items()] found = False
for instrument in instruments:
# make folder for new experiment
try:
os.mkdir(os.path.join('models',EXPERIMENT_NAME))
except:
pass
# init models
found = False
for instrument in instruments:
if INSTRUMENT == None or INSTRUMENT == instrument: if INSTRUMENT == None or INSTRUMENT == instrument:
data_path = os.path.join('training_sets', EXPERIMENT_NAME, instrument.lower() + '_data.pkl') data_path = os.path.join('training_sets', EXPERIMENT_NAME, instrument.lower() + '_data.pkl')
@ -58,9 +47,33 @@ for instrument in instruments:
model.load(model_path) model.load(model_path)
print(f'Training: {instrument}') print(f'Training: {instrument}')
train_history = model.fit(BATCH_SIZE, EPOCHS, callbacks=[tbCallBack]) model.fit(BATCH_SIZE, EPOCHS, callbacks=[])
model.save(model_path) model.save(model_path)
found = True found = True
if not found: if not found:
raise ValueError(f'Instrument not found. Use one of the {instruments}') raise ValueError(f'Instrument not found. Use one of the {instruments}')
if __name__ == '__main__':
warnings.filterwarnings("ignore")
args = parse_argv()
EXPERIMENT_NAME = args.n
BATCH_SIZE = args.b
LATENT_DIM = args.l
EPOCHS = args.e
RESET = args.r
INSTRUMENT = args.i
# default settings if not args passed
if not BATCH_SIZE:
BATCH_SIZE = 32
if not LATENT_DIM:
LATENT_DIM = 256
if not EPOCHS:
EPOCHS = 1
if not RESET:
RESET = False
train_models(load_workflow())