praca-magisterska/docs/images/plots.py

169 lines
3.7 KiB
Python
Raw Normal View History

#%% Change working directory from the workspace root to the ipynb file location. Turn this addition off with the DataScience.changeDirOnImportExport setting
# ms-python.python added
import os
try:
os.chdir(os.path.join(os.getcwd(), 'docs\\images'))
print(os.getcwd())
except:
pass
#%%
import music21
from music21.midi import MidiFile
import numpy as np
import matplotlib.pyplot as plt
import mido
#%% [markdown]
# # midi messages
#%%
filepath = '/home/altarin/praca-magisterska/docs/images/seq2seq_generated_midi_7.mid'
mid = mido.MidiFile(filepath)
for i, track in enumerate(mid.tracks):
for msg in track:
print(msg)
#%% [markdown]
# # Regresja liniowa
#%%
x = np.arange(0,10)
y = np.arange(0,10) + np.random.random(10)-0.5
y_hat = np.arange(0,10)
plt.scatter(x, y, c='k')
plt.plot(x,y_hat, c='r')
# plt.labels
# plt.savefig('linear_reg.png')
#%%
from math import exp
x = np.arange(-10,10, 0.1)
# y = np.arange(0,10)
tanh = lambda x: (exp(x) - exp(-x))/(exp(x) + exp(-x))
y_hat = [tanh(yy) for yy in x]
# plt.scatter(x, y, c='k')
plt.plot(x, y_hat)
# plt.show()
# plt.labels
plt.savefig('tanh.png')
#%%
# Gradient descent
fx = x^2
dx = 2x + c
#%%
func = lambda x: x**2
func_dx = lambda x:2*x
x = np.arange(-20,20,0.1)
y = func(x)
point_x = -6
point_y = func(point_x)
dx = styczna(x)
learning_points_xs = np.arange(point_x, 0, 0.8)
learning_points_ys = func(learning_points_xs)
fig, ax = plt.subplots()
ax.plot(x, y, c='k')
for px in learning_points_xs[0:1]:
slope = func_dx(px)
intercept = -px**2
styczna = lambda x: slope*x + intercept
dx = styczna(x)
ax.plot(x, dx, c='r', zorder=1)
ax.scatter(x=point_x, y=point_y, c='r', zorder=6) #start
# ax.scatter(x=0, y=0, c='g', zorder=6) #min
# ax.scatter(x=learning_points_xs, y=learning_points_ys, c='y', zorder=5)
plt.ylim((-20,80))
plt.xlim((-20,20))
# plt.xlabel('x')
# plt.ylabel('f(x) = x^2')
plt.savefig('gradient_descent_1.png')
# https://towardsdatascience.com/understanding-the-mathematics-behind-gradient-descent-dde5dc9be06e?
# https://medium.com/code-heroku/gradient-descent-for-machine-learning-3d871fa48b4c
#%% sound wave
#import the pyplot and wavfile modules
import matplotlib.pyplot as plot
from scipy.io import wavfile
# Read the wav file (mono)
samplingFrequency, signalData = wavfile.read('foo.wav')
# Plot the signal read from wav file
# plot.title('Spectrogram of a wav file with piano music')
plot.plot(signalData)
plot.xlabel('Próbki')
plot.ylabel('Amplituda')
plot.savefig('waveform_axis.png')
# plot.show()
# print(samplingFrequency)
# %%
import pandas as pd
import matplotlib.pyplot as plt
import os
# %%
cols = ['epoch', 'val_loss', 'loss']
guitar_df = pd.read_csv('offspring_history/guitar_history.csv', header=None)
bass_df = pd.read_csv('offspring_history/bass_history.csv', header=None)
drums_df = pd.read_csv('offspring_history/drums_history.csv', header=None)
melody_df = pd.read_csv('offspring_history/melody_history.csv', header=None)
guitar_df.columns = cols
bass_df.columns = cols
drums_df.columns = cols
melody_df.columns = cols
fig, axs = plt.subplots(2, 2, figsize=(10,10))
l11, l21 = axs[0][0].plot(guitar_df[['loss', 'val_loss']])
l21, l22 = axs[0][1].plot(bass_df[['loss', 'val_loss']])
l31, l32 = axs[1][0].plot(drums_df[['loss', 'val_loss']])
l41, l42 = axs[1][1].plot(melody_df[['loss', 'val_loss']])
axs[0][0].set_title('Guitar')
axs[0][1].set_title('Bass')
axs[1][0].set_title('Drums')
axs[1][1].set_title('Melody')
axs[0][0].set_xlabel('epochs')
axs[0][1].set_xlabel('epochs')
axs[1][0].set_xlabel('epochs')
axs[1][1].set_xlabel('epochs')
axs[0][0].set_ylabel('loss')
axs[0][1].set_ylabel('loss')
axs[1][0].set_ylabel('loss')
axs[1][1].set_ylabel('loss')
axs[0][0].legend(labels=('loss', 'val_loss'))
plt.savefig('training_losses.png')
#%%