109 lines
2.1 KiB
Python
109 lines
2.1 KiB
Python
|
#%% Change working directory from the workspace root to the ipynb file location. Turn this addition off with the DataScience.changeDirOnImportExport setting
|
||
|
# ms-python.python added
|
||
|
import os
|
||
|
try:
|
||
|
os.chdir(os.path.join(os.getcwd(), 'docs\\images'))
|
||
|
print(os.getcwd())
|
||
|
except:
|
||
|
pass
|
||
|
|
||
|
#%%
|
||
|
import music21
|
||
|
from music21.midi import MidiFile
|
||
|
|
||
|
import numpy as np
|
||
|
import matplotlib.pyplot as plt
|
||
|
import mido
|
||
|
|
||
|
#%% [markdown]
|
||
|
# # midi messages
|
||
|
|
||
|
#%%
|
||
|
filepath = '/home/altarin/praca-magisterska/docs/images/seq2seq_generated_midi_7.mid'
|
||
|
mid = mido.MidiFile(filepath)
|
||
|
for i, track in enumerate(mid.tracks):
|
||
|
for msg in track:
|
||
|
print(msg)
|
||
|
|
||
|
#%% [markdown]
|
||
|
# # Regresja liniowa
|
||
|
|
||
|
#%%
|
||
|
x = np.arange(0,10)
|
||
|
y = np.arange(0,10) + np.random.random(10)-0.5
|
||
|
y_hat = np.arange(0,10)
|
||
|
plt.scatter(x, y, c='k')
|
||
|
plt.plot(x,y_hat, c='r')
|
||
|
# plt.labels
|
||
|
# plt.savefig('linear_reg.png')
|
||
|
|
||
|
#%%
|
||
|
from math import exp
|
||
|
|
||
|
x = np.arange(-10,10, 0.1)
|
||
|
# y = np.arange(0,10)
|
||
|
|
||
|
tanh = lambda x: (exp(x) - exp(-x))/(exp(x) + exp(-x))
|
||
|
|
||
|
y_hat = [tanh(yy) for yy in x]
|
||
|
# plt.scatter(x, y, c='k')
|
||
|
plt.plot(x, y_hat)
|
||
|
# plt.show()
|
||
|
# plt.labels
|
||
|
plt.savefig('tanh.png')
|
||
|
|
||
|
#%%
|
||
|
# Gradient descent
|
||
|
|
||
|
fx = x^2
|
||
|
dx = 2x + c
|
||
|
|
||
|
|
||
|
|
||
|
|
||
|
#%%
|
||
|
func = lambda x: x**2
|
||
|
func_dx = lambda x:2*x
|
||
|
|
||
|
x = np.arange(-20,20,0.1)
|
||
|
y = func(x)
|
||
|
|
||
|
point_x = -6
|
||
|
point_y = func(point_x)
|
||
|
|
||
|
|
||
|
|
||
|
dx = styczna(x)
|
||
|
|
||
|
learning_points_xs = np.arange(point_x, 0, 0.8)
|
||
|
learning_points_ys = func(learning_points_xs)
|
||
|
|
||
|
fig, ax = plt.subplots()
|
||
|
ax.plot(x, y, c='k')
|
||
|
|
||
|
for px in learning_points_xs[0:1]:
|
||
|
slope = func_dx(px)
|
||
|
intercept = -px**2
|
||
|
styczna = lambda x: slope*x + intercept
|
||
|
dx = styczna(x)
|
||
|
ax.plot(x, dx, c='r', zorder=1)
|
||
|
|
||
|
ax.scatter(x=point_x, y=point_y, c='r', zorder=6) #start
|
||
|
# ax.scatter(x=0, y=0, c='g', zorder=6) #min
|
||
|
# ax.scatter(x=learning_points_xs, y=learning_points_ys, c='y', zorder=5)
|
||
|
plt.ylim((-20,80))
|
||
|
plt.xlim((-20,20))
|
||
|
# plt.xlabel('x')
|
||
|
# plt.ylabel('f(x) = x^2')
|
||
|
|
||
|
plt.savefig('gradient_descent_1.png')
|
||
|
|
||
|
# https://towardsdatascience.com/understanding-the-mathematics-behind-gradient-descent-dde5dc9be06e?
|
||
|
# https://medium.com/code-heroku/gradient-descent-for-machine-learning-3d871fa48b4c
|
||
|
|
||
|
|
||
|
#%%
|
||
|
|
||
|
|
||
|
|