ium_s434700/learning/ml.py

64 lines
1.5 KiB
Python
Raw Normal View History

2021-05-26 18:31:41 +02:00
import torch
import torch.nn as nn
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import torch
import datetime
from torch.autograd import Variable
INPUT_DIM = 1
OUTPUT_DIM = 1
LEARNING_RATE = 0.01
EPOCHS = 100
dataset = pd.read_csv('datasets/train_set.csv')
x_values = [datetime.datetime.strptime(
item, "%Y-%m-%d").month for item in dataset['date'].values]
x_train = np.array(x_values, dtype=np.float32)
x_train = x_train.reshape(-1, 1)
y_values = [min(dataset['result_1'].values[i]/dataset['result_2'].values[i], dataset['result_2'].values[i] /
dataset['result_1'].values[i]) for i in range(len(dataset['result_1'].values))]
y_train = np.array(y_values, dtype=np.float32)
y_train = y_train.reshape(-1, 1)
class LinearRegression(torch.nn.Module):
def __init__(self, inputSize, outputSize):
super(LinearRegression, self).__init__()
self.linear = torch.nn.Linear(inputSize, outputSize)
def forward(self, x):
out = self.linear(x)
return out
model = LinearRegression(INPUT_DIM, OUTPUT_DIM)
criterion = torch.nn.MSELoss()
optimizer = torch.optim.SGD(model.parameters(), lr=LEARNING_RATE)
for epoch in range(EPOCHS):
inputs = Variable(torch.from_numpy(x_train))
labels = Variable(torch.from_numpy(y_train))
optimizer.zero_grad()
outputs = model(inputs)
loss = criterion(outputs, labels)
print(loss)
loss.backward()
optimizer.step()
print('epoch {}, loss {}'.format(epoch, loss.item()))
torch.save(model.state_dict(), 'model.pt')