mieszkania5/main.py

76 lines
1.7 KiB
Python
Raw Normal View History

2020-12-16 14:37:43 +01:00
import torch
import torch.nn as nn
import numpy as np
import pandas as pd
from torch.autograd import Variable
train = 'train/train.tsv'
in_file = 'test-A/in.tsv'
out_file = 'test-A/out.tsv'
learning_rate = 0.00001
class linearRegression(nn.Module):
def __init__(self, input_dim, output_dim):
super(linearRegression, self).__init__()
self.linear = nn.Linear(input_dim, output_dim)
def forward(self, x):
out = self.linear(x)
return out
inputDim = 1
outputDim = 1
model = linearRegression(inputDim, outputDim)
data = pd.read_csv('train/train.tsv',sep='\t',header=None)
y_values = data[0].tolist()
x_values = data[8].tolist()
x_train = np.array(x_values, dtype=np.float32)
x_train = x_train.reshape(-1, 1)
y_train = np.array(y_values, dtype=np.float32)
y_train = y_train.reshape(-1, 1)
if torch.cuda.is_available():
model.cuda()
criterion = nn.MSELoss()
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)
for epoch in range(1000):
epoch += 1
if torch.cuda.is_available():
inputs = Variable(torch.from_numpy(x_train).cuda())
labels = Variable(torch.from_numpy(y_train).cuda())
else:
inputs = Variable(torch.from_numpy(x_train))
labels = Variable(torch.from_numpy(y_train))
optimizer.zero_grad()
outputs = model(inputs)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
result = pd.read_csv('test-A/in.tsv',sep='\t',header=None)
end_x = result[7].tolist()
end_x = np.array(end_x, dtype=np.float32)
end_x = end_x.reshape(-1, 1)
predictions = model(torch.from_numpy(end_x).requires_grad_()).data.numpy()
output = open('test-A/out.tsv', 'w')
for p in predictions:
output.write(str(p[0])+'\n')