try 2 change to ipynb
This commit is contained in:
parent
77795dcbd3
commit
475af24a8d
462
dev-0/out.tsv
Normal file
462
dev-0/out.tsv
Normal file
@ -0,0 +1,462 @@
|
||||
93925.83
|
||||
65075.145
|
||||
100406.98
|
||||
81483.12
|
||||
102840.83
|
||||
144380.33
|
||||
78748.45
|
||||
85585.11
|
||||
72663.83
|
||||
80115.79
|
||||
65758.81
|
||||
72663.83
|
||||
95826.42
|
||||
93023.39
|
||||
93597.67
|
||||
81496.79
|
||||
73785.04
|
||||
68247.35
|
||||
79418.45
|
||||
65075.145
|
||||
84122.07
|
||||
67536.34
|
||||
83123.914
|
||||
108487.9
|
||||
93789.09
|
||||
115666.39
|
||||
126878.5
|
||||
81578.83
|
||||
92421.766
|
||||
105780.586
|
||||
73279.13
|
||||
76013.8
|
||||
100215.555
|
||||
82303.516
|
||||
100215.555
|
||||
72595.47
|
||||
122721.82
|
||||
99668.62
|
||||
68110.625
|
||||
126345.24
|
||||
73593.62
|
||||
98848.22
|
||||
58238.492
|
||||
82700.04
|
||||
118742.88
|
||||
99176.375
|
||||
64008.63
|
||||
79856.0
|
||||
71939.15
|
||||
63707.812
|
||||
60508.26
|
||||
65075.145
|
||||
64801.68
|
||||
89687.1
|
||||
112384.79
|
||||
74646.46
|
||||
73962.8
|
||||
128245.83
|
||||
114299.055
|
||||
58238.492
|
||||
57377.07
|
||||
103934.69
|
||||
55503.83
|
||||
197706.23
|
||||
133441.69
|
||||
90015.266
|
||||
167624.95
|
||||
116049.24
|
||||
93228.49
|
||||
110197.06
|
||||
55777.297
|
||||
64008.63
|
||||
82850.45
|
||||
80799.45
|
||||
72034.86
|
||||
133441.69
|
||||
102676.75
|
||||
121983.46
|
||||
75412.17
|
||||
100215.555
|
||||
61656.816
|
||||
68411.43
|
||||
70544.47
|
||||
71911.805
|
||||
65075.145
|
||||
82221.48
|
||||
63707.812
|
||||
95156.43
|
||||
60973.152
|
||||
65075.145
|
||||
200044.38
|
||||
113779.48
|
||||
112931.73
|
||||
98848.22
|
||||
54136.5
|
||||
74646.46
|
||||
49214.105
|
||||
95142.76
|
||||
81619.85
|
||||
75603.59
|
||||
106095.07
|
||||
77381.125
|
||||
82303.516
|
||||
70503.45
|
||||
58552.977
|
||||
57896.66
|
||||
107462.41
|
||||
70763.24
|
||||
70858.95
|
||||
78064.79
|
||||
77107.66
|
||||
184032.92
|
||||
61752.53
|
||||
92968.7
|
||||
70708.55
|
||||
77107.66
|
||||
79842.32
|
||||
81756.586
|
||||
67536.34
|
||||
93789.09
|
||||
58922.156
|
||||
96523.76
|
||||
206963.08
|
||||
151080.27
|
||||
86952.445
|
||||
125675.25
|
||||
65211.875
|
||||
95156.43
|
||||
74249.94
|
||||
97891.086
|
||||
78748.45
|
||||
64815.355
|
||||
175268.33
|
||||
83534.12
|
||||
79158.66
|
||||
54054.46
|
||||
60973.152
|
||||
61068.867
|
||||
65075.145
|
||||
84491.25
|
||||
146841.53
|
||||
121135.71
|
||||
101856.35
|
||||
124977.91
|
||||
103360.414
|
||||
66579.21
|
||||
91054.44
|
||||
110593.59
|
||||
83000.86
|
||||
122503.05
|
||||
63106.19
|
||||
81113.94
|
||||
139594.67
|
||||
80115.79
|
||||
119303.49
|
||||
65075.145
|
||||
71433.234
|
||||
101856.35
|
||||
73033.016
|
||||
95156.43
|
||||
79445.8
|
||||
104727.74
|
||||
80047.42
|
||||
76013.8
|
||||
81578.83
|
||||
57664.215
|
||||
128409.914
|
||||
56187.496
|
||||
101993.08
|
||||
55503.83
|
||||
70503.45
|
||||
79199.68
|
||||
81496.79
|
||||
93789.09
|
||||
129161.945
|
||||
80854.15
|
||||
106327.516
|
||||
59769.9
|
||||
82782.086
|
||||
75316.46
|
||||
122995.28
|
||||
76013.8
|
||||
48667.176
|
||||
62231.094
|
||||
51976.117
|
||||
66442.48
|
||||
82221.48
|
||||
54915.875
|
||||
101719.62
|
||||
85585.11
|
||||
60973.152
|
||||
107435.06
|
||||
73279.13
|
||||
114162.33
|
||||
73935.45
|
||||
108829.734
|
||||
77381.125
|
||||
87745.5
|
||||
98848.22
|
||||
112931.73
|
||||
73279.13
|
||||
49145.742
|
||||
57691.555
|
||||
93789.09
|
||||
68384.086
|
||||
70544.47
|
||||
85585.11
|
||||
70544.47
|
||||
94253.99
|
||||
70175.29
|
||||
95156.43
|
||||
114299.055
|
||||
179848.9
|
||||
65075.145
|
||||
101993.08
|
||||
82194.13
|
||||
86132.05
|
||||
117854.12
|
||||
83137.586
|
||||
77381.125
|
||||
83465.75
|
||||
74796.87
|
||||
40463.19
|
||||
82631.68
|
||||
75918.08
|
||||
78748.45
|
||||
108816.06
|
||||
65075.145
|
||||
75316.46
|
||||
57418.094
|
||||
68411.43
|
||||
57965.023
|
||||
70544.47
|
||||
67809.81
|
||||
101610.23
|
||||
104536.32
|
||||
78748.45
|
||||
166257.62
|
||||
69997.54
|
||||
71911.805
|
||||
68028.58
|
||||
58347.88
|
||||
71419.56
|
||||
55503.83
|
||||
81072.92
|
||||
77791.32
|
||||
78201.52
|
||||
76287.26
|
||||
81346.39
|
||||
96660.49
|
||||
86405.51
|
||||
69888.15
|
||||
57691.555
|
||||
133742.5
|
||||
74988.3
|
||||
78174.18
|
||||
82850.45
|
||||
151859.64
|
||||
122503.05
|
||||
95156.43
|
||||
59933.984
|
||||
60562.953
|
||||
113205.19
|
||||
54259.56
|
||||
79842.32
|
||||
136176.34
|
||||
55845.66
|
||||
54560.37
|
||||
177196.28
|
||||
59988.67
|
||||
56871.16
|
||||
80662.72
|
||||
174461.61
|
||||
48202.28
|
||||
88948.74
|
||||
95361.53
|
||||
106108.75
|
||||
100625.75
|
||||
80115.79
|
||||
139321.22
|
||||
75330.125
|
||||
103168.984
|
||||
55503.83
|
||||
89646.086
|
||||
111564.4
|
||||
65075.145
|
||||
76875.21
|
||||
54560.37
|
||||
100625.75
|
||||
66169.01
|
||||
149712.92
|
||||
90712.6
|
||||
78748.45
|
||||
81428.42
|
||||
72048.53
|
||||
51688.977
|
||||
73744.02
|
||||
83534.12
|
||||
79856.0
|
||||
73279.13
|
||||
84217.78
|
||||
125552.195
|
||||
114299.055
|
||||
135164.53
|
||||
65075.145
|
||||
80525.984
|
||||
80020.08
|
||||
54136.5
|
||||
95990.5
|
||||
73347.5
|
||||
78433.97
|
||||
55271.383
|
||||
129476.43
|
||||
214114.2
|
||||
93789.09
|
||||
65075.145
|
||||
86268.77
|
||||
109280.95
|
||||
57185.645
|
||||
106095.07
|
||||
68643.88
|
||||
126605.04
|
||||
101993.08
|
||||
63707.812
|
||||
73279.13
|
||||
62381.5
|
||||
73279.13
|
||||
72404.04
|
||||
73279.13
|
||||
53863.03
|
||||
77381.125
|
||||
54861.184
|
||||
184032.92
|
||||
100078.81
|
||||
116896.99
|
||||
234624.17
|
||||
93789.09
|
||||
78748.45
|
||||
79363.76
|
||||
58703.383
|
||||
109896.25
|
||||
104549.99
|
||||
83465.75
|
||||
74646.46
|
||||
49350.84
|
||||
118537.78
|
||||
49350.84
|
||||
93707.055
|
||||
14483.906
|
||||
77545.2
|
||||
74017.49
|
||||
82166.78
|
||||
64090.664
|
||||
231889.52
|
||||
53261.406
|
||||
68028.58
|
||||
65088.816
|
||||
57513.81
|
||||
60891.11
|
||||
85585.11
|
||||
99805.35
|
||||
116896.99
|
||||
173094.28
|
||||
70544.47
|
||||
93925.83
|
||||
85585.11
|
||||
88183.04
|
||||
262640.78
|
||||
93707.055
|
||||
78734.79
|
||||
82850.45
|
||||
58306.855
|
||||
71228.13
|
||||
168035.16
|
||||
101582.875
|
||||
69518.97
|
||||
66442.48
|
||||
70544.47
|
||||
72349.34
|
||||
92421.766
|
||||
70763.24
|
||||
150533.33
|
||||
130132.75
|
||||
78488.664
|
||||
147115.0
|
||||
135041.45
|
||||
84081.05
|
||||
54327.926
|
||||
157069.17
|
||||
54478.332
|
||||
69956.516
|
||||
69095.1
|
||||
108829.734
|
||||
97070.695
|
||||
82850.45
|
||||
147115.0
|
||||
104727.74
|
||||
97248.44
|
||||
61109.883
|
||||
91218.516
|
||||
86104.695
|
||||
151216.98
|
||||
154908.78
|
||||
120862.25
|
||||
94336.03
|
||||
130707.03
|
||||
135889.2
|
||||
91054.44
|
||||
71843.44
|
||||
88319.77
|
||||
137461.64
|
||||
54833.836
|
||||
102170.83
|
||||
71952.82
|
||||
129408.07
|
||||
116377.4
|
||||
212746.88
|
||||
86542.24
|
||||
102143.484
|
||||
110921.75
|
||||
134809.02
|
||||
70503.45
|
||||
40504.21
|
||||
102881.85
|
||||
76177.875
|
||||
74783.195
|
||||
82262.5
|
||||
82262.5
|
||||
77914.38
|
||||
82043.73
|
||||
128779.086
|
||||
128177.47
|
||||
79842.32
|
||||
116350.055
|
||||
94882.96
|
||||
121081.016
|
||||
139184.47
|
||||
131527.42
|
||||
233256.84
|
||||
80143.13
|
||||
85284.3
|
||||
142370.36
|
||||
101610.23
|
||||
74783.195
|
||||
87923.24
|
||||
104495.3
|
||||
333345.44
|
||||
102321.234
|
||||
62504.562
|
||||
76000.125
|
||||
62326.812
|
||||
106081.4
|
||||
62326.812
|
||||
77969.08
|
||||
96728.86
|
||||
112630.914
|
||||
115502.305
|
||||
112630.914
|
||||
83164.94
|
||||
87171.21
|
||||
136313.08
|
||||
111427.664
|
|
236
linear_regression.ipynb
Normal file
236
linear_regression.ipynb
Normal file
@ -0,0 +1,236 @@
|
||||
{
|
||||
"metadata": {
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.8.0-final"
|
||||
},
|
||||
"orig_nbformat": 2,
|
||||
"kernelspec": {
|
||||
"name": "python3",
|
||||
"display_name": "Python 3.8.0 64-bit ('tau': conda)",
|
||||
"metadata": {
|
||||
"interpreter": {
|
||||
"hash": "99b9bc2e2925de034137bab8ac26137a7eaafe59960ece65892d3f1bd8bee5d4"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2,
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np\n",
|
||||
"import pandas as pd\n",
|
||||
"import torch\n",
|
||||
"import torch.nn as tnn"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"filedir = '/home/ubuntu/Pulpit/TAU/mieszkania5'\n",
|
||||
"\n",
|
||||
"#train size\n",
|
||||
"learningRate = 0.000001 #przy obecnie ustawinej wartości udało się uzyskać najlepszy wynik. Najniższa wartość przy której tensory wyściowe regresji (zmienna outputs w sekcji trening) nie są [nan] wynisu 0.00001\n",
|
||||
"epochs = 20000\n",
|
||||
"\n",
|
||||
"#treainfile\n",
|
||||
"trainfile = filedir + '/train/train.tsv'\n",
|
||||
"\n",
|
||||
"#data files\n",
|
||||
"dev0in = filedir + '/dev-0/in.tsv'\n",
|
||||
"dev0out = filedir + '/dev-0/out.tsv' \n",
|
||||
"testAin = filedir + '/test-A/in.tsv'\n",
|
||||
"testAout = filedir + '/test-A/out.tsv'"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"model regresji gotowy\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"class linearRegression(tnn.Module):\n",
|
||||
" def __init__(self, dim_i, dim_o):\n",
|
||||
" super(linearRegression, self).__init__()\n",
|
||||
" self.linear = tnn.Linear(dim_i, dim_o)\n",
|
||||
"\n",
|
||||
" def forward(self, x):\n",
|
||||
" out = self.linear(x)\n",
|
||||
" return out\n",
|
||||
"\n",
|
||||
"model = linearRegression(1, 1)\n",
|
||||
"device = torch.device('cpu')\n",
|
||||
"model.to(device)\n",
|
||||
"criterion = tnn.MSELoss() \n",
|
||||
"optimizer = torch.optim.SGD(model.parameters(), lr=learningRate)\n",
|
||||
"\n",
|
||||
"print('model regresji gotowy')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"dane treningowe wczytane\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"#dane do treningu\n",
|
||||
"trainfile_read = pd.read_csv(trainfile, sep='\\t', header=None, index_col=None)\n",
|
||||
"train_data_sizes = np.array(trainfile_read[8].tolist(), dtype=np.float32).reshape(-1, 1)\n",
|
||||
"train_data_prices = np.array(trainfile_read[0].tolist(), dtype=np.float32).reshape(-1, 1)\n",
|
||||
"\n",
|
||||
"print(\"dane treningowe wczytane\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"trening zakonczony\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"#trening\n",
|
||||
"for epoch in range(epochs):\n",
|
||||
" inputs = torch.from_numpy(train_data_sizes).to(device)\n",
|
||||
" labels = torch.from_numpy(train_data_prices).to(device)\n",
|
||||
" \n",
|
||||
" optimizer.zero_grad()\n",
|
||||
"\n",
|
||||
" outputs = model(inputs)\n",
|
||||
"\n",
|
||||
" loss = criterion(outputs, labels)\n",
|
||||
"\n",
|
||||
" loss.backward()\n",
|
||||
"\n",
|
||||
" optimizer.step()\n",
|
||||
"\n",
|
||||
"print('trening zakonczony')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"dane do przewidzenia wczytane\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"#dane do przewidywania\n",
|
||||
"devfile_read = pd.read_csv(dev0in, sep='\\t', header=None, index_col=None)\n",
|
||||
"testfile_in = pd.read_csv(testAin, sep='\\t', header=None, index_col=None)\n",
|
||||
"dev_data_sizes = np.array(devfile_read[7].tolist(), dtype=np.float32).reshape(-1, 1)\n",
|
||||
"test_data_sizes = np.array(testfile_in[7].tolist(), dtype=np.float32).reshape(-1, 1)\n",
|
||||
"\n",
|
||||
"print('dane do przewidzenia wczytane')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"zapisywanie wyników dev-0\nwyniki zapisane\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"#przewidywanie ceny dev-0\n",
|
||||
"pred_data = model(torch.from_numpy(dev_data_sizes).requires_grad_()).data.numpy()\n",
|
||||
"\n",
|
||||
"print('zapisywanie wyników dev-0')\n",
|
||||
"\n",
|
||||
"dev_of = open(dev0out, 'w')\n",
|
||||
"for i in pred_data:\n",
|
||||
" dev_of.write(str(i[0])+'\\n')\n",
|
||||
"dev_of.close()\n",
|
||||
"\n",
|
||||
"print('wyniki zapisane')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stdout",
|
||||
"text": [
|
||||
"zapisywanie wyników test-A\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"#przewidywanie ceny test-A\n",
|
||||
"pred_data = model(torch.from_numpy(test_data_sizes).requires_grad_()).data.numpy()\n",
|
||||
"\n",
|
||||
"print('zapisywanie wyników test-A')\n",
|
||||
"\n",
|
||||
"test_of = open(testAout, 'w')\n",
|
||||
"for i in pred_data:\n",
|
||||
" test_of.write(str(i[0])+'\\n')\n",
|
||||
"test_of.close()\n",
|
||||
"\n",
|
||||
"print('wyniki zapisane')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
]
|
||||
}
|
@ -4,18 +4,20 @@ import torch
|
||||
import torch.nn as tnn
|
||||
|
||||
filedir = '/home/ubuntu/Pulpit/TAU/mieszkania5'
|
||||
|
||||
#train size
|
||||
learningRate = 0.0001
|
||||
epochs = 10000
|
||||
learningRate = 0.000001 #przy obecnie ustawinej wartości udało się uzyskać najlepszy wynik. Najniższa wartość przy której tensory wyściowe regresji (zmienna outputs w sekcji trening) nie są [nan] wynisu 0.00001
|
||||
epochs = 20000
|
||||
|
||||
#treainfile
|
||||
trainfile = filedir + '/train/train.tsv'
|
||||
|
||||
#data files
|
||||
dev0in = filedir + '/dev-0/in.tsv'
|
||||
dev0out = filedir + '/dev-0/out.tsv'
|
||||
testAin = filedir + '/test-A/in.tsv'
|
||||
testAout = filedir + '/test-A/out.tsv'
|
||||
|
||||
|
||||
class linearRegression(tnn.Module):
|
||||
def __init__(self, dim_i, dim_o):
|
||||
super(linearRegression, self).__init__()
|
||||
@ -28,27 +30,22 @@ class linearRegression(tnn.Module):
|
||||
model = linearRegression(1, 1)
|
||||
device = torch.device('cpu')
|
||||
model.to(device)
|
||||
print('model regresji gotowy')
|
||||
|
||||
#dane do treningu
|
||||
trainfile_read = pd.raed_csv(trainfile, sep='\t', header=None, index_col=None)
|
||||
train_data_sizes = np.array(trainfile_read[8].tolist(), dtype=np.float32).reshape(-1, 1)
|
||||
train_data_prices = np.array(trainfile_read[0].tolist(), dtype=np.float32).reshape(-1, 1)
|
||||
|
||||
#dane do przewidywania
|
||||
devfile_read = pd.raed_csv(dev0in, sep='\t', header=None, index_col=None)
|
||||
testfile_in = pd.raed_csv(testAin, sep='\t', header=None, index_col=None)
|
||||
dev_data_sizes = np.array(devfile_read[7].tolist(), dtype=np.float32)
|
||||
test_data_sizes = np.array(testfile_in[7].tolist(), dtype=np.float32)
|
||||
criterion = tnn.MSELoss()
|
||||
optimizer = torch.optim.SGD(model.parameters(), lr=learningRate)
|
||||
|
||||
print('dane wczytane')
|
||||
print('model regresji gotowy')
|
||||
|
||||
#dane do treningu
|
||||
trainfile_read = pd.read_csv(trainfile, sep='\t', header=None, index_col=None)
|
||||
train_data_sizes = np.array(trainfile_read[8].tolist(), dtype=np.float32).reshape(-1, 1)
|
||||
train_data_prices = np.array(trainfile_read[0].tolist(), dtype=np.float32).reshape(-1, 1)
|
||||
|
||||
print("dane treningowe wczytane")
|
||||
|
||||
#trening
|
||||
for epoch in range(epochs):
|
||||
inputs = torch.from_numpy(train_data_sizes)
|
||||
labels = torch.from_numpy(train_data_prices)
|
||||
inputs = torch.from_numpy(train_data_sizes).to(device)
|
||||
labels = torch.from_numpy(train_data_prices).to(device)
|
||||
|
||||
optimizer.zero_grad()
|
||||
|
||||
@ -59,23 +56,37 @@ for epoch in range(epochs):
|
||||
loss.backward()
|
||||
|
||||
optimizer.step()
|
||||
print('trening zakonczony')
|
||||
#przewidywanie ceny
|
||||
|
||||
predicted = model(torch.from_numpy(dev_data_sizes).requires_grad_()).data.numpy()
|
||||
print('trening zakonczony')
|
||||
|
||||
#dane do przewidywania
|
||||
devfile_read = pd.read_csv(dev0in, sep='\t', header=None, index_col=None)
|
||||
testfile_in = pd.read_csv(testAin, sep='\t', header=None, index_col=None)
|
||||
dev_data_sizes = np.array(devfile_read[7].tolist(), dtype=np.float32).reshape(-1, 1)
|
||||
test_data_sizes = np.array(testfile_in[7].tolist(), dtype=np.float32).reshape(-1, 1)
|
||||
|
||||
print('dane do przewidzenia wczytane')
|
||||
|
||||
#przewidywanie ceny dev-0
|
||||
pred_data = model(torch.from_numpy(dev_data_sizes).requires_grad_()).data.numpy()
|
||||
|
||||
print('zapisywanie wyników dev-0')
|
||||
|
||||
dev_of = open(dev0out, 'w')
|
||||
for i in predicted:
|
||||
for i in pred_data:
|
||||
dev_of.write(str(i[0])+'\n')
|
||||
dev_of.close()
|
||||
|
||||
predicted = model(torch.from_numpy(test_data_sizes).requires_grad_()).data.numpy()
|
||||
print('wyniki zapisane')
|
||||
|
||||
#przewidywanie ceny test-A
|
||||
pred_data = model(torch.from_numpy(test_data_sizes).requires_grad_()).data.numpy()
|
||||
|
||||
print('zapisywanie wyników test-A')
|
||||
|
||||
test_of = open(testAout, 'w')
|
||||
for i in predicted:
|
||||
for i in pred_data:
|
||||
test_of.write(str(i[0])+'\n')
|
||||
test_of.close()
|
||||
|
||||
print('wyniki zapisane')
|
418
test-A/out.tsv
Normal file
418
test-A/out.tsv
Normal file
@ -0,0 +1,418 @@
|
||||
97877.414
|
||||
100625.75
|
||||
83055.555
|
||||
75699.305
|
||||
73771.375
|
||||
140141.61
|
||||
73962.8
|
||||
122503.05
|
||||
117211.47
|
||||
79924.36
|
||||
96510.086
|
||||
97891.086
|
||||
79295.39
|
||||
65075.145
|
||||
78748.45
|
||||
80115.79
|
||||
136996.75
|
||||
85585.11
|
||||
54136.5
|
||||
96414.37
|
||||
114299.055
|
||||
55503.83
|
||||
81483.12
|
||||
66442.48
|
||||
132074.36
|
||||
79856.0
|
||||
100147.19
|
||||
88114.67
|
||||
50458.38
|
||||
79842.32
|
||||
149849.66
|
||||
100625.75
|
||||
190869.58
|
||||
60822.746
|
||||
73279.13
|
||||
112384.79
|
||||
82850.45
|
||||
89687.1
|
||||
71911.805
|
||||
109896.25
|
||||
100215.555
|
||||
88169.37
|
||||
63024.15
|
||||
95156.43
|
||||
63707.812
|
||||
82221.48
|
||||
96660.49
|
||||
61971.305
|
||||
55093.633
|
||||
140278.34
|
||||
93830.12
|
||||
52495.7
|
||||
66305.75
|
||||
97617.625
|
||||
78748.45
|
||||
102020.42
|
||||
97084.36
|
||||
62258.44
|
||||
79486.82
|
||||
69997.54
|
||||
134398.81
|
||||
84217.78
|
||||
80115.79
|
||||
126605.04
|
||||
65717.8
|
||||
86952.445
|
||||
125374.44
|
||||
86104.695
|
||||
78748.45
|
||||
66715.945
|
||||
59605.82
|
||||
97891.086
|
||||
98574.75
|
||||
92230.336
|
||||
75603.59
|
||||
98574.75
|
||||
70817.94
|
||||
73279.13
|
||||
79856.0
|
||||
88319.77
|
||||
144462.38
|
||||
129161.945
|
||||
103866.33
|
||||
99942.086
|
||||
56871.16
|
||||
87403.664
|
||||
70653.86
|
||||
97207.42
|
||||
79568.85
|
||||
65075.145
|
||||
88579.56
|
||||
60932.133
|
||||
97070.695
|
||||
78748.45
|
||||
77381.125
|
||||
201808.23
|
||||
154635.31
|
||||
98848.22
|
||||
65211.875
|
||||
61930.285
|
||||
85585.11
|
||||
81483.12
|
||||
84217.78
|
||||
93789.09
|
||||
70954.67
|
||||
76150.52
|
||||
99258.42
|
||||
136012.27
|
||||
135287.58
|
||||
71911.805
|
||||
163522.97
|
||||
88388.14
|
||||
54341.598
|
||||
67809.81
|
||||
138227.34
|
||||
54273.23
|
||||
72034.86
|
||||
85995.31
|
||||
74646.46
|
||||
69040.41
|
||||
76287.26
|
||||
72513.42
|
||||
75754.0
|
||||
80826.8
|
||||
109294.625
|
||||
53863.03
|
||||
67262.875
|
||||
81578.83
|
||||
93597.67
|
||||
95156.43
|
||||
94062.56
|
||||
90398.12
|
||||
65075.145
|
||||
66442.48
|
||||
99668.62
|
||||
58648.688
|
||||
76013.8
|
||||
81483.12
|
||||
84217.78
|
||||
137543.67
|
||||
86938.77
|
||||
59769.9
|
||||
100352.28
|
||||
101172.69
|
||||
132074.36
|
||||
73279.13
|
||||
73826.06
|
||||
163522.97
|
||||
99258.42
|
||||
81879.65
|
||||
55257.71
|
||||
71911.805
|
||||
61055.195
|
||||
104727.74
|
||||
63707.812
|
||||
77381.125
|
||||
203161.89
|
||||
186412.08
|
||||
80854.15
|
||||
152174.12
|
||||
104727.74
|
||||
67809.81
|
||||
78748.45
|
||||
90712.6
|
||||
105165.29
|
||||
40504.21
|
||||
94295.01
|
||||
89687.1
|
||||
85585.11
|
||||
95142.76
|
||||
52358.97
|
||||
110607.266
|
||||
82125.766
|
||||
66442.48
|
||||
64049.65
|
||||
73361.17
|
||||
77381.125
|
||||
76287.26
|
||||
106095.07
|
||||
65499.02
|
||||
65075.145
|
||||
101993.08
|
||||
114299.055
|
||||
85858.58
|
||||
65813.51
|
||||
68083.27
|
||||
110921.75
|
||||
204542.89
|
||||
85585.11
|
||||
103907.34
|
||||
91054.44
|
||||
84122.07
|
||||
78748.45
|
||||
170359.62
|
||||
56761.773
|
||||
93925.83
|
||||
71911.805
|
||||
81483.12
|
||||
73415.86
|
||||
84217.78
|
||||
178837.06
|
||||
93789.09
|
||||
69013.06
|
||||
77791.32
|
||||
115666.39
|
||||
86911.42
|
||||
48202.28
|
||||
70312.02
|
||||
68684.9
|
||||
385850.94
|
||||
80020.08
|
||||
80115.79
|
||||
70407.74
|
||||
105274.68
|
||||
67809.81
|
||||
143013.0
|
||||
114299.055
|
||||
48202.28
|
||||
57691.555
|
||||
210012.22
|
||||
80115.79
|
||||
80348.234
|
||||
70544.47
|
||||
54341.598
|
||||
85585.11
|
||||
67946.54
|
||||
90110.98
|
||||
168992.3
|
||||
73279.13
|
||||
73279.13
|
||||
80115.79
|
||||
109417.69
|
||||
64391.48
|
||||
71911.805
|
||||
110921.75
|
||||
88114.67
|
||||
120192.25
|
||||
308460.03
|
||||
89687.1
|
||||
84354.51
|
||||
96523.76
|
||||
104727.74
|
||||
80115.79
|
||||
74742.17
|
||||
100625.75
|
||||
95566.625
|
||||
55503.83
|
||||
65075.145
|
||||
72130.57
|
||||
127015.24
|
||||
171316.75
|
||||
55503.83
|
||||
83000.86
|
||||
69450.61
|
||||
114299.055
|
||||
122776.51
|
||||
99805.35
|
||||
116869.63
|
||||
88114.67
|
||||
47436.58
|
||||
68247.35
|
||||
69190.81
|
||||
119125.734
|
||||
97070.695
|
||||
92831.96
|
||||
73074.03
|
||||
77107.66
|
||||
104727.74
|
||||
117033.72
|
||||
106095.07
|
||||
104727.74
|
||||
73033.016
|
||||
133441.69
|
||||
117074.734
|
||||
125921.37
|
||||
92421.766
|
||||
62067.016
|
||||
87909.58
|
||||
100625.75
|
||||
78748.45
|
||||
81483.12
|
||||
115666.39
|
||||
103360.414
|
||||
81715.56
|
||||
48202.28
|
||||
77381.125
|
||||
77381.125
|
||||
101856.35
|
||||
78748.45
|
||||
104727.74
|
||||
63762.508
|
||||
57992.37
|
||||
86938.77
|
||||
112603.57
|
||||
144790.55
|
||||
79295.39
|
||||
121135.71
|
||||
65075.145
|
||||
78748.45
|
||||
68493.48
|
||||
99805.35
|
||||
61520.086
|
||||
67673.08
|
||||
70763.24
|
||||
69587.336
|
||||
80115.79
|
||||
137543.67
|
||||
59469.09
|
||||
92285.03
|
||||
80115.79
|
||||
72404.04
|
||||
86952.445
|
||||
101993.08
|
||||
65704.12
|
||||
77107.66
|
||||
87417.336
|
||||
136176.34
|
||||
129886.63
|
||||
81319.04
|
||||
78748.45
|
||||
55189.344
|
||||
76328.28
|
||||
80115.79
|
||||
82850.45
|
||||
69464.28
|
||||
61109.883
|
||||
85585.11
|
||||
104727.74
|
||||
96933.95
|
||||
136996.75
|
||||
102676.75
|
||||
63707.812
|
||||
131253.95
|
||||
105999.36
|
||||
113615.39
|
||||
60973.152
|
||||
70817.94
|
||||
84354.51
|
||||
104044.08
|
||||
55189.344
|
||||
95279.484
|
||||
73128.73
|
||||
92011.57
|
||||
85120.22
|
||||
152174.12
|
||||
120862.25
|
||||
77107.66
|
||||
110921.75
|
||||
87485.7
|
||||
156180.39
|
||||
70544.47
|
||||
97617.625
|
||||
60973.152
|
||||
122503.05
|
||||
40463.19
|
||||
78174.18
|
||||
153185.95
|
||||
67809.81
|
||||
73279.13
|
||||
218216.2
|
||||
77203.37
|
||||
61438.047
|
||||
83930.64
|
||||
92476.46
|
||||
104727.74
|
||||
79418.45
|
||||
106642.01
|
||||
175828.94
|
||||
78748.45
|
||||
85995.31
|
||||
78748.45
|
||||
209396.92
|
||||
61793.55
|
||||
48803.91
|
||||
74632.79
|
||||
121135.71
|
||||
81483.12
|
||||
69956.516
|
||||
84286.15
|
||||
68069.6
|
||||
167939.45
|
||||
63160.88
|
||||
182255.39
|
||||
92052.586
|
||||
80102.12
|
||||
133947.61
|
||||
79090.29
|
||||
71802.414
|
||||
105137.945
|
||||
105137.945
|
||||
152174.12
|
||||
108337.49
|
||||
74236.266
|
||||
81319.04
|
||||
88114.67
|
||||
76574.4
|
||||
80115.79
|
||||
59387.05
|
||||
78051.12
|
||||
95156.43
|
||||
80744.76
|
||||
117348.21
|
||||
86186.734
|
||||
86186.734
|
||||
121135.71
|
||||
68493.48
|
||||
132101.7
|
||||
207687.75
|
||||
106614.66
|
||||
92763.6
|
||||
80881.49
|
||||
54177.52
|
||||
62313.137
|
||||
60932.133
|
||||
80197.83
|
||||
118004.52
|
||||
112425.81
|
||||
103141.63
|
|
Loading…
Reference in New Issue
Block a user