first try

This commit is contained in:
Michal Maciaszek 2020-12-08 20:14:00 +01:00
parent 242c10da9f
commit bb5d311df5
6 changed files with 975 additions and 0 deletions

462
dev-0/out.tsv Normal file
View File

@ -0,0 +1,462 @@
753233.5625
717278.0
761310.75
737726.625
581629.3125
633398.5625
734318.5
742838.8125
635378.125
736022.625
626772.6875
726735.5
755602.1875
660751.5625
752824.625
737743.6875
728132.8125
1178018.125
735153.5
534563.3125
741015.5
903060.0
739771.5
954096.4375
753063.125
688970.6875
977016.0
737845.9375
751359.125
768007.75
727502.3125
730910.4375
761072.25
647391.75
761072.25
909365.0
789121.0
577675.875
721061.0
793636.75
727894.25
942082.875
891472.375
739243.25
784162.1875
577062.4375
624591.5
644341.5
725832.375
624216.5625
620229.0625
899992.625
716937.125
747951.0
776238.3125
729206.375
636997.0
704648.0
687266.625
526043.0
707684.125
582992.5625
613992.25
1065286.125
802480.75
657002.625
845082.1875
780805.1875
752364.5
682154.5
705690.375
715948.8125
739430.75
919589.3125
725951.625
985195.5
764139.5
970915.5
730160.625
943786.875
895732.5
538721.1875
724094.1875
725798.25
899992.625
738646.875
532859.25
754767.25
712165.8125
534563.3125
794128.0
777976.5
594205.25
942082.875
886360.25
911921.0625
880225.625
754750.1875
737897.0625
547684.5
951114.375
732614.5
738749.125
724043.125
526434.9375
525617.0
952818.375
724366.875
724486.125
733466.5
732273.6875
774173.5
895851.8125
752040.75
724298.6875
732273.6875
735681.75
738067.5
720345.25
570348.5
709609.75
939185.9375
1076822.5
824463.125
744542.875
701444.375
717448.375
663409.875
728712.1875
758175.3125
642961.1875
1082383.625
945965.1875
740282.75
734829.75
886258.0
529451.125
894999.75
717278.0
741475.5625
636465.875
787144.25
580402.375
609218.0
764991.5625
536437.75
658297.6875
682648.6875
648260.8125
971563.0
897538.8125
737266.5625
992863.75
736022.625
967575.5
717278.0
907916.5
763117.125
727195.625
663409.875
735187.625
675338.25
735937.375
730910.4375
737845.9375
708042.0
978924.5625
888916.3125
763287.5
613992.25
724043.125
552166.1875
737743.6875
753063.125
705789.75
554228.125
951404.0
710666.25
739345.5
730041.375
698104.4375
548195.75
514114.625
713733.5
700953.125
901696.75
738646.875
613259.5
762946.6875
742838.8125
803523.125
770069.625
727502.3125
595738.875
728320.25
771807.75
732614.5
745531.25
759368.125
776919.9375
727502.3125
880140.375
708076.0625
753063.125
904116.5
724094.1875
742838.8125
632736.875
662285.1875
723634.125
754767.25
778624.0
677601.75
534563.3125
763287.5
555898.0625
743520.4375
783054.5
739788.5625
732614.5
740197.5
546679.125
503890.28125
556443.375
548076.4375
734318.5
589076.0625
717278.0
730041.375
707735.25
721435.875
708416.875
724094.1875
629328.75
762810.375
583742.375
734318.5
1117450.125
723412.5625
634440.9375
538244.0625
526179.3125
633827.4375
705349.625
554500.75
733125.6875
733636.875
639893.875
737556.25
665284.3125
743861.25
723276.25
708076.0625
802855.6875
729632.375
733602.875
739430.75
642719.75
697491.0
663409.875
619513.375
528939.875
594546.0625
703798.875
552967.125
805888.875
523060.90625
612816.4375
1039725.25
710938.875
707053.625
919418.875
1036317.125
696249.9375
747030.8125
663665.5
677059.375
670226.125
736022.625
992522.9375
912773.125
947467.6875
522634.90625
747899.875
592501.1875
717278.0
640626.625
521459.09375
761583.4375
718641.25
1005473.75
749229.0
734318.5
920373.1875
908683.375
700595.25
728081.6875
740282.75
735698.8125
636145.0
923849.4375
792648.375
961338.6875
804627.875
717278.0
645176.5
553188.625
520930.84375
573092.0
544872.8125
733926.625
522345.21875
706181.6875
720305.375
570348.5
717278.0
743690.875
772370.125
616088.25
768399.625
904440.25
976675.1875
580572.8125
532859.25
544787.625
713921.0
1184289.0
726411.75
910217.0
886019.375
732614.5
704548.6875
956888.1875
760901.8125
599147.0
1111295.625
753063.125
917033.25
735085.375
892051.75
773136.9375
949188.75
557482.875
729206.375
606324.0
783906.5625
606324.0
570246.25
745585.25
732819.0
728422.5
738578.6875
533336.375
742458.125
611197.5625
538244.0625
717295.0
616497.1875
712063.5625
651481.5
760561.0
599147.0
669183.75
906808.875
753233.5625
560124.125
746076.5
1146211.75
570246.25
734301.5
739430.75
708842.875
724946.25
845593.375
580061.5625
722816.125
627624.6875
906808.875
726343.5625
660001.75
541652.1875
915138.8125
615642.25
733994.75
636806.625
895831.875
558249.6875
703884.125
740569.5
704071.5625
540646.75
539573.1875
771807.75
574438.1875
556716.0
636806.625
675338.25
574659.75
895050.875
749859.5
743486.375
733276.1875
1011949.125
969518.125
936459.5
1255859.375
714173.6875
566940.375
725713.0625
654889.5625
807490.75
613157.25
580794.3125
634492.0
797453.8125
598499.5
992673.375
744031.625
580760.25
591700.3125
895542.1875
724043.125
595298.75
764395.125
731114.875
546662.0625
738698.0
738698.0
733279.0625
738425.3125
796669.9375
795920.1875
644324.4375
689822.75
663069.0625
787076.125
992352.5
617380.4375
835519.5
736056.6875
559749.25
630893.5625
580095.6875
638019.4375
654395.4375
675048.5625
868899.125
763696.5
714074.375
730893.375
713852.8125
585667.9375
713852.8125
733347.25
574012.1875
776545.0625
597408.875
776545.0625
557107.9375
744815.5
1171488.75
775045.5
1 753233.5625
2 717278.0
3 761310.75
4 737726.625
5 581629.3125
6 633398.5625
7 734318.5
8 742838.8125
9 635378.125
10 736022.625
11 626772.6875
12 726735.5
13 755602.1875
14 660751.5625
15 752824.625
16 737743.6875
17 728132.8125
18 1178018.125
19 735153.5
20 534563.3125
21 741015.5
22 903060.0
23 739771.5
24 954096.4375
25 753063.125
26 688970.6875
27 977016.0
28 737845.9375
29 751359.125
30 768007.75
31 727502.3125
32 730910.4375
33 761072.25
34 647391.75
35 761072.25
36 909365.0
37 789121.0
38 577675.875
39 721061.0
40 793636.75
41 727894.25
42 942082.875
43 891472.375
44 739243.25
45 784162.1875
46 577062.4375
47 624591.5
48 644341.5
49 725832.375
50 624216.5625
51 620229.0625
52 899992.625
53 716937.125
54 747951.0
55 776238.3125
56 729206.375
57 636997.0
58 704648.0
59 687266.625
60 526043.0
61 707684.125
62 582992.5625
63 613992.25
64 1065286.125
65 802480.75
66 657002.625
67 845082.1875
68 780805.1875
69 752364.5
70 682154.5
71 705690.375
72 715948.8125
73 739430.75
74 919589.3125
75 725951.625
76 985195.5
77 764139.5
78 970915.5
79 730160.625
80 943786.875
81 895732.5
82 538721.1875
83 724094.1875
84 725798.25
85 899992.625
86 738646.875
87 532859.25
88 754767.25
89 712165.8125
90 534563.3125
91 794128.0
92 777976.5
93 594205.25
94 942082.875
95 886360.25
96 911921.0625
97 880225.625
98 754750.1875
99 737897.0625
100 547684.5
101 951114.375
102 732614.5
103 738749.125
104 724043.125
105 526434.9375
106 525617.0
107 952818.375
108 724366.875
109 724486.125
110 733466.5
111 732273.6875
112 774173.5
113 895851.8125
114 752040.75
115 724298.6875
116 732273.6875
117 735681.75
118 738067.5
119 720345.25
120 570348.5
121 709609.75
122 939185.9375
123 1076822.5
124 824463.125
125 744542.875
126 701444.375
127 717448.375
128 663409.875
129 728712.1875
130 758175.3125
131 642961.1875
132 1082383.625
133 945965.1875
134 740282.75
135 734829.75
136 886258.0
137 529451.125
138 894999.75
139 717278.0
140 741475.5625
141 636465.875
142 787144.25
143 580402.375
144 609218.0
145 764991.5625
146 536437.75
147 658297.6875
148 682648.6875
149 648260.8125
150 971563.0
151 897538.8125
152 737266.5625
153 992863.75
154 736022.625
155 967575.5
156 717278.0
157 907916.5
158 763117.125
159 727195.625
160 663409.875
161 735187.625
162 675338.25
163 735937.375
164 730910.4375
165 737845.9375
166 708042.0
167 978924.5625
168 888916.3125
169 763287.5
170 613992.25
171 724043.125
172 552166.1875
173 737743.6875
174 753063.125
175 705789.75
176 554228.125
177 951404.0
178 710666.25
179 739345.5
180 730041.375
181 698104.4375
182 548195.75
183 514114.625
184 713733.5
185 700953.125
186 901696.75
187 738646.875
188 613259.5
189 762946.6875
190 742838.8125
191 803523.125
192 770069.625
193 727502.3125
194 595738.875
195 728320.25
196 771807.75
197 732614.5
198 745531.25
199 759368.125
200 776919.9375
201 727502.3125
202 880140.375
203 708076.0625
204 753063.125
205 904116.5
206 724094.1875
207 742838.8125
208 632736.875
209 662285.1875
210 723634.125
211 754767.25
212 778624.0
213 677601.75
214 534563.3125
215 763287.5
216 555898.0625
217 743520.4375
218 783054.5
219 739788.5625
220 732614.5
221 740197.5
222 546679.125
223 503890.28125
224 556443.375
225 548076.4375
226 734318.5
227 589076.0625
228 717278.0
229 730041.375
230 707735.25
231 721435.875
232 708416.875
233 724094.1875
234 629328.75
235 762810.375
236 583742.375
237 734318.5
238 1117450.125
239 723412.5625
240 634440.9375
241 538244.0625
242 526179.3125
243 633827.4375
244 705349.625
245 554500.75
246 733125.6875
247 733636.875
248 639893.875
249 737556.25
250 665284.3125
251 743861.25
252 723276.25
253 708076.0625
254 802855.6875
255 729632.375
256 733602.875
257 739430.75
258 642719.75
259 697491.0
260 663409.875
261 619513.375
262 528939.875
263 594546.0625
264 703798.875
265 552967.125
266 805888.875
267 523060.90625
268 612816.4375
269 1039725.25
270 710938.875
271 707053.625
272 919418.875
273 1036317.125
274 696249.9375
275 747030.8125
276 663665.5
277 677059.375
278 670226.125
279 736022.625
280 992522.9375
281 912773.125
282 947467.6875
283 522634.90625
284 747899.875
285 592501.1875
286 717278.0
287 640626.625
288 521459.09375
289 761583.4375
290 718641.25
291 1005473.75
292 749229.0
293 734318.5
294 920373.1875
295 908683.375
296 700595.25
297 728081.6875
298 740282.75
299 735698.8125
300 636145.0
301 923849.4375
302 792648.375
303 961338.6875
304 804627.875
305 717278.0
306 645176.5
307 553188.625
308 520930.84375
309 573092.0
310 544872.8125
311 733926.625
312 522345.21875
313 706181.6875
314 720305.375
315 570348.5
316 717278.0
317 743690.875
318 772370.125
319 616088.25
320 768399.625
321 904440.25
322 976675.1875
323 580572.8125
324 532859.25
325 544787.625
326 713921.0
327 1184289.0
328 726411.75
329 910217.0
330 886019.375
331 732614.5
332 704548.6875
333 956888.1875
334 760901.8125
335 599147.0
336 1111295.625
337 753063.125
338 917033.25
339 735085.375
340 892051.75
341 773136.9375
342 949188.75
343 557482.875
344 729206.375
345 606324.0
346 783906.5625
347 606324.0
348 570246.25
349 745585.25
350 732819.0
351 728422.5
352 738578.6875
353 533336.375
354 742458.125
355 611197.5625
356 538244.0625
357 717295.0
358 616497.1875
359 712063.5625
360 651481.5
361 760561.0
362 599147.0
363 669183.75
364 906808.875
365 753233.5625
366 560124.125
367 746076.5
368 1146211.75
369 570246.25
370 734301.5
371 739430.75
372 708842.875
373 724946.25
374 845593.375
375 580061.5625
376 722816.125
377 627624.6875
378 906808.875
379 726343.5625
380 660001.75
381 541652.1875
382 915138.8125
383 615642.25
384 733994.75
385 636806.625
386 895831.875
387 558249.6875
388 703884.125
389 740569.5
390 704071.5625
391 540646.75
392 539573.1875
393 771807.75
394 574438.1875
395 556716.0
396 636806.625
397 675338.25
398 574659.75
399 895050.875
400 749859.5
401 743486.375
402 733276.1875
403 1011949.125
404 969518.125
405 936459.5
406 1255859.375
407 714173.6875
408 566940.375
409 725713.0625
410 654889.5625
411 807490.75
412 613157.25
413 580794.3125
414 634492.0
415 797453.8125
416 598499.5
417 992673.375
418 744031.625
419 580760.25
420 591700.3125
421 895542.1875
422 724043.125
423 595298.75
424 764395.125
425 731114.875
426 546662.0625
427 738698.0
428 738698.0
429 733279.0625
430 738425.3125
431 796669.9375
432 795920.1875
433 644324.4375
434 689822.75
435 663069.0625
436 787076.125
437 992352.5
438 617380.4375
439 835519.5
440 736056.6875
441 559749.25
442 630893.5625
443 580095.6875
444 638019.4375
445 654395.4375
446 675048.5625
447 868899.125
448 763696.5
449 714074.375
450 730893.375
451 713852.8125
452 585667.9375
453 713852.8125
454 733347.25
455 574012.1875
456 776545.0625
457 597408.875
458 776545.0625
459 557107.9375
460 744815.5
461 1171488.75
462 775045.5

BIN
geval Executable file

Binary file not shown.

BIN
model.pkl Normal file

Binary file not shown.

37
predict.py Normal file
View File

@ -0,0 +1,37 @@
import pickle
import sys
import torch
import pandas as pd
def read_data_file(filepath):
df = pd.read_csv(filepath, sep='\t', header=None, index_col=None)
dataframe = df.iloc[:, [7,10]]
dataframe.columns = ['biggy','type']
#print(dataframe.size[0])
# for x in range(len(dataframe)):
# dataframe['biggy'].loc[x] = dataframe['biggy'].loc[x].replace(" ","")
#such dumb solution, well, but at least it works
dataframe['bias'] = 1
dataframe['biggy'] = dataframe['biggy'].astype(float)
return dataframe
def dataframe_to_arrays(dataframe):
dataframe1 = dataframe.copy(deep=True)
dataframe1["type"] = dataframe1["type"].astype('category').cat.codes
return dataframe1
PREDICT_FILE_PATH = 'test-A/in.tsv'
def main():
w = pickle.load(open('model.pkl', 'rb'))
data = read_data_file(PREDICT_FILE_PATH)
data = dataframe_to_arrays(data)
for index, row in data.iterrows():
#print(row[0], row[1])
x = torch.tensor([float(row[0]), float(row[1]), 1])
y = x @ w
print(y.item())
main()

418
test-A/out.tsv Normal file
View File

@ -0,0 +1,418 @@
758158.25
761583.4375
739686.3125
639161.125
728115.75
810830.625
545639.625
880205.6875
1147683.0
735784.0
756454.25
758175.3125
735000.125
717278.0
734318.5
736022.625
806911.3125
834196.125
703645.5
939049.625
778624.0
705349.625
646369.3125
718982.0
618062.0625
735698.8125
760987.0
1111420.75
607704.25
735681.75
1005644.125
944298.125
1056765.75
620621.0
636145.0
776238.3125
922145.375
747951.0
725798.25
681779.625
761072.25
563344.8125
532007.1875
754767.25
715573.9375
738646.875
756641.6875
713409.75
704838.375
628286.375
753114.25
610243.3125
627454.3125
757834.5
734318.5
946036.25
665812.5625
622410.25
643881.375
723412.5625
620958.9375
923849.4375
736022.625
976675.1875
718078.875
653185.5
701069.5
560771.6875
917033.25
719322.875
893176.4375
575460.625
941742.0
933835.25
730399.25
759027.375
724435.0
910217.0
735698.8125
746246.9375
816215.5
797147.125
674264.6875
760731.375
707053.625
745105.25
724230.5
757323.3125
735341.0
717278.0
746570.6875
894829.375
757152.875
917033.25
549899.8125
1070398.25
828893.625
942082.875
534733.6875
530643.9375
742838.8125
737726.625
741134.75
753063.125
724605.375
731080.8125
759879.375
622969.75
622066.5625
543083.5625
1022684.6875
746332.125
703901.125
629328.75
625730.3125
703815.9375
543236.9375
743350.0
820563.75
539505.0625
822608.625
726548.0
730586.625
736908.6875
681029.8125
886019.375
628647.125
737845.9375
752824.625
754767.25
936118.625
566122.4375
717278.0
718982.0
760390.625
983340.9375
730910.4375
737726.625
558420.0625
716235.625
561811.125
710666.25
761242.625
762265.0625
800776.75
544787.625
728183.9375
1022684.6875
759879.375
738220.8125
705042.875
725798.25
712268.0625
766695.625
532859.25
732614.5
1072085.25
1051210.625
736942.75
825826.375
766695.625
537971.375
734318.5
657871.6875
767240.875
595298.75
753693.625
747951.0
742838.8125
754750.1875
518715.5625
774023.0625
738527.5625
718982.0
533285.25
727604.5625
549899.8125
639893.875
677042.3125
626448.875
717278.0
946002.1875
778624.0
743179.625
718198.1875
721026.875
683057.625
982449.0625
742838.8125
765673.1875
841012.375
558300.8125
551603.875
1031205.0
706917.3125
570518.875
725798.25
920441.375
727672.75
741134.75
676340.75
570348.5
722185.625
733125.6875
597613.375
744491.75
696249.9375
906519.1875
721776.6875
1299764.25
735903.3125
644665.25
541209.125
950091.9375
720686.125
997123.875
961338.6875
696249.9375
708076.0625
897907.9375
736022.625
919027.0
906808.875
703901.125
651481.5
720856.5
748479.25
1029500.9375
727502.3125
727502.3125
736022.625
772540.5
716425.9375
725798.25
1139844.375
654634.0
968683.1875
1203314.625
656593.625
741305.125
573756.5625
583980.9375
736022.625
729325.625
578868.75
663921.0625
613992.25
717278.0
726070.875
794471.75
849683.125
705349.625
648260.8125
722731.0
961338.6875
697831.8125
669203.6875
781827.625
654634.0
512580.96875
721231.375
722407.1875
784639.3125
757152.875
934585.0
727246.6875
732273.6875
766695.625
690674.75
768399.625
766695.625
727195.625
802480.75
1147512.625
975823.125
751359.125
713529.0625
745735.75
578868.75
642961.1875
555011.9375
688970.6875
764991.5625
738016.375
696249.9375
549899.8125
549899.8125
580402.375
551603.875
583980.9375
715642.125
708451.0
744525.875
776511.0
816624.4375
643642.8125
604429.625
534563.3125
734318.5
538823.4375
760561.0
712847.4375
629158.375
724366.875
722901.375
644665.25
624878.25
710291.375
659831.375
736022.625
543697.0
561828.1875
580572.8125
535347.125
549559.0
745122.25
623174.1875
798050.25
646164.8125
734318.5
522242.96875
639945.0
553307.875
556716.0
540033.3125
712336.25
651481.5
583980.9375
756982.5
624196.625
672782.1875
715573.9375
1073826.25
585565.6875
595057.25
620808.4375
724435.0
741305.125
674486.25
522242.96875
754920.5625
910029.5625
750847.875
559544.75
825826.375
786803.5
732273.6875
591700.3125
562492.75
922176.5625
724094.1875
757834.5
1077595.25
788848.375
686605.0
733602.875
1009802.0625
720686.125
727502.3125
1090847.0
732392.9375
530030.5
740776.875
751427.25
675338.25
735153.5
769081.25
1038021.25
551603.875
560635.375
734318.5
714426.4375
530473.5625
879714.375
546474.625
695786.9375
737726.625
540646.75
649862.625
538295.1875
845474.125
714892.3125
1046030.25
568184.3125
644648.1875
985826.0
734744.5625
725661.9375
1041278.875
1041278.875
1099898.375
771194.3125
1002767.1875
554807.5
745991.3125
731609.125
736022.625
892903.75
733449.5
663409.875
736806.5
782424.0625
743588.625
743588.625
1152573.625
721538.125
709453.4375
1077725.75
769047.1875
751785.125
736976.875
520981.96875
713835.75
712114.6875
736124.875
783242.0
593574.75
764718.875
1 758158.25
2 761583.4375
3 739686.3125
4 639161.125
5 728115.75
6 810830.625
7 545639.625
8 880205.6875
9 1147683.0
10 735784.0
11 756454.25
12 758175.3125
13 735000.125
14 717278.0
15 734318.5
16 736022.625
17 806911.3125
18 834196.125
19 703645.5
20 939049.625
21 778624.0
22 705349.625
23 646369.3125
24 718982.0
25 618062.0625
26 735698.8125
27 760987.0
28 1111420.75
29 607704.25
30 735681.75
31 1005644.125
32 944298.125
33 1056765.75
34 620621.0
35 636145.0
36 776238.3125
37 922145.375
38 747951.0
39 725798.25
40 681779.625
41 761072.25
42 563344.8125
43 532007.1875
44 754767.25
45 715573.9375
46 738646.875
47 756641.6875
48 713409.75
49 704838.375
50 628286.375
51 753114.25
52 610243.3125
53 627454.3125
54 757834.5
55 734318.5
56 946036.25
57 665812.5625
58 622410.25
59 643881.375
60 723412.5625
61 620958.9375
62 923849.4375
63 736022.625
64 976675.1875
65 718078.875
66 653185.5
67 701069.5
68 560771.6875
69 917033.25
70 719322.875
71 893176.4375
72 575460.625
73 941742.0
74 933835.25
75 730399.25
76 759027.375
77 724435.0
78 910217.0
79 735698.8125
80 746246.9375
81 816215.5
82 797147.125
83 674264.6875
84 760731.375
85 707053.625
86 745105.25
87 724230.5
88 757323.3125
89 735341.0
90 717278.0
91 746570.6875
92 894829.375
93 757152.875
94 917033.25
95 549899.8125
96 1070398.25
97 828893.625
98 942082.875
99 534733.6875
100 530643.9375
101 742838.8125
102 737726.625
103 741134.75
104 753063.125
105 724605.375
106 731080.8125
107 759879.375
108 622969.75
109 622066.5625
110 543083.5625
111 1022684.6875
112 746332.125
113 703901.125
114 629328.75
115 625730.3125
116 703815.9375
117 543236.9375
118 743350.0
119 820563.75
120 539505.0625
121 822608.625
122 726548.0
123 730586.625
124 736908.6875
125 681029.8125
126 886019.375
127 628647.125
128 737845.9375
129 752824.625
130 754767.25
131 936118.625
132 566122.4375
133 717278.0
134 718982.0
135 760390.625
136 983340.9375
137 730910.4375
138 737726.625
139 558420.0625
140 716235.625
141 561811.125
142 710666.25
143 761242.625
144 762265.0625
145 800776.75
146 544787.625
147 728183.9375
148 1022684.6875
149 759879.375
150 738220.8125
151 705042.875
152 725798.25
153 712268.0625
154 766695.625
155 532859.25
156 732614.5
157 1072085.25
158 1051210.625
159 736942.75
160 825826.375
161 766695.625
162 537971.375
163 734318.5
164 657871.6875
165 767240.875
166 595298.75
167 753693.625
168 747951.0
169 742838.8125
170 754750.1875
171 518715.5625
172 774023.0625
173 738527.5625
174 718982.0
175 533285.25
176 727604.5625
177 549899.8125
178 639893.875
179 677042.3125
180 626448.875
181 717278.0
182 946002.1875
183 778624.0
184 743179.625
185 718198.1875
186 721026.875
187 683057.625
188 982449.0625
189 742838.8125
190 765673.1875
191 841012.375
192 558300.8125
193 551603.875
194 1031205.0
195 706917.3125
196 570518.875
197 725798.25
198 920441.375
199 727672.75
200 741134.75
201 676340.75
202 570348.5
203 722185.625
204 733125.6875
205 597613.375
206 744491.75
207 696249.9375
208 906519.1875
209 721776.6875
210 1299764.25
211 735903.3125
212 644665.25
213 541209.125
214 950091.9375
215 720686.125
216 997123.875
217 961338.6875
218 696249.9375
219 708076.0625
220 897907.9375
221 736022.625
222 919027.0
223 906808.875
224 703901.125
225 651481.5
226 720856.5
227 748479.25
228 1029500.9375
229 727502.3125
230 727502.3125
231 736022.625
232 772540.5
233 716425.9375
234 725798.25
235 1139844.375
236 654634.0
237 968683.1875
238 1203314.625
239 656593.625
240 741305.125
241 573756.5625
242 583980.9375
243 736022.625
244 729325.625
245 578868.75
246 663921.0625
247 613992.25
248 717278.0
249 726070.875
250 794471.75
251 849683.125
252 705349.625
253 648260.8125
254 722731.0
255 961338.6875
256 697831.8125
257 669203.6875
258 781827.625
259 654634.0
260 512580.96875
261 721231.375
262 722407.1875
263 784639.3125
264 757152.875
265 934585.0
266 727246.6875
267 732273.6875
268 766695.625
269 690674.75
270 768399.625
271 766695.625
272 727195.625
273 802480.75
274 1147512.625
275 975823.125
276 751359.125
277 713529.0625
278 745735.75
279 578868.75
280 642961.1875
281 555011.9375
282 688970.6875
283 764991.5625
284 738016.375
285 696249.9375
286 549899.8125
287 549899.8125
288 580402.375
289 551603.875
290 583980.9375
291 715642.125
292 708451.0
293 744525.875
294 776511.0
295 816624.4375
296 643642.8125
297 604429.625
298 534563.3125
299 734318.5
300 538823.4375
301 760561.0
302 712847.4375
303 629158.375
304 724366.875
305 722901.375
306 644665.25
307 624878.25
308 710291.375
309 659831.375
310 736022.625
311 543697.0
312 561828.1875
313 580572.8125
314 535347.125
315 549559.0
316 745122.25
317 623174.1875
318 798050.25
319 646164.8125
320 734318.5
321 522242.96875
322 639945.0
323 553307.875
324 556716.0
325 540033.3125
326 712336.25
327 651481.5
328 583980.9375
329 756982.5
330 624196.625
331 672782.1875
332 715573.9375
333 1073826.25
334 585565.6875
335 595057.25
336 620808.4375
337 724435.0
338 741305.125
339 674486.25
340 522242.96875
341 754920.5625
342 910029.5625
343 750847.875
344 559544.75
345 825826.375
346 786803.5
347 732273.6875
348 591700.3125
349 562492.75
350 922176.5625
351 724094.1875
352 757834.5
353 1077595.25
354 788848.375
355 686605.0
356 733602.875
357 1009802.0625
358 720686.125
359 727502.3125
360 1090847.0
361 732392.9375
362 530030.5
363 740776.875
364 751427.25
365 675338.25
366 735153.5
367 769081.25
368 1038021.25
369 551603.875
370 560635.375
371 734318.5
372 714426.4375
373 530473.5625
374 879714.375
375 546474.625
376 695786.9375
377 737726.625
378 540646.75
379 649862.625
380 538295.1875
381 845474.125
382 714892.3125
383 1046030.25
384 568184.3125
385 644648.1875
386 985826.0
387 734744.5625
388 725661.9375
389 1041278.875
390 1041278.875
391 1099898.375
392 771194.3125
393 1002767.1875
394 554807.5
395 745991.3125
396 731609.125
397 736022.625
398 892903.75
399 733449.5
400 663409.875
401 736806.5
402 782424.0625
403 743588.625
404 743588.625
405 1152573.625
406 721538.125
407 709453.4375
408 1077725.75
409 769047.1875
410 751785.125
411 736976.875
412 520981.96875
413 713835.75
414 712114.6875
415 736124.875
416 783242.0
417 593574.75
418 764718.875

58
train.py Normal file
View File

@ -0,0 +1,58 @@
import torch
import torch.nn as nn
import numpy as np
import pandas as pd
import sys
import torch.nn.functional as F
import pickle
from torch.utils.data import TensorDataset, DataLoader
TRAIN_FILE_PATH = 'train/train.tsv'
#prepare data methods
def read_data_file(filepath):
df = pd.read_csv(filepath, sep='\t', header=None, index_col=None)
dataframe = df.iloc[:, [0,8,11]]
dataframe.columns = ['price','biggy','type']
#print(dataframe.size[0])
for x in range(len(dataframe)):
dataframe['biggy'].loc[x] = dataframe['biggy'].loc[x].replace(" ","")
#such dumb solution, well, but at least it works
dataframe['bias'] = 1
dataframe['biggy'] = dataframe['biggy'].astype(float)
return dataframe
def dataframe_to_arrays(dataframe):
dataframe1 = dataframe.copy(deep=True)
dataframe1["type"] = dataframe1["type"].astype('category').cat.codes
inputs_array = dataframe1[input_cols].to_numpy()
targets_array = dataframe1[output_cols].to_numpy()
return inputs_array, targets_array
data = read_data_file(TRAIN_FILE_PATH)
input_cols = data.columns.values[1:]
output_cols = data.columns.values[:1]
inputs_array_training, targets_array_training = dataframe_to_arrays(data)
inputs_training = torch.from_numpy(inputs_array_training).type(torch.float32)
targets_training = torch.from_numpy(targets_array_training).type(torch.float32)
print(inputs_training)
w = torch.tensor([7201.61492633873, 1,7201.500], requires_grad=True)
learning_rate = torch.tensor(0.000000000005)
print("training started")
for i in range(10000):
y_predicted = inputs_training @ w
cost = torch.sum((y_predicted - targets_training) ** 2)
cost.backward()
with torch.no_grad():
w -= learning_rate * w.grad
w.requires_grad = True
print(w)
pickle.dump(w, open('model.pkl', 'wb'))