Compare commits

...

2 Commits

Author SHA1 Message Date
b8a409e014 weszlo 2021-05-13 22:06:31 +02:00
7616b2d9f5 test 2021-05-13 21:05:06 +02:00
4 changed files with 161 additions and 5272 deletions

BIN
Wynik.bmp

Binary file not shown.

Before

Width:  |  Height:  |  Size: 65 KiB

View File

@ -21,9 +21,9 @@
0 0
0 0
0 0
0
1 1
0 0
0
1 1
0 0
0 0
@ -98,7 +98,6 @@
0 0
0 0
0 0
0
1 1
0 0
0 0
@ -113,6 +112,7 @@
0 0
0 0
0 0
0
1 1
0 0
0 0
@ -200,8 +200,8 @@
0 0
1 1
0 0
1
0 0
1
0 0
0 0
0 0
@ -338,7 +338,6 @@
0 0
0 0
0 0
0
1 1
0 0
0 0
@ -376,6 +375,7 @@
0 0
0 0
0 0
0
1 1
1 1
0 0
@ -467,18 +467,18 @@
0 0
0 0
1 1
1
1
0 0
1
0 0
0 0
1
0 0
0 0
0 0
1
0 0
0 0
0 0
1
0 0
0 0
0 0
@ -544,8 +544,8 @@
0 0
0 0
0 0
1
0 0
1
0 0
0 0
0 0
@ -797,12 +797,12 @@
0 0
0 0
0 0
0
1 1
0 0
0 0
0 0
0 0
0
1 1
0 0
0 0
@ -842,13 +842,13 @@
1 1
0 0
1 1
0
1 1
0 0
0 0
0 0
0 0
0 0
0
1 1
0 0
0 0
@ -894,8 +894,8 @@
0 0
1 1
1 1
1
0 0
1
0 0
0 0
0 0
@ -982,7 +982,6 @@
0 0
0 0
0 0
0
1 1
0 0
0 0
@ -996,6 +995,7 @@
0 0
0 0
0 0
0
1 1
0 0
0 0
@ -1020,8 +1020,8 @@
0 0
0 0
1 1
1
0 0
1
0 0
0 0
0 0
@ -1151,7 +1151,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -1235,7 +1235,7 @@
0 0
0 0
0 0
1 0
0 0
0 0
0 0
@ -1297,7 +1297,6 @@
0 0
0 0
0 0
0
1 1
0 0
0 0
@ -1360,6 +1359,7 @@
0 0
0 0
0 0
0
1 1
1 1
0 0
@ -1434,14 +1434,14 @@
0 0
0 0
0 0
0
1 1
0 0
0 0
1
0 0
1 1
1 1
1
0
0 0
0 0
0 0
@ -1533,7 +1533,6 @@
0 0
0 0
0 0
0
1 1
0 0
0 0
@ -1541,6 +1540,7 @@
0 0
0 0
0 0
0
1 1
1 1
0 0
@ -1857,12 +1857,11 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
1
0 0
1
0 0
0 0
0 0
@ -1878,6 +1877,7 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
@ -2064,11 +2064,11 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
0 0
1
0 0
0 0
0 0
@ -2378,7 +2378,6 @@
0 0
0 0
0 0
0
1 1
0 0
0 0
@ -2387,6 +2386,7 @@
0 0
0 0
0 0
0
1 1
0 0
1 1
@ -2858,7 +2858,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -2876,7 +2876,7 @@
1 1
0 0
0 0
1 0
0 0
0 0
0 0
@ -2943,8 +2943,8 @@
0 0
0 0
0 0
1
0 0
1
0 0
0 0
0 0
@ -3213,7 +3213,6 @@
0 0
0 0
0 0
0
1 1
0 0
0 0
@ -3229,6 +3228,7 @@
0 0
0 0
0 0
0
1 1
0 0
0 0
@ -3515,6 +3515,7 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
@ -3522,7 +3523,6 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
@ -3605,10 +3605,10 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
1
0 0
0 0
0 0
@ -4039,8 +4039,8 @@
0 0
0 0
0 0
1
0 0
1
0 0
0 0
0 0
@ -4414,7 +4414,6 @@
1 1
0 0
0 0
0
1 1
0 0
0 0
@ -4424,9 +4423,10 @@
0 0
0 0
0 0
1
0
0 0
0 0
1
0 0
0 0
0 0
@ -4711,7 +4711,6 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
@ -4720,6 +4719,7 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
@ -4752,8 +4752,8 @@
1 1
0 0
0 0
1
0 0
1
0 0
0 0
0 0
@ -4935,9 +4935,9 @@
0 0
0 0
1 1
1
0 0
0 0
1
0 0
0 0
0 0
@ -4966,9 +4966,9 @@
0 0
0 0
0 0
1
0 0
0 0
1
0 0
0 0
0 0
@ -5057,6 +5057,7 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
@ -5071,7 +5072,6 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
@ -5085,7 +5085,6 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
@ -5098,6 +5097,7 @@
0 0
1 1
1 1
1
0 0
0 0
0 0
@ -5135,16 +5135,16 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
0 0
0 0
1
0 0
1 1
0 0
1
1
0 0
0 0
0 0
@ -5237,7 +5237,7 @@
0 0
1 1
1 1
0 1
1 1
0 0
0 0
@ -5247,17 +5247,16 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
0 0
0 0
1
0 0
0 0
0 0
0 0
1
0 0
0 0
0 0
@ -5270,3 +5269,4 @@
0 0
0 0
0 0
1

1 0
21 0
22 0
23 0
0
24 1
25 0
26 0
27 1
28 0
29 0
98 0
99 0
100 0
0
101 1
102 0
103 0
112 0
113 0
114 0
115 0
116 1
117 0
118 0
200 0
201 1
202 0
1
203 0
204 1
205 0
206 0
207 0
338 0
339 0
340 0
0
341 1
342 0
343 0
375 0
376 0
377 0
378 0
379 1
380 1
381 0
467 0
468 0
469 1
1
1
470 0
471 1
472 0
473 0
474 1
475 0
476 0
477 0
1
478 0
479 0
480 0
481 1
482 0
483 0
484 0
544 0
545 0
546 0
1
547 0
548 1
549 0
550 0
551 0
797 0
798 0
799 0
0
800 1
801 0
802 0
803 0
804 0
805 0
806 1
807 0
808 0
842 1
843 0
844 1
0
845 1
846 0
847 0
848 0
849 0
850 0
851 0
852 1
853 0
854 0
894 0
895 1
896 1
1
897 0
898 1
899 0
900 0
901 0
982 0
983 0
984 0
0
985 1
986 0
987 0
995 0
996 0
997 0
998 0
999 1
1000 0
1001 0
1020 0
1021 0
1022 1
1
1023 0
1024 1
1025 0
1026 0
1027 0
1151 0
1152 0
1153 0
1154 0 1
1155 0
1156 0
1157 0
1235 0
1236 0
1237 0
1238 1 0
1239 0
1240 0
1241 0
1297 0
1298 0
1299 0
0
1300 1
1301 0
1302 0
1359 0
1360 0
1361 0
1362 0
1363 1
1364 1
1365 0
1434 0
1435 0
1436 0
0
1437 1
1438 0
1439 0
1
1440 0
1441 1
1442 1
1443 1
1444 0
1445 0
1446 0
1447 0
1533 0
1534 0
1535 0
0
1536 1
1537 0
1538 0
1540 0
1541 0
1542 0
1543 0
1544 1
1545 1
1546 0
1857 0
1858 0
1859 0
1
1860 0
1861 0
1862 0
1
1863 0
1864 1
1865 0
1866 0
1867 0
1877 0
1878 0
1879 0
1880 1
1881 0
1882 0
1883 0
2064 0
2065 0
2066 0
1
2067 0
2068 0
2069 0
2070 0
2071 1
2072 0
2073 0
2074 0
2378 0
2379 0
2380 0
0
2381 1
2382 0
2383 0
2386 0
2387 0
2388 0
2389 0
2390 1
2391 0
2392 1
2858 0
2859 0
2860 0
2861 0 1
2862 0
2863 0
2864 0
2876 1
2877 0
2878 0
2879 1 0
2880 0
2881 0
2882 0
2943 0
2944 0
2945 0
1
2946 0
2947 1
2948 0
2949 0
2950 0
3213 0
3214 0
3215 0
0
3216 1
3217 0
3218 0
3228 0
3229 0
3230 0
3231 0
3232 1
3233 0
3234 0
3515 0
3516 0
3517 0
3518 1
3519 0
3520 0
3521 0
3523 0
3524 0
3525 0
1
3526 0
3527 0
3528 0
3605 0
3606 0
3607 0
1
3608 0
3609 0
3610 0
3611 1
3612 0
3613 0
3614 0
4039 0
4040 0
4041 0
1
4042 0
4043 1
4044 0
4045 0
4046 0
4414 1
4415 0
4416 0
0
4417 1
4418 0
4419 0
4423 0
4424 0
4425 0
4426 1
4427 0
4428 0
4429 0
1
4430 0
4431 0
4432 0
4711 0
4712 0
4713 0
1
4714 0
4715 0
4716 0
4719 0
4720 0
4721 0
4722 1
4723 0
4724 0
4725 0
4752 1
4753 0
4754 0
1
4755 0
4756 1
4757 0
4758 0
4759 0
4935 0
4936 0
4937 1
1
4938 0
4939 0
4940 1
4941 0
4942 0
4943 0
4966 0
4967 0
4968 0
1
4969 0
4970 0
4971 1
4972 0
4973 0
4974 0
5057 0
5058 0
5059 0
5060 1
5061 0
5062 0
5063 0
5072 0
5073 0
5074 0
1
5075 0
5076 0
5077 0
5085 0
5086 0
5087 0
1
5088 0
5089 0
5090 0
5097 0
5098 1
5099 1
5100 1
5101 0
5102 0
5103 0
5135 0
5136 0
5137 0
1
5138 0
5139 0
5140 0
5141 0
5142 0
1
5143 0
5144 1
5145 0
5146 1
5147 1
5148 0
5149 0
5150 0
5237 0
5238 1
5239 1
5240 0 1
5241 1
5242 0
5243 0
5247 0
5248 0
5249 0
1
5250 0
5251 0
5252 0
5253 0
5254 0
5255 1
5256 0
5257 0
5258 0
5259 0
1
5260 0
5261 0
5262 0
5269 0
5270 0
5271 0
5272 1

79
main.py
View File

@ -1,24 +1,65 @@
from gensim.test.utils import common_texts
from gensim.models import Word2Vec
from sklearn import preprocessing from sklearn import preprocessing
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.naive_bayes import MultinomialNB
from sklearn.pipeline import make_pipeline
import pandas as pd
import numpy as np import numpy as np
import gensim
import torch
eng = preprocessing.LabelEncoder() class NeuralNetworkModel(torch.nn.Module):
with open("train/in.tsv") as myFile: def __init__(self):
tmp = myFile.readlines() super(NeuralNetworkModel, self).__init__()
with open("train/expected.tsv") as finFile: self.fc1 = torch.nn.Linear(maxim, 500)
fin = finFile.readlines() self.fc2 = torch.nn.Linear(500, 1)
fin = eng.fit_transform(fin)
with open("test-A/in.tsv") as tFile: def forward(self, x):
fic = tFile.readlines() x = self.fc1(x)
gnb = make_pipeline(TfidfVectorizer(),MultinomialNB()) x = torch.relu(x)
model = gnb.fit(tmp, fin) x = self.fc2(x)
fin_pred = model.predict(fic) x = torch.sigmoid(x)
fin_pred = np.array(fin_pred) return x
np.set_printoptions(threshold = np.inf)
eFile = np.array2string(fin_pred.flatten(), precision = 2, separator = '\n',suppress_small = True) with open("train/in.tsv") as xd:
myFile = open("test-A/out.tsv", "a") x1 = xd.readlines()
myFile.write(eFile) with open("train/expected.tsv") as xdd:
y1 = xdd.readlines()
with open("test-A/in.tsv") as xddd:
x = xddd.readlines()
maxim = 500
bLen = 5
exp = Word2Vec(x1, min_count = 1, vector_size = 500, workers = 3, window = 3, sg = 1)
exp1 = Word2Vec(x, min_count = 1, vector_size = 500, workers = 3, window = 3, sg = 1)
exp2 = NeuralNetworkModel()
x1 = exp.wv
x = exp1.wv
crt = torch.nn.BCELoss()
miz = torch.optim.SGD(exp2.parameters(), lr = 0.1)
for each in range(10):
lossScore, accScore, sums = 0
exp2.train()
for i in range(0, y1.shape[0], bLen):
x = x1[i : i + bLen]
x = torch.tensor(x.astype(np.float32).todense())
y = y1[i : i + bLen]
y = torch.tensor(y.astype(np.float32)).reshape(-1, 1)
y2 = exp2(x)
accScore += torch.sum((y2 > 0.5) == y).item()
sums += y.shape[0]
miz.zero_grad()
loss = crt(y2, Y)
loss.backward()
miz.step()
lossScore += loss.item() * Y.shape[0]
with open('test-A/out.tsv', 'w') as file:
for each in y2:
file.write("%f\n" % each)

File diff suppressed because it is too large Load Diff