neural network

This commit is contained in:
s434766 2021-05-23 19:11:17 +02:00
parent a16904d4cf
commit 5ae7c95f2c
5 changed files with 11209 additions and 196 deletions

View File

@ -21,7 +21,7 @@
1
1
1
1
0
0
1
0
@ -91,7 +91,7 @@
0
0
1
1
0
1
0
1
@ -163,7 +163,7 @@
0
0
1
1
0
0
1
0
@ -187,7 +187,7 @@
1
1
1
0
1
1
1
1
@ -244,7 +244,7 @@
1
1
1
1
0
1
1
1
@ -275,7 +275,7 @@
0
1
0
0
1
1
1
1
@ -426,7 +426,7 @@
1
1
0
0
1
1
1
1
@ -524,15 +524,15 @@
1
1
1
1
0
1
0
1
0
1
1
1
1
1
0
0
0
@ -593,7 +593,7 @@
0
1
1
1
0
1
0
0
@ -741,7 +741,7 @@
1
0
1
0
1
1
1
1
@ -762,7 +762,7 @@
0
1
1
0
1
0
0
0
@ -816,7 +816,7 @@
1
1
0
1
0
0
1
0
@ -827,7 +827,7 @@
0
1
1
1
0
1
1
1
@ -886,9 +886,7 @@
1
0
1
0
1
0
1
0
1
@ -904,6 +902,8 @@
1
1
1
1
1
0
1
0
@ -911,7 +911,7 @@
1
1
0
0
1
0
1
1
@ -933,7 +933,7 @@
1
1
0
0
1
0
1
0
@ -959,7 +959,7 @@
0
1
1
1
0
1
0
0
@ -1036,7 +1036,7 @@
0
0
0
0
1
1
1
0
@ -1065,11 +1065,11 @@
1
1
1
0
0
1
0
1
0
0
1
0
1
@ -1133,7 +1133,7 @@
0
0
1
1
0
0
1
1
@ -1247,7 +1247,7 @@
1
0
0
0
1
1
1
1
@ -1313,7 +1313,7 @@
1
0
1
0
1
1
1
1
@ -1347,7 +1347,7 @@
1
1
0
1
0
1
1
1
@ -1552,7 +1552,7 @@
1
0
0
0
1
0
1
1
@ -1575,7 +1575,7 @@
0
0
1
1
0
0
1
1
@ -1590,18 +1590,18 @@
1
0
1
0
1
1
1
1
0
1
0
1
0
1
1
1
0
1
1
1
@ -1623,7 +1623,7 @@
1
1
0
0
1
0
0
1
@ -1662,7 +1662,7 @@
0
1
0
1
0
1
1
0
@ -1783,7 +1783,7 @@
0
1
1
1
0
0
0
1
@ -1814,7 +1814,7 @@
1
1
1
1
0
0
0
1
@ -1861,7 +1861,7 @@
1
1
1
1
0
0
1
0
@ -1869,7 +1869,7 @@
1
0
1
1
0
1
1
1
@ -1917,17 +1917,17 @@
1
0
1
1
0
0
1
0
1
0
1
1
1
1
1
0
1
1
1
@ -1972,7 +1972,7 @@
1
0
1
1
0
1
1
1
@ -1999,7 +1999,7 @@
1
0
0
1
0
1
0
1
@ -2019,7 +2019,7 @@
1
1
1
1
0
1
1
0
@ -2053,7 +2053,7 @@
1
1
0
0
1
1
1
1
@ -2107,7 +2107,7 @@
1
0
1
0
1
1
1
1
@ -2132,10 +2132,10 @@
0
1
0
1
0
1
1
1
0
0
0
@ -2249,7 +2249,7 @@
0
1
0
0
1
0
1
0
@ -2271,7 +2271,7 @@
1
0
0
1
0
0
1
1
@ -2364,7 +2364,7 @@
1
0
1
1
0
0
1
1
@ -2375,18 +2375,18 @@
0
1
0
0
1
1
1
1
1
0
1
0
1
0
0
0
0
1
0
0
@ -2469,7 +2469,7 @@
1
1
1
0
1
1
1
1
@ -2484,11 +2484,11 @@
1
0
1
0
1
0
1
0
1
1
1
0
1
1
@ -2557,7 +2557,7 @@
1
0
1
0
1
1
0
0
@ -2591,7 +2591,7 @@
1
0
1
0
1
0
0
1
@ -2679,11 +2679,11 @@
0
1
1
0
1
1
1
0
1
1
1
1
0
@ -2714,7 +2714,7 @@
1
0
0
0
1
0
1
1
@ -2753,7 +2753,7 @@
1
1
0
0
1
0
0
1
@ -2770,7 +2770,7 @@
1
1
1
1
0
1
1
1
@ -2783,7 +2783,7 @@
0
0
0
0
1
0
0
1
@ -2896,7 +2896,7 @@
0
1
0
1
0
0
1
1
@ -2976,21 +2976,21 @@
1
1
0
0
1
0
1
0
1
0
1
0
0
1
0
1
1
0
1
1
0
1
1
0
@ -3154,7 +3154,7 @@
1
1
1
1
0
0
0
1
@ -3170,7 +3170,6 @@
1
0
1
0
1
1
1
@ -3180,6 +3179,7 @@
1
1
1
0
1
1
0
@ -3191,16 +3191,16 @@
0
0
0
1
0
0
0
1
1
1
1
1
1
1
0
1
1
0
@ -3278,7 +3278,7 @@
0
1
0
1
0
1
0
0
@ -3396,7 +3396,7 @@
1
1
1
0
1
0
0
1
@ -3484,7 +3484,7 @@
1
1
1
0
1
1
1
1
@ -3507,7 +3507,7 @@
1
1
1
0
1
0
0
1
@ -3518,7 +3518,7 @@
1
0
1
0
1
1
1
1
@ -3541,7 +3541,7 @@
1
0
1
0
1
0
0
1
@ -3610,19 +3610,19 @@
1
1
0
0
1
1
1
1
0
0
0
1
1
0
1
0
1
0
1
1
1
@ -3630,7 +3630,7 @@
1
1
0
1
0
0
1
1
@ -3684,7 +3684,7 @@
1
1
1
1
0
1
0
1
@ -3725,7 +3725,7 @@
1
1
1
1
0
1
1
1
@ -3773,7 +3773,7 @@
0
0
1
1
0
1
0
0
@ -3784,7 +3784,7 @@
0
0
1
1
0
1
1
1
@ -3809,7 +3809,7 @@
0
0
0
1
0
1
1
0
@ -3867,7 +3867,7 @@
1
1
0
1
0
0
1
0
@ -3922,7 +3922,7 @@
1
1
1
0
1
1
0
1
@ -3983,7 +3983,7 @@
1
0
1
0
1
1
1
1
@ -4051,7 +4051,7 @@
1
1
1
0
1
1
1
0
@ -4065,7 +4065,7 @@
1
1
0
0
1
0
1
0
@ -4073,7 +4073,7 @@
1
1
1
0
1
0
0
0
@ -4086,10 +4086,10 @@
1
1
1
0
1
1
1
0
1
1
1
@ -4244,7 +4244,7 @@
0
1
1
1
0
1
1
1
@ -4298,7 +4298,7 @@
1
1
1
0
1
0
1
0
@ -4389,8 +4389,8 @@
1
1
0
1
1
0
0
0
1
1
@ -4447,7 +4447,7 @@
0
1
1
0
1
1
1
1
@ -4498,7 +4498,7 @@
1
0
1
0
1
0
0
1
@ -4625,7 +4625,7 @@
1
0
1
0
1
1
1
0
@ -4675,7 +4675,7 @@
1
1
1
1
0
0
1
1
@ -4707,7 +4707,7 @@
1
0
0
1
0
1
1
0
@ -4725,7 +4725,7 @@
0
1
1
0
1
0
1
1
@ -4749,7 +4749,7 @@
1
1
1
1
0
1
0
1
@ -4760,7 +4760,7 @@
0
1
1
0
1
1
0
1
@ -4906,7 +4906,7 @@
1
0
1
0
1
1
1
0
@ -4929,7 +4929,7 @@
0
0
0
1
0
0
1
1
@ -4955,7 +4955,7 @@
1
0
0
0
1
1
1
0
@ -4987,7 +4987,7 @@
1
1
0
1
0
1
1
1
@ -5048,7 +5048,7 @@
0
1
0
0
1
1
1
1
@ -5264,7 +5264,7 @@
1
1
0
1
0
1
1
0
@ -5273,7 +5273,7 @@
1
0
1
0
1
1
0
1
@ -5339,7 +5339,7 @@
1
1
1
0
1
1
1
1
@ -5379,7 +5379,7 @@
0
0
1
0
1
1
1
0
@ -5443,7 +5443,7 @@
1
1
1
0
1
1
1
1

1 1
21 1
22 1
23 1
24 1 0
25 0
26 1
27 0
91 0
92 0
93 1
94 1 0
95 1
96 0
97 1
163 0
164 0
165 1
166 1 0
167 0
168 1
169 0
187 1
188 1
189 1
190 0 1
191 1
192 1
193 1
244 1
245 1
246 1
247 1 0
248 1
249 1
250 1
275 0
276 1
277 0
278 0 1
279 1
280 1
281 1
426 1
427 1
428 0
429 0 1
430 1
431 1
432 1
524 1
525 1
526 1
1
527 0
1
528 0
529 1
530 0
531 1
532 1
533 1
534 1
535 1
536 0
537 0
538 0
593 0
594 1
595 1
596 1 0
597 1
598 0
599 0
741 1
742 0
743 1
744 0 1
745 1
746 1
747 1
762 0
763 1
764 1
765 0 1
766 0
767 0
768 0
816 1
817 1
818 0
819 1 0
820 0
821 1
822 0
827 0
828 1
829 1
830 1 0
831 1
832 1
833 1
886 1
887 0
888 1
0
889 1
0
890 1
891 0
892 1
902 1
903 1
904 1
905 1
906 1
907 0
908 1
909 0
911 1
912 1
913 0
914 0 1
915 0
916 1
917 1
933 1
934 1
935 0
936 0 1
937 0
938 1
939 0
959 0
960 1
961 1
962 1 0
963 1
964 0
965 0
1036 0
1037 0
1038 0
1039 0 1
1040 1
1041 1
1042 0
1065 1
1066 1
1067 1
0
0
1068 1
1069 0
1070 1
1071 0
1072 0
1073 1
1074 0
1075 1
1133 0
1134 0
1135 1
1136 1 0
1137 0
1138 1
1139 1
1247 1
1248 0
1249 0
1250 0 1
1251 1
1252 1
1253 1
1313 1
1314 0
1315 1
1316 0 1
1317 1
1318 1
1319 1
1347 1
1348 1
1349 0
1350 1 0
1351 1
1352 1
1353 1
1552 1
1553 0
1554 0
1555 0 1
1556 0
1557 1
1558 1
1575 0
1576 0
1577 1
1578 1 0
1579 0
1580 1
1581 1
1590 1
1591 0
1592 1
0
1593 1
1594 1
1595 1
1596 1
0
1597 1
1598 0
1599 1
1600 0
1601 1
1602 1
1603 1
1604 0
1605 1
1606 1
1607 1
1623 1
1624 1
1625 0
1626 0 1
1627 0
1628 0
1629 1
1662 0
1663 1
1664 0
1665 1 0
1666 1
1667 1
1668 0
1783 0
1784 1
1785 1
1786 1 0
1787 0
1788 0
1789 1
1814 1
1815 1
1816 1
1817 1 0
1818 0
1819 0
1820 1
1861 1
1862 1
1863 1
1864 1 0
1865 0
1866 1
1867 0
1869 1
1870 0
1871 1
1872 1 0
1873 1
1874 1
1875 1
1917 1
1918 0
1919 1
1
1920 0
1921 0
1
1922 0
1923 1
1924 0
1925 1
1926 1
1927 1
1928 1
1929 1
1930 0
1931 1
1932 1
1933 1
1972 1
1973 0
1974 1
1975 1 0
1976 1
1977 1
1978 1
1999 1
2000 0
2001 0
2002 1 0
2003 1
2004 0
2005 1
2019 1
2020 1
2021 1
2022 1 0
2023 1
2024 1
2025 0
2053 1
2054 1
2055 0
2056 0 1
2057 1
2058 1
2059 1
2107 1
2108 0
2109 1
2110 0 1
2111 1
2112 1
2113 1
2132 0
2133 1
2134 0
1
2135 0
2136 1
2137 1
2138 1
2139 0
2140 0
2141 0
2249 0
2250 1
2251 0
2252 0 1
2253 0
2254 1
2255 0
2271 1
2272 0
2273 0
2274 1 0
2275 0
2276 1
2277 1
2364 1
2365 0
2366 1
2367 1 0
2368 0
2369 1
2370 1
2375 0
2376 1
2377 0
0
2378 1
2379 1
2380 1
2381 1
2382 1
0
2383 1
2384 0
2385 1
2386 0
2387 0
2388 0
2389 0
2390 1
2391 0
2392 0
2469 1
2470 1
2471 1
2472 0 1
2473 1
2474 1
2475 1
2484 1
2485 0
2486 1
0
2487 1
0
2488 1
2489 0 1
2490 1
2491 1
2492 0
2493 1
2494 1
2557 1
2558 0
2559 1
2560 0 1
2561 1
2562 0
2563 0
2591 1
2592 0
2593 1
2594 0 1
2595 0
2596 0
2597 1
2679 0
2680 1
2681 1
0
2682 1
2683 1
2684 1
2685 0 1
2686 1
2687 1
2688 1
2689 0
2714 1
2715 0
2716 0
2717 0 1
2718 0
2719 1
2720 1
2753 1
2754 1
2755 0
2756 0 1
2757 0
2758 0
2759 1
2770 1
2771 1
2772 1
2773 1 0
2774 1
2775 1
2776 1
2783 0
2784 0
2785 0
2786 0 1
2787 0
2788 0
2789 1
2896 0
2897 1
2898 0
2899 1 0
2900 0
2901 1
2902 1
2976 1
2977 1
2978 0
0
2979 1
0
2980 1
2981 0
2982 1
2983 0
2984 1
2985 0
2986 0
2987 1
2988 0
2989 1
2990 1
2991 0
2992 1
2993 1 0
2994 1
2995 1
2996 0
3154 1
3155 1
3156 1
3157 1 0
3158 0
3159 0
3160 1
3170 1
3171 0
3172 1
0
3173 1
3174 1
3175 1
3179 1
3180 1
3181 1
3182 0
3183 1
3184 1
3185 0
3191 0
3192 0
3193 0
3194 1
3195 0
3196 0
0
1
3197 1
3198 1
3199 1
3200 1
3201 1
3202 1
3203 0
3204 1
3205 1
3206 0
3278 0
3279 1
3280 0
3281 1 0
3282 1
3283 0
3284 0
3396 1
3397 1
3398 1
3399 0 1
3400 0
3401 0
3402 1
3484 1
3485 1
3486 1
3487 0 1
3488 1
3489 1
3490 1
3507 1
3508 1
3509 1
3510 0 1
3511 0
3512 0
3513 1
3518 1
3519 0
3520 1
3521 0 1
3522 1
3523 1
3524 1
3541 1
3542 0
3543 1
3544 0 1
3545 0
3546 0
3547 1
3610 1
3611 1
3612 0
0
3613 1
3614 1
3615 1
3616 1
3617 0
3618 0
3619 0
3620 1
3621 1
3622 0
3623 1
3624 0
3625 1 0
3626 1
3627 1
3628 1
3630 1
3631 1
3632 0
3633 1 0
3634 0
3635 1
3636 1
3684 1
3685 1
3686 1
3687 1 0
3688 1
3689 0
3690 1
3725 1
3726 1
3727 1
3728 1 0
3729 1
3730 1
3731 1
3773 0
3774 0
3775 1
3776 1 0
3777 1
3778 0
3779 0
3784 0
3785 0
3786 1
3787 1 0
3788 1
3789 1
3790 1
3809 0
3810 0
3811 0
3812 1 0
3813 1
3814 1
3815 0
3867 1
3868 1
3869 0
3870 1 0
3871 0
3872 1
3873 0
3922 1
3923 1
3924 1
3925 0 1
3926 1
3927 0
3928 1
3983 1
3984 0
3985 1
3986 0 1
3987 1
3988 1
3989 1
4051 1
4052 1
4053 1
4054 0 1
4055 1
4056 1
4057 0
4065 1
4066 1
4067 0
4068 0 1
4069 0
4070 1
4071 0
4073 1
4074 1
4075 1
4076 0 1
4077 0
4078 0
4079 0
4086 1
4087 1
4088 1
0
4089 1
4090 1
4091 1
4092 0
4093 1
4094 1
4095 1
4244 0
4245 1
4246 1
4247 1 0
4248 1
4249 1
4250 1
4298 1
4299 1
4300 1
4301 0 1
4302 0
4303 1
4304 0
4389 1
4390 1
4391 0
4392 1 0
4393 1 0
4394 0
4395 1
4396 1
4447 0
4448 1
4449 1
4450 0 1
4451 1
4452 1
4453 1
4498 1
4499 0
4500 1
4501 0 1
4502 0
4503 0
4504 1
4625 1
4626 0
4627 1
4628 0 1
4629 1
4630 1
4631 0
4675 1
4676 1
4677 1
4678 1 0
4679 0
4680 1
4681 1
4707 1
4708 0
4709 0
4710 1 0
4711 1
4712 1
4713 0
4725 0
4726 1
4727 1
4728 0 1
4729 0
4730 1
4731 1
4749 1
4750 1
4751 1
4752 1 0
4753 1
4754 0
4755 1
4760 0
4761 1
4762 1
4763 0 1
4764 1
4765 0
4766 1
4906 1
4907 0
4908 1
4909 0 1
4910 1
4911 1
4912 0
4929 0
4930 0
4931 0
4932 1 0
4933 0
4934 1
4935 1
4955 1
4956 0
4957 0
4958 0 1
4959 1
4960 1
4961 0
4987 1
4988 1
4989 0
4990 1 0
4991 1
4992 1
4993 1
5048 0
5049 1
5050 0
5051 0 1
5052 1
5053 1
5054 1
5264 1
5265 1
5266 0
5267 1 0
5268 1
5269 1
5270 0
5273 1
5274 0
5275 1
5276 0 1
5277 1
5278 0
5279 1
5339 1
5340 1
5341 1
5342 0 1
5343 1
5344 1
5345 1
5379 0
5380 0
5381 1
5382 0 1
5383 1
5384 1
5385 0
5443 1
5444 1
5445 1
5446 0 1
5447 1
5448 1
5449 1

5452
dev-0/out2.tsv Normal file

File diff suppressed because it is too large Load Diff

116
neural.py Normal file
View File

@ -0,0 +1,116 @@
import gensim
import nltk
import pandas as pd
import numpy as np
import os
import io
import gzip
import torch
def read_data_gz(baseUrl):
f = gzip.open(baseUrl,'r')
data_unzip = f.read()
data = pd.read_table(io.StringIO(data_unzip.decode('utf-8')), error_bad_lines=False, header= None)
return data
def preprocess(data):
data_tokenize = [nltk.word_tokenize(x) for x in data]
for doc in data_tokenize:
i = 0
while i < len(doc):
if doc[i].isalpha():
doc[i] = doc[i].lower()
else:
del doc[i]
i += 1
return data_tokenize
class NeuralNetworkModel(torch.nn.Module):
def __init__(self):
super(NeuralNetworkModel, self).__init__()
self.fc1 = torch.nn.Linear(100,200)
self.fc2 = torch.nn.Linear(200,1)
def forward(self, x):
x = self.fc1(x)
x = torch.relu(x)
x = self.fc2(x)
x = torch.sigmoid(x)
return x
data_train = read_data_gz('train/train.tsv.gz')
data_dev = pd.read_table('dev-0/in.tsv', error_bad_lines=False, header= None)
data_test = pd.read_table('test-A/in.tsv', error_bad_lines=False, header= None)
model = gensim.models.KeyedVectors.load_word2vec_format('pl-embeddings-cbow.txt', binary=False)
y_train = data_train[0].values
x_train = data_train[1].values
x_dev = data_dev[0].values
x_test = data_test[0].values
x_train_tokenize = preprocess(x_train)
x_dev_tokenize = preprocess(x_dev)
x_test_tokenize = preprocess(x_test)
# -------------------------------------------------------------------------------------------------------------------------------------------
x_train_vectors = [np.mean([model[word] for word in content if word in model] or [np.zeros(100)], axis=0) for content in x_train_tokenize]
x_train_vectors = np.array(x_train_vectors)
# -------------------------------------------------------------------------------------------------------------------------------------------
x_dev_vectors= [np.mean([model[word] for word in content if word in model] or [np.zeros(100)], axis=0) for content in x_dev_tokenize]
x_dev_vectors = np.array(x_dev_vectors, dtype=np.float32)
x_dev_tensor = torch.tensor(x_dev_vectors.astype(np.float32))
# -------------------------------------------------------------------------------------------------------------------------------------------
x_test_vectors= [np.mean([model[word] for word in content if word in model] or [np.zeros(100)], axis=0) for content in x_test_tokenize]
x_test_vectors = np.array(x_test_vectors, dtype=np.float32)
x_test_tensor = torch.tensor(x_test_vectors.astype(np.float32))
# -------------------------------------------------------------------------------------------------------------------------------------------
model_nn = NeuralNetworkModel()
criterion = torch.nn.BCELoss()
optimizer = torch.optim.SGD(model_nn.parameters(), lr=0.01)
batch_size = 10
print('Trenowanie modelu...')
for epoch in range(6):
loss_score = 0
acc_score = 0
items_total = 0
model_nn.train()
for i in range(0, y_train.shape[0], batch_size):
X = x_train_vectors[i:i+batch_size]
X = torch.tensor(X.astype(np.float32))
Y = y_train[i:i+batch_size]
Y = torch.tensor(Y.astype(np.float32)).reshape(-1,1)
Y_predictions = model_nn(X)
acc_score += torch.sum((Y_predictions > 0.5) == Y).item()
items_total += Y.shape[0]
optimizer.zero_grad()
loss = criterion(Y_predictions, Y)
loss.backward()
optimizer.step()
loss_score += loss.item() * Y.shape[0]
# -------------------------------------------------------------------------------------------------------------------------------------------
ypred = model_nn(x_dev_tensor)
ypred = ypred.cpu().detach().numpy()
ypred = (ypred > 0.5)
ypred = np.asarray(ypred, dtype=np.int32)
ypred.tofile('dev-0/out.tsv', sep='\n')
# -------------------------------------------------------------------------------------------------------------------------------------------
ypredtest = model_nn(x_test_tensor)
ypredtest = ypredtest.cpu().detach().numpy()
ypredtest = (ypredtest > 0.5)
ypredtest = np.asarray(ypredtest, dtype=np.int32)
ypredtest.tofile('test-A/out.tsv', sep='\n')

View File

@ -7,7 +7,7 @@
0
1
1
1
0
1
1
1
@ -50,7 +50,7 @@
1
0
0
1
0
0
1
1
@ -66,7 +66,7 @@
1
0
0
0
1
1
1
0
@ -237,7 +237,7 @@
0
1
0
1
0
1
0
1
@ -323,7 +323,7 @@
0
1
1
0
1
0
1
1
@ -353,7 +353,7 @@
0
0
1
0
1
0
1
1
@ -555,7 +555,7 @@
0
1
1
0
1
0
1
0
@ -651,7 +651,7 @@
1
1
1
0
1
1
1
1
@ -780,7 +780,7 @@
1
0
0
0
1
1
0
1
@ -822,7 +822,7 @@
1
1
1
0
1
1
0
0
@ -919,7 +919,7 @@
1
0
0
0
1
1
1
0
@ -1012,7 +1012,7 @@
0
0
1
1
0
1
1
1
@ -1170,7 +1170,7 @@
1
1
0
1
0
1
0
0
@ -1179,7 +1179,7 @@
0
1
0
1
0
1
1
0
@ -1291,7 +1291,7 @@
1
1
0
0
1
0
1
1
@ -1325,17 +1325,17 @@
1
0
0
1
0
1
1
0
1
1
0
1
0
0
1
0
1
0
0
1
@ -1357,7 +1357,7 @@
1
1
1
0
1
1
1
1
@ -1451,7 +1451,7 @@
1
1
1
0
1
0
1
1
@ -1606,7 +1606,7 @@
0
1
0
1
0
1
1
1
@ -1620,7 +1620,7 @@
1
0
1
1
0
1
1
1
@ -1682,7 +1682,7 @@
1
1
1
0
1
1
0
1
@ -1789,7 +1789,7 @@
1
0
1
0
1
1
0
1
@ -1798,7 +1798,7 @@
0
1
1
0
1
1
1
1
@ -1939,15 +1939,15 @@
1
0
1
1
0
1
1
0
1
0
0
1
1
0
0
1
0
0
@ -1973,7 +1973,7 @@
1
0
1
0
1
1
1
1
@ -2041,7 +2041,7 @@
0
0
1
1
0
1
0
0
@ -2181,7 +2181,7 @@
1
1
1
0
1
0
1
1
@ -2209,7 +2209,7 @@
0
1
1
0
1
1
1
1
@ -2293,7 +2293,7 @@
1
1
1
1
0
1
0
1
@ -2315,7 +2315,7 @@
1
1
1
1
0
0
0
1
@ -2370,7 +2370,7 @@
1
1
0
0
1
1
0
1
@ -2390,7 +2390,7 @@
0
1
1
0
1
1
0
1
@ -2454,7 +2454,7 @@
0
1
1
1
0
0
1
0
@ -2548,7 +2548,7 @@
1
1
1
1
0
1
0
1
@ -2572,7 +2572,7 @@
0
1
1
0
1
0
1
0
@ -2689,9 +2689,9 @@
1
1
1
0
1
0
1
1
1
0
1
@ -2746,7 +2746,7 @@
0
0
1
0
1
1
1
1
@ -2763,7 +2763,7 @@
1
1
1
0
1
1
1
0
@ -2775,7 +2775,7 @@
1
0
0
0
1
0
0
1
@ -2835,7 +2835,7 @@
1
1
1
0
1
1
0
1
@ -2861,7 +2861,7 @@
1
1
1
1
0
1
1
0
@ -2933,7 +2933,7 @@
1
0
1
0
1
1
1
1
@ -2995,7 +2995,7 @@
1
0
1
0
1
0
0
0
@ -3017,7 +3017,7 @@
0
0
0
0
1
1
1
1
@ -3158,7 +3158,7 @@
1
0
1
0
1
1
1
1
@ -3244,7 +3244,7 @@
1
1
0
1
0
0
1
1
@ -3310,7 +3310,7 @@
1
1
1
0
1
1
1
1
@ -3364,7 +3364,7 @@
1
0
1
1
0
1
1
1
@ -3502,7 +3502,7 @@
1
1
1
0
1
0
1
1
@ -3569,7 +3569,7 @@
1
1
1
1
0
1
1
1
@ -3826,7 +3826,7 @@
0
0
0
0
1
0
1
0
@ -3906,7 +3906,7 @@
1
1
0
0
1
1
1
0
@ -3931,7 +3931,7 @@
1
1
0
1
0
1
1
0
@ -3947,7 +3947,7 @@
0
1
0
1
0
1
1
1
@ -3967,7 +3967,7 @@
1
1
0
1
0
1
1
1
@ -4049,7 +4049,7 @@
1
1
0
1
0
1
1
0
@ -4060,7 +4060,7 @@
1
1
0
0
1
1
1
1
@ -4078,7 +4078,7 @@
1
1
0
0
1
1
1
0
@ -4160,14 +4160,14 @@
0
0
0
1
0
1
1
1
0
1
0
0
1
0
0
0
@ -4178,7 +4178,7 @@
1
1
0
0
1
0
1
1
@ -4336,7 +4336,7 @@
0
0
1
0
1
1
1
1
@ -4394,7 +4394,7 @@
1
0
1
0
1
1
0
0
@ -4518,7 +4518,7 @@
0
0
1
0
1
1
1
1
@ -4547,7 +4547,7 @@
1
1
1
1
0
1
1
1
@ -4642,7 +4642,7 @@
0
1
1
1
0
1
1
1
@ -4666,7 +4666,7 @@
1
0
1
0
1
0
1
0
@ -4763,7 +4763,7 @@
1
1
1
0
1
1
1
0
@ -4796,7 +4796,7 @@
0
0
1
1
0
0
1
1
@ -4849,7 +4849,7 @@
1
1
1
0
1
0
1
1
@ -5136,7 +5136,7 @@
1
0
0
1
0
1
1
0
@ -5168,7 +5168,7 @@
1
0
1
0
1
1
1
1
@ -5176,7 +5176,7 @@
1
1
0
0
1
1
0
1
@ -5231,7 +5231,7 @@
0
1
1
0
1
0
0
1
@ -5288,7 +5288,6 @@
1
1
1
0
1
1
1
@ -5297,9 +5296,10 @@
1
1
1
0
1
0
1
1
0
0
1
@ -5391,9 +5391,9 @@
1
1
0
0
1
1
0
1
1
0
@ -5411,7 +5411,7 @@
1
0
0
0
1
1
1
1
@ -5438,7 +5438,7 @@
0
0
1
0
1
1
1
0

1 1
7 0
8 1
9 1
10 1 0
11 1
12 1
13 1
50 1
51 0
52 0
53 1 0
54 0
55 1
56 1
66 1
67 0
68 0
69 0 1
70 1
71 1
72 0
237 0
238 1
239 0
240 1 0
241 1
242 0
243 1
323 0
324 1
325 1
326 0 1
327 0
328 1
329 1
353 0
354 0
355 1
356 0 1
357 0
358 1
359 1
555 0
556 1
557 1
558 0 1
559 0
560 1
561 0
651 1
652 1
653 1
654 0 1
655 1
656 1
657 1
780 1
781 0
782 0
783 0 1
784 1
785 0
786 1
822 1
823 1
824 1
825 0 1
826 1
827 0
828 0
919 1
920 0
921 0
922 0 1
923 1
924 1
925 0
1012 0
1013 0
1014 1
1015 1 0
1016 1
1017 1
1018 1
1170 1
1171 1
1172 0
1173 1 0
1174 1
1175 0
1176 0
1179 0
1180 1
1181 0
1182 1 0
1183 1
1184 1
1185 0
1291 1
1292 1
1293 0
1294 0 1
1295 0
1296 1
1297 1
1325 1
1326 0
1327 0
1
1328 0
1329 1
1330 1
1331 0 1
1332 1
1333 0
1334 1
1335 0
1336 0
1337 1
1338 0 1
1339 0
1340 0
1341 1
1357 1
1358 1
1359 1
1360 0 1
1361 1
1362 1
1363 1
1451 1
1452 1
1453 1
1454 0 1
1455 0
1456 1
1457 1
1606 0
1607 1
1608 0
1609 1 0
1610 1
1611 1
1612 1
1620 1
1621 0
1622 1
1623 1 0
1624 1
1625 1
1626 1
1682 1
1683 1
1684 1
1685 0 1
1686 1
1687 0
1688 1
1789 1
1790 0
1791 1
1792 0 1
1793 1
1794 0
1795 1
1798 0
1799 1
1800 1
1801 0 1
1802 1
1803 1
1804 1
1939 1
1940 0
1941 1
1942 1 0
1943 1
1944 1
1945 0
1946 1
1947 0
1948 0
1949 1 0
1950 1 0
1951 1
1952 0
1953 0
1973 1
1974 0
1975 1
1976 0 1
1977 1
1978 1
1979 1
2041 0
2042 0
2043 1
2044 1 0
2045 1
2046 0
2047 0
2181 1
2182 1
2183 1
2184 0 1
2185 0
2186 1
2187 1
2209 0
2210 1
2211 1
2212 0 1
2213 1
2214 1
2215 1
2293 1
2294 1
2295 1
2296 1 0
2297 1
2298 0
2299 1
2315 1
2316 1
2317 1
2318 1 0
2319 0
2320 0
2321 1
2370 1
2371 1
2372 0
2373 0 1
2374 1
2375 0
2376 1
2390 0
2391 1
2392 1
2393 0 1
2394 1
2395 0
2396 1
2454 0
2455 1
2456 1
2457 1 0
2458 0
2459 1
2460 0
2548 1
2549 1
2550 1
2551 1 0
2552 1
2553 0
2554 1
2572 0
2573 1
2574 1
2575 0 1
2576 0
2577 1
2578 0
2689 1
2690 1
2691 1
0
2692 1
2693 0 1
2694 1
2695 1
2696 0
2697 1
2746 0
2747 0
2748 1
2749 0 1
2750 1
2751 1
2752 1
2763 1
2764 1
2765 1
2766 0 1
2767 1
2768 1
2769 0
2775 1
2776 0
2777 0
2778 0 1
2779 0
2780 0
2781 1
2835 1
2836 1
2837 1
2838 0 1
2839 1
2840 0
2841 1
2861 1
2862 1
2863 1
2864 1 0
2865 1
2866 1
2867 0
2933 1
2934 0
2935 1
2936 0 1
2937 1
2938 1
2939 1
2995 1
2996 0
2997 1
2998 0 1
2999 0
3000 0
3001 0
3017 0
3018 0
3019 0
3020 0 1
3021 1
3022 1
3023 1
3158 1
3159 0
3160 1
3161 0 1
3162 1
3163 1
3164 1
3244 1
3245 1
3246 0
3247 1 0
3248 0
3249 1
3250 1
3310 1
3311 1
3312 1
3313 0 1
3314 1
3315 1
3316 1
3364 1
3365 0
3366 1
3367 1 0
3368 1
3369 1
3370 1
3502 1
3503 1
3504 1
3505 0 1
3506 0
3507 1
3508 1
3569 1
3570 1
3571 1
3572 1 0
3573 1
3574 1
3575 1
3826 0
3827 0
3828 0
3829 0 1
3830 0
3831 1
3832 0
3906 1
3907 1
3908 0
3909 0 1
3910 1
3911 1
3912 0
3931 1
3932 1
3933 0
3934 1 0
3935 1
3936 1
3937 0
3947 0
3948 1
3949 0
3950 1 0
3951 1
3952 1
3953 1
3967 1
3968 1
3969 0
3970 1 0
3971 1
3972 1
3973 1
4049 1
4050 1
4051 0
4052 1 0
4053 1
4054 1
4055 0
4060 1
4061 1
4062 0
4063 0 1
4064 1
4065 1
4066 1
4078 1
4079 1
4080 0
4081 0 1
4082 1
4083 1
4084 0
4160 0
4161 0
4162 0
4163 1 0
4164 1
4165 1
4166 1
4167 0
4168 1
4169 0
4170 0 1
4171 0
4172 0
4173 0
4178 1
4179 1
4180 0
4181 0 1
4182 0
4183 1
4184 1
4336 0
4337 0
4338 1
4339 0 1
4340 1
4341 1
4342 1
4394 1
4395 0
4396 1
4397 0 1
4398 1
4399 0
4400 0
4518 0
4519 0
4520 1
4521 0 1
4522 1
4523 1
4524 1
4547 1
4548 1
4549 1
4550 1 0
4551 1
4552 1
4553 1
4642 0
4643 1
4644 1
4645 1 0
4646 1
4647 1
4648 1
4666 1
4667 0
4668 1
4669 0 1
4670 0
4671 1
4672 0
4763 1
4764 1
4765 1
4766 0 1
4767 1
4768 1
4769 0
4796 0
4797 0
4798 1
4799 1 0
4800 0
4801 1
4802 1
4849 1
4850 1
4851 1
4852 0 1
4853 0
4854 1
4855 1
5136 1
5137 0
5138 0
5139 1 0
5140 1
5141 1
5142 0
5168 1
5169 0
5170 1
5171 0 1
5172 1
5173 1
5174 1
5176 1
5177 1
5178 0
5179 0 1
5180 1
5181 0
5182 1
5231 0
5232 1
5233 1
5234 0 1
5235 0
5236 0
5237 1
5288 1
5289 1
5290 1
0
5291 1
5292 1
5293 1
5296 1
5297 1
5298 1
5299 0 1
5300 0
5301 1
5302 1
5303 0
5304 0
5305 1
5391 1
5392 1
5393 0
0
5394 1
5395 1
5396 0
5397 1
5398 1
5399 0
5411 1
5412 0
5413 0
5414 0 1
5415 1
5416 1
5417 1
5438 0
5439 0
5440 1
5441 0 1
5442 1
5443 1
5444 0

5445
test-A/out2.tsv Normal file

File diff suppressed because it is too large Load Diff