".py" version of LogReg

This commit is contained in:
Dominik Strzako 2021-05-22 18:52:56 +02:00
parent 265216824e
commit 6dbb5168eb
5 changed files with 343 additions and 225 deletions

View File

112
LogReg.py Normal file
View File

@ -0,0 +1,112 @@
import pandas as pd
import numpy as np
import torch
from nltk.tokenize import word_tokenize
import gensim.downloader as api
# Wczytanie X i Y do Train oraz X do Dev i Test
X_train = pd.read_table('train/in.tsv', sep='\t', error_bad_lines=False, quoting=3, header=None, names=['content', 'id'], usecols=['content'])
y_train = pd.read_table('train/expected.tsv', sep='\t', error_bad_lines=False, quoting=3, header=None, names=['label'])
X_dev = pd.read_table('dev-0/in.tsv', sep='\t', error_bad_lines=False, header=None, quoting=3, names=['content', 'id'], usecols=['content'])
X_test = pd.read_table('test-A/in.tsv', sep='\t', error_bad_lines=False, header=None, quoting=3, names=['content', 'id'], usecols=['content'])
# lowercase-ing zbiorów
# https://www.datacamp.com/community/tutorials/case-conversion-python
X_train = X_train.content.str.lower()
X_dev = X_dev.content.str.lower()
X_test = X_test.content.str.lower()
y_train = y_train['label'] #Df do Series?
# tokenizacja zbiorów
#https://www.nltk.org/_modules/nltk/tokenize.html
X_train = [word_tokenize(doc) for doc in X_train]
X_dev = [word_tokenize(doc) for doc in X_dev]
X_test = [word_tokenize(doc) for doc in X_test]
# word2vec zgodnie z poradą Pana Jakuba
# https://radimrehurek.com/gensim/auto_examples/howtos/run_downloader_api.html
# https://www.kaggle.com/kstathou/word-embeddings-logistic-regression
w2v = api.load('word2vec-google-news-300')
def document_vector(doc):
"""Create document vectors by averaging word vectors. Remove out-of-vocabulary words."""
return np.mean([w2v[w] for w in doc if w in w2v] or [np.zeros(300)], axis=0)
X_train = [document_vector(doc) for doc in X_train]
X_dev = [document_vector(doc) for doc in X_dev]
X_test = [document_vector(doc) for doc in X_test]
#Sieć neuronowa z ćwiczeń 8
#https://git.wmi.amu.edu.pl/filipg/aitech-eks-pub/src/branch/master/cw/08_regresja_logistyczna.ipynb
class NeuralNetwork(torch.nn.Module):
def __init__(self, hidden_size):
super(NeuralNetwork, self).__init__()
self.l1 = torch.nn.Linear(300, hidden_size) #Korzystamy z word2vec-google-news-300 który ma zawsze na wejściu wymiar 300
self.l2 = torch.nn.Linear(hidden_size, 1)
def forward(self, x):
x = self.l1(x)
x = torch.relu(x)
x = self.l2(x)
x = torch.sigmoid(x)
return x
model = NeuralNetwork(600)
criterion = torch.nn.BCELoss()
optimizer = torch.optim.SGD(model.parameters(), lr = 0.1)
batch_size = 15
# Trening modelu z ćwiczeń 8
#https://git.wmi.amu.edu.pl/filipg/aitech-eks-pub/src/branch/master/cw/08_regresja_logistyczna.ipynb
for epoch in range(5):
model.train()
for i in range(0, y_train.shape[0], batch_size):
X = X_train[i:i+batch_size]
X = torch.tensor(X)
y = y_train[i:i+batch_size]
y = torch.tensor(y.astype(np.float32).to_numpy()).reshape(-1,1)
outputs = model(X.float())
loss = criterion(outputs, y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
y_dev = []
y_test = []
#Predykcje
#model.eval() will notify all your layers that you are in eval mode
model.eval()
#torch.no_grad() impacts the autograd engine and deactivate it. It will reduce memory usage and speed up
with torch.no_grad():
for i in range(0, len(X_dev), batch_size):
X = X_dev[i:i+batch_size]
X = torch.tensor(X)
outputs = model(X.float())
y = (outputs > 0.5)
y_dev.extend(y)
for i in range(0, len(X_test), batch_size):
X = X_test[i:i+batch_size]
X = torch.tensor(X)
outputs = model(X.float())
y = (outputs > 0.5)
y_test.extend(y)
#Wygenerowanie plików outputowych
y_dev = np.asarray(y_dev, dtype=np.int32)
y_test = np.asarray(y_test, dtype=np.int32)
y_dev_df = pd.DataFrame({'label':y_dev})
y_test_df = pd.DataFrame({'label':y_test})
y_dev_df.to_csv(r'dev-0/out.tsv', sep='\t', index=False, header=False)
y_test_df.to_csv(r'test-A/out.tsv', sep='\t', index=False, header=False)

View File

@ -2,43 +2,20 @@
"cells": [ "cells": [
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 38, "execution_count": 61,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"import pandas as pd\n", "import pandas as pd\n",
"import numpy as np\n", "import numpy as np\n",
"import torch\n", "import torch\n",
"import csv\n",
"from nltk.tokenize import word_tokenize\n", "from nltk.tokenize import word_tokenize\n",
"#from gensim.models import Word2Vec\n",
"import gensim.downloader as api" "import gensim.downloader as api"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 39, "execution_count": 62,
"metadata": {},
"outputs": [],
"source": [
"#Sieć neuronowa z ćwiczeń 8\n",
"class NeuralNetwork(torch.nn.Module): \n",
" def __init__(self, hidden_size):\n",
" super(NeuralNetwork, self).__init__()\n",
" self.l1 = torch.nn.Linear(300, hidden_size) #Korzystamy z Googlowego word2vec-google-news-300 który ma zawsze na wejściu wymiar 300\n",
" self.l2 = torch.nn.Linear(hidden_size, 1)\n",
"\n",
" def forward(self, x):\n",
" x = self.l1(x)\n",
" x = torch.relu(x)\n",
" x = self.l2(x)\n",
" x = torch.sigmoid(x)\n",
" return x"
]
},
{
"cell_type": "code",
"execution_count": 40,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
@ -51,34 +28,35 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 41, "execution_count": 63,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"# Preprocessing danych\n", "# lowercase-ing zbiorów\n",
"# lowercase\n",
"# https://www.datacamp.com/community/tutorials/case-conversion-python\n", "# https://www.datacamp.com/community/tutorials/case-conversion-python\n",
"X_train = X_train.content.str.lower()\n", "X_train = X_train.content.str.lower()\n",
"y_train = y_train['label']\n",
"X_dev = X_dev.content.str.lower()\n", "X_dev = X_dev.content.str.lower()\n",
"X_test = X_test.content.str.lower()" "X_test = X_test.content.str.lower()\n",
"\n",
"y_train = y_train['label'] #Df do Series?"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 42, "execution_count": 64,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"# tokenize\n", "# tokenizacja zbiorów\n",
"X_train = [word_tokenize(content) for content in X_train]\n", "#https://www.nltk.org/_modules/nltk/tokenize.html\n",
"X_dev = [word_tokenize(content) for content in X_dev]\n", "X_train = [word_tokenize(doc) for doc in X_train]\n",
"X_test = [word_tokenize(content) for content in X_test]" "X_dev = [word_tokenize(doc) for doc in X_dev]\n",
"X_test = [word_tokenize(doc) for doc in X_test]"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 44, "execution_count": 67,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
@ -86,9 +64,36 @@
"# https://radimrehurek.com/gensim/auto_examples/howtos/run_downloader_api.html\n", "# https://radimrehurek.com/gensim/auto_examples/howtos/run_downloader_api.html\n",
"# https://www.kaggle.com/kstathou/word-embeddings-logistic-regression\n", "# https://www.kaggle.com/kstathou/word-embeddings-logistic-regression\n",
"w2v = api.load('word2vec-google-news-300')\n", "w2v = api.load('word2vec-google-news-300')\n",
"X_train = [np.mean([w2v[w] for w in content if w in w2v] or [np.zeros(300)], axis=0) for content in X_train]\n", "\n",
"X_dev = [np.mean([w2v[w] for w in content if w in w2v] or [np.zeros(300)], axis=0) for content in X_dev]\n", "def document_vector(doc):\n",
"X_test = [np.mean([w2v[w] for w in content if w in w2v] or [np.zeros(300)], axis=0) for content in X_test]" " \"\"\"Create document vectors by averaging word vectors. Remove out-of-vocabulary words.\"\"\"\n",
" return np.mean([w2v[w] for w in doc if w in w2v] or [np.zeros(300)], axis=0)\n",
"\n",
"X_train = [document_vector(doc) for doc in X_train]\n",
"X_dev = [document_vector(doc) for doc in X_dev]\n",
"X_test = [document_vector(doc) for doc in X_test]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#Sieć neuronowa z ćwiczeń 8\n",
"#https://git.wmi.amu.edu.pl/filipg/aitech-eks-pub/src/branch/master/cw/08_regresja_logistyczna.ipynb\n",
"class NeuralNetwork(torch.nn.Module): \n",
" def __init__(self, hidden_size):\n",
" super(NeuralNetwork, self).__init__()\n",
" self.l1 = torch.nn.Linear(300, hidden_size) #Korzystamy z word2vec-google-news-300 który ma zawsze na wejściu wymiar 300\n",
" self.l2 = torch.nn.Linear(hidden_size, 1)\n",
"\n",
" def forward(self, x):\n",
" x = self.l1(x)\n",
" x = torch.relu(x)\n",
" x = self.l2(x)\n",
" x = torch.sigmoid(x)\n",
" return x"
] ]
}, },
{ {
@ -112,6 +117,7 @@
"outputs": [], "outputs": [],
"source": [ "source": [
"# Trening modelu z ćwiczeń 8\n", "# Trening modelu z ćwiczeń 8\n",
"#https://git.wmi.amu.edu.pl/filipg/aitech-eks-pub/src/branch/master/cw/08_regresja_logistyczna.ipynb\n",
"for epoch in range(5):\n", "for epoch in range(5):\n",
" model.train()\n", " model.train()\n",
" for i in range(0, y_train.shape[0], batch_size):\n", " for i in range(0, y_train.shape[0], batch_size):\n",

View File

@ -9,7 +9,7 @@
0 0
1 1
0 0
1 0
0 0
1 1
0 0
@ -26,7 +26,7 @@
0 0
1 1
0 0
0 1
0 0
0 0
1 1
@ -65,7 +65,7 @@
0 0
1 1
0 0
1 0
1 1
0 0
0 0
@ -148,7 +148,7 @@
1 1
0 0
1 1
1 0
0 0
0 0
0 0
@ -205,7 +205,7 @@
0 0
1 1
0 0
1 0
1 1
0 0
0 0
@ -464,7 +464,7 @@
0 0
0 0
0 0
0 1
0 0
1 1
1 1
@ -490,7 +490,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -537,7 +537,7 @@
1 1
1 1
0 0
0 1
0 0
1 1
0 0
@ -552,7 +552,7 @@
1 1
0 0
0 0
0 1
0 0
0 0
0 0
@ -608,7 +608,7 @@
0 0
0 0
0 0
1 0
1 1
0 0
0 0
@ -654,7 +654,7 @@
1 1
0 0
1 1
0 1
0 0
0 0
1 1
@ -849,7 +849,7 @@
0 0
0 0
0 0
1 0
0 0
0 0
0 0
@ -874,7 +874,7 @@
0 0
0 0
1 1
0 1
1 1
0 0
0 0
@ -896,7 +896,7 @@
1 1
0 0
1 1
0 1
0 0
0 0
0 0
@ -928,7 +928,7 @@
1 1
0 0
0 0
0 1
0 0
0 0
0 0
@ -940,7 +940,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -1044,7 +1044,7 @@
1 1
1 1
1 1
0 1
1 1
0 0
1 1
@ -1078,7 +1078,7 @@
0 0
1 1
0 0
0 1
0 0
1 1
0 0
@ -1312,7 +1312,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -1351,7 +1351,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -1390,7 +1390,7 @@
1 1
0 0
0 0
1 0
0 0
0 0
0 0
@ -1435,13 +1435,13 @@
0 0
0 0
1 1
0 1
0 0
0 0
1 1
0
1 1
0 1
1
0 0
0 0
0 0
@ -1485,7 +1485,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -1505,20 +1505,20 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
0 0
1
0 0
1 1
0 0
1
0 0
0 0
0 0
0 0
1 1
1
0 0
0 0
0 0
@ -1538,11 +1538,10 @@
0 0
1 1
0 0
1
0 0
0 0
1 1
1
0
0 0
0 0
0 0
@ -1558,6 +1557,7 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
@ -1637,7 +1637,7 @@
1 1
0 0
0 0
1 0
0 0
0 0
0 0
@ -1657,7 +1657,7 @@
0 0
1 1
1 1
0 1
0 0
0 0
1 1
@ -1709,7 +1709,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -1741,7 +1741,7 @@
0 0
0 0
0 0
0 1
1 1
0 0
0 0
@ -2075,7 +2075,7 @@
0 0
1 1
1 1
0 1
0 0
0 0
1 1
@ -2084,7 +2084,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
1 1
@ -2241,7 +2241,7 @@
0 0
0 0
0 0
0 1
0 0
1 1
0 0
@ -2259,7 +2259,7 @@
1 1
0 0
0 0
0 1
1 1
0 0
0 0
@ -2367,7 +2367,7 @@
1 1
0 0
0 0
0 1
1 1
0 0
0 0
@ -2376,7 +2376,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
1 1
@ -2400,7 +2400,7 @@
0 0
1 1
0 0
1 0
0 0
0 0
0 0
@ -2443,7 +2443,7 @@
0 0
1 1
1 1
0 1
0 0
0 0
0 0
@ -2451,7 +2451,7 @@
0 0
1 1
0 0
1 0
1 1
0 0
0 0
@ -2527,7 +2527,7 @@
0 0
0 0
1 1
0 1
0 0
1 1
0 0
@ -2552,7 +2552,7 @@
0 0
0 0
0 0
0 1
1 1
0 0
0 0
@ -2566,11 +2566,11 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
0 1
0
0 0
1 1
0 0
@ -2687,7 +2687,7 @@
0 0
1 1
0 0
1 0
1 1
1 1
1 1
@ -2721,7 +2721,7 @@
0 0
0 0
1 1
0 1
0 0
0 0
0 0
@ -2782,7 +2782,7 @@
0 0
1 1
0 0
0 1
0 0
0 0
0 0
@ -2815,7 +2815,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -2955,7 +2955,7 @@
0 0
0 0
0 0
0 1
0 0
1 1
0 0
@ -2968,7 +2968,7 @@
0 0
0 0
0 0
1 0
0 0
0 0
0 0
@ -2998,15 +2998,15 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
1
0 0
0 0
0 0
1 0
0
0
0 0
1 1
0 0
@ -3087,7 +3087,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
1 1
@ -3102,7 +3102,7 @@
1 1
0 0
0 0
1 0
1 1
1 1
0 0
@ -3182,7 +3182,7 @@
0 0
1 1
1 1
0 1
1 1
0 0
1 1
@ -3203,7 +3203,7 @@
0 0
0 0
0 0
1 0
0 0
0 0
0 0
@ -3266,7 +3266,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -3308,7 +3308,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -3363,7 +3363,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
1 1
@ -3391,7 +3391,7 @@
0 0
0 0
0 0
1 0
0 0
0 0
0 0
@ -3494,7 +3494,7 @@
0 0
1 1
0 0
1 0
0 0
0 0
0 0
@ -3569,7 +3569,7 @@
0 0
1 1
1 1
0 1
0 0
1 1
0 0
@ -3597,7 +3597,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
1 1
@ -3626,7 +3626,7 @@
0 0
0 0
0 0
0 1
1 1
0 0
0 0
@ -3663,9 +3663,9 @@
0 0
0 0
0 0
1
0 0
0 0
1
0 0
0 0
0 0
@ -3730,16 +3730,16 @@
0 0
0 0
0 0
1
0 0
0 0
0 0
0 0
0 0
0 0
0
1
1 1
0 0
0
1 1
0 0
0 0
@ -3812,7 +3812,7 @@
0 0
0 0
1 1
0 1
0 0
1 1
0 0
@ -3882,7 +3882,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -3951,7 +3951,7 @@
1 1
0 0
1 1
0 1
0 0
0 0
0 0
@ -4009,7 +4009,7 @@
0 0
0 0
1 1
0 1
0 0
0 0
0 0
@ -4034,7 +4034,7 @@
0 0
0 0
0 0
0 1
1 1
0 0
0 0
@ -4133,7 +4133,7 @@
1 1
1 1
0 0
0 1
0 0
0 0
0 0
@ -4168,7 +4168,7 @@
1 1
1 1
0 0
0 1
0 0
0 0
0 0
@ -4302,6 +4302,7 @@
0 0
1 1
1 1
0
1 1
1 1
0 0
@ -4313,7 +4314,6 @@
0 0
0 0
0 0
0
1 1
0 0
0 0
@ -4340,7 +4340,7 @@
1 1
1 1
0 0
0 1
0 0
0 0
1 1
@ -4366,7 +4366,7 @@
1 1
1 1
0 0
0 1
0 0
1 1
0 0
@ -4511,7 +4511,7 @@
0 0
1 1
0 0
0 1
0 0
0 0
0 0
@ -4586,7 +4586,7 @@
1 1
0 0
1 1
0 1
1 1
0 0
0 0
@ -4628,7 +4628,7 @@
1 1
0 0
0 0
0 1
0 0
0 0
1 1
@ -4637,7 +4637,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
1 1
@ -4743,7 +4743,7 @@
0 0
0 0
0 0
1 0
0 0
0 0
0 0
@ -4873,7 +4873,7 @@
0 0
0 0
0 0
1 0
0 0
1 1
0 0
@ -4936,7 +4936,7 @@
0 0
1 1
0 0
0 1
1 1
1 1
0 0

1 0
9 0
10 1
11 0
12 1 0
13 0
14 1
15 0
26 0
27 1
28 0
29 0 1
30 0
31 0
32 1
65 0
66 1
67 0
68 1 0
69 1
70 0
71 0
148 1
149 0
150 1
151 1 0
152 0
153 0
154 0
205 0
206 1
207 0
208 1 0
209 1
210 0
211 0
464 0
465 0
466 0
467 0 1
468 0
469 1
470 1
490 0
491 0
492 0
493 0 1
494 0
495 0
496 0
537 1
538 1
539 0
540 0 1
541 0
542 1
543 0
552 1
553 0
554 0
555 0 1
556 0
557 0
558 0
608 0
609 0
610 0
611 1 0
612 1
613 0
614 0
654 1
655 0
656 1
657 0 1
658 0
659 0
660 1
849 0
850 0
851 0
852 1 0
853 0
854 0
855 0
874 0
875 0
876 1
877 0 1
878 1
879 0
880 0
896 1
897 0
898 1
899 0 1
900 0
901 0
902 0
928 1
929 0
930 0
931 0 1
932 0
933 0
934 0
940 0
941 0
942 0
943 0 1
944 0
945 0
946 0
1044 1
1045 1
1046 1
1047 0 1
1048 1
1049 0
1050 1
1078 0
1079 1
1080 0
1081 0 1
1082 0
1083 1
1084 0
1312 0
1313 0
1314 0
1315 0 1
1316 0
1317 0
1318 0
1351 0
1352 0
1353 0
1354 0 1
1355 0
1356 0
1357 0
1390 1
1391 0
1392 0
1393 1 0
1394 0
1395 0
1396 0
1435 0
1436 0
1437 1
1438 0 1
1439 0
1440 0
1441 1
0
1442 1
1443 0 1
1444 1
1445 0
1446 0
1447 0
1485 0
1486 0
1487 0
1488 0 1
1489 0
1490 0
1491 0
1505 0
1506 0
1507 0
1
1508 0
1509 0
1510 0
1511 0
1
1512 0
1513 1
1514 0
1515 1
1516 0
1517 0
1518 0
1519 0
1520 1
1521 1
1522 0
1523 0
1524 0
1538 0
1539 1
1540 0
1541 1
1542 0
1543 0
1544 1
1
0
1545 0
1546 0
1547 0
1557 0
1558 0
1559 0
1560 1
1561 0
1562 0
1563 0
1637 1
1638 0
1639 0
1640 1 0
1641 0
1642 0
1643 0
1657 0
1658 1
1659 1
1660 0 1
1661 0
1662 0
1663 1
1709 0
1710 0
1711 0
1712 0 1
1713 0
1714 0
1715 0
1741 0
1742 0
1743 0
1744 0 1
1745 1
1746 0
1747 0
2075 0
2076 1
2077 1
2078 0 1
2079 0
2080 0
2081 1
2084 0
2085 0
2086 0
2087 0 1
2088 0
2089 0
2090 1
2241 0
2242 0
2243 0
2244 0 1
2245 0
2246 1
2247 0
2259 1
2260 0
2261 0
2262 0 1
2263 1
2264 0
2265 0
2367 1
2368 0
2369 0
2370 0 1
2371 1
2372 0
2373 0
2376 0
2377 0
2378 0
2379 0 1
2380 0
2381 0
2382 1
2400 0
2401 1
2402 0
2403 1 0
2404 0
2405 0
2406 0
2443 0
2444 1
2445 1
2446 0 1
2447 0
2448 0
2449 0
2451 0
2452 1
2453 0
2454 1 0
2455 1
2456 0
2457 0
2527 0
2528 0
2529 1
2530 0 1
2531 0
2532 1
2533 0
2552 0
2553 0
2554 0
2555 0 1
2556 1
2557 0
2558 0
2566 0
2567 0
2568 0
2569 1
2570 0
2571 0
2572 0
2573 0 1
0
2574 0
2575 1
2576 0
2687 0
2688 1
2689 0
2690 1 0
2691 1
2692 1
2693 1
2721 0
2722 0
2723 1
2724 0 1
2725 0
2726 0
2727 0
2782 0
2783 1
2784 0
2785 0 1
2786 0
2787 0
2788 0
2815 0
2816 0
2817 0
2818 0 1
2819 0
2820 0
2821 0
2955 0
2956 0
2957 0
2958 0 1
2959 0
2960 1
2961 0
2968 0
2969 0
2970 0
2971 1 0
2972 0
2973 0
2974 0
2998 0
2999 0
3000 0
1
3001 0
3002 0
3003 0
1
3004 0
3005 0
3006 0
3007 1 0
3008 0
3009 0
3010 0
3011 1
3012 0
3087 0
3088 0
3089 0
3090 0 1
3091 0
3092 0
3093 1
3102 1
3103 0
3104 0
3105 1 0
3106 1
3107 1
3108 0
3182 0
3183 1
3184 1
3185 0 1
3186 1
3187 0
3188 1
3203 0
3204 0
3205 0
3206 1 0
3207 0
3208 0
3209 0
3266 0
3267 0
3268 0
3269 0 1
3270 0
3271 0
3272 0
3308 0
3309 0
3310 0
3311 0 1
3312 0
3313 0
3314 0
3363 0
3364 0
3365 0
3366 0 1
3367 0
3368 0
3369 1
3391 0
3392 0
3393 0
3394 1 0
3395 0
3396 0
3397 0
3494 0
3495 1
3496 0
3497 1 0
3498 0
3499 0
3500 0
3569 0
3570 1
3571 1
3572 0 1
3573 0
3574 1
3575 0
3597 0
3598 0
3599 0
3600 0 1
3601 0
3602 0
3603 1
3626 0
3627 0
3628 0
3629 0 1
3630 1
3631 0
3632 0
3663 0
3664 0
3665 0
3666 1
3667 0
3668 0
1
3669 0
3670 0
3671 0
3730 0
3731 0
3732 0
3733 1
3734 0
3735 0
3736 0
3737 0
3738 0
3739 0
0
1
3740 1
3741 0
3742 0
3743 1
3744 0
3745 0
3812 0
3813 0
3814 1
3815 0 1
3816 0
3817 1
3818 0
3882 0
3883 0
3884 0
3885 0 1
3886 0
3887 0
3888 0
3951 1
3952 0
3953 1
3954 0 1
3955 0
3956 0
3957 0
4009 0
4010 0
4011 1
4012 0 1
4013 0
4014 0
4015 0
4034 0
4035 0
4036 0
4037 0 1
4038 1
4039 0
4040 0
4133 1
4134 1
4135 0
4136 0 1
4137 0
4138 0
4139 0
4168 1
4169 1
4170 0
4171 0 1
4172 0
4173 0
4174 0
4302 0
4303 1
4304 1
4305 0
4306 1
4307 1
4308 0
4314 0
4315 0
4316 0
0
4317 1
4318 0
4319 0
4340 1
4341 1
4342 0
4343 0 1
4344 0
4345 0
4346 1
4366 1
4367 1
4368 0
4369 0 1
4370 0
4371 1
4372 0
4511 0
4512 1
4513 0
4514 0 1
4515 0
4516 0
4517 0
4586 1
4587 0
4588 1
4589 0 1
4590 1
4591 0
4592 0
4628 1
4629 0
4630 0
4631 0 1
4632 0
4633 0
4634 1
4637 0
4638 0
4639 0
4640 0 1
4641 0
4642 0
4643 1
4743 0
4744 0
4745 0
4746 1 0
4747 0
4748 0
4749 0
4873 0
4874 0
4875 0
4876 1 0
4877 0
4878 1
4879 0
4936 0
4937 1
4938 0
4939 0 1
4940 1
4941 1
4942 0

View File

@ -126,7 +126,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -149,7 +149,7 @@
1 1
0 0
1 1
1 0
0 0
1 1
0 0
@ -347,7 +347,7 @@
0 0
0 0
0 0
0 1
1 1
1 1
0 0
@ -356,7 +356,7 @@
0 0
1 1
0 0
0 1
0 0
0 0
0 0
@ -376,9 +376,9 @@
1 1
0 0
0 0
1
0 0
0 1
0
1 1
0 0
0 0
@ -425,7 +425,7 @@
1 1
1 1
0 0
0 1
1 1
0 0
0 0
@ -461,7 +461,7 @@
0 0
0 0
0 0
1 0
0 0
0 0
0 0
@ -486,7 +486,7 @@
0 0
1 1
0 0
0 1
0 0
1 1
0 0
@ -557,8 +557,8 @@
1 1
0 0
0 0
0 1
0 1
0 0
1 1
0 0
@ -578,7 +578,7 @@
0 0
0 0
0 0
0 1
1 1
1 1
0 0
@ -700,10 +700,10 @@
0 0
0 0
0 0
1
0 0
1 1
1 1
1
0 0
0 0
0 0
@ -793,7 +793,7 @@
1 1
0 0
1 1
0 1
0 0
0 0
1 1
@ -826,7 +826,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -837,7 +837,7 @@
0 0
0 0
0 0
1 0
0 0
0 0
0 0
@ -943,7 +943,7 @@
1 1
0 0
0 0
1 0
0 0
0 0
0 0
@ -1021,7 +1021,7 @@
0 0
0 0
0 0
0 1
0 0
1 1
0 0
@ -1058,7 +1058,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
1 1
@ -1081,7 +1081,7 @@
1 1
1 1
0 0
1 0
0 0
0 0
1 1
@ -1114,8 +1114,8 @@
0 0
1 1
0 0
0 1
0 1
1 1
0 0
0 0
@ -1177,7 +1177,7 @@
0 0
0 0
0 0
1 0
0 0
0 0
1 1
@ -1232,7 +1232,7 @@
0 0
0 0
0 0
0 1
1 1
0 0
0 0
@ -1253,7 +1253,7 @@
0 0
0 0
0 0
1 0
1 1
1 1
0 0
@ -1348,7 +1348,7 @@
0 0
0 0
1 1
0 1
0 0
1 1
0 0
@ -1377,7 +1377,7 @@
1 1
0 0
0 0
1 0
0 0
0 0
0 0
@ -1408,7 +1408,7 @@
0 0
0 0
1 1
0 1
0 0
0 0
0 0
@ -1424,7 +1424,7 @@
0 0
0 0
1 1
0 1
0 0
0 0
1 1
@ -1445,7 +1445,7 @@
0 0
0 0
1 1
0 1
1 1
1 1
1 1
@ -1482,7 +1482,7 @@
0 0
0 0
1 1
0 1
0 0
0 0
0 0
@ -1616,7 +1616,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -1692,7 +1692,7 @@
0 0
0 0
0 0
0 1
1 1
1 1
0 0
@ -1713,7 +1713,7 @@
0 0
1 1
1 1
0 1
0 0
1 1
0 0
@ -1755,21 +1755,21 @@
0 0
1 1
0 0
1
0 0
0 0
1 0
1
1 1
1 1
0 0
1 1
0 0
1
0 0
0 0
0 0
0 0
0 0
1
0 0
1 1
0 0
@ -1795,7 +1795,7 @@
0 0
1 1
1 1
1 0
0 0
0 0
1 1
@ -1812,7 +1812,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
1 1
@ -1915,7 +1915,7 @@
0 0
0 0
1 1
0 1
0 0
0 0
0 0
@ -1997,8 +1997,8 @@
0 0
0 0
0 0
1
0 0
1
0 0
1 1
0 0
@ -2301,7 +2301,7 @@
0 0
0 0
0 0
1 0
0 0
1 1
0 0
@ -2318,7 +2318,7 @@
0 0
0 0
1 1
1 0
0 0
0 0
0 0
@ -2362,7 +2362,7 @@
0 0
0 0
1 1
1 0
0 0
1 1
0 0
@ -2381,7 +2381,7 @@
0 0
0 0
1 1
1 0
0 0
1 1
0 0
@ -2416,7 +2416,7 @@
0 0
0 0
0 0
1 0
0 0
0 0
1 1
@ -2520,7 +2520,7 @@
0 0
0 0
0 0
1 0
0 0
1 1
0 0
@ -2549,7 +2549,7 @@
0 0
0 0
0 0
0 1
1 1
0 0
0 0
@ -2564,7 +2564,7 @@
0 0
1 1
0 0
1 0
1 1
0 0
0 0
@ -2594,7 +2594,7 @@
1 1
0 0
1 1
0 1
1 1
0 0
0 0
@ -2691,7 +2691,7 @@
0 0
1 1
0 0
0 1
0 0
0 0
0 0
@ -2725,7 +2725,7 @@
0 0
1 1
0 0
0 1
0 0
1 1
0 0
@ -2849,7 +2849,7 @@
1 1
0 0
1 1
1 0
1 1
0 0
0 0
@ -2975,12 +2975,12 @@
1 1
0 0
0 0
1
0 0
0 0
0 0
0 0
0 1
0
0 0
0 0
0 0
@ -3033,7 +3033,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -3088,7 +3088,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -3129,6 +3129,7 @@
0 0
0 0
0 0
0
1 1
1 1
1 1
@ -3136,7 +3137,6 @@
0 0
0 0
0 0
0
1 1
0 0
0 0
@ -3145,7 +3145,7 @@
0 0
0 0
0 0
0 1
0 0
1 1
0 0
@ -3334,6 +3334,7 @@
1 1
0 0
0 0
1
0 0
0 0
0 0
@ -3345,8 +3346,7 @@
0 0
0 0
0 0
0 1
0
0 0
0 0
0 0
@ -3363,7 +3363,7 @@
0 0
0 0
1 1
0 1
0 0
0 0
1 1
@ -3399,12 +3399,12 @@
0 0
0 0
0 0
0 1
1 1
0 0
0 0
0 0
1 0
0 0
1 1
0 0
@ -3453,7 +3453,7 @@
1 1
0 0
0 0
0 1
0 0
0 0
0 0
@ -3524,7 +3524,7 @@
0 0
0 0
0 0
0 1
1 1
1 1
0 0
@ -3549,7 +3549,7 @@
1 1
0 0
0 0
1 0
0 0
0 0
0 0
@ -3634,7 +3634,7 @@
0 0
1 1
0 0
1 0
0 0
1 1
0 0
@ -3720,7 +3720,7 @@
0 0
0 0
0 0
0 1
0 0
1 1
0 0
@ -3786,7 +3786,7 @@
1 1
0 0
0 0
0 1
0 0
0 0
1 1
@ -3826,7 +3826,7 @@
0 0
0 0
0 0
0 1
0 0
1 1
0 0
@ -3855,12 +3855,12 @@
0 0
0 0
0 0
1
0 0
1
0 0
1
0 0
0 0
1
0 0
1 1
0 0
@ -3981,7 +3981,7 @@
1 1
0 0
0 0
1 0
1 1
1 1
0 0
@ -4010,7 +4010,7 @@
0 0
0 0
0 0
0 1
1 1
1 1
0 0
@ -4037,7 +4037,7 @@
0 0
1 1
1 1
0 1
0 0
0 0
0 0
@ -4047,7 +4047,7 @@
0 0
1 1
0 0
0 1
0 0
0 0
0 0
@ -4067,14 +4067,14 @@
1 1
1 1
1 1
1
0 0
0 0
0 0
0 0
0 0
0 0
0 1
0
0 0
0 0
0 0
@ -4085,7 +4085,7 @@
0 0
1 1
0 0
1 0
0 0
1 1
0 0
@ -4106,7 +4106,7 @@
1 1
0 0
0 0
0 1
0 0
1 1
0 0
@ -4164,7 +4164,7 @@
1 1
1 1
0 0
1 0
0 0
0 0
0 0
@ -4210,7 +4210,7 @@
0 0
0 0
0 0
1 0
1 1
0 0
0 0
@ -4228,7 +4228,7 @@
0 0
0 0
0 0
0 1
0 0
1 1
0 0
@ -4262,7 +4262,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -4305,7 +4305,7 @@
0 0
0 0
1 1
0 1
0 0
0 0
1 1
@ -4498,7 +4498,7 @@
1 1
0 0
1 1
0 1
1 1
0 0
0 0
@ -4513,7 +4513,7 @@
0 0
1 1
0 0
0 1
0 0
0 0
0 0
@ -4708,7 +4708,7 @@
1 1
0 0
1 1
1 0
0 0
0 0
0 0
@ -4737,12 +4737,12 @@
0 0
0 0
0 0
1 0
0 0
0 0
1 1
1 1
0 1
0 0
0 0
0 0
@ -4785,15 +4785,15 @@
0 0
0 0
0 0
1
0 0
0 0
1
0 0
1 1
0 0
1
0 0
1 1
1
0 0
0 0
0 0
@ -4831,7 +4831,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -4844,7 +4844,7 @@
0 0
0 0
1 1
1 0
1 1
0 0
1 1
@ -4886,7 +4886,7 @@
0 0
1 1
1 1
0 1
1 1
0 0
0 0
@ -4980,7 +4980,7 @@
0 0
0 0
0 0
0 1
0 0
0 0
0 0
@ -5054,7 +5054,7 @@
0 0
0 0
0 0
1 0
1 1
1 1
1 1
@ -5097,7 +5097,7 @@
0 0
0 0
0 0
0 1
0 0
1 1
0 0

1 0
126 0
127 0
128 0
129 0 1
130 0
131 0
132 0
149 1
150 0
151 1
152 1 0
153 0
154 1
155 0
347 0
348 0
349 0
350 0 1
351 1
352 1
353 0
356 0
357 1
358 0
359 0 1
360 0
361 0
362 0
376 1
377 0
378 0
379 1
380 0
381 0 1
0
382 1
383 0
384 0
425 1
426 1
427 0
428 0 1
429 1
430 0
431 0
461 0
462 0
463 0
464 1 0
465 0
466 0
467 0
486 0
487 1
488 0
489 0 1
490 0
491 1
492 0
557 1
558 0
559 0
560 0 1
561 0 1
562 0
563 1
564 0
578 0
579 0
580 0
581 0 1
582 1
583 1
584 0
700 0
701 0
702 0
1
703 0
704 1
705 1
706 1
707 0
708 0
709 0
793 1
794 0
795 1
796 0 1
797 0
798 0
799 1
826 0
827 0
828 0
829 0 1
830 0
831 0
832 0
837 0
838 0
839 0
840 1 0
841 0
842 0
843 0
943 1
944 0
945 0
946 1 0
947 0
948 0
949 0
1021 0
1022 0
1023 0
1024 0 1
1025 0
1026 1
1027 0
1058 0
1059 0
1060 0
1061 0 1
1062 0
1063 0
1064 1
1081 1
1082 1
1083 0
1084 1 0
1085 0
1086 0
1087 1
1114 0
1115 1
1116 0
1117 0 1
1118 0 1
1119 1
1120 0
1121 0
1177 0
1178 0
1179 0
1180 1 0
1181 0
1182 0
1183 1
1232 0
1233 0
1234 0
1235 0 1
1236 1
1237 0
1238 0
1253 0
1254 0
1255 0
1256 1 0
1257 1
1258 1
1259 0
1348 0
1349 0
1350 1
1351 0 1
1352 0
1353 1
1354 0
1377 1
1378 0
1379 0
1380 1 0
1381 0
1382 0
1383 0
1408 0
1409 0
1410 1
1411 0 1
1412 0
1413 0
1414 0
1424 0
1425 0
1426 1
1427 0 1
1428 0
1429 0
1430 1
1445 0
1446 0
1447 1
1448 0 1
1449 1
1450 1
1451 1
1482 0
1483 0
1484 1
1485 0 1
1486 0
1487 0
1488 0
1616 0
1617 0
1618 0
1619 0 1
1620 0
1621 0
1622 0
1692 0
1693 0
1694 0
1695 0 1
1696 1
1697 1
1698 0
1713 0
1714 1
1715 1
1716 0 1
1717 0
1718 1
1719 0
1755 0
1756 1
1757 0
1
1758 0
1759 0
1760 1 0
1
1761 1
1762 1
1763 0
1764 1
1765 0
1766 1
1767 0
1768 0
1769 0
1770 0
1771 0
1772 1
1773 0
1774 1
1775 0
1795 0
1796 1
1797 1
1798 1 0
1799 0
1800 0
1801 1
1812 0
1813 0
1814 0
1815 0 1
1816 0
1817 0
1818 1
1915 0
1916 0
1917 1
1918 0 1
1919 0
1920 0
1921 0
1997 0
1998 0
1999 0
1
2000 0
2001 1
2002 0
2003 1
2004 0
2301 0
2302 0
2303 0
2304 1 0
2305 0
2306 1
2307 0
2318 0
2319 0
2320 1
2321 1 0
2322 0
2323 0
2324 0
2362 0
2363 0
2364 1
2365 1 0
2366 0
2367 1
2368 0
2381 0
2382 0
2383 1
2384 1 0
2385 0
2386 1
2387 0
2416 0
2417 0
2418 0
2419 1 0
2420 0
2421 0
2422 1
2520 0
2521 0
2522 0
2523 1 0
2524 0
2525 1
2526 0
2549 0
2550 0
2551 0
2552 0 1
2553 1
2554 0
2555 0
2564 0
2565 1
2566 0
2567 1 0
2568 1
2569 0
2570 0
2594 1
2595 0
2596 1
2597 0 1
2598 1
2599 0
2600 0
2691 0
2692 1
2693 0
2694 0 1
2695 0
2696 0
2697 0
2725 0
2726 1
2727 0
2728 0 1
2729 0
2730 1
2731 0
2849 1
2850 0
2851 1
2852 1 0
2853 1
2854 0
2855 0
2975 1
2976 0
2977 0
2978 1
2979 0
2980 0
2981 0
2982 0
2983 0 1
0
2984 0
2985 0
2986 0
3033 0
3034 0
3035 0
3036 0 1
3037 0
3038 0
3039 0
3088 0
3089 0
3090 0
3091 0 1
3092 0
3093 0
3094 0
3129 0
3130 0
3131 0
3132 0
3133 1
3134 1
3135 1
3137 0
3138 0
3139 0
0
3140 1
3141 0
3142 0
3145 0
3146 0
3147 0
3148 0 1
3149 0
3150 1
3151 0
3334 1
3335 0
3336 0
3337 1
3338 0
3339 0
3340 0
3346 0
3347 0
3348 0
3349 0 1
0
3350 0
3351 0
3352 0
3363 0
3364 0
3365 1
3366 0 1
3367 0
3368 0
3369 1
3399 0
3400 0
3401 0
3402 0 1
3403 1
3404 0
3405 0
3406 0
3407 1 0
3408 0
3409 1
3410 0
3453 1
3454 0
3455 0
3456 0 1
3457 0
3458 0
3459 0
3524 0
3525 0
3526 0
3527 0 1
3528 1
3529 1
3530 0
3549 1
3550 0
3551 0
3552 1 0
3553 0
3554 0
3555 0
3634 0
3635 1
3636 0
3637 1 0
3638 0
3639 1
3640 0
3720 0
3721 0
3722 0
3723 0 1
3724 0
3725 1
3726 0
3786 1
3787 0
3788 0
3789 0 1
3790 0
3791 0
3792 1
3826 0
3827 0
3828 0
3829 0 1
3830 0
3831 1
3832 0
3855 0
3856 0
3857 0
1
3858 0
1
3859 0
3860 1
3861 0
3862 0
3863 1
3864 0
3865 1
3866 0
3981 1
3982 0
3983 0
3984 1 0
3985 1
3986 1
3987 0
4010 0
4011 0
4012 0
4013 0 1
4014 1
4015 1
4016 0
4037 0
4038 1
4039 1
4040 0 1
4041 0
4042 0
4043 0
4047 0
4048 1
4049 0
4050 0 1
4051 0
4052 0
4053 0
4067 1
4068 1
4069 1
4070 1
4071 0
4072 0
4073 0
4074 0
4075 0
4076 0
4077 0 1
0
4078 0
4079 0
4080 0
4085 0
4086 1
4087 0
4088 1 0
4089 0
4090 1
4091 0
4106 1
4107 0
4108 0
4109 0 1
4110 0
4111 1
4112 0
4164 1
4165 1
4166 0
4167 1 0
4168 0
4169 0
4170 0
4210 0
4211 0
4212 0
4213 1 0
4214 1
4215 0
4216 0
4228 0
4229 0
4230 0
4231 0 1
4232 0
4233 1
4234 0
4262 0
4263 0
4264 0
4265 0 1
4266 0
4267 0
4268 0
4305 0
4306 0
4307 1
4308 0 1
4309 0
4310 0
4311 1
4498 1
4499 0
4500 1
4501 0 1
4502 1
4503 0
4504 0
4513 0
4514 1
4515 0
4516 0 1
4517 0
4518 0
4519 0
4708 1
4709 0
4710 1
4711 1 0
4712 0
4713 0
4714 0
4737 0
4738 0
4739 0
4740 1 0
4741 0
4742 0
4743 1
4744 1
4745 0 1
4746 0
4747 0
4748 0
4785 0
4786 0
4787 0
1
4788 0
4789 0
1
4790 0
4791 1
4792 0
4793 1
4794 0
4795 1
4796 1
4797 0
4798 0
4799 0
4831 0
4832 0
4833 0
4834 0 1
4835 0
4836 0
4837 0
4844 0
4845 0
4846 1
4847 1 0
4848 1
4849 0
4850 1
4886 0
4887 1
4888 1
4889 0 1
4890 1
4891 0
4892 0
4980 0
4981 0
4982 0
4983 0 1
4984 0
4985 0
4986 0
5054 0
5055 0
5056 0
5057 1 0
5058 1
5059 1
5060 1
5097 0
5098 0
5099 0
5100 0 1
5101 0
5102 1
5103 0