update script

This commit is contained in:
Zosia 2021-05-18 22:46:19 +02:00
parent ad632af707
commit aa6998f037
4 changed files with 34352 additions and 34338 deletions

File diff suppressed because it is too large Load Diff

View File

@ -25,7 +25,7 @@
},
{
"cell_type": "code",
"execution_count": 38,
"execution_count": 68,
"metadata": {},
"outputs": [],
"source": [
@ -33,12 +33,12 @@
" all_data = lzma.open(filename).read().decode('UTF-8').split('\\n')\n",
" return [line.split('\\t') for line in all_data][:-1]\n",
"\n",
"train_data = read_data('train/train.tsv.xz')[::500]"
"train_data = read_data('train/train.tsv.xz')[::250]"
]
},
{
"cell_type": "code",
"execution_count": 39,
"execution_count": 69,
"metadata": {},
"outputs": [
{
@ -51,7 +51,7 @@
" 'nowią część kultury. U nas już nikt ich nie chce oglądać. Chciałam osiągnąć coś wprost przeciwnego: przywrócić kobietom zaufanie do samych siebie, do własnych mo!liwości. Katharłne Hepburn powłedziala. kłedyi, łe najtrudnłej$ze to ..aprzedawanłe debie jak bukietu Awłeźych kwiatów\". Czy pant nie myllt. tak aamo7 Jestem bardziej odprężona niż Katharine. Gwiazdy jej generacji były większymi gwiazdami i musiały być całkiem nadzwyczajne. Nasze pokolenie jest banalniejsze. Jako kobieta i jako aktorka najlepiej czuję się w tłumie. --. Jest szalona rolnica między tym co ludzie o panł myl\\'ą. a tllm. kim panł jeBt naprClwdę. Ja tego nie widzę. Był taki okres w naszym ż\\'yciu, że Tom i ja n e mieliśmy pieniędzy. NIe pracowałam. Zyliśmy z koie zności bardzo skrom- -. -... .. nie. Zresztą dotychC\" as zy- . popiół znad ruin miasta. Ogromny teren, obejmuJący około 58 km t został zamieniony w dymiące pogorzelisko. Ulice miasta pokryte były zwęglonymi zwłokami mieszkańc6w, kt6re w wielu miejscach tworzyły makabryczne stosy. Wśród ofiar znaleźli się wszyscy dostojnicy przybyli poprzedniego dnia z Fort de France. Przez pierwsze dwa dni trwała akcja ratunkowa, nie udało się jednak znale:fć ani jednej żywej istoty. Dopiero w niedzielę, 11 maja, usłyszano czyjeŚ jęki. Odrzucając głazy i gorący jeszcze popiół, odnaleziono mocno poparzonego i całkowicie wyczerpanego młodego człowieka. Okazało si że jest to więzień pochodzący z leo Precheur. Skazano go na tygodniowy pobyt w karnej celi (ciemnicy) za samowolne opuszczenie więzienia. Ta niesubordynacja okazała się dla Sylbarisa zbawienna. Grube mury celi, Rołożonej u pod!1 óża g?ry, uchroniły go od zrmażdźenla i od spalenia\\'. Uratowany tak opisał nieprawdopodobną tragedię miasta: To btllo okolo 8 rano... nagle usłyszałem ogromny huk, a potem pTzeraźliwe krzyki ludzi. W sZ]lScy . l .\\' , P walali: pa ę nę.... umIeram.... o kilku minutach. WSZ1łstkie ucichły. Wszystkie... :z 1D1/;qtkiem mo;ego... Ogień pochłonął miasto i jego mieszkańców. Spełniła się klątwa rzucona przez wodza karaibskiego przed nies łna ćwierć wiekiem. ANDRZEJ VORBRODT jemy o wiele skromniej, niż większość ludzi z Hollywood. Moje. dzieci chodzą do publicznej szkoły, nie chcę, by wyrastały na .snobów. Myślę, że każda aktorka chyba że gra wyłącznie kr6lowe i księżniczki musi pozostawać w kontakcie z normalnymi ludźmi i z normalnym życiem. Zresztą, gdybym nagle zdecydowała się żyć luksusowo, Tom niechybnie opuściłby mnie\\' w mgnieniu oka. Wydawalo mł się nłer4%, e ma pant paC2. UC\"ic winy z powodu awołch ]Jłeniędzy... Nic podobnego. Jestem dumna ze sposobu, w jaki wydaję moje pieniądze. Używam ich na cele? w które wierzę i o ktore walczę. - czy t,o prawda. te sfinanaowała pant calkouńcie kampanię elektoralną Toma przy pomocy płenłędZ1l zarobionych na aerobiku\\' Tak. czy zna pani włelko\\' swojej fortuny? ..:.. Mniej więcej. Przed Tomem byl Vad\\'m; Paryt. cyganeria artystyczna, latwe tycie... Była pant kim innym. Jak doszlo do takiej zmiany? Dwadzie cia lat temu nie wiedziałam kim jestem. Byłam całkiem apolityczna. Kiedy wybuchła wojna w Wietnamie, n!e wiedziałam nawet gdzie leży Wietnam. A kiedy zrozumiałam, co naprawdę się dzieje w Wietnamie nie umiałam się wyłączyć j przestać walczyć o to, co Ic-uważalam za swój 000- wiązek. To calkowicle zmieniło']"
]
},
"execution_count": 39,
"execution_count": 69,
"metadata": {},
"output_type": "execute_result"
}
@ -62,7 +62,7 @@
},
{
"cell_type": "code",
"execution_count": 40,
"execution_count": 70,
"metadata": {},
"outputs": [
{
@ -80,7 +80,7 @@
},
{
"cell_type": "code",
"execution_count": 41,
"execution_count": 71,
"metadata": {},
"outputs": [],
"source": [
@ -89,7 +89,7 @@
},
{
"cell_type": "code",
"execution_count": 42,
"execution_count": 72,
"metadata": {},
"outputs": [
{
@ -486,7 +486,7 @@
" 'ciemnicy']"
]
},
"execution_count": 42,
"execution_count": 72,
"metadata": {},
"output_type": "execute_result"
}
@ -497,7 +497,7 @@
},
{
"cell_type": "code",
"execution_count": 43,
"execution_count": 73,
"metadata": {},
"outputs": [
{
@ -840,7 +840,7 @@
" 'gorący']"
]
},
"execution_count": 43,
"execution_count": 73,
"metadata": {},
"output_type": "execute_result"
}
@ -852,7 +852,7 @@
},
{
"cell_type": "code",
"execution_count": 44,
"execution_count": 74,
"metadata": {},
"outputs": [],
"source": [
@ -862,7 +862,7 @@
},
{
"cell_type": "code",
"execution_count": 45,
"execution_count": 75,
"metadata": {},
"outputs": [],
"source": [
@ -874,16 +874,16 @@
},
{
"cell_type": "code",
"execution_count": 46,
"execution_count": 76,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"215"
"430"
]
},
"execution_count": 46,
"execution_count": 76,
"metadata": {},
"output_type": "execute_result"
}
@ -894,7 +894,7 @@
},
{
"cell_type": "code",
"execution_count": 47,
"execution_count": 77,
"metadata": {},
"outputs": [
{
@ -925,9 +925,9 @@
" <th>______</th>\n",
" <th>____x</th>\n",
" <th>__ch</th>\n",
" <th>__n_</th>\n",
" <th>__naie</th>\n",
" <th>__o</th>\n",
" <th>_a</th>\n",
" <th>_b</th>\n",
" <th>...</th>\n",
" <th>франкф</th>\n",
" <th>фялофс</th>\n",
@ -944,12 +944,12 @@
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
@ -968,12 +968,12 @@
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
@ -992,12 +992,12 @@
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
@ -1016,12 +1016,12 @@
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
@ -1040,12 +1040,12 @@
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
@ -1064,12 +1064,12 @@
" </tr>\n",
" <tr>\n",
" <th>5</th>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
@ -1088,12 +1088,12 @@
" </tr>\n",
" <tr>\n",
" <th>6</th>\n",
" <td>0.040798</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.064346</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
@ -1112,12 +1112,12 @@
" </tr>\n",
" <tr>\n",
" <th>7</th>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
@ -1136,12 +1136,12 @@
" </tr>\n",
" <tr>\n",
" <th>8</th>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
@ -1160,12 +1160,12 @@
" </tr>\n",
" <tr>\n",
" <th>9</th>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.000000</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
" <td>0.0</td>\n",
@ -1184,38 +1184,38 @@
" </tr>\n",
" </tbody>\n",
"</table>\n",
"<p>10 rows × 25509 columns</p>\n",
"<p>10 rows × 42788 columns</p>\n",
"</div>"
],
"text/plain": [
" __ ___ ____ _____ ______ ____x __ch __o _a _b ... \\\n",
"0 0.000000 0.0 0.0 0.0 0.0 0.000000 0.0 0.0 0.0 0.0 ... \n",
"1 0.000000 0.0 0.0 0.0 0.0 0.000000 0.0 0.0 0.0 0.0 ... \n",
"2 0.000000 0.0 0.0 0.0 0.0 0.000000 0.0 0.0 0.0 0.0 ... \n",
"3 0.000000 0.0 0.0 0.0 0.0 0.000000 0.0 0.0 0.0 0.0 ... \n",
"4 0.000000 0.0 0.0 0.0 0.0 0.000000 0.0 0.0 0.0 0.0 ... \n",
"5 0.000000 0.0 0.0 0.0 0.0 0.000000 0.0 0.0 0.0 0.0 ... \n",
"6 0.040798 0.0 0.0 0.0 0.0 0.064346 0.0 0.0 0.0 0.0 ... \n",
"7 0.000000 0.0 0.0 0.0 0.0 0.000000 0.0 0.0 0.0 0.0 ... \n",
"8 0.000000 0.0 0.0 0.0 0.0 0.000000 0.0 0.0 0.0 0.0 ... \n",
"9 0.000000 0.0 0.0 0.0 0.0 0.000000 0.0 0.0 0.0 0.0 ... \n",
" __ ___ ____ _____ ______ ____x __ch __n_ __naie __o ... франкф \\\n",
"0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n",
"1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n",
"2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n",
"3 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n",
"4 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n",
"5 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n",
"6 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n",
"7 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n",
"8 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n",
"9 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 \n",
"\n",
" франкф фялофс что шшяшшш щвашш ьввдвн ьлало эавкде юрвдич ях \n",
"0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"3 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"4 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"5 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"6 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"7 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"8 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"9 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
" фялофс что шшяшшш щвашш ьввдвн ьлало эавкде юрвдич ях \n",
"0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"1 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"3 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"4 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"5 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"6 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"7 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"8 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"9 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 \n",
"\n",
"[10 rows x 25509 columns]"
"[10 rows x 42788 columns]"
]
},
"execution_count": 47,
"execution_count": 77,
"metadata": {},
"output_type": "execute_result"
}
@ -1226,16 +1226,17 @@
},
{
"cell_type": "code",
"execution_count": 48,
"execution_count": 78,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([1., 0., 0., ..., 0., 0., 0.])"
"array([0.47377066, 0. , 0. , ..., 0. , 0. ,\n",
" 0. ])"
]
},
"execution_count": 48,
"execution_count": 78,
"metadata": {},
"output_type": "execute_result"
}
@ -1246,7 +1247,7 @@
},
{
"cell_type": "code",
"execution_count": 49,
"execution_count": 79,
"metadata": {},
"outputs": [],
"source": [
@ -1255,7 +1256,7 @@
},
{
"cell_type": "code",
"execution_count": 50,
"execution_count": 80,
"metadata": {},
"outputs": [
{
@ -1264,7 +1265,7 @@
"LinearRegression()"
]
},
"execution_count": 50,
"execution_count": 80,
"metadata": {},
"output_type": "execute_result"
}
@ -1276,18 +1277,18 @@
},
{
"cell_type": "code",
"execution_count": 51,
"execution_count": 81,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([1985.49452053, 1967.30958903, 1919.55479387, 1842.49999998,\n",
" 1930.67808218, 1967.49999998, 2012.5887978 , 2013.1958904 ,\n",
" 1938.49999998, 1876.93852457])"
"array([1985.49452053, 1925.6178082 , 1967.30958903, 1937.49999998,\n",
" 1919.55479387, 1932.77459015, 1842.49999998, 1932.08333332,\n",
" 1930.67808218, 2000.49999998])"
]
},
"execution_count": 51,
"execution_count": 81,
"metadata": {},
"output_type": "execute_result"
}
@ -1298,7 +1299,7 @@
},
{
"cell_type": "code",
"execution_count": 52,
"execution_count": 82,
"metadata": {},
"outputs": [],
"source": [
@ -1312,7 +1313,7 @@
},
{
"cell_type": "code",
"execution_count": 53,
"execution_count": 83,
"metadata": {},
"outputs": [],
"source": [
@ -1321,18 +1322,18 @@
},
{
"cell_type": "code",
"execution_count": 54,
"execution_count": 84,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([1856.34950057, 1998.14824651, 1980.9029765 , 1937.6572196 ,\n",
" 1909.521258 , 1946.78126886, 1915.19790703, 1899.90957018,\n",
" 1897.26065759, 1954.66458951])"
"array([1889.28635713, 1950.9440436 , 1957.26235075, 1959.53052259,\n",
" 1914.96228803, 1948.17090442, 1951.19472106, 1917.66714928,\n",
" 1912.14525243, 1936.7929999 ])"
]
},
"execution_count": 54,
"execution_count": 84,
"metadata": {},
"output_type": "execute_result"
}
@ -1343,7 +1344,7 @@
},
{
"cell_type": "code",
"execution_count": 66,
"execution_count": 92,
"metadata": {},
"outputs": [],
"source": [
@ -1355,7 +1356,7 @@
},
{
"cell_type": "code",
"execution_count": 56,
"execution_count": 86,
"metadata": {},
"outputs": [],
"source": [
@ -1365,14 +1366,14 @@
},
{
"cell_type": "code",
"execution_count": 57,
"execution_count": 94,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"47.658531615811334\n"
"38.80250628936373\n"
]
}
],
@ -1387,7 +1388,7 @@
},
{
"cell_type": "code",
"execution_count": 65,
"execution_count": 88,
"metadata": {},
"outputs": [],
"source": [
@ -1401,7 +1402,7 @@
},
{
"cell_type": "code",
"execution_count": 61,
"execution_count": 89,
"metadata": {},
"outputs": [],
"source": [
@ -1410,18 +1411,18 @@
},
{
"cell_type": "code",
"execution_count": 62,
"execution_count": 90,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([1983.16061382, 1913.76206771, 1922.81889879, 1940.76034008,\n",
" 1950.58829951, 1890.32262944, 1935.14295119, 1932.10534702,\n",
" 1969.90671923, 1935.72959798])"
"array([1970.80482572, 1891.52353205, 1914.05051655, 1921.30242974,\n",
" 1908.01225049, 1912.69373127, 1911.11153893, 1948.74997295,\n",
" 1925.77888352, 1923.62798817])"
]
},
"execution_count": 62,
"execution_count": 90,
"metadata": {},
"output_type": "execute_result"
}
@ -1432,7 +1433,7 @@
},
{
"cell_type": "code",
"execution_count": 67,
"execution_count": 93,
"metadata": {},
"outputs": [],
"source": [
@ -1441,6 +1442,13 @@
" f.write(str(round(i, 11)) + '\\n')\n",
"f.close()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {

View File

@ -16,56 +16,56 @@ import pandas as pd
from sklearn.linear_model import LinearRegression
# In[38]:
# In[68]:
def read_data(filename):
all_data = lzma.open(filename).read().decode('UTF-8').split('\n')
return [line.split('\t') for line in all_data][:-1]
train_data = read_data('train/train.tsv.xz')[::500]
train_data = read_data('train/train.tsv.xz')[::250]
# In[39]:
# In[69]:
train_data[0]
# In[40]:
# In[70]:
stop_words = get_stop_words('pl') + ['a', 'u', 'i', 'z', 'w', 'o']
print(stop_words)
# In[41]:
# In[71]:
train_data_tokenized = [list(set(gensim.utils.tokenize(x[4], lowercase = True))) for x in train_data]
# In[42]:
# In[72]:
train_data_tokenized[0]
# In[43]:
# In[73]:
train_data_stemmatized = [list(set([w[:6] for w in set(i) - set(stop_words)])) for i in train_data_tokenized]
train_data_stemmatized[0]
# In[44]:
# In[74]:
vectorizer = TfidfVectorizer()
vectors = vectorizer.fit_transform([' '.join(i) for i in train_data_stemmatized])
# In[45]:
# In[75]:
feature_names = vectorizer.get_feature_names()
@ -74,44 +74,44 @@ denselist = dense.tolist()
df = pd.DataFrame(denselist, columns=feature_names)
# In[46]:
# In[76]:
len(train_data)
# In[47]:
# In[77]:
df[:10]
# In[48]:
# In[78]:
vectorizer.transform(['__ ma kota']).toarray()[0]
# In[49]:
# In[79]:
train_Y = [(float(x[0]) + float(x[1])) / 2 for x in train_data]
# In[50]:
# In[80]:
model = LinearRegression() # definicja modelu
model.fit(df, train_Y) # dopasowanie modelu
# In[51]:
# In[81]:
model.predict(df[:10])
# In[52]:
# In[82]:
with open('dev-0/in.tsv', "r", encoding="utf-8") as f:
@ -122,19 +122,19 @@ dev_0_data_stemmatized = [list(set([w[:6] for w in set(i) - set(stop_words)])) f
dev_0_data = [' '.join(i) for i in dev_0_data_stemmatized]
# In[53]:
# In[83]:
y_predicted = model.predict(vectorizer.transform(dev_0_data).toarray())
# In[54]:
# In[84]:
y_predicted[:10]
# In[66]:
# In[92]:
f = open("dev-0/out.tsv", "a")
@ -143,14 +143,14 @@ for i in y_predicted:
f.close()
# In[56]:
# In[86]:
with open('dev-0/expected.tsv', "r", encoding="utf-8") as f:
e = [line.rstrip() for line in f]
# In[57]:
# In[94]:
import math
@ -161,7 +161,7 @@ for i in range(len(y_predicted)):
print(math.sqrt(sum(t)/len(y_predicted)))
# In[65]:
# In[88]:
with open('test-A/in.tsv', "r", encoding="utf-8") as f:
@ -172,19 +172,19 @@ test_A_data_stemmatized = [list(set([w[:6] for w in set(i) - set(stop_words)]))
test_A_data = [' '.join(i) for i in test_A_data_stemmatized]
# In[61]:
# In[89]:
y_test_predicted = model.predict(vectorizer.transform(test_A_data).toarray())
# In[62]:
# In[90]:
y_test_predicted[:10]
# In[67]:
# In[93]:
f = open("test-A/out.tsv", "a")
@ -192,3 +192,9 @@ for i in y_test_predicted:
f.write(str(round(i, 11)) + '\n')
f.close()
# In[ ]:

File diff suppressed because it is too large Load Diff