warsztaty2/P7. WRMF (Implicit ALS).ipynb

1813 lines
564 KiB
Plaintext
Raw Permalink Normal View History

2020-06-16 19:40:37 +02:00
{
"cells": [
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"import helpers\n",
"import pandas as pd\n",
"import numpy as np\n",
"import scipy.sparse as sparse\n",
"from collections import defaultdict\n",
"from itertools import chain\n",
"import random\n",
"import time\n",
"import matplotlib.pyplot as plt\n",
"import implicit\n",
"import evaluation_measures as ev\n",
"\n",
"train_read=pd.read_csv('./Datasets/ml-100k/train.csv', sep='\\t', header=None)\n",
"test_read=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None)\n",
"train_ui, test_ui, user_code_id, user_id_code, item_code_id, item_id_code = helpers.data_to_csr(train_read, test_read)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"alpha = 30\n",
"train_ui*=alpha\n",
"train_iu=train_ui.transpose().tocsr()"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"WARNING:root:OpenBLAS detected. Its highly recommend to set the environment variable 'export OPENBLAS_NUM_THREADS=1' to disable its internal multithreading\n"
]
},
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "1b4550ffe25e47cc8be9484358e7dba9",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"HBox(children=(FloatProgress(value=0.0, max=10.0), HTML(value='')))"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\n"
]
}
],
"source": [
"model = implicit.als.AlternatingLeastSquares(factors=200, regularization=0.1, iterations=10)\n",
"model.fit(train_iu)"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"def top_k_recommendations(model, user_code_id, item_code_id, topK=10):\n",
" recommendations=[]\n",
" for u in range(train_ui.shape[0]):\n",
" u_recommended_items=model.recommend(u, train_ui, N=10, filter_already_liked_items=True)\n",
" recommendations.append([user_code_id[u]]+list(chain(*u_recommended_items)))\n",
" reco=pd.DataFrame(recommendations)\n",
" reco.iloc[:,1::2]=reco.iloc[:,1::2].applymap(lambda x: item_code_id[x])\n",
" return reco\n",
"\n",
"def estimate(model, user_code_id, item_code_id, test_ui):\n",
" result=[]\n",
" for user, item in zip(*test_ui.nonzero()):\n",
" result.append([user_code_id[user], item_code_id[item], \n",
" model.rank_items(userid=user, user_items=train_ui, selected_items=[item])[0][1]])\n",
" return result"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"reco=top_k_recommendations(model, user_code_id, item_code_id, topK=10)\n",
"reco.to_csv('Recommendations generated/ml-100k/Ready_ImplicitALS_reco.csv', index=False, header=False)\n",
"\n",
"estimations_df=pd.DataFrame(estimate(model, user_code_id, item_code_id, test_ui))\n",
"estimations_df.to_csv('Recommendations generated/ml-100k/Ready_ImplicitALS_estimations.csv', index=False, header=False)"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"943it [00:00, 9361.55it/s]\n"
]
},
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>RMSE</th>\n",
" <th>MAE</th>\n",
" <th>precision</th>\n",
" <th>recall</th>\n",
" <th>F_1</th>\n",
" <th>F_05</th>\n",
" <th>precision_super</th>\n",
" <th>recall_super</th>\n",
" <th>NDCG</th>\n",
" <th>mAP</th>\n",
" <th>MRR</th>\n",
" <th>LAUC</th>\n",
" <th>HR</th>\n",
" <th>Reco in test</th>\n",
" <th>Test coverage</th>\n",
" <th>Shannon</th>\n",
" <th>Gini</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>3.274971</td>\n",
" <td>3.076091</td>\n",
" <td>0.246766</td>\n",
" <td>0.180284</td>\n",
" <td>0.171181</td>\n",
" <td>0.194935</td>\n",
" <td>0.16309</td>\n",
" <td>0.205416</td>\n",
" <td>0.29822</td>\n",
" <td>0.166226</td>\n",
" <td>0.520157</td>\n",
" <td>0.587509</td>\n",
" <td>0.878049</td>\n",
" <td>0.999788</td>\n",
" <td>0.508658</td>\n",
" <td>5.756947</td>\n",
" <td>0.819903</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" RMSE MAE precision recall F_1 F_05 \\\n",
"0 3.274971 3.076091 0.246766 0.180284 0.171181 0.194935 \n",
"\n",
" precision_super recall_super NDCG mAP MRR LAUC \\\n",
"0 0.16309 0.205416 0.29822 0.166226 0.520157 0.587509 \n",
"\n",
" HR Reco in test Test coverage Shannon Gini \n",
"0 0.878049 0.999788 0.508658 5.756947 0.819903 "
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import evaluation_measures as ev\n",
"import imp\n",
"imp.reload(ev)\n",
"\n",
"estimations_df=pd.read_csv('Recommendations generated/ml-100k/Ready_ImplicitALS_estimations.csv', header=None)\n",
"reco=np.loadtxt('Recommendations generated/ml-100k/Ready_ImplicitALS_reco.csv', delimiter=',')\n",
"\n",
"ev.evaluate(test=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None),\n",
" estimations_df=estimations_df, \n",
" reco=reco,\n",
" super_reactions=[4,5])\n",
"#also you can just type ev.evaluate_all(estimations_df, reco) - I put above values as default"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Hiperparameters tuning"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Number of latent factors"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
" 0%| | 0/15 [00:00<?, ?it/s]\n",
"943it [00:00, 10055.83it/s]\n",
" 7%|▋ | 1/15 [00:01<00:15, 1.12s/it]\n",
"943it [00:00, 10056.90it/s]\n",
" 13%|█▎ | 2/15 [00:02<00:14, 1.15s/it]\n",
"943it [00:00, 9674.18it/s]\n",
" 20%|██ | 3/15 [00:03<00:14, 1.25s/it]\n",
"943it [00:00, 9548.09it/s]\n",
" 27%|██▋ | 4/15 [00:05<00:14, 1.30s/it]\n",
"943it [00:00, 9846.42it/s]\n",
" 33%|███▎ | 5/15 [00:06<00:13, 1.37s/it]\n",
"943it [00:00, 9848.78it/s]\n",
" 40%|████ | 6/15 [00:08<00:12, 1.44s/it]\n",
"943it [00:00, 9745.47it/s]\n",
" 47%|████▋ | 7/15 [00:10<00:12, 1.54s/it]\n",
"943it [00:00, 9744.82it/s]\n",
" 53%|█████▎ | 8/15 [00:12<00:11, 1.68s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 9272.28it/s]\u001b[A\n",
" 60%|██████ | 9/15 [00:14<00:10, 1.80s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 9147.55it/s]\u001b[A\n",
" 67%|██████▋ | 10/15 [00:16<00:09, 1.94s/it]\n",
"943it [00:00, 9648.29it/s]\n",
" 73%|███████▎ | 11/15 [00:18<00:08, 2.09s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 9180.40it/s]\u001b[A\n",
" 80%|████████ | 12/15 [00:21<00:06, 2.24s/it]\n",
"943it [00:00, 9706.96it/s]\n",
" 87%|████████▋ | 13/15 [00:24<00:04, 2.44s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 9002.64it/s]\u001b[A\n",
" 93%|█████████▎| 14/15 [00:27<00:02, 2.62s/it]\n",
"943it [00:00, 9838.66it/s]\n",
"100%|██████████| 15/15 [00:30<00:00, 2.06s/it]\n"
]
},
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Factors</th>\n",
" <th>RMSE</th>\n",
" <th>MAE</th>\n",
" <th>precision</th>\n",
" <th>recall</th>\n",
" <th>F_1</th>\n",
" <th>F_05</th>\n",
" <th>precision_super</th>\n",
" <th>recall_super</th>\n",
" <th>NDCG</th>\n",
" <th>mAP</th>\n",
" <th>MRR</th>\n",
" <th>LAUC</th>\n",
" <th>HR</th>\n",
" <th>Reco in test</th>\n",
" <th>Test coverage</th>\n",
" <th>Shannon</th>\n",
" <th>Gini</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>25</td>\n",
" <td>2.844919</td>\n",
" <td>2.620125</td>\n",
" <td>0.112513</td>\n",
" <td>0.118879</td>\n",
" <td>0.097878</td>\n",
" <td>0.100372</td>\n",
" <td>0.066738</td>\n",
" <td>0.129517</td>\n",
" <td>0.125779</td>\n",
" <td>0.052940</td>\n",
" <td>0.218140</td>\n",
" <td>0.556162</td>\n",
" <td>0.656416</td>\n",
" <td>0.996501</td>\n",
" <td>0.709957</td>\n",
" <td>6.457332</td>\n",
" <td>0.647711</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>50</td>\n",
" <td>2.898551</td>\n",
" <td>2.678081</td>\n",
" <td>0.144645</td>\n",
" <td>0.135718</td>\n",
" <td>0.116720</td>\n",
" <td>0.123745</td>\n",
" <td>0.085086</td>\n",
" <td>0.147738</td>\n",
" <td>0.162899</td>\n",
" <td>0.073388</td>\n",
" <td>0.281801</td>\n",
" <td>0.564729</td>\n",
" <td>0.744433</td>\n",
" <td>0.999894</td>\n",
" <td>0.649351</td>\n",
" <td>6.257955</td>\n",
" <td>0.706753</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>75</td>\n",
" <td>2.946453</td>\n",
" <td>2.728472</td>\n",
" <td>0.168081</td>\n",
" <td>0.149663</td>\n",
" <td>0.131850</td>\n",
" <td>0.141945</td>\n",
" <td>0.103112</td>\n",
" <td>0.160522</td>\n",
" <td>0.192246</td>\n",
" <td>0.090485</td>\n",
" <td>0.335233</td>\n",
" <td>0.571818</td>\n",
" <td>0.797455</td>\n",
" <td>0.999788</td>\n",
" <td>0.608225</td>\n",
" <td>6.053205</td>\n",
" <td>0.755007</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>100</td>\n",
" <td>2.983337</td>\n",
" <td>2.766327</td>\n",
" <td>0.186957</td>\n",
" <td>0.159899</td>\n",
" <td>0.143370</td>\n",
" <td>0.156146</td>\n",
" <td>0.118670</td>\n",
" <td>0.179116</td>\n",
" <td>0.213595</td>\n",
" <td>0.103269</td>\n",
" <td>0.365040</td>\n",
" <td>0.577007</td>\n",
" <td>0.829268</td>\n",
" <td>1.000000</td>\n",
" <td>0.561328</td>\n",
" <td>5.906052</td>\n",
" <td>0.789284</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>125</td>\n",
" <td>3.013649</td>\n",
" <td>2.798792</td>\n",
" <td>0.195440</td>\n",
" <td>0.158716</td>\n",
" <td>0.144700</td>\n",
" <td>0.159963</td>\n",
" <td>0.119957</td>\n",
" <td>0.174794</td>\n",
" <td>0.227569</td>\n",
" <td>0.114459</td>\n",
" <td>0.399596</td>\n",
" <td>0.576467</td>\n",
" <td>0.815483</td>\n",
" <td>0.999894</td>\n",
" <td>0.532468</td>\n",
" <td>5.782155</td>\n",
" <td>0.813818</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>150</td>\n",
" <td>3.042673</td>\n",
" <td>2.830717</td>\n",
" <td>0.210180</td>\n",
" <td>0.164005</td>\n",
" <td>0.151327</td>\n",
" <td>0.169325</td>\n",
" <td>0.135944</td>\n",
" <td>0.181649</td>\n",
" <td>0.243943</td>\n",
" <td>0.125804</td>\n",
" <td>0.425139</td>\n",
" <td>0.579170</td>\n",
" <td>0.847296</td>\n",
" <td>1.000000</td>\n",
" <td>0.506494</td>\n",
" <td>5.746312</td>\n",
" <td>0.823647</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>175</td>\n",
" <td>3.064257</td>\n",
" <td>2.852890</td>\n",
" <td>0.215589</td>\n",
" <td>0.165439</td>\n",
" <td>0.153855</td>\n",
" <td>0.172823</td>\n",
" <td>0.136052</td>\n",
" <td>0.185028</td>\n",
" <td>0.254184</td>\n",
" <td>0.130538</td>\n",
" <td>0.457603</td>\n",
" <td>0.579917</td>\n",
" <td>0.857900</td>\n",
" <td>1.000000</td>\n",
" <td>0.503608</td>\n",
" <td>5.631072</td>\n",
" <td>0.838636</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>200</td>\n",
" <td>3.084422</td>\n",
" <td>2.874424</td>\n",
" <td>0.219618</td>\n",
" <td>0.162595</td>\n",
" <td>0.153513</td>\n",
" <td>0.174288</td>\n",
" <td>0.140451</td>\n",
" <td>0.186207</td>\n",
" <td>0.257191</td>\n",
" <td>0.136220</td>\n",
" <td>0.449647</td>\n",
" <td>0.578527</td>\n",
" <td>0.844115</td>\n",
" <td>1.000000</td>\n",
" <td>0.488456</td>\n",
" <td>5.644481</td>\n",
" <td>0.840826</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>225</td>\n",
" <td>3.097204</td>\n",
" <td>2.888454</td>\n",
" <td>0.227678</td>\n",
" <td>0.168651</td>\n",
" <td>0.159966</td>\n",
" <td>0.181384</td>\n",
" <td>0.150858</td>\n",
" <td>0.195783</td>\n",
" <td>0.267800</td>\n",
" <td>0.143532</td>\n",
" <td>0.464535</td>\n",
" <td>0.581580</td>\n",
" <td>0.861082</td>\n",
" <td>0.999788</td>\n",
" <td>0.468254</td>\n",
" <td>5.566533</td>\n",
" <td>0.850247</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>250</td>\n",
" <td>3.111146</td>\n",
" <td>2.902813</td>\n",
" <td>0.232131</td>\n",
" <td>0.169147</td>\n",
" <td>0.159200</td>\n",
" <td>0.181969</td>\n",
" <td>0.147854</td>\n",
" <td>0.189227</td>\n",
" <td>0.268282</td>\n",
" <td>0.144025</td>\n",
" <td>0.458117</td>\n",
" <td>0.581832</td>\n",
" <td>0.853659</td>\n",
" <td>0.999894</td>\n",
" <td>0.466089</td>\n",
" <td>5.603329</td>\n",
" <td>0.846631</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>275</td>\n",
" <td>3.124854</td>\n",
" <td>2.916841</td>\n",
" <td>0.234571</td>\n",
" <td>0.171622</td>\n",
" <td>0.162598</td>\n",
" <td>0.185026</td>\n",
" <td>0.149356</td>\n",
" <td>0.185706</td>\n",
" <td>0.274896</td>\n",
" <td>0.150449</td>\n",
" <td>0.474372</td>\n",
" <td>0.583088</td>\n",
" <td>0.858961</td>\n",
" <td>1.000000</td>\n",
" <td>0.466811</td>\n",
" <td>5.535912</td>\n",
" <td>0.852984</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>300</td>\n",
" <td>3.135253</td>\n",
" <td>2.928828</td>\n",
" <td>0.229162</td>\n",
" <td>0.164930</td>\n",
" <td>0.157395</td>\n",
" <td>0.179806</td>\n",
" <td>0.149249</td>\n",
" <td>0.191109</td>\n",
" <td>0.265386</td>\n",
" <td>0.142651</td>\n",
" <td>0.454700</td>\n",
" <td>0.579712</td>\n",
" <td>0.848356</td>\n",
" <td>1.000000</td>\n",
" <td>0.463203</td>\n",
" <td>5.546113</td>\n",
" <td>0.853711</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>325</td>\n",
" <td>3.143012</td>\n",
" <td>2.936466</td>\n",
" <td>0.236267</td>\n",
" <td>0.164353</td>\n",
" <td>0.159323</td>\n",
" <td>0.184114</td>\n",
" <td>0.155579</td>\n",
" <td>0.187519</td>\n",
" <td>0.275057</td>\n",
" <td>0.150313</td>\n",
" <td>0.471154</td>\n",
" <td>0.579462</td>\n",
" <td>0.854719</td>\n",
" <td>0.999788</td>\n",
" <td>0.462482</td>\n",
" <td>5.542926</td>\n",
" <td>0.856801</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>350</td>\n",
" <td>3.149240</td>\n",
" <td>2.943003</td>\n",
" <td>0.238494</td>\n",
" <td>0.170851</td>\n",
" <td>0.161939</td>\n",
" <td>0.185788</td>\n",
" <td>0.155794</td>\n",
" <td>0.195913</td>\n",
" <td>0.278565</td>\n",
" <td>0.152289</td>\n",
" <td>0.479033</td>\n",
" <td>0.582719</td>\n",
" <td>0.868505</td>\n",
" <td>1.000000</td>\n",
" <td>0.474026</td>\n",
" <td>5.543713</td>\n",
" <td>0.854430</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>375</td>\n",
" <td>3.158746</td>\n",
" <td>2.953578</td>\n",
" <td>0.231071</td>\n",
" <td>0.157607</td>\n",
" <td>0.153557</td>\n",
" <td>0.178314</td>\n",
" <td>0.148069</td>\n",
" <td>0.174575</td>\n",
" <td>0.266733</td>\n",
" <td>0.144945</td>\n",
" <td>0.460581</td>\n",
" <td>0.576058</td>\n",
" <td>0.846235</td>\n",
" <td>1.000000</td>\n",
" <td>0.467532</td>\n",
" <td>5.522817</td>\n",
" <td>0.855222</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Factors RMSE MAE precision recall F_1 F_05 \\\n",
"0 25 2.844919 2.620125 0.112513 0.118879 0.097878 0.100372 \n",
"0 50 2.898551 2.678081 0.144645 0.135718 0.116720 0.123745 \n",
"0 75 2.946453 2.728472 0.168081 0.149663 0.131850 0.141945 \n",
"0 100 2.983337 2.766327 0.186957 0.159899 0.143370 0.156146 \n",
"0 125 3.013649 2.798792 0.195440 0.158716 0.144700 0.159963 \n",
"0 150 3.042673 2.830717 0.210180 0.164005 0.151327 0.169325 \n",
"0 175 3.064257 2.852890 0.215589 0.165439 0.153855 0.172823 \n",
"0 200 3.084422 2.874424 0.219618 0.162595 0.153513 0.174288 \n",
"0 225 3.097204 2.888454 0.227678 0.168651 0.159966 0.181384 \n",
"0 250 3.111146 2.902813 0.232131 0.169147 0.159200 0.181969 \n",
"0 275 3.124854 2.916841 0.234571 0.171622 0.162598 0.185026 \n",
"0 300 3.135253 2.928828 0.229162 0.164930 0.157395 0.179806 \n",
"0 325 3.143012 2.936466 0.236267 0.164353 0.159323 0.184114 \n",
"0 350 3.149240 2.943003 0.238494 0.170851 0.161939 0.185788 \n",
"0 375 3.158746 2.953578 0.231071 0.157607 0.153557 0.178314 \n",
"\n",
" precision_super recall_super NDCG mAP MRR LAUC \\\n",
"0 0.066738 0.129517 0.125779 0.052940 0.218140 0.556162 \n",
"0 0.085086 0.147738 0.162899 0.073388 0.281801 0.564729 \n",
"0 0.103112 0.160522 0.192246 0.090485 0.335233 0.571818 \n",
"0 0.118670 0.179116 0.213595 0.103269 0.365040 0.577007 \n",
"0 0.119957 0.174794 0.227569 0.114459 0.399596 0.576467 \n",
"0 0.135944 0.181649 0.243943 0.125804 0.425139 0.579170 \n",
"0 0.136052 0.185028 0.254184 0.130538 0.457603 0.579917 \n",
"0 0.140451 0.186207 0.257191 0.136220 0.449647 0.578527 \n",
"0 0.150858 0.195783 0.267800 0.143532 0.464535 0.581580 \n",
"0 0.147854 0.189227 0.268282 0.144025 0.458117 0.581832 \n",
"0 0.149356 0.185706 0.274896 0.150449 0.474372 0.583088 \n",
"0 0.149249 0.191109 0.265386 0.142651 0.454700 0.579712 \n",
"0 0.155579 0.187519 0.275057 0.150313 0.471154 0.579462 \n",
"0 0.155794 0.195913 0.278565 0.152289 0.479033 0.582719 \n",
"0 0.148069 0.174575 0.266733 0.144945 0.460581 0.576058 \n",
"\n",
" HR Reco in test Test coverage Shannon Gini \n",
"0 0.656416 0.996501 0.709957 6.457332 0.647711 \n",
"0 0.744433 0.999894 0.649351 6.257955 0.706753 \n",
"0 0.797455 0.999788 0.608225 6.053205 0.755007 \n",
"0 0.829268 1.000000 0.561328 5.906052 0.789284 \n",
"0 0.815483 0.999894 0.532468 5.782155 0.813818 \n",
"0 0.847296 1.000000 0.506494 5.746312 0.823647 \n",
"0 0.857900 1.000000 0.503608 5.631072 0.838636 \n",
"0 0.844115 1.000000 0.488456 5.644481 0.840826 \n",
"0 0.861082 0.999788 0.468254 5.566533 0.850247 \n",
"0 0.853659 0.999894 0.466089 5.603329 0.846631 \n",
"0 0.858961 1.000000 0.466811 5.535912 0.852984 \n",
"0 0.848356 1.000000 0.463203 5.546113 0.853711 \n",
"0 0.854719 0.999788 0.462482 5.542926 0.856801 \n",
"0 0.868505 1.000000 0.474026 5.543713 0.854430 \n",
"0 0.846235 1.000000 0.467532 5.522817 0.855222 "
]
},
"execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from tqdm import tqdm\n",
"result=[]\n",
"for factors in tqdm([i for i in np.arange(25,400,25)]):\n",
" train_read=pd.read_csv('./Datasets/ml-100k/train.csv', sep='\\t', header=None)\n",
" test_read=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None)\n",
" train_ui, test_ui, user_code_id, user_id_code, item_code_id, item_id_code = helpers.data_to_csr(train_read, test_read)\n",
" \n",
" train_ui*=100\n",
" train_iu=train_ui.transpose().tocsr()\n",
" \n",
" model = implicit.als.AlternatingLeastSquares(factors=factors, regularization=0.1, iterations=10)\n",
" model.fit(train_iu, show_progress=False)\n",
" \n",
" reco=top_k_recommendations(model, user_code_id, item_code_id, topK=10)\n",
" estimations_df=pd.DataFrame(estimate(model, user_code_id, item_code_id, test_ui))\n",
" \n",
" to_append=ev.evaluate(test=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None),\n",
" estimations_df=estimations_df, \n",
" reco=np.array(reco),\n",
" super_reactions=[4,5])\n",
" to_append.insert(0, \"Factors\", factors)\n",
" result.append(to_append)\n",
" \n",
"result=pd.concat(result)\n",
"result"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [],
"source": [
"### import matplotlib.pyplot as plt\n",
"\n",
"metrics=list(result.columns[[i not in ['Factors'] for i in result.columns]])\n",
"\n",
"charts_per_row=6\n",
"charts_per_column=3\n",
"\n",
"fig, axes = plt.subplots(nrows=charts_per_row, ncols=charts_per_column,figsize=(18, 7*charts_per_row ))\n",
"import itertools\n",
"to_iter=[i for i in itertools.product(range(charts_per_row), range(charts_per_column))]\n",
"\n",
"for i in range(len(metrics)):\n",
" df=result[['Factors', metrics[i]]]\n",
" df.plot(ax=axes[to_iter[i]], title=metrics[i], x=0, y=1)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Alpha"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
" 0%| | 0/13 [00:00<?, ?it/s]\n",
"943it [00:00, 10163.95it/s]\n",
" 8%|▊ | 1/13 [00:03<00:42, 3.51s/it]\n",
"943it [00:00, 9455.21it/s]\n",
" 15%|█▌ | 2/13 [00:06<00:38, 3.46s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 9093.94it/s]\u001b[A\n",
" 23%|██▎ | 3/13 [00:10<00:34, 3.46s/it]\n",
"943it [00:00, 9595.32it/s]\n",
" 31%|███ | 4/13 [00:13<00:31, 3.51s/it]\n",
"943it [00:00, 9455.26it/s]\n",
" 38%|███▊ | 5/13 [00:17<00:28, 3.61s/it]\n",
"943it [00:00, 9355.27it/s]\n",
" 46%|████▌ | 6/13 [00:21<00:25, 3.69s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 8472.03it/s]\u001b[A\n",
" 54%|█████▍ | 7/13 [00:25<00:22, 3.79s/it]\n",
"943it [00:00, 9499.24it/s]\n",
" 62%|██████▏ | 8/13 [00:29<00:19, 3.87s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 8672.07it/s]\u001b[A\n",
" 69%|██████▉ | 9/13 [00:34<00:16, 4.07s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 8752.60it/s]\u001b[A\n",
" 77%|███████▋ | 10/13 [00:38<00:12, 4.15s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 8081.43it/s]\u001b[A\n",
" 85%|████████▍ | 11/13 [00:42<00:08, 4.21s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 7386.88it/s]\u001b[A\n",
" 92%|█████████▏| 12/13 [00:47<00:04, 4.27s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 8754.84it/s]\u001b[A\n",
"100%|██████████| 13/13 [00:52<00:00, 4.03s/it]\n"
]
},
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Alpha</th>\n",
" <th>RMSE</th>\n",
" <th>MAE</th>\n",
" <th>precision</th>\n",
" <th>recall</th>\n",
" <th>F_1</th>\n",
" <th>F_05</th>\n",
" <th>precision_super</th>\n",
" <th>recall_super</th>\n",
" <th>NDCG</th>\n",
" <th>mAP</th>\n",
" <th>MRR</th>\n",
" <th>LAUC</th>\n",
" <th>HR</th>\n",
" <th>Reco in test</th>\n",
" <th>Test coverage</th>\n",
" <th>Shannon</th>\n",
" <th>Gini</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>1</td>\n",
" <td>3.667964</td>\n",
" <td>3.489818</td>\n",
" <td>0.094380</td>\n",
" <td>0.070257</td>\n",
" <td>0.065728</td>\n",
" <td>0.074445</td>\n",
" <td>0.051502</td>\n",
" <td>0.070147</td>\n",
" <td>0.113450</td>\n",
" <td>0.049410</td>\n",
" <td>0.251617</td>\n",
" <td>0.531850</td>\n",
" <td>0.569459</td>\n",
" <td>0.993955</td>\n",
" <td>0.737374</td>\n",
" <td>6.690952</td>\n",
" <td>0.542274</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>3</td>\n",
" <td>3.627294</td>\n",
" <td>3.448283</td>\n",
" <td>0.148780</td>\n",
" <td>0.103792</td>\n",
" <td>0.099159</td>\n",
" <td>0.114233</td>\n",
" <td>0.093455</td>\n",
" <td>0.110666</td>\n",
" <td>0.181431</td>\n",
" <td>0.089470</td>\n",
" <td>0.371873</td>\n",
" <td>0.548831</td>\n",
" <td>0.712619</td>\n",
" <td>0.994380</td>\n",
" <td>0.711400</td>\n",
" <td>6.639888</td>\n",
" <td>0.567716</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>7</td>\n",
" <td>3.554798</td>\n",
" <td>3.373876</td>\n",
" <td>0.219936</td>\n",
" <td>0.150798</td>\n",
" <td>0.145514</td>\n",
" <td>0.168680</td>\n",
" <td>0.144635</td>\n",
" <td>0.171680</td>\n",
" <td>0.263177</td>\n",
" <td>0.145537</td>\n",
" <td>0.472968</td>\n",
" <td>0.572621</td>\n",
" <td>0.816543</td>\n",
" <td>0.997773</td>\n",
" <td>0.631313</td>\n",
" <td>6.305570</td>\n",
" <td>0.698148</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>10</td>\n",
" <td>3.510181</td>\n",
" <td>3.327658</td>\n",
" <td>0.247826</td>\n",
" <td>0.166669</td>\n",
" <td>0.161966</td>\n",
" <td>0.188961</td>\n",
" <td>0.161266</td>\n",
" <td>0.184196</td>\n",
" <td>0.298395</td>\n",
" <td>0.172603</td>\n",
" <td>0.527587</td>\n",
" <td>0.580683</td>\n",
" <td>0.845175</td>\n",
" <td>0.998091</td>\n",
" <td>0.588745</td>\n",
" <td>6.108851</td>\n",
" <td>0.748855</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>30</td>\n",
" <td>3.355511</td>\n",
" <td>3.164379</td>\n",
" <td>0.275928</td>\n",
" <td>0.193550</td>\n",
" <td>0.186070</td>\n",
" <td>0.214375</td>\n",
" <td>0.183262</td>\n",
" <td>0.224550</td>\n",
" <td>0.332232</td>\n",
" <td>0.198190</td>\n",
" <td>0.556671</td>\n",
" <td>0.594247</td>\n",
" <td>0.876988</td>\n",
" <td>0.999788</td>\n",
" <td>0.479798</td>\n",
" <td>5.570110</td>\n",
" <td>0.844550</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>50</td>\n",
" <td>3.273365</td>\n",
" <td>3.076256</td>\n",
" <td>0.265005</td>\n",
" <td>0.186234</td>\n",
" <td>0.180388</td>\n",
" <td>0.207058</td>\n",
" <td>0.176288</td>\n",
" <td>0.219204</td>\n",
" <td>0.310446</td>\n",
" <td>0.177237</td>\n",
" <td>0.521655</td>\n",
" <td>0.590528</td>\n",
" <td>0.884411</td>\n",
" <td>0.999894</td>\n",
" <td>0.476912</td>\n",
" <td>5.495722</td>\n",
" <td>0.854628</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>70</td>\n",
" <td>3.220208</td>\n",
" <td>3.018475</td>\n",
" <td>0.247508</td>\n",
" <td>0.174676</td>\n",
" <td>0.167792</td>\n",
" <td>0.192897</td>\n",
" <td>0.162661</td>\n",
" <td>0.203997</td>\n",
" <td>0.296658</td>\n",
" <td>0.167366</td>\n",
" <td>0.522179</td>\n",
" <td>0.584690</td>\n",
" <td>0.869565</td>\n",
" <td>1.000000</td>\n",
" <td>0.464646</td>\n",
" <td>5.532033</td>\n",
" <td>0.853659</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>90</td>\n",
" <td>3.177784</td>\n",
" <td>2.972935</td>\n",
" <td>0.244327</td>\n",
" <td>0.174771</td>\n",
" <td>0.165438</td>\n",
" <td>0.190194</td>\n",
" <td>0.161266</td>\n",
" <td>0.198930</td>\n",
" <td>0.289373</td>\n",
" <td>0.160604</td>\n",
" <td>0.498875</td>\n",
" <td>0.584710</td>\n",
" <td>0.871686</td>\n",
" <td>0.999894</td>\n",
" <td>0.461760</td>\n",
" <td>5.495266</td>\n",
" <td>0.857651</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>110</td>\n",
" <td>3.144328</td>\n",
" <td>2.937850</td>\n",
" <td>0.234995</td>\n",
" <td>0.164141</td>\n",
" <td>0.158510</td>\n",
" <td>0.183029</td>\n",
" <td>0.151717</td>\n",
" <td>0.185931</td>\n",
" <td>0.268423</td>\n",
" <td>0.144579</td>\n",
" <td>0.450377</td>\n",
" <td>0.579336</td>\n",
" <td>0.856840</td>\n",
" <td>0.999894</td>\n",
" <td>0.453824</td>\n",
" <td>5.518412</td>\n",
" <td>0.857772</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>130</td>\n",
" <td>3.112180</td>\n",
" <td>2.902455</td>\n",
" <td>0.228950</td>\n",
" <td>0.164122</td>\n",
" <td>0.156619</td>\n",
" <td>0.179336</td>\n",
" <td>0.147210</td>\n",
" <td>0.188450</td>\n",
" <td>0.265865</td>\n",
" <td>0.142502</td>\n",
" <td>0.459092</td>\n",
" <td>0.579307</td>\n",
" <td>0.856840</td>\n",
" <td>1.000000</td>\n",
" <td>0.453824</td>\n",
" <td>5.475153</td>\n",
" <td>0.864239</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>150</td>\n",
" <td>3.092145</td>\n",
" <td>2.882119</td>\n",
" <td>0.219830</td>\n",
" <td>0.154060</td>\n",
" <td>0.148340</td>\n",
" <td>0.171206</td>\n",
" <td>0.141416</td>\n",
" <td>0.174732</td>\n",
" <td>0.256054</td>\n",
" <td>0.136146</td>\n",
" <td>0.449823</td>\n",
" <td>0.574245</td>\n",
" <td>0.841994</td>\n",
" <td>1.000000</td>\n",
" <td>0.461760</td>\n",
" <td>5.489169</td>\n",
" <td>0.859261</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>170</td>\n",
" <td>3.072217</td>\n",
" <td>2.860201</td>\n",
" <td>0.221951</td>\n",
" <td>0.155889</td>\n",
" <td>0.149070</td>\n",
" <td>0.172317</td>\n",
" <td>0.138412</td>\n",
" <td>0.167305</td>\n",
" <td>0.251275</td>\n",
" <td>0.133171</td>\n",
" <td>0.426381</td>\n",
" <td>0.575125</td>\n",
" <td>0.827147</td>\n",
" <td>0.999894</td>\n",
" <td>0.467532</td>\n",
" <td>5.562501</td>\n",
" <td>0.853246</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>190</td>\n",
" <td>3.056080</td>\n",
" <td>2.843538</td>\n",
" <td>0.208696</td>\n",
" <td>0.145416</td>\n",
" <td>0.140349</td>\n",
" <td>0.162318</td>\n",
" <td>0.134227</td>\n",
" <td>0.157974</td>\n",
" <td>0.238459</td>\n",
" <td>0.122803</td>\n",
" <td>0.423420</td>\n",
" <td>0.569854</td>\n",
" <td>0.818664</td>\n",
" <td>0.999894</td>\n",
" <td>0.473304</td>\n",
" <td>5.438078</td>\n",
" <td>0.867098</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Alpha RMSE MAE precision recall F_1 F_05 \\\n",
"0 1 3.667964 3.489818 0.094380 0.070257 0.065728 0.074445 \n",
"0 3 3.627294 3.448283 0.148780 0.103792 0.099159 0.114233 \n",
"0 7 3.554798 3.373876 0.219936 0.150798 0.145514 0.168680 \n",
"0 10 3.510181 3.327658 0.247826 0.166669 0.161966 0.188961 \n",
"0 30 3.355511 3.164379 0.275928 0.193550 0.186070 0.214375 \n",
"0 50 3.273365 3.076256 0.265005 0.186234 0.180388 0.207058 \n",
"0 70 3.220208 3.018475 0.247508 0.174676 0.167792 0.192897 \n",
"0 90 3.177784 2.972935 0.244327 0.174771 0.165438 0.190194 \n",
"0 110 3.144328 2.937850 0.234995 0.164141 0.158510 0.183029 \n",
"0 130 3.112180 2.902455 0.228950 0.164122 0.156619 0.179336 \n",
"0 150 3.092145 2.882119 0.219830 0.154060 0.148340 0.171206 \n",
"0 170 3.072217 2.860201 0.221951 0.155889 0.149070 0.172317 \n",
"0 190 3.056080 2.843538 0.208696 0.145416 0.140349 0.162318 \n",
"\n",
" precision_super recall_super NDCG mAP MRR LAUC \\\n",
"0 0.051502 0.070147 0.113450 0.049410 0.251617 0.531850 \n",
"0 0.093455 0.110666 0.181431 0.089470 0.371873 0.548831 \n",
"0 0.144635 0.171680 0.263177 0.145537 0.472968 0.572621 \n",
"0 0.161266 0.184196 0.298395 0.172603 0.527587 0.580683 \n",
"0 0.183262 0.224550 0.332232 0.198190 0.556671 0.594247 \n",
"0 0.176288 0.219204 0.310446 0.177237 0.521655 0.590528 \n",
"0 0.162661 0.203997 0.296658 0.167366 0.522179 0.584690 \n",
"0 0.161266 0.198930 0.289373 0.160604 0.498875 0.584710 \n",
"0 0.151717 0.185931 0.268423 0.144579 0.450377 0.579336 \n",
"0 0.147210 0.188450 0.265865 0.142502 0.459092 0.579307 \n",
"0 0.141416 0.174732 0.256054 0.136146 0.449823 0.574245 \n",
"0 0.138412 0.167305 0.251275 0.133171 0.426381 0.575125 \n",
"0 0.134227 0.157974 0.238459 0.122803 0.423420 0.569854 \n",
"\n",
" HR Reco in test Test coverage Shannon Gini \n",
"0 0.569459 0.993955 0.737374 6.690952 0.542274 \n",
"0 0.712619 0.994380 0.711400 6.639888 0.567716 \n",
"0 0.816543 0.997773 0.631313 6.305570 0.698148 \n",
"0 0.845175 0.998091 0.588745 6.108851 0.748855 \n",
"0 0.876988 0.999788 0.479798 5.570110 0.844550 \n",
"0 0.884411 0.999894 0.476912 5.495722 0.854628 \n",
"0 0.869565 1.000000 0.464646 5.532033 0.853659 \n",
"0 0.871686 0.999894 0.461760 5.495266 0.857651 \n",
"0 0.856840 0.999894 0.453824 5.518412 0.857772 \n",
"0 0.856840 1.000000 0.453824 5.475153 0.864239 \n",
"0 0.841994 1.000000 0.461760 5.489169 0.859261 \n",
"0 0.827147 0.999894 0.467532 5.562501 0.853246 \n",
"0 0.818664 0.999894 0.473304 5.438078 0.867098 "
]
},
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from tqdm import tqdm\n",
"result=[]\n",
"for alpha in tqdm([1, 3, 7]+[i for i in np.arange(10,200,20)]):\n",
" train_read=pd.read_csv('./Datasets/ml-100k/train.csv', sep='\\t', header=None)\n",
" test_read=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None)\n",
" train_ui, test_ui, user_code_id, user_id_code, item_code_id, item_id_code = helpers.data_to_csr(train_read, test_read)\n",
" \n",
" train_ui*=alpha\n",
" train_iu=train_ui.transpose().tocsr()\n",
" \n",
" model = implicit.als.AlternatingLeastSquares(factors=factors, regularization=0.1, iterations=10)\n",
" model.fit(train_iu, show_progress=False)\n",
" \n",
" reco=top_k_recommendations(model, user_code_id, item_code_id, topK=10)\n",
" estimations_df=pd.DataFrame(estimate(model, user_code_id, item_code_id, test_ui))\n",
" \n",
" to_append=ev.evaluate(test=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None),\n",
" estimations_df=estimations_df, \n",
" reco=np.array(reco),\n",
" super_reactions=[4,5])\n",
" to_append.insert(0, \"Alpha\", alpha)\n",
" result.append(to_append)\n",
" \n",
"result=pd.concat(result)\n",
"result"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [],
"source": [
"### import matplotlib.pyplot as plt\n",
"\n",
"metrics=list(result.columns[[i not in ['Alpha'] for i in result.columns]])\n",
"\n",
"charts_per_row=6\n",
"charts_per_column=3\n",
"\n",
"fig, axes = plt.subplots(nrows=charts_per_row, ncols=charts_per_column,figsize=(18, 7*charts_per_row ))\n",
"import itertools\n",
"to_iter=[i for i in itertools.product(range(charts_per_row), range(charts_per_column))]\n",
"\n",
"for i in range(len(metrics)):\n",
" df=result[['Alpha', metrics[i]]]\n",
" df.plot(ax=axes[to_iter[i]], title=metrics[i], x=0, y=1)"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"943it [00:00, 9091.58it/s]\n",
"943it [00:00, 9747.68it/s]\n",
"943it [00:00, 9361.64it/s]\n",
"943it [00:00, 9223.93it/s]\n",
"943it [00:00, 8151.04it/s]\n",
"943it [00:00, 8836.62it/s]\n",
"943it [00:00, 8294.04it/s]\n",
"943it [00:00, 9648.19it/s]\n",
"943it [00:00, 9412.17it/s]\n",
"943it [00:00, 9747.61it/s]\n",
"943it [00:00, 9849.07it/s]\n",
"943it [00:00, 9822.92it/s]\n",
"943it [00:00, 9949.66it/s]\n",
"943it [00:00, 10058.44it/s]\n",
"943it [00:00, 10868.01it/s]\n",
"943it [00:00, 10623.97it/s]\n",
"943it [00:00, 8674.55it/s]\n",
"943it [00:00, 6832.84it/s]\n"
]
},
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Model</th>\n",
" <th>RMSE</th>\n",
" <th>MAE</th>\n",
" <th>precision</th>\n",
" <th>recall</th>\n",
" <th>F_1</th>\n",
" <th>F_05</th>\n",
" <th>precision_super</th>\n",
" <th>recall_super</th>\n",
" <th>NDCG</th>\n",
" <th>mAP</th>\n",
" <th>MRR</th>\n",
" <th>LAUC</th>\n",
" <th>HR</th>\n",
" <th>Reco in test</th>\n",
" <th>Test coverage</th>\n",
" <th>Shannon</th>\n",
" <th>Gini</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_RP3Beta</td>\n",
" <td>3.704589</td>\n",
" <td>3.529397</td>\n",
" <td>0.286744</td>\n",
" <td>0.196524</td>\n",
" <td>0.191117</td>\n",
" <td>0.221375</td>\n",
" <td>0.213948</td>\n",
" <td>0.251263</td>\n",
" <td>0.344598</td>\n",
" <td>0.207836</td>\n",
" <td>0.587953</td>\n",
" <td>0.595770</td>\n",
" <td>0.885472</td>\n",
" <td>0.998197</td>\n",
" <td>0.193362</td>\n",
" <td>4.291821</td>\n",
" <td>0.960775</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_P3</td>\n",
" <td>3.702446</td>\n",
" <td>3.527273</td>\n",
" <td>0.282185</td>\n",
" <td>0.192092</td>\n",
" <td>0.186749</td>\n",
" <td>0.216980</td>\n",
" <td>0.204185</td>\n",
" <td>0.240096</td>\n",
" <td>0.339114</td>\n",
" <td>0.204905</td>\n",
" <td>0.572157</td>\n",
" <td>0.593544</td>\n",
" <td>0.875928</td>\n",
" <td>1.000000</td>\n",
" <td>0.077201</td>\n",
" <td>3.875892</td>\n",
" <td>0.974947</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_ImplicitALS</td>\n",
" <td>3.274971</td>\n",
" <td>3.076091</td>\n",
" <td>0.246766</td>\n",
" <td>0.180284</td>\n",
" <td>0.171181</td>\n",
" <td>0.194935</td>\n",
" <td>0.163090</td>\n",
" <td>0.205416</td>\n",
" <td>0.298220</td>\n",
" <td>0.166226</td>\n",
" <td>0.520157</td>\n",
" <td>0.587509</td>\n",
" <td>0.878049</td>\n",
" <td>0.999788</td>\n",
" <td>0.508658</td>\n",
" <td>5.756947</td>\n",
" <td>0.819903</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_TopPop</td>\n",
" <td>2.508258</td>\n",
" <td>2.217909</td>\n",
" <td>0.188865</td>\n",
" <td>0.116919</td>\n",
" <td>0.118732</td>\n",
" <td>0.141584</td>\n",
" <td>0.130472</td>\n",
" <td>0.137473</td>\n",
" <td>0.214651</td>\n",
" <td>0.111707</td>\n",
" <td>0.400939</td>\n",
" <td>0.555546</td>\n",
" <td>0.765642</td>\n",
" <td>1.000000</td>\n",
" <td>0.038961</td>\n",
" <td>3.159079</td>\n",
" <td>0.987317</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_SVD</td>\n",
" <td>0.949165</td>\n",
" <td>0.746667</td>\n",
" <td>0.093955</td>\n",
" <td>0.044969</td>\n",
" <td>0.051197</td>\n",
" <td>0.065474</td>\n",
" <td>0.083906</td>\n",
" <td>0.073996</td>\n",
" <td>0.104672</td>\n",
" <td>0.048211</td>\n",
" <td>0.220757</td>\n",
" <td>0.519187</td>\n",
" <td>0.483563</td>\n",
" <td>0.997985</td>\n",
" <td>0.204906</td>\n",
" <td>4.408913</td>\n",
" <td>0.954288</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_SVD</td>\n",
" <td>0.916330</td>\n",
" <td>0.720153</td>\n",
" <td>0.103393</td>\n",
" <td>0.044455</td>\n",
" <td>0.053177</td>\n",
" <td>0.070073</td>\n",
" <td>0.093884</td>\n",
" <td>0.079366</td>\n",
" <td>0.107792</td>\n",
" <td>0.051281</td>\n",
" <td>0.200210</td>\n",
" <td>0.518957</td>\n",
" <td>0.475080</td>\n",
" <td>0.853022</td>\n",
" <td>0.147186</td>\n",
" <td>3.911356</td>\n",
" <td>0.971196</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_Baseline</td>\n",
" <td>0.949459</td>\n",
" <td>0.752487</td>\n",
" <td>0.091410</td>\n",
" <td>0.037652</td>\n",
" <td>0.046030</td>\n",
" <td>0.061286</td>\n",
" <td>0.079614</td>\n",
" <td>0.056463</td>\n",
" <td>0.095957</td>\n",
" <td>0.043178</td>\n",
" <td>0.198193</td>\n",
" <td>0.515501</td>\n",
" <td>0.437964</td>\n",
" <td>1.000000</td>\n",
" <td>0.033911</td>\n",
" <td>2.836513</td>\n",
" <td>0.991139</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_SVDBiased</td>\n",
" <td>0.938146</td>\n",
" <td>0.739917</td>\n",
" <td>0.086532</td>\n",
" <td>0.037067</td>\n",
" <td>0.044832</td>\n",
" <td>0.058877</td>\n",
" <td>0.078004</td>\n",
" <td>0.057865</td>\n",
" <td>0.094583</td>\n",
" <td>0.043013</td>\n",
" <td>0.202391</td>\n",
" <td>0.515202</td>\n",
" <td>0.433722</td>\n",
" <td>0.996076</td>\n",
" <td>0.166667</td>\n",
" <td>4.168354</td>\n",
" <td>0.964092</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_GlobalAvg</td>\n",
" <td>1.125760</td>\n",
" <td>0.943534</td>\n",
" <td>0.061188</td>\n",
" <td>0.025968</td>\n",
" <td>0.031383</td>\n",
" <td>0.041343</td>\n",
" <td>0.040558</td>\n",
" <td>0.032107</td>\n",
" <td>0.067695</td>\n",
" <td>0.027470</td>\n",
" <td>0.171187</td>\n",
" <td>0.509546</td>\n",
" <td>0.384942</td>\n",
" <td>1.000000</td>\n",
" <td>0.025974</td>\n",
" <td>2.711772</td>\n",
" <td>0.992003</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_Random</td>\n",
" <td>1.510030</td>\n",
" <td>1.211848</td>\n",
" <td>0.050053</td>\n",
" <td>0.022367</td>\n",
" <td>0.025984</td>\n",
" <td>0.033727</td>\n",
" <td>0.030687</td>\n",
" <td>0.023255</td>\n",
" <td>0.055392</td>\n",
" <td>0.021602</td>\n",
" <td>0.137690</td>\n",
" <td>0.507713</td>\n",
" <td>0.338282</td>\n",
" <td>0.987911</td>\n",
" <td>0.187590</td>\n",
" <td>5.111878</td>\n",
" <td>0.906685</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_I-KNN</td>\n",
" <td>1.030386</td>\n",
" <td>0.813067</td>\n",
" <td>0.026087</td>\n",
" <td>0.006908</td>\n",
" <td>0.010593</td>\n",
" <td>0.016046</td>\n",
" <td>0.021137</td>\n",
" <td>0.009522</td>\n",
" <td>0.024214</td>\n",
" <td>0.008958</td>\n",
" <td>0.048068</td>\n",
" <td>0.499885</td>\n",
" <td>0.154825</td>\n",
" <td>0.402333</td>\n",
" <td>0.434343</td>\n",
" <td>5.133650</td>\n",
" <td>0.877999</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_I-KNNWithZScore</td>\n",
" <td>0.957701</td>\n",
" <td>0.752387</td>\n",
" <td>0.003712</td>\n",
" <td>0.001994</td>\n",
" <td>0.002380</td>\n",
" <td>0.002919</td>\n",
" <td>0.003433</td>\n",
" <td>0.002401</td>\n",
" <td>0.005137</td>\n",
" <td>0.002158</td>\n",
" <td>0.016458</td>\n",
" <td>0.497349</td>\n",
" <td>0.027572</td>\n",
" <td>0.389926</td>\n",
" <td>0.067821</td>\n",
" <td>2.475747</td>\n",
" <td>0.992793</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_I-KNNWithMeans</td>\n",
" <td>0.935327</td>\n",
" <td>0.737424</td>\n",
" <td>0.002545</td>\n",
" <td>0.000755</td>\n",
" <td>0.001105</td>\n",
" <td>0.001602</td>\n",
" <td>0.002253</td>\n",
" <td>0.000930</td>\n",
" <td>0.003444</td>\n",
" <td>0.001362</td>\n",
" <td>0.011760</td>\n",
" <td>0.496724</td>\n",
" <td>0.021209</td>\n",
" <td>0.482821</td>\n",
" <td>0.059885</td>\n",
" <td>2.232578</td>\n",
" <td>0.994487</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_I-KNNBaseline</td>\n",
" <td>0.935327</td>\n",
" <td>0.737424</td>\n",
" <td>0.002545</td>\n",
" <td>0.000755</td>\n",
" <td>0.001105</td>\n",
" <td>0.001602</td>\n",
" <td>0.002253</td>\n",
" <td>0.000930</td>\n",
" <td>0.003444</td>\n",
" <td>0.001362</td>\n",
" <td>0.011760</td>\n",
" <td>0.496724</td>\n",
" <td>0.021209</td>\n",
" <td>0.482821</td>\n",
" <td>0.059885</td>\n",
" <td>2.232578</td>\n",
" <td>0.994487</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_U-KNN</td>\n",
" <td>1.023495</td>\n",
" <td>0.807913</td>\n",
" <td>0.000742</td>\n",
" <td>0.000205</td>\n",
" <td>0.000305</td>\n",
" <td>0.000449</td>\n",
" <td>0.000536</td>\n",
" <td>0.000198</td>\n",
" <td>0.000845</td>\n",
" <td>0.000274</td>\n",
" <td>0.002744</td>\n",
" <td>0.496441</td>\n",
" <td>0.007423</td>\n",
" <td>0.602121</td>\n",
" <td>0.010823</td>\n",
" <td>2.089186</td>\n",
" <td>0.995706</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_TopRated</td>\n",
" <td>2.508258</td>\n",
" <td>2.217909</td>\n",
" <td>0.000954</td>\n",
" <td>0.000188</td>\n",
" <td>0.000298</td>\n",
" <td>0.000481</td>\n",
" <td>0.000644</td>\n",
" <td>0.000223</td>\n",
" <td>0.001043</td>\n",
" <td>0.000335</td>\n",
" <td>0.003348</td>\n",
" <td>0.496433</td>\n",
" <td>0.009544</td>\n",
" <td>0.699046</td>\n",
" <td>0.005051</td>\n",
" <td>1.945910</td>\n",
" <td>0.995669</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_BaselineUI</td>\n",
" <td>0.967585</td>\n",
" <td>0.762740</td>\n",
" <td>0.000954</td>\n",
" <td>0.000170</td>\n",
" <td>0.000278</td>\n",
" <td>0.000463</td>\n",
" <td>0.000644</td>\n",
" <td>0.000189</td>\n",
" <td>0.000752</td>\n",
" <td>0.000168</td>\n",
" <td>0.001677</td>\n",
" <td>0.496424</td>\n",
" <td>0.009544</td>\n",
" <td>0.600530</td>\n",
" <td>0.005051</td>\n",
" <td>1.803126</td>\n",
" <td>0.996380</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_IKNN</td>\n",
" <td>1.018363</td>\n",
" <td>0.808793</td>\n",
" <td>0.000318</td>\n",
" <td>0.000108</td>\n",
" <td>0.000140</td>\n",
" <td>0.000189</td>\n",
" <td>0.000000</td>\n",
" <td>0.000000</td>\n",
" <td>0.000214</td>\n",
" <td>0.000037</td>\n",
" <td>0.000368</td>\n",
" <td>0.496391</td>\n",
" <td>0.003181</td>\n",
" <td>0.392153</td>\n",
" <td>0.115440</td>\n",
" <td>4.174741</td>\n",
" <td>0.965327</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Model RMSE MAE precision recall F_1 \\\n",
"0 Self_RP3Beta 3.704589 3.529397 0.286744 0.196524 0.191117 \n",
"0 Self_P3 3.702446 3.527273 0.282185 0.192092 0.186749 \n",
"0 Ready_ImplicitALS 3.274971 3.076091 0.246766 0.180284 0.171181 \n",
"0 Self_TopPop 2.508258 2.217909 0.188865 0.116919 0.118732 \n",
"0 Ready_SVD 0.949165 0.746667 0.093955 0.044969 0.051197 \n",
"0 Self_SVD 0.916330 0.720153 0.103393 0.044455 0.053177 \n",
"0 Ready_Baseline 0.949459 0.752487 0.091410 0.037652 0.046030 \n",
"0 Ready_SVDBiased 0.938146 0.739917 0.086532 0.037067 0.044832 \n",
"0 Self_GlobalAvg 1.125760 0.943534 0.061188 0.025968 0.031383 \n",
"0 Ready_Random 1.510030 1.211848 0.050053 0.022367 0.025984 \n",
"0 Ready_I-KNN 1.030386 0.813067 0.026087 0.006908 0.010593 \n",
"0 Ready_I-KNNWithZScore 0.957701 0.752387 0.003712 0.001994 0.002380 \n",
"0 Ready_I-KNNWithMeans 0.935327 0.737424 0.002545 0.000755 0.001105 \n",
"0 Ready_I-KNNBaseline 0.935327 0.737424 0.002545 0.000755 0.001105 \n",
"0 Ready_U-KNN 1.023495 0.807913 0.000742 0.000205 0.000305 \n",
"0 Self_TopRated 2.508258 2.217909 0.000954 0.000188 0.000298 \n",
"0 Self_BaselineUI 0.967585 0.762740 0.000954 0.000170 0.000278 \n",
"0 Self_IKNN 1.018363 0.808793 0.000318 0.000108 0.000140 \n",
"\n",
" F_05 precision_super recall_super NDCG mAP MRR \\\n",
"0 0.221375 0.213948 0.251263 0.344598 0.207836 0.587953 \n",
"0 0.216980 0.204185 0.240096 0.339114 0.204905 0.572157 \n",
"0 0.194935 0.163090 0.205416 0.298220 0.166226 0.520157 \n",
"0 0.141584 0.130472 0.137473 0.214651 0.111707 0.400939 \n",
"0 0.065474 0.083906 0.073996 0.104672 0.048211 0.220757 \n",
"0 0.070073 0.093884 0.079366 0.107792 0.051281 0.200210 \n",
"0 0.061286 0.079614 0.056463 0.095957 0.043178 0.198193 \n",
"0 0.058877 0.078004 0.057865 0.094583 0.043013 0.202391 \n",
"0 0.041343 0.040558 0.032107 0.067695 0.027470 0.171187 \n",
"0 0.033727 0.030687 0.023255 0.055392 0.021602 0.137690 \n",
"0 0.016046 0.021137 0.009522 0.024214 0.008958 0.048068 \n",
"0 0.002919 0.003433 0.002401 0.005137 0.002158 0.016458 \n",
"0 0.001602 0.002253 0.000930 0.003444 0.001362 0.011760 \n",
"0 0.001602 0.002253 0.000930 0.003444 0.001362 0.011760 \n",
"0 0.000449 0.000536 0.000198 0.000845 0.000274 0.002744 \n",
"0 0.000481 0.000644 0.000223 0.001043 0.000335 0.003348 \n",
"0 0.000463 0.000644 0.000189 0.000752 0.000168 0.001677 \n",
"0 0.000189 0.000000 0.000000 0.000214 0.000037 0.000368 \n",
"\n",
" LAUC HR Reco in test Test coverage Shannon Gini \n",
"0 0.595770 0.885472 0.998197 0.193362 4.291821 0.960775 \n",
"0 0.593544 0.875928 1.000000 0.077201 3.875892 0.974947 \n",
"0 0.587509 0.878049 0.999788 0.508658 5.756947 0.819903 \n",
"0 0.555546 0.765642 1.000000 0.038961 3.159079 0.987317 \n",
"0 0.519187 0.483563 0.997985 0.204906 4.408913 0.954288 \n",
"0 0.518957 0.475080 0.853022 0.147186 3.911356 0.971196 \n",
"0 0.515501 0.437964 1.000000 0.033911 2.836513 0.991139 \n",
"0 0.515202 0.433722 0.996076 0.166667 4.168354 0.964092 \n",
"0 0.509546 0.384942 1.000000 0.025974 2.711772 0.992003 \n",
"0 0.507713 0.338282 0.987911 0.187590 5.111878 0.906685 \n",
"0 0.499885 0.154825 0.402333 0.434343 5.133650 0.877999 \n",
"0 0.497349 0.027572 0.389926 0.067821 2.475747 0.992793 \n",
"0 0.496724 0.021209 0.482821 0.059885 2.232578 0.994487 \n",
"0 0.496724 0.021209 0.482821 0.059885 2.232578 0.994487 \n",
"0 0.496441 0.007423 0.602121 0.010823 2.089186 0.995706 \n",
"0 0.496433 0.009544 0.699046 0.005051 1.945910 0.995669 \n",
"0 0.496424 0.009544 0.600530 0.005051 1.803126 0.996380 \n",
"0 0.496391 0.003181 0.392153 0.115440 4.174741 0.965327 "
]
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABCAAAAkoCAYAAAC6aM7IAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOzdeVzU1f7H8dcBFBAVFVxB3BVU3BJ3y1xKy7K9zBa3rG7rrdt6W+5t+XW7t70sb4vZaubSplZqaWq5J4IKKioigqAgCiL7+f0xoxeJklRmGHg/H495NN/vOd+Zz5Aevn7mc84x1lpERERERERERCqTl7sDEBEREREREZHqTwkIEREREREREal0SkCIiIiIiIiISKVTAkJEREREREREKp0SECIiIiIiIiJS6ZSAEBEREREREZFKpwSEiIiIiIhINWWMmWaMebwC/bYYY4a4ICSpwZSAkCrPGJNojDlmjMkxxuw3xswwxtR1ts0wxlhjzKVlrnnFeX6887i2MeZFY0yy83V2G2Ne/p33OP54w6UfVETEQzjHzAJjTHCZ89HOsbd1qXP/cJ7rU6bveGNMcZlxN8cY08I1n0JEpGaw1t5mrX26Av26WGuXuSAkqcGUgBBPcYm1ti7QA+gJPFKqbTtw8/EDY4wPcDWws1SfR4DeQB+gHnA+sLG89yj1uPPsfwwRkWpjNzD2+IExJhLwL93BGGOAG4FMSo3TpawqM+7WtdamVGbQIiKeyHl/K+LxlIAQj2Kt3Q98jyMRcdw3wEBjTEPn8UggBthfqk8U8IW1NsU6JFprP3RJ0CIi1dNHwE2ljm8Gyo6rg4EWwD3AdcaY2i6KTUTEIzgryh4xxmw1xhwyxrxvjPEzxgxxVu4+ZIzZD7xvjPEyxjxsjNlpjMkwxnxujGlU6rUGGWN+McZkGWP2lqoEnmGMecb5PNgYM9/ZJ9MYs8IY41UqluHO577OiuIU5+MVY4yvs+14bPcbY9KNManGmAmu/tmJZ1ICQjyKMSYUGAUklDqdB3wNXOc8vonf3gSvBu4zxvzFGBPp/FZORERO32qgvjEmwhjjDVwLfFymz804ksSznMejXRifiIinGAdcCLQDOgKPOc83AxoBrYApwN3AZcB5OJK7h4CpAMaYMOBb4HWgMY4v66LLea/7gWRnn6bAo4Atp9/fgX7O1+mOo4r4sVLtzYBAIASYBEwt9WWgyO9SAkI8xZfGmGxgL5AOPFmm/UPgJmNMII5B+csy7c8Bz+MY4NcD+4wxZcuBv3Rmg48/bjnrn0JEpHo5XgUxAogH9h1vMMbUwTEd7lNrbSEwh99Ow+hXZtzdiYhIzfOGtXavtTYTeJb/TW8rAZ601uZba48BtwJ/t9YmW2vzgX8AVzmnZ4wDllhrZ1prC621Gdba8hIQhUBzoJWz3wprbXkJiHHAU9badGvtAeCfOKbUlX6dp5yvsRDIATqd6Q9Cqj8lIMRTXGatrQcMAcKBkxY+s9auxJHJfQyY7xykS7cXW2unWmsHAg1wDO7TjTERZd6jQanHO5X4eUREqoOPgOuB8fy28uxyoAhY6Dz+BBhljGlcqs/qMuNuu8oOWESkCtpb6vkeHNUNAAestXml2loBXxxP2gJxQDGOSoaWnLz+2e/5D45K4kXGmF3GmId/p18LZyzlxQWQYa0tKnWcC9StwPtLDacEhHgUa+1PwAzghXKaP8ZRVvaHaztYa49Za6fiKFvrfLZjFBGpKay1e3AsRnkRMK9M8804bkaTnPOXZwO1KLVwpYiIAI7kwXFhwPHFeMtWJuwFRpVJ3PpZa/c5206ZxLXWZltr77fWtgUuwTFFeVg5XVNwJDzKi0vktCkBIZ7oFWCEMaZHmfOv4SgDXl72AmPMvc4Fc/yNMT7O6Rf1+O1OGCIi8udMAoZaa4+WOhcCDMOx5kMP/jeH+HnK3w1DRKQmu8MYE+pcUPJR/rduTlnTgGeNMa0AjDGNjTFjnG2fAMONMdc473WDyrlXxhgz2hjT3rke2hEcFRTF5bzXTOAx53sEA0/w23V+RP40JSDE4zjnoX0IPF7mfKa19offmcd2DHgRx84YB4E7gCuttbtK9fmmzF70X1TSRxARqTastTuttevLnB4MRFtrF1lr9x9/4EgUdzPGdHX2619m3M0xxkS59AOIiLjfp8AiYJfz8czv9HsVx8Lri5xro60G+gJYa5NwVKPdj2Pr42gcid+yOgBLcKzZsAp401q7rJx+z+BYNy0GiAV+/YO4RCrMlP9vNREREREREalMxphEYLK1dom7YxFxBVVAiIiIiIiIiEilUwJCRERERERERCqdpmCIiIiIiIiISKVTBYSIiIiIiIiIVDolIERERERERESk0vm4O4DyBAcH29atW7s7DBGRk2zYsOGgtbaxu+NwBY3DIlIV1aRxGDQWi0jVdCZjcZVMQLRu3Zr168tuKS4i4l7GmD3ujsFVNA6LSFVUk8Zh0FgsIlXTmYzFmoIhIiIiIiIiIpVOCQgRERERERERqXRKQIiIiIiIiIhIpauSa0CUp7CwkOTkZPLy8twdisv5+fkRGhpKrVq13B2KiNRgNWEc1ngrIlVdTRiLK5PGeRH38pgERHJyMvXq1aN169YYY9wdjstYa8nIyCA5OZk2bdq4OxwRqcGq+zis8VZEPEF1H4srk8Z5EffzmCkYeXl5BAUF1biB1hhDUFCQstwi4nbVfRzWeCsip2KMGWmM2WaMSTDGPFxO+zhjTIzz8YsxpnuZdm9jzEZjzPzTjaG6j8WVSeO8iPt5TAICqLEDbU393CJS9VT38ai6fz4ROX3GGG9gKjAK6AyMNcZ0LtNtN3CetbYb8DTwdpn2e4C4sxDLmb5EjaWfnYh7eVQCwt28vb3p0aMHXbt25ZJLLiErKwuAxMREjDE8/vjjJ/oePHiQWrVqceeddwKwbds2hgwZQo8ePYiIiGDKlCkALFu2jMDAQHr06HHisWTJEtd/OBERD2CM4cYbbzxxXFRUROPGjRk9evRJ/caMGUP//v1POvePf/yDkJCQk8bb4+O4iEgF9AESrLW7rLUFwGfAmNIdrLW/WGsPOQ9XA6HH24wxocDFwLsuitdjrF+/nrvvvvt321NSUrjqqqtcGJGIVBYlIP4Ef39/oqOj2bx5M40aNWLq1Kkn2tq2bcv8+f+rpps9ezZdunQ5cXz33Xfz17/+lejoaOLi4rjrrrtOtA0ePJjo6OgTj+HDh7vmA4mIeJiAgAA2b97MsWPHAFi8eDEhISEn9cnKyuLXX38lKyuL3bt3n9R2fBw+/mjQoIHLYhcRjxcC7C11nOw893smAd+WOn4FeBAoOfuhVS3FxcV/qn/v3r157bXXfre9RYsWzJkz50zDEpEqQAmI09S/f3/27dt34tjf35+IiAjWr18PwKxZs7jmmmtOtKemphIaeiIJTmRkpOuCFRGpRkaNGsWCBQsAmDlzJmPHjj2pfe7cuVxyySVcd911fPbZZ+4IUUSqp/Jq9225HY05H0cC4iHn8Wgg3Vq74ZRvYswUY8x6Y8z6AwcOnEm8lSIxMZHw8HBuvvlmunXrxlVXXUVubi6tW7fmqaeeYtCgQcyePZtFixbRv39/evXqxdVXX01OTg4A69atY8CAAXTv3p0+ffqQnZ3NsmXLTlSy/fTTTyeq1Hr27El2djaJiYl07doVcKyBMWHCBCIjI+nZsydLly4FYMaMGVxxxRWMHDmSDh068OCDD7rnByQif8hjdsEo7Z/fbGFrypGz+pqdW9TnyUu6nLojjqzuDz/8wKRJk046f/xmt1mzZnh7e9OiRQtSUlIAx7duQ4cOZcCAAVxwwQVMmDDhxDdvK1asoEePHideZ+7cubRr1+4sfTIRkbPPnePwddddx1NPPcXo0aOJiYlh4sSJrFix4kT7zJkzefLJJ2natClXXXUVjzzyyIm2l19+mY8//hiAhg0bnrhxFRGpgGSgZanjUCClbCdjTDcc0yxGWWsznKcHApcaYy4C/ID6xpiPrbU3lL3eWvs2zrUjevfuXW6C4zh3jcXbtm3
"text/plain": [
"<Figure size 1296x3024 with 18 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABCAAAAkoCAYAAAC6aM7IAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOzdeXyU5dX/8c/JHpIQtgDZgLDJEkiAgCAo7gUFUSSKtdaK1lqrtmpb7dNabat9am3V2mr92U19WlHBDRUVtOC+sMi+LxFCWELYkgBZr98fM8EQgwSy3JmZ7/v1mldm7vuamTNRL++cOde5zDmHiIiIiIiIiEhzCvM6ABEREREREREJfkpAiIiIiIiIiEizUwJCRERERERERJqdEhAiIiIiIiIi0uyUgBARERERERGRZqcEhIiIiIiIiIg0OyUgREREREREgpSZPW5mdzVg3EozO7MFQpIQpgSEtHpmlmdmh8ysxMx2mNmTZhbvP/ekmTkzu6jOcx72H/+O/3GUmf3RzPL9r7PZzB46xnvU3P7Soh9URCRA+OfMcjPrVOf4Ev/c26PWsXv8x0bUGfsdM6uqM++WmFlKy3wKEZHQ4Jy7wTn3mwaMG+icm98CIUkIUwJCAsVE51w8kA0MAX5W69w64OqaB2YWAeQCG2uN+RmQA4wAEoCzgM/re49at5ua/mOIiASNzcAVNQ/MbBAQW3uAmRlwFbCHWvN0LR/XmXfjnXMFzRm0iEgg8l/figQ8JSAkoDjndgBv4UtE1HgVGG1m7f2PxwHLgB21xgwHXnLOFTifPOfc0y0StIhIcPo/4Nu1Hl8N1J1XTwdSgB8CU80sqoViExEJCP6Ksp+Z2Soz22tm/zKzGDM701+5e4eZ7QD+ZWZhZnanmW00syIze97MOtR6rTFm9pGZ7TOzrbUqgZ80s3v99zuZ2Wv+MXvM7H0zC6sVy7n++9H+iuIC/+1hM4v2n6uJ7XYz22Vm283smpb+3UlgUgJCAoqZpQHjgQ21Dh8GZgFT/Y+/zVcvgj8BbjOzG81skP9bOREROXmfAG3NrL+ZhQOXA/+uM+ZqfEni5/yPJ7RgfCIigeJK4BtAL6Av8Av/8a5AB6A7cD1wC3AxMBZfcncv8CiAmXUD3gD+DCTh+7JuST3vdTuQ7x/TBfgfwNUz7ufASP/rZOGrIv5FrfNdgUQgFbgWeLTWl4Eix6QEhASKl82sGNgK7ALurnP+aeDbZpaIb1J+uc75/wXuxzfBLwS2mVndcuCX/dngmtt3m/xTiIgEl5oqiPOANcC2mhNm1gbfcrhnnHMVwEy+ugxjZJ15dyMiIqHnL865rc65PcB9fLm8rRq42zlX5pw7BHwP+LlzLt85VwbcA0zxL8+4EnjbOTfdOVfhnCtyztWXgKgAkoHu/nHvO+fqS0BcCfzaObfLOVcI/Arfkrrar/Nr/2vMBkqAUxr7i5DgpwSEBIqLnXMJwJlAP+CoxmfOuQ/wZXJ/Abzmn6Rrn69yzj3qnBsNtMM3uf/TzPrXeY92tW5/a8bPIyISDP4P+CbwHb5aeXYJUAnM9j/+DzDezJJqjfmkzrzbq7kDFhFphbbWuv8FvuoGgELn3OFa57oDL9UkbYHVQBW+SoZ0ju5/diwP4KsknmNmm8zszmOMS/HHUl9cAEXOucpajw8C8Q14fwlxSkBIQHHOvQs8CfyhntP/xldW9rW9HZxzh5xzj+IrWxvQ1DGKiIQK59wX+JpRXgC8WOf01fguRrf41y/PACKp1bhSREQAX/KgRjegphlv3cqErcD4OonbGOfcNv+54yZxnXPFzrnbnXM9gYn4liifU8/QAnwJj/riEjlpSkBIIHoYOM/MsuscfwRfGfB7dZ9gZj/yN8yJNbMI//KLBL66E4aIiJyYa4GznXOltY6lAufg6/mQzZdriO+n/t0wRERC2Q/MLM3fUPJ/+LJvTl2PA/eZWXcAM0sys0n+c/8BzjWzy/zXuh3ruVbGzCaYWW9/P7QD+Cooqup5r+nAL/zv0Qn4JV/t8yNywpSAkIDjX4f2NHBXneN7nHPvHGMd2yHgj/h2xtgN/AC41Dm3qdaYV+vsRf9SM30EEZGg4Zzb6JxbWOfw6cAS59wc59yOmhu+RPFgM8v0jxtVZ94tMbPhLfoBRES89wwwB9jkv917jHF/wtd4fY6/N9onwKkAzrkt+KrRbse39fESfInfuvoAb+Pr2fAx8Jhzbn494+7F1zdtGbAcWPw1cYk0mNX/t5qIiIiIiIg0JzPLA65zzr3tdSwiLUEVECIiIiIiIiLS7JSAEBEREREREZFmpyUYIiIiIiIiItLsVAEhIiIiIiIiIs1OCQgRERERERERaXYRXgdQn06dOrkePXp4HYaIyFEWLVq02zmX5HUcLUHzsIi0RqE0D4PmYhFpnRozF7fKBESPHj1YuLDuluIiIt4ysy+8jqGlaB4WkdYolOZh0FwsIq1TY+ZiLcEQERERERERkWanBISIiIiIiIiINDslIERERERERESk2bXKHhAi4r2Kigry8/M5fPiw16G0uJiYGNLS0oiMjPQ6FBGRegX7HK15WESaQrDPlc2tOeZiJSBEpF75+fkkJCTQo0cPzMzrcFqMc46ioiLy8/PJyMjwOpzjMrMY4D0gGt+cPtM5d3edMWcCrwCb/YdedM79uiXjFJGmFcxzdKDNwyLSegXzXNncmmsu1hIMEanX4cOH6dixY8hN1mZGx44dAylTXgac7ZzLArKBcWY2sp5x7zvnsv03JR9EAlwwz9EBOA+LSCsVzHNlc2uuuVgVECJyTKE6WQfS53bOOaDE/zDSf3PeRSQiLSWQ5qoTFcyfTURaluaTk9ccvztVQIhIqxUeHk52djaZmZlMnDiRffv2AZCXl4eZcddddx0Zu3v3biIjI7npppsAWLt2LWeeeSbZ2dn079+f66+/HoD58+eTmJhIdnb2kdvbb7/d8h+uCZlZuJktAXYBc51zn9YzbJSZLTWzN8xsYAuHKCJByMy46qqrjjyurKwkKSmJCRMmHDVu0qRJjBo16qhj99xzD6mpqUfNxTVzvIiIfL2FCxdyyy23HPN8QUEBU6ZMacGIGk4JCBFptWJjY1myZAkrVqygQ4cOPProo0fO9ezZk9dee+3I4xkzZjBw4Jd/V99yyy3ceuutLFmyhNWrV3PzzTcfOXf66aezZMmSI7dzzz23ZT5QM3HOVTnnsoE0YISZZdYZshjo7l+m8Wfg5fpex8yuN7OFZrawsLCweYMWkYAXFxfHihUrOHToEABz584lNTX1qDH79u1j8eLF7Nu3j82bNx91rmaOrrm1a9euxWIXEWlNqqqqTmh8Tk4OjzzyyDHPp6SkMHPmzMaG1SyUgBCRgDBq1Ci2bdt25HFsbCz9+/dn4cKFADz33HNcdtllR85v376dtLS0I48HDRrUcsF6xDm3D5gPjKtz/IBzrsR/fzYQaWad6nn+E865HOdcTlJSUkuELCIBbvz48bz++usATJ8+nSuuuOKo8y+88AITJ05k6tSpPPvss16EKCLiqby8PPr168fVV1/N4MGDmTJlCgcPHqRHjx78+te/ZsyYMcyYMYM5c+YwatQohg4dSm5uLiUlvhW2CxYs4LTTTiMrK4sRI0ZQXFzM/Pnzj1Sbvfvuu0cqyYYMGUJxcTF5eXlkZvq+jzp8+DDXXHMNgwYNYsiQIcybNw+AJ598ksmTJzNu3Dj69OnDT3/60xb5fagHhIgc169eXcmqggNN+poDUtpy98SGrQSoqqrinXfe4dprrz3qeM0FbdeuXQkPDyclJYWCggLA983a2WefzWmnncb555/PNddcc+Tbtffff5/s7Owjr/PCCy/Qq1evJvpkLcvMkoAK59w+M4sFzgXurzOmK7DTOefMbAS+5HNRy0crIs3Byzl66tSp/PrXv2bChAksW7aMadOm8f777x85P336dO6++266dOnClClT+NnPfnbk3EMPPcS///1vANq3b3/kolhEpDl4OVeuXbuWf/zjH4wePZpp06bx2GOPAb5tLj/44AN2797
"text/plain": [
"<Figure size 1296x3024 with 18 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"import evaluation_measures as ev\n",
"\n",
"dir_path=\"Recommendations generated/ml-100k/\"\n",
"super_reactions=[4,5]\n",
"test=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None)\n",
"\n",
"ev.evaluate_all(test, dir_path, super_reactions)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.3"
}
},
"nbformat": 4,
"nbformat_minor": 4
}