warsztaty-B/P5. Graph-based.ipynb

2550 lines
626 KiB
Plaintext
Raw Normal View History

2020-06-13 15:34:33 +02:00
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Self made RP3-beta"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import helpers\n",
"import pandas as pd\n",
"import numpy as np\n",
"import scipy.sparse as sparse\n",
"from collections import defaultdict\n",
"from itertools import chain\n",
"import random\n",
"import time\n",
"import matplotlib.pyplot as plt\n",
"\n",
"train_read=pd.read_csv('./Datasets/ml-100k/train.csv', sep='\\t', header=None)\n",
"test_read=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None)\n",
"train_ui, test_ui, user_code_id, user_id_code, item_code_id, item_id_code = helpers.data_to_csr(train_read, test_read)"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"class RP3Beta():\n",
" def fit(self, train_ui, alpha, beta):\n",
" \"\"\"We weight our edges by user's explicit ratings so if user rated movie high we'll follow that path\n",
" with higher probability.\"\"\"\n",
" self.train_ui=train_ui\n",
" self.train_iu=train_ui.transpose()\n",
" \n",
" self.alpha = alpha\n",
" self.beta = beta\n",
" \n",
" # Define Pui \n",
" Pui=sparse.csr_matrix(self.train_ui/self.train_ui.sum(axis=1))\n",
" \n",
" # Define Piu\n",
" to_divide=np.vectorize(lambda x: x if x>0 else 1)(self.train_iu.sum(axis=1)) # to avoid dividing by zero\n",
" Piu=sparse.csr_matrix(self.train_iu/to_divide)\n",
" item_orders=(self.train_ui>0).sum(axis=0)\n",
" \n",
" Pui = Pui.power(self.alpha)\n",
" Piu = Piu.power(self.alpha)\n",
"\n",
" P3=Pui*Piu*Pui\n",
" \n",
" P3/=np.power(np.vectorize(lambda x: x if x>0 else 1)(item_orders), self.beta)\n",
" \n",
" self.estimations=np.array(P3)\n",
" \n",
" def recommend(self, user_code_id, item_code_id, topK=10):\n",
" \n",
" top_k = defaultdict(list)\n",
" for nb_user, user in enumerate(self.estimations):\n",
" \n",
" user_rated=self.train_ui.indices[self.train_ui.indptr[nb_user]:self.train_ui.indptr[nb_user+1]]\n",
" for item, score in enumerate(user):\n",
" if item not in user_rated and not np.isnan(score):\n",
" top_k[user_code_id[nb_user]].append((item_code_id[item], score))\n",
" result=[]\n",
" # Let's choose k best items in the format: (user, item1, score1, item2, score2, ...)\n",
" for uid, item_scores in top_k.items():\n",
" item_scores.sort(key=lambda x: x[1], reverse=True)\n",
" result.append([uid]+list(chain(*item_scores[:topK])))\n",
" return result\n",
" \n",
" def estimate(self, user_code_id, item_code_id, test_ui):\n",
" result=[]\n",
" for user, item in zip(*test_ui.nonzero()):\n",
" result.append([user_code_id[user], item_code_id[item], \n",
" self.estimations[user,item] if not np.isnan(self.estimations[user,item]) else 1])\n",
" return result"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"model=RP3Beta()\n",
"model.fit(train_ui, alpha=1, beta=0)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"top_n=pd.DataFrame(model.recommend(user_code_id, item_code_id, topK=10))\n",
"\n",
"top_n.to_csv('Recommendations generated/ml-100k/Self_P3_reco.csv', index=False, header=False)\n",
"\n",
"estimations=pd.DataFrame(model.estimate(user_code_id, item_code_id, test_ui))\n",
"estimations.to_csv('Recommendations generated/ml-100k/Self_P3_estimations.csv', index=False, header=False)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"943it [00:00, 4936.65it/s]\n"
]
},
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>RMSE</th>\n",
" <th>MAE</th>\n",
" <th>precision</th>\n",
" <th>recall</th>\n",
" <th>F_1</th>\n",
" <th>F_05</th>\n",
" <th>precision_super</th>\n",
" <th>recall_super</th>\n",
" <th>NDCG</th>\n",
" <th>mAP</th>\n",
" <th>MRR</th>\n",
" <th>LAUC</th>\n",
" <th>HR</th>\n",
" <th>F_2</th>\n",
" <th>Whole_average</th>\n",
" <th>Reco in test</th>\n",
" <th>Test coverage</th>\n",
" <th>Shannon</th>\n",
" <th>Gini</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>3.702446</td>\n",
" <td>3.527273</td>\n",
" <td>0.282185</td>\n",
" <td>0.192092</td>\n",
" <td>0.186749</td>\n",
" <td>0.21698</td>\n",
" <td>0.204185</td>\n",
" <td>0.240096</td>\n",
" <td>0.339114</td>\n",
" <td>0.204905</td>\n",
" <td>0.572157</td>\n",
" <td>0.593544</td>\n",
" <td>0.875928</td>\n",
" <td>0.181702</td>\n",
" <td>0.340803</td>\n",
" <td>1.0</td>\n",
" <td>0.077201</td>\n",
" <td>3.875892</td>\n",
" <td>0.974947</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" RMSE MAE precision recall F_1 F_05 \\\n",
"0 3.702446 3.527273 0.282185 0.192092 0.186749 0.21698 \n",
"\n",
" precision_super recall_super NDCG mAP MRR LAUC \\\n",
"0 0.204185 0.240096 0.339114 0.204905 0.572157 0.593544 \n",
"\n",
" HR F_2 Whole_average Reco in test Test coverage Shannon \\\n",
"0 0.875928 0.181702 0.340803 1.0 0.077201 3.875892 \n",
"\n",
" Gini \n",
"0 0.974947 "
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import evaluation_measures as ev\n",
"estimations_df=pd.read_csv('Recommendations generated/ml-100k/Self_P3_estimations.csv', header=None)\n",
"reco=np.loadtxt('Recommendations generated/ml-100k/Self_P3_reco.csv', delimiter=',')\n",
"\n",
"ev.evaluate(test=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None),\n",
" estimations_df=estimations_df, \n",
" reco=reco,\n",
" super_reactions=[4,5])"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Let's check hiperparameters"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"##### Alpha"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
" 0%| | 0/8 [00:00<?, ?it/s]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 5127.30it/s]\u001b[A\n",
" 12%|█▎ | 1/8 [00:18<02:12, 18.88s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 4905.08it/s]\u001b[A\n",
" 25%|██▌ | 2/8 [00:37<01:53, 18.91s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 5077.03it/s]\u001b[A\n",
" 38%|███▊ | 3/8 [00:56<01:33, 18.75s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 5032.37it/s]\u001b[A\n",
" 50%|█████ | 4/8 [01:15<01:15, 18.81s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 5154.37it/s]\u001b[A\n",
" 62%|██████▎ | 5/8 [01:34<00:56, 18.88s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 5117.24it/s]\u001b[A\n",
" 75%|███████▌ | 6/8 [01:53<00:38, 19.02s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 5012.20it/s]\u001b[A\n",
" 88%|████████▊ | 7/8 [02:12<00:18, 18.88s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 5098.59it/s]\u001b[A\n",
"100%|██████████| 8/8 [02:30<00:00, 18.83s/it]\n"
]
},
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Alpha</th>\n",
" <th>RMSE</th>\n",
" <th>MAE</th>\n",
" <th>precision</th>\n",
" <th>recall</th>\n",
" <th>F_1</th>\n",
" <th>F_05</th>\n",
" <th>precision_super</th>\n",
" <th>recall_super</th>\n",
" <th>NDCG</th>\n",
" <th>mAP</th>\n",
" <th>MRR</th>\n",
" <th>LAUC</th>\n",
" <th>HR</th>\n",
" <th>F_2</th>\n",
" <th>Whole_average</th>\n",
" <th>Reco in test</th>\n",
" <th>Test coverage</th>\n",
" <th>Shannon</th>\n",
" <th>Gini</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.2</td>\n",
" <td>268.177832</td>\n",
" <td>211.732649</td>\n",
" <td>0.262672</td>\n",
" <td>0.166858</td>\n",
" <td>0.166277</td>\n",
" <td>0.197184</td>\n",
" <td>0.187661</td>\n",
" <td>0.203252</td>\n",
" <td>0.320910</td>\n",
" <td>0.196132</td>\n",
" <td>0.563378</td>\n",
" <td>0.580866</td>\n",
" <td>0.850477</td>\n",
" <td>0.159293</td>\n",
" <td>0.321247</td>\n",
" <td>1.000000</td>\n",
" <td>0.060606</td>\n",
" <td>3.669627</td>\n",
" <td>0.979636</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.4</td>\n",
" <td>10.546689</td>\n",
" <td>7.792373</td>\n",
" <td>0.268505</td>\n",
" <td>0.172669</td>\n",
" <td>0.171569</td>\n",
" <td>0.202643</td>\n",
" <td>0.192489</td>\n",
" <td>0.212653</td>\n",
" <td>0.326760</td>\n",
" <td>0.200172</td>\n",
" <td>0.565148</td>\n",
" <td>0.583801</td>\n",
" <td>0.854719</td>\n",
" <td>0.164747</td>\n",
" <td>0.326323</td>\n",
" <td>1.000000</td>\n",
" <td>0.064214</td>\n",
" <td>3.726996</td>\n",
" <td>0.978426</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.6</td>\n",
" <td>3.143988</td>\n",
" <td>2.948790</td>\n",
" <td>0.274655</td>\n",
" <td>0.180502</td>\n",
" <td>0.177820</td>\n",
" <td>0.208730</td>\n",
" <td>0.198176</td>\n",
" <td>0.222746</td>\n",
" <td>0.332872</td>\n",
" <td>0.203290</td>\n",
" <td>0.568872</td>\n",
" <td>0.587738</td>\n",
" <td>0.870626</td>\n",
" <td>0.171652</td>\n",
" <td>0.333140</td>\n",
" <td>1.000000</td>\n",
" <td>0.065657</td>\n",
" <td>3.785282</td>\n",
" <td>0.977090</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.8</td>\n",
" <td>3.670728</td>\n",
" <td>3.495735</td>\n",
" <td>0.281972</td>\n",
" <td>0.189868</td>\n",
" <td>0.185300</td>\n",
" <td>0.216071</td>\n",
" <td>0.203541</td>\n",
" <td>0.236751</td>\n",
" <td>0.339867</td>\n",
" <td>0.206688</td>\n",
" <td>0.573729</td>\n",
" <td>0.592432</td>\n",
" <td>0.874867</td>\n",
" <td>0.179823</td>\n",
" <td>0.340076</td>\n",
" <td>1.000000</td>\n",
" <td>0.070707</td>\n",
" <td>3.832415</td>\n",
" <td>0.975998</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>1.0</td>\n",
" <td>3.702446</td>\n",
" <td>3.527273</td>\n",
" <td>0.282185</td>\n",
" <td>0.192092</td>\n",
" <td>0.186749</td>\n",
" <td>0.216980</td>\n",
" <td>0.204185</td>\n",
" <td>0.240096</td>\n",
" <td>0.339114</td>\n",
" <td>0.204905</td>\n",
" <td>0.572157</td>\n",
" <td>0.593544</td>\n",
" <td>0.875928</td>\n",
" <td>0.181702</td>\n",
" <td>0.340803</td>\n",
" <td>1.000000</td>\n",
" <td>0.077201</td>\n",
" <td>3.875892</td>\n",
" <td>0.974947</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>1.2</td>\n",
" <td>3.704441</td>\n",
" <td>3.529251</td>\n",
" <td>0.280912</td>\n",
" <td>0.193633</td>\n",
" <td>0.187311</td>\n",
" <td>0.216872</td>\n",
" <td>0.203004</td>\n",
" <td>0.240588</td>\n",
" <td>0.338049</td>\n",
" <td>0.203453</td>\n",
" <td>0.571830</td>\n",
" <td>0.594313</td>\n",
" <td>0.883351</td>\n",
" <td>0.182776</td>\n",
" <td>0.341341</td>\n",
" <td>1.000000</td>\n",
" <td>0.085859</td>\n",
" <td>3.910718</td>\n",
" <td>0.974073</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>1.4</td>\n",
" <td>3.704580</td>\n",
" <td>3.529388</td>\n",
" <td>0.273595</td>\n",
" <td>0.190651</td>\n",
" <td>0.183874</td>\n",
" <td>0.212183</td>\n",
" <td>0.199464</td>\n",
" <td>0.239118</td>\n",
" <td>0.329550</td>\n",
" <td>0.195433</td>\n",
" <td>0.566171</td>\n",
" <td>0.592793</td>\n",
" <td>0.871686</td>\n",
" <td>0.179766</td>\n",
" <td>0.336190</td>\n",
" <td>1.000000</td>\n",
" <td>0.107504</td>\n",
" <td>3.961915</td>\n",
" <td>0.972674</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>1.6</td>\n",
" <td>3.704591</td>\n",
" <td>3.529399</td>\n",
" <td>0.263097</td>\n",
" <td>0.186255</td>\n",
" <td>0.178709</td>\n",
" <td>0.205170</td>\n",
" <td>0.191094</td>\n",
" <td>0.232920</td>\n",
" <td>0.317439</td>\n",
" <td>0.184917</td>\n",
" <td>0.552349</td>\n",
" <td>0.590545</td>\n",
" <td>0.868505</td>\n",
" <td>0.175419</td>\n",
" <td>0.328868</td>\n",
" <td>0.999576</td>\n",
" <td>0.156566</td>\n",
" <td>4.060156</td>\n",
" <td>0.969203</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Alpha RMSE MAE precision recall F_1 F_05 \\\n",
"0 0.2 268.177832 211.732649 0.262672 0.166858 0.166277 0.197184 \n",
"0 0.4 10.546689 7.792373 0.268505 0.172669 0.171569 0.202643 \n",
"0 0.6 3.143988 2.948790 0.274655 0.180502 0.177820 0.208730 \n",
"0 0.8 3.670728 3.495735 0.281972 0.189868 0.185300 0.216071 \n",
"0 1.0 3.702446 3.527273 0.282185 0.192092 0.186749 0.216980 \n",
"0 1.2 3.704441 3.529251 0.280912 0.193633 0.187311 0.216872 \n",
"0 1.4 3.704580 3.529388 0.273595 0.190651 0.183874 0.212183 \n",
"0 1.6 3.704591 3.529399 0.263097 0.186255 0.178709 0.205170 \n",
"\n",
" precision_super recall_super NDCG mAP MRR LAUC \\\n",
"0 0.187661 0.203252 0.320910 0.196132 0.563378 0.580866 \n",
"0 0.192489 0.212653 0.326760 0.200172 0.565148 0.583801 \n",
"0 0.198176 0.222746 0.332872 0.203290 0.568872 0.587738 \n",
"0 0.203541 0.236751 0.339867 0.206688 0.573729 0.592432 \n",
"0 0.204185 0.240096 0.339114 0.204905 0.572157 0.593544 \n",
"0 0.203004 0.240588 0.338049 0.203453 0.571830 0.594313 \n",
"0 0.199464 0.239118 0.329550 0.195433 0.566171 0.592793 \n",
"0 0.191094 0.232920 0.317439 0.184917 0.552349 0.590545 \n",
"\n",
" HR F_2 Whole_average Reco in test Test coverage Shannon \\\n",
"0 0.850477 0.159293 0.321247 1.000000 0.060606 3.669627 \n",
"0 0.854719 0.164747 0.326323 1.000000 0.064214 3.726996 \n",
"0 0.870626 0.171652 0.333140 1.000000 0.065657 3.785282 \n",
"0 0.874867 0.179823 0.340076 1.000000 0.070707 3.832415 \n",
"0 0.875928 0.181702 0.340803 1.000000 0.077201 3.875892 \n",
"0 0.883351 0.182776 0.341341 1.000000 0.085859 3.910718 \n",
"0 0.871686 0.179766 0.336190 1.000000 0.107504 3.961915 \n",
"0 0.868505 0.175419 0.328868 0.999576 0.156566 4.060156 \n",
"\n",
" Gini \n",
"0 0.979636 \n",
"0 0.978426 \n",
"0 0.977090 \n",
"0 0.975998 \n",
"0 0.974947 \n",
"0 0.974073 \n",
"0 0.972674 \n",
"0 0.969203 "
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from tqdm import tqdm\n",
"result=[]\n",
"for alpha in tqdm([round(i,1) for i in np.arange(0.2,1.6001,0.2)]):\n",
" model=RP3Beta()\n",
" model.fit(train_ui, alpha=alpha, beta=0)\n",
" reco=pd.DataFrame(model.recommend(user_code_id, item_code_id, topK=10))\n",
" estimations_df=pd.DataFrame(model.estimate(user_code_id, item_code_id, test_ui))\n",
" to_append=ev.evaluate(test=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None),\n",
" estimations_df=estimations_df, \n",
" reco=np.array(reco),\n",
" super_reactions=[4,5])\n",
" to_append.insert(0, \"Alpha\", alpha)\n",
" result.append(to_append)\n",
" \n",
"result=pd.concat(result)\n",
"result"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [
{
"ename": "IndexError",
"evalue": "list index out of range",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-8-e01206177978>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 10\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmetrics\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 11\u001b[0m \u001b[0mdf\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mresult\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'Alpha'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmetrics\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 12\u001b[0;31m \u001b[0mdf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mplot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0max\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0maxes\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mto_iter\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtitle\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mmetrics\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;31mIndexError\u001b[0m: list index out of range"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABCQAAAkoCAYAAACzg26yAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOzde3xV1Z3//9cn9xxu4YQACSSAiso1oIBYpCqiFUvQttbq1LZTO+NMq+102vl9x077aztOfcxUextb2mk77dhOLd6mF2LxXmz1OyKiEq6iKPcECOEWIBeSfL5/nJ14iIkQyDk7Oef9fDzOg3PWXvvksxWXO5+91meZuyMiIiIiIiIikkwZYQcgIiIiIiIiIulHCQkRERERERERSTolJEREREREREQk6ZSQEBEREREREZGkU0JCRERERERERJJOCQkRERERERERSTolJERERERERKSDmf2Hmf3/p9BvvZldloSQJEUpISEpx8y2mlmDmR0xs91mdp+ZDQyO3WdmbmbXdjrnu0H7Xwafc8zs22a2M/ierWb2vW5+RvvrB0m9UBGRfiQYN5vNbFin9leD8XdsXNvXg7aLOvX9SzNr7TT2HjGzkuRchYhIenD3v3X3fzmFfpPc/dkkhCQpSgkJSVUV7j4QmAZMB74Ud+x14OPtH8wsC7gBeDOuz5eAGcAsYBBwGfBKVz8j7nV7r1+FiEhq2QLc1P7BzKYAkfgOZmbExuj9xI3VcV7oNPYOdPfqRAYtItIfBfe4In2aEhKS0tx9N/AEscREu0rgEjMbGny+GlgD7I7rMxP4rbtXe8xWd/9lUoIWEUld/82JSYZPAJ3H1rlAMfA54EYzy0lSbCIi/UIw4+xLZrbBzA6Y2X+ZWZ6ZXRbM7v1HM9sN/JeZZZjZHWb2ppnVmdlDZhaN+65LzOx/zeygme2Imy18n5l9I3g/zMweDfrsN7PnzCwjLpb5wftcM/uemVUHr++ZWW5wrD22L5rZXjOrMbNPJvufnfQ9SkhISjOz0cACYHNccyPwe+DG4PPHeecN8QrgC2b2GTObEjyxExGRM7MCGGxmE8wsk9g4/KtOfT5BLHH8UPC5IonxiYj0Fx8F3gecDZwLfCVoHwlEgTHArcBngeuAS4ES4ACwGMDMxgCPAd8Hiog9wFvdxc/6IrAz6DMC+CfAu+j3ZWB28D3lxGYafyXu+EhgCDAK+BSwOO4BoaQpJSQkVf3OzOqBHcBe4Gudjv8S+LiZFRAboH/X6fi/At8kNtivAnaZ2Se6+BkH415/3etXISKSetpnSVwJbAR2tR8wswjwYeDX7n4ceIR3LtuY3WnsfRMRkfTzA3ff4e77gbt4ezlcG/A1d29y9wbgb4Evu/tOd28Cvg5cHyzn+AvgaXdf4u7H3b3O3btKSBwnNnNtTNDvOXfvKiHxUeBOd9/r7rXAPwMf6/Q9dwbfsQw4Apx3pv8gpH9TQkJS1XXu3l774XzghCJq7v48sSzvl4FHgwE7/niruy929zlAAbGB/udmNqHTzyiIe/00gdcjIpIq/pvYTfBf8s7ZaR8AWoBlwef7gQVmVhTXZ0WnsffsRAcsItIH7Yh7v43Y7AeAWndvjDs2BvhtexKXWCK4ldhMh1JOrKHWnXuIzTZ+0szeMrM7uulXEsTSVVwAde7eEvf5GDDwFH6+pDAlJCSlufufgPuAb3Vx+FfEpqC9a20Id29w98XEprhN7O0YRUTSibtvI1bc8hrgN50Of4LYzen2YP3zw0A2sQSGiIi8rTTufRnQXty388yFHcCCToncPHffFRw7aVLX3evd/YvufhawiNiy5iu66FpNLAHSVVwiXVJCQtLB94Arzay8U/u9xKYM/7nzCWb2+aD4Tr6ZZQXLNQYBryY+XBGRlPcpYJ67H41rGwVcASwktv64fQ3yN+l6tw0RkXR2m5mNDgpUfhl4sJt+/wHcFdSLwMyKzOza4Nj9wHwzuyG43y00s2mdv8DMFprZOUFNtUPEZli0dfGzlgBfCX7GMOCrvLNOkMgJlJCQlBesYfslsUExvn2/uz/TzRq4Y8C3ie28sQ+4DfiQu78V16fSzI7EvX6boEsQEUkp7v6mu6/q1DwXWO3uT7r77vYXseTxVDObHPS7uNPYe8TMZib1AkREwvdr4EngLWLLLr7RTb9/B5YSW25RT6y48EUA7r6d2Gy1LxLbank1sURwZ+OBp4nVfHgB+KG7L++i3zeI1V5bA6wFXnmXuEQAsK5/FxMREREREZG+xsy2An/l7k+HHYvImdIMCRERERERERFJOiUkRERERERERCTptGRDRERERERERJJOMyREREREREREJOmUkBARERERERGRpMsKO4DeMGzYMB87dmzYYYiIvMPLL7+8z92Lwo4jGTQWi0hfpHFYRCR83Y3FKZGQGDt2LKtWdd7OXEQkfGa2LewYkkVjsYj0RRqHRUTC191YrCUbIiIiIiIiIpJ0SkiIiIiIiIiISNIpISEiIiIiIiIiSZcSNSREpG84fvw4O3fupLGxMexQki4vL4/Ro0eTnZ0ddigiIu+QDuOzxmEROVPpMFYmWk/HYiUkRKTX7Ny5k0GDBjF27FjMLOxwksbdqaurY+fOnYwbNy7scERE3iHVx+dkjcNmdjXw70Am8J/u/m+djn8B+CugBagFbnH3bcGxu4H3E5uh/BTwd0A+8DBwNtAKVLr7HUH/vwTuAXYFX/8Dd//PhF2ciKT8WJlopzMWa8mGiPSaxsZGCgsL024ANzMKCwuVTReRPivVx+dkjMNmlgksBhYAE4GbzGxip26vAjPcfSrwCHB3cO57gDnAVGAyMBO4NDjnW+5+PjAdmGNmC+K+70F3nxa8lIwQSbBUHysT7XTGYiUkRKRXpesAnq7XLSL9R6qPU0m4vlnAZnd/y92bgQeAa+M7uPtydz8WfFwBjG4/BOQBOUAukA3scfdj7r48OLcZeCXuHBEJQaqPlYnW039+SkiISErJzMxk2rRpTJ48mYqKCg4ePAjA1q1bMTO+8pWvdPTdt28f2dnZ3H777QBs2rSJyy67jGnTpjFhwgRuvfVWAJ599lmGDBnCtGnTOl5PP/108i9ORKQfMzNuvvnmjs8tLS0UFRWxcOHCE/pdd911zJ49+4S2r3/964waNeqEcbh9fE+iUcCOuM87g7bufAp4DMDdXwCWAzXB6wl33xjf2cwKgArgmbjmD5nZGjN7xMxKz/wSRCQdrVq1is997nPdHq+urub6669PYkRvU0JCRFJKfn4+q1evZt26dUSjURYvXtxxbNy4cfzhD3/o+Pzwww8zadKkjs+f+9zn+Pu//3tWr17Nxo0b+exnP9txbO7cuaxevbrjNX/+/ORckIhIihgwYADr1q2joaEBgKeeeopRo078ff7gwYO8/PLLHDp0iLfeeuuEY+3jc/uroKAgabH3lJndDMwgVgMCMzsHmEBs9sMoYJ6ZzY3rnwUsAe519/YLrwTGBss/ngJ+0c3PutXMVpnZqtra2kRdkoj0Ia2trT3qP2PGDO69995uj5eUlPDII4+caVinRQkJEUlZF198Mbt27er4HIlEmDBhAqtWrQLgwQcf5IYbbug4XlNTw+jRb8+UnTJlSvKCFRFJA9dcc01HYnjJkiXcdNNNJxz/zW9+Q0VFBTfeeCMPPPBAGCG+m11A/CyF0bxdcLKDmc0HvgwscvemoPkDwAp3P+LuR4jNnLg47rSfAG+4+/faG9y9Lu78/wQu7Cood/+Ju89w9xlFRUWneWki0lds3bqV888/n49+9KNMmDCB66+/nmPHjjF27Fj+8R//kQsuuICHH36YJ598kosvvpgLLriAD3/4wxw5cgSAl156ife85z2Ul5cza9Ys6uvrefbZZztmo/3pT3/qmGk2ffp06uvr2bp1K5MnTwZidTQ++clPMmXKFKZPn87y5csBuO+++/jgBz/I1Vdfzfjx4/k//+f/9Mr1apcNEUmIf65cz4bqw736nRNLBvO1ikkn70gsc/zMM8/wqU996oT29pvcESNGkJmZSUlJCdXV1UD
"text/plain": [
"<Figure size 1296x3024 with 18 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"metrics=list(result.columns[[i not in ['Alpha'] for i in result.columns]])\n",
"\n",
"charts_per_row=6\n",
"charts_per_column=3\n",
"\n",
"fig, axes = plt.subplots(nrows=charts_per_row, ncols=charts_per_column,figsize=(18, 7*charts_per_row ))\n",
"import itertools\n",
"to_iter=[i for i in itertools.product(range(charts_per_row), range(charts_per_column))]\n",
"\n",
"for i in range(len(metrics)):\n",
" df=result[['Alpha', metrics[i]]]\n",
" df.plot(ax=axes[to_iter[i]], title=metrics[i], x=0, y=1)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"##### Beta"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
" 0%| | 0/10 [00:00<?, ?it/s]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 5110.40it/s]\u001b[A\n",
" 10%|█ | 1/10 [00:19<02:58, 19.79s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 5154.22it/s]\u001b[A\n",
" 20%|██ | 2/10 [00:39<02:37, 19.71s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 5027.65it/s]\u001b[A\n",
" 30%|███ | 3/10 [00:58<02:16, 19.48s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 5020.67it/s]\u001b[A\n",
" 40%|████ | 4/10 [01:17<01:55, 19.33s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 4962.05it/s]\u001b[A\n",
" 50%|█████ | 5/10 [01:36<01:36, 19.20s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 5076.40it/s]\u001b[A\n",
" 60%|██████ | 6/10 [01:54<01:16, 19.01s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 5041.93it/s]\u001b[A\n",
" 70%|███████ | 7/10 [02:13<00:56, 18.95s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 5050.76it/s]\u001b[A\n",
" 80%|████████ | 8/10 [02:32<00:37, 18.90s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"458it [00:00, 4577.78it/s]\u001b[A\n",
"943it [00:00, 4472.91it/s]\u001b[A\n",
" 90%|█████████ | 9/10 [02:51<00:18, 18.91s/it]\n",
"0it [00:00, ?it/s]\u001b[A\n",
"943it [00:00, 5422.18it/s]\u001b[A\n",
"100%|██████████| 10/10 [03:10<00:00, 19.02s/it]\n"
]
},
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Beta</th>\n",
" <th>RMSE</th>\n",
" <th>MAE</th>\n",
" <th>precision</th>\n",
" <th>recall</th>\n",
" <th>F_1</th>\n",
" <th>F_05</th>\n",
" <th>precision_super</th>\n",
" <th>recall_super</th>\n",
" <th>NDCG</th>\n",
" <th>mAP</th>\n",
" <th>MRR</th>\n",
" <th>LAUC</th>\n",
" <th>HR</th>\n",
" <th>F_2</th>\n",
" <th>Whole_average</th>\n",
" <th>Reco in test</th>\n",
" <th>Test coverage</th>\n",
" <th>Shannon</th>\n",
" <th>Gini</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.0</td>\n",
" <td>3.702446</td>\n",
" <td>3.527273</td>\n",
" <td>0.282185</td>\n",
" <td>0.192092</td>\n",
" <td>0.186749</td>\n",
" <td>0.216980</td>\n",
" <td>0.204185</td>\n",
" <td>0.240096</td>\n",
" <td>0.339114</td>\n",
" <td>0.204905</td>\n",
" <td>0.572157</td>\n",
" <td>0.593544</td>\n",
" <td>0.875928</td>\n",
" <td>0.181702</td>\n",
" <td>0.340803</td>\n",
" <td>1.000000</td>\n",
" <td>0.077201</td>\n",
" <td>3.875892</td>\n",
" <td>0.974947</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.1</td>\n",
" <td>3.703312</td>\n",
" <td>3.528128</td>\n",
" <td>0.290138</td>\n",
" <td>0.197597</td>\n",
" <td>0.192259</td>\n",
" <td>0.223336</td>\n",
" <td>0.210944</td>\n",
" <td>0.246153</td>\n",
" <td>0.347768</td>\n",
" <td>0.212034</td>\n",
" <td>0.581038</td>\n",
" <td>0.596328</td>\n",
" <td>0.884411</td>\n",
" <td>0.187030</td>\n",
" <td>0.347420</td>\n",
" <td>1.000000</td>\n",
" <td>0.085137</td>\n",
" <td>3.957416</td>\n",
" <td>0.972784</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.2</td>\n",
" <td>3.703825</td>\n",
" <td>3.528636</td>\n",
" <td>0.297137</td>\n",
" <td>0.201202</td>\n",
" <td>0.196067</td>\n",
" <td>0.228169</td>\n",
" <td>0.218026</td>\n",
" <td>0.252767</td>\n",
" <td>0.355655</td>\n",
" <td>0.219909</td>\n",
" <td>0.588904</td>\n",
" <td>0.598160</td>\n",
" <td>0.886532</td>\n",
" <td>0.190538</td>\n",
" <td>0.352756</td>\n",
" <td>1.000000</td>\n",
" <td>0.094517</td>\n",
" <td>4.053212</td>\n",
" <td>0.969980</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.3</td>\n",
" <td>3.704130</td>\n",
" <td>3.528939</td>\n",
" <td>0.303499</td>\n",
" <td>0.204749</td>\n",
" <td>0.199901</td>\n",
" <td>0.232829</td>\n",
" <td>0.225107</td>\n",
" <td>0.260797</td>\n",
" <td>0.363757</td>\n",
" <td>0.226825</td>\n",
" <td>0.599969</td>\n",
" <td>0.599964</td>\n",
" <td>0.888653</td>\n",
" <td>0.194073</td>\n",
" <td>0.358344</td>\n",
" <td>1.000000</td>\n",
" <td>0.105339</td>\n",
" <td>4.147779</td>\n",
" <td>0.966948</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.4</td>\n",
" <td>3.704313</td>\n",
" <td>3.529120</td>\n",
" <td>0.308908</td>\n",
" <td>0.208811</td>\n",
" <td>0.203854</td>\n",
" <td>0.237241</td>\n",
" <td>0.229614</td>\n",
" <td>0.266918</td>\n",
" <td>0.370758</td>\n",
" <td>0.232673</td>\n",
" <td>0.609385</td>\n",
" <td>0.602014</td>\n",
" <td>0.895016</td>\n",
" <td>0.197981</td>\n",
" <td>0.363598</td>\n",
" <td>0.999894</td>\n",
" <td>0.132035</td>\n",
" <td>4.259682</td>\n",
" <td>0.962989</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.5</td>\n",
" <td>3.704422</td>\n",
" <td>3.529229</td>\n",
" <td>0.314316</td>\n",
" <td>0.211411</td>\n",
" <td>0.206768</td>\n",
" <td>0.240986</td>\n",
" <td>0.237124</td>\n",
" <td>0.273416</td>\n",
" <td>0.378307</td>\n",
" <td>0.239297</td>\n",
" <td>0.622792</td>\n",
" <td>0.603327</td>\n",
" <td>0.903499</td>\n",
" <td>0.200572</td>\n",
" <td>0.369318</td>\n",
" <td>0.999046</td>\n",
" <td>0.168831</td>\n",
" <td>4.411281</td>\n",
" <td>0.956648</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.6</td>\n",
" <td>3.704488</td>\n",
" <td>3.529295</td>\n",
" <td>0.314634</td>\n",
" <td>0.206209</td>\n",
" <td>0.204818</td>\n",
" <td>0.240159</td>\n",
" <td>0.242489</td>\n",
" <td>0.273850</td>\n",
" <td>0.376438</td>\n",
" <td>0.238428</td>\n",
" <td>0.622042</td>\n",
" <td>0.600721</td>\n",
" <td>0.897137</td>\n",
" <td>0.197320</td>\n",
" <td>0.367854</td>\n",
" <td>0.996394</td>\n",
" <td>0.212843</td>\n",
" <td>4.621938</td>\n",
" <td>0.945932</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.7</td>\n",
" <td>3.704528</td>\n",
" <td>3.529335</td>\n",
" <td>0.304136</td>\n",
" <td>0.187298</td>\n",
" <td>0.191990</td>\n",
" <td>0.228749</td>\n",
" <td>0.238305</td>\n",
" <td>0.256201</td>\n",
" <td>0.358807</td>\n",
" <td>0.226808</td>\n",
" <td>0.593897</td>\n",
" <td>0.591207</td>\n",
" <td>0.868505</td>\n",
" <td>0.182056</td>\n",
" <td>0.352330</td>\n",
" <td>0.983033</td>\n",
" <td>0.256854</td>\n",
" <td>4.898568</td>\n",
" <td>0.928065</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.8</td>\n",
" <td>3.704552</td>\n",
" <td>3.529360</td>\n",
" <td>0.266384</td>\n",
" <td>0.147571</td>\n",
" <td>0.158660</td>\n",
" <td>0.194838</td>\n",
" <td>0.214485</td>\n",
" <td>0.209336</td>\n",
" <td>0.299850</td>\n",
" <td>0.184356</td>\n",
" <td>0.492852</td>\n",
" <td>0.571152</td>\n",
" <td>0.803818</td>\n",
" <td>0.146414</td>\n",
" <td>0.307476</td>\n",
" <td>0.936373</td>\n",
" <td>0.341270</td>\n",
" <td>5.257397</td>\n",
" <td>0.895882</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.9</td>\n",
" <td>3.704567</td>\n",
" <td>3.529375</td>\n",
" <td>0.162354</td>\n",
" <td>0.076967</td>\n",
" <td>0.089233</td>\n",
" <td>0.114583</td>\n",
" <td>0.134657</td>\n",
" <td>0.113253</td>\n",
" <td>0.160868</td>\n",
" <td>0.085486</td>\n",
" <td>0.243590</td>\n",
" <td>0.535405</td>\n",
" <td>0.580064</td>\n",
" <td>0.078906</td>\n",
" <td>0.197947</td>\n",
" <td>0.800106</td>\n",
" <td>0.415584</td>\n",
" <td>5.563910</td>\n",
" <td>0.857396</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Beta RMSE MAE precision recall F_1 F_05 \\\n",
"0 0.0 3.702446 3.527273 0.282185 0.192092 0.186749 0.216980 \n",
"0 0.1 3.703312 3.528128 0.290138 0.197597 0.192259 0.223336 \n",
"0 0.2 3.703825 3.528636 0.297137 0.201202 0.196067 0.228169 \n",
"0 0.3 3.704130 3.528939 0.303499 0.204749 0.199901 0.232829 \n",
"0 0.4 3.704313 3.529120 0.308908 0.208811 0.203854 0.237241 \n",
"0 0.5 3.704422 3.529229 0.314316 0.211411 0.206768 0.240986 \n",
"0 0.6 3.704488 3.529295 0.314634 0.206209 0.204818 0.240159 \n",
"0 0.7 3.704528 3.529335 0.304136 0.187298 0.191990 0.228749 \n",
"0 0.8 3.704552 3.529360 0.266384 0.147571 0.158660 0.194838 \n",
"0 0.9 3.704567 3.529375 0.162354 0.076967 0.089233 0.114583 \n",
"\n",
" precision_super recall_super NDCG mAP MRR LAUC \\\n",
"0 0.204185 0.240096 0.339114 0.204905 0.572157 0.593544 \n",
"0 0.210944 0.246153 0.347768 0.212034 0.581038 0.596328 \n",
"0 0.218026 0.252767 0.355655 0.219909 0.588904 0.598160 \n",
"0 0.225107 0.260797 0.363757 0.226825 0.599969 0.599964 \n",
"0 0.229614 0.266918 0.370758 0.232673 0.609385 0.602014 \n",
"0 0.237124 0.273416 0.378307 0.239297 0.622792 0.603327 \n",
"0 0.242489 0.273850 0.376438 0.238428 0.622042 0.600721 \n",
"0 0.238305 0.256201 0.358807 0.226808 0.593897 0.591207 \n",
"0 0.214485 0.209336 0.299850 0.184356 0.492852 0.571152 \n",
"0 0.134657 0.113253 0.160868 0.085486 0.243590 0.535405 \n",
"\n",
" HR F_2 Whole_average Reco in test Test coverage Shannon \\\n",
"0 0.875928 0.181702 0.340803 1.000000 0.077201 3.875892 \n",
"0 0.884411 0.187030 0.347420 1.000000 0.085137 3.957416 \n",
"0 0.886532 0.190538 0.352756 1.000000 0.094517 4.053212 \n",
"0 0.888653 0.194073 0.358344 1.000000 0.105339 4.147779 \n",
"0 0.895016 0.197981 0.363598 0.999894 0.132035 4.259682 \n",
"0 0.903499 0.200572 0.369318 0.999046 0.168831 4.411281 \n",
"0 0.897137 0.197320 0.367854 0.996394 0.212843 4.621938 \n",
"0 0.868505 0.182056 0.352330 0.983033 0.256854 4.898568 \n",
"0 0.803818 0.146414 0.307476 0.936373 0.341270 5.257397 \n",
"0 0.580064 0.078906 0.197947 0.800106 0.415584 5.563910 \n",
"\n",
" Gini \n",
"0 0.974947 \n",
"0 0.972784 \n",
"0 0.969980 \n",
"0 0.966948 \n",
"0 0.962989 \n",
"0 0.956648 \n",
"0 0.945932 \n",
"0 0.928065 \n",
"0 0.895882 \n",
"0 0.857396 "
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from tqdm import tqdm\n",
"result=[]\n",
"for beta in tqdm([round(i,1) for i in np.arange(0,1,0.1)]):\n",
" model=RP3Beta()\n",
" model.fit(train_ui, alpha=1, beta=beta)\n",
" reco=pd.DataFrame(model.recommend(user_code_id, item_code_id, topK=10))\n",
" estimations_df=pd.DataFrame(model.estimate(user_code_id, item_code_id, test_ui))\n",
" to_append=ev.evaluate(test=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None),\n",
" estimations_df=estimations_df, \n",
" reco=np.array(reco),\n",
" super_reactions=[4,5])\n",
" to_append.insert(0, \"Beta\", beta)\n",
" result.append(to_append)\n",
" \n",
"result=pd.concat(result)\n",
"result"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"ename": "IndexError",
"evalue": "list index out of range",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-10-8f1dc184fb30>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 12\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmetrics\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 13\u001b[0m \u001b[0mdf\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mresult\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'Beta'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmetrics\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 14\u001b[0;31m \u001b[0mdf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mplot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0max\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0maxes\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mto_iter\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtitle\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mmetrics\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mx\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;31mIndexError\u001b[0m: list index out of range"
]
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAABCQAAAkoCAYAAACzg26yAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOzdd3xX1f3H8dfJXiQhQAJJCHsFAgHDElFkiYpicYBiHbSlbuus29ba/mod2Fpqa1trrYigVXEzVByVLWFDWCEhCQkEQhKyk/P74/tNDDHISr434/18PL6PfO+5537zuYyTm88993OMtRYREREREREREU/ycjoAEREREREREWl9lJAQEREREREREY9TQkJEREREREREPE4JCRERERERERHxOCUkRERERERERMTjlJAQEREREREREY9TQkJERERERERqGGP+aox59CT6bTbGjPFASNJCKSEhLY4xJtUYU2yMKTTG7DfGvGKMCXHve8UYY40xU+ocM9vdfoN7288Y86wxZp/7c1KNMc8f53tUv/7s0RMVEWkm3GNmmTGmfZ32de6xt2uttl+524bX6XuDMaayzrhbaIyJ9sxZiIi0Htbam6y1vzmJfv2ttcs8EJK0UEpISEt1ibU2BEgEBgMP1tqXAlxXvWGM8QGuAnbV6vMgkAQMA9oAY4Bv6/setV63NfhZiIi0HHuAq6s3jDEJQFDtDsYYg2t8PkStcbqW5XXG3RBrbWZjBi0i0ly5r3FFmjQlJKRFs9buBxbhSkxUex84xxjT1r09CdgA7K/VZyjwjrU207qkWmtf9UjQIiIt0384NslwPVB3XB0NdALuAKYbY/w8FJuISLPhnnX2oDFmizHmsDHmX8aYAGPMGPfs3l8aY/YD/zLGeBljHjDG7DLG5BpjFhhjImp91jnGmG+MMXnGmPRas4VfMcY86X7f3hjzgbvPIWPMV8YYr1qxjHe/9zfGPG+MyXS/njfG+Lv3Vcd2jzEmxxiTZYy50dN/dtL0KCEhLZoxJha4ENhZq7kEWAhMd29fx/cvilcAdxtjbjHGJLjv2omIyOlbAYQaY/oZY7xxjcGv1elzPa6k8QL39iUejE9EpDmZAVwA9AB6A4+42zsCEUAXYBZwO3AZcB4QDRwG5gAYY7oAHwMvAB1w3cBLrud73QPsc/eJAh4CbD39HgZGuD9nEK6Zxo/U2t8RCANigJ8Ac2rdIJRWSgkJaaneNcYUAOlADvB4nf2vAtcZY8JxDdDv1tn/f8BTuAb7NUCGMeb6er5HXq3Xzxr8LEREWpbqWRITgK1ARvUOY0wQcCXwurW2HHiL7z+2MaLOuLsLEZHW6c/W2nRr7SHgt3z3SFwV8Li1ttRaWwzcBDxsrd1nrS0FfgVc4X6c4xpgqbV2nrW23Fqba62tLyFRjmv2Whd3v6+stfUlJGYAT1hrc6y1B4BfAz+u8zlPuD/jI6AQ6HOmfxDSvCkhIS3VZdba6toPfYFjCqlZa7/GleV9GPjAPWDX3l9prZ1jrR0FhOMa6F82xvSr8z3Ca73+3ojnIyLSEvwH1wXwDXx/ZtqPgArgI/f2XOBCY0yHWn1W1Bl3ezR2wCIiTVR6rfd7cc1+ADhgrS2pta8L8E51IhdXMrgS10yHzhxbQ+14nsY123ixMWa3MeaB4/SLdsdSX1wAudbailrbRUDISXx/acGUkJAWzVr7BfAK8Ew9u1/DNQXtB2tDWGuLrbVzcE1xi2/oGEVEWgtr7V5cxS0vAt6us/t6XBemae5nn98EfHElMERE5Fida72PA6oL/NaduZAOXFgnmRtgrc1w7zthYtdaW2Ctvcda2x24FNdjzePq6ZqJKwFSX1wi9VJCQlqD54EJxphBddr/hGva8Jd1DzDG/MJdfCfQGOPjflyjDbCu8cMVEWnRfgKMtdYerdUWA4wDJuN69rj6+eOnqH+1DRGR1u5WY0ysu0Dlw8D84/T7K/Bbd70IjDEdjDFT3PvmAuONMVe5r3fbGWMS636AMWayMaanu6baEVwzLKrq+V7zgEfc36M98BjfrxUkcgwlJKTFcz/D9iquQbF2+yFr7afHeQauCHgW18obB4Fbgcuttbtr9XnfGFNY6/VOI52CiEiLYa3dZa1dU6d5NJBsrV1srd1f/cKVOB5ojBng7jeyzrhbaIwZ6tETEBFpGl4HFgO7cT128eRx+v0ReA/X4xYFuAoMDwew1qbhmrF2D67llpNxJYPr6gUsxVXzYTnwF2vt5/X0exJX7bUNwEbg2x+ISwQAU//vYiIiIiIiItLUGGNSgZ9aa5c6HYvImdIMCRERERERERHxOCUkRERERERERMTj9MiGiIiIiIiIiHicZkiIiIiIiIiIiMcpISEiIiIiIiIiHufjdAANoX379rZr165OhyEicoy1a9cetNZ2cDoOT9FYLCJNUWsaizUOi0hT9EPjcItISHTt2pU1a+ouaS4i4ixjzF6nY/AkjcUi0hQ5PRYbYyYBfwS8gX9Ya39fZ/9NwK1AJVAIzLLWbjHGTAB+D/gBZcB91trPfuh7aRwWkaboh8ZhPbIhIiIiItIIjDHewBzgQiAeuNoYE1+n2+vW2gRrbSLwB+A5d/tB4BJrbQJwPfAfD4UtIuIxSkiIiIiIiDSOYcBOa+1ua20Z8AYwpXYHa21+rc1gwLrb11lrM93tm4FAY4y/B2IWEfGYFvHIhoiIiIhIExQDpNfa3gcMr9vJGHMrcDeuxzPG1vM5lwPfWmtL6zl2FjALIC4urgFCFhHxnBabkCgvL2ffvn2UlJQ4HYrHBQQEEBsbi6+vr9OhiEgr19LHYo23ItIQrLVzgDnGmGuAR3A9ogGAMaY/8BQw8TjHvgS8BJCUlGQbP1qRlqulX7c0ttO5LmqxCYl9+/bRpk0bunbtijHG6XA8xlpLbm4u+/bto1u3bk6HIyKtXEseizXeishJyAA619qOdbcdzxvAi9UbxphY4B3gOmvtrkaJUERqtOTrlsZ2utdFLbaGRElJCe3atWt1/5CMMbRr105ZPRFpElryWKzxVkROwmqglzGmmzHGD5gOvFe7gzGmV63Ni4Ed7vZw4EPgAWvt/zwUr0ir1pKvWxrb6V4XtdiEBNBq/yG11vMWkaapJY9JLfncROTMWWsrgNuARcBWYIG1drMx5gljzKXubrcZYzYbY5Jx1ZGoflzjNqAn8JgxJtn9ivT0OYi0NvrZfvpO58+uRScknObt7U1iYiIDBgzgkksuIS8vD4DU1FSMMTzyyCM1fQ8ePIivry+33XYbANu3b2fMmDEkJibSr18/Zs2aBcCyZcsICwsjMTGx5rV06VLPn5yISDNhjOHaa6+t2a6oqKBDhw5Mnjz5mH6XXXYZI0aMOKbtV7/6FTExMceMudVjuYjIybDWfmSt7W2t7WGt/a277TFr7Xvu93daa/tbaxOttedbaze725+01ga726tfOU6ei4g0T2vWrOGOO+447v7MzEyuuOIKD0b0HSUkGlFgYCDJycls2rSJiIgI5syZU7OvW7dufPjhhzXbb775Jv3796/ZvuOOO7jrrrtITk5m69at3H777TX7Ro8eTXJycs1r/PjxnjkhEZFmKDg4mE2bNlFcXAzAkiVLiImJOaZPXl4ea9eu5ciRI+zevfuYfdVjcfUrPDzcY7GLiIiI1FVZWXlK/ZOSkvjTn/503P3R0dG89dZbZxrWaVFCwkNGjhxJRsZ3NYyCgoLo168fa9asAWD+/PlcddVVNfuzsrKIjY2t2U5ISPBcsCIiLcxFF11UkwSeN28eV1999TH73377bS655BKmT5/OG2+84USIIiIiIqSmptK3b19mzJhBv379uOKKKygqKqJr16788pe/ZMiQIbz55pssXryYkSNHMmTIEK688koKCwsBWL16NWeffTaDBg1i2LBhFBQUsGzZspqZoV988UXNrM/BgwdTUFBAamoqAwYMAFx1NG688UYSEhIYPHgwn3/+OQCvvPIKU6dOZdKkSfTq1Yv777+/Qc63xa6yUduv39/Mlsz8Bv3M+OhQHr+k/4k74sp
"text/plain": [
"<Figure size 1296x3024 with 18 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"### import matplotlib.pyplot as plt\n",
"\n",
"metrics=list(result.columns[[i not in ['Beta'] for i in result.columns]])\n",
"\n",
"charts_per_row=6\n",
"charts_per_column=3\n",
"\n",
"fig, axes = plt.subplots(nrows=charts_per_row, ncols=charts_per_column,figsize=(18, 7*charts_per_row ))\n",
"import itertools\n",
"to_iter=[i for i in itertools.product(range(charts_per_row), range(charts_per_column))]\n",
"\n",
"for i in range(len(metrics)):\n",
" df=result[['Beta', metrics[i]]]\n",
" df.plot(ax=axes[to_iter[i]], title=metrics[i], x=0, y=1)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Check sample recommendations"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>user</th>\n",
" <th>rating</th>\n",
" <th>title</th>\n",
" <th>genres</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>57566</th>\n",
" <td>734</td>\n",
" <td>5</td>\n",
" <td>Emma (1996)</td>\n",
" <td>Drama, Romance</td>\n",
" </tr>\n",
" <tr>\n",
" <th>50942</th>\n",
" <td>734</td>\n",
" <td>5</td>\n",
" <td>It's a Wonderful Life (1946)</td>\n",
" <td>Drama</td>\n",
" </tr>\n",
" <tr>\n",
" <th>48429</th>\n",
" <td>734</td>\n",
" <td>5</td>\n",
" <td>Rebecca (1940)</td>\n",
" <td>Romance, Thriller</td>\n",
" </tr>\n",
" <tr>\n",
" <th>22622</th>\n",
" <td>734</td>\n",
" <td>5</td>\n",
" <td>My Fair Lady (1964)</td>\n",
" <td>Musical, Romance</td>\n",
" </tr>\n",
" <tr>\n",
" <th>22461</th>\n",
" <td>734</td>\n",
" <td>5</td>\n",
" <td>Sound of Music, The (1965)</td>\n",
" <td>Musical</td>\n",
" </tr>\n",
" <tr>\n",
" <th>35119</th>\n",
" <td>734</td>\n",
" <td>4</td>\n",
" <td>Much Ado About Nothing (1993)</td>\n",
" <td>Comedy, Romance</td>\n",
" </tr>\n",
" <tr>\n",
" <th>43403</th>\n",
" <td>734</td>\n",
" <td>4</td>\n",
" <td>Snow White and the Seven Dwarfs (1937)</td>\n",
" <td>Animation, Children's, Musical</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2447</th>\n",
" <td>734</td>\n",
" <td>4</td>\n",
" <td>Back to the Future (1985)</td>\n",
" <td>Comedy, Sci-Fi</td>\n",
" </tr>\n",
" <tr>\n",
" <th>36121</th>\n",
" <td>734</td>\n",
" <td>4</td>\n",
" <td>Silence of the Lambs, The (1991)</td>\n",
" <td>Drama, Thriller</td>\n",
" </tr>\n",
" <tr>\n",
" <th>37465</th>\n",
" <td>734</td>\n",
" <td>4</td>\n",
" <td>Aladdin (1992)</td>\n",
" <td>Animation, Children's, Comedy, Musical</td>\n",
" </tr>\n",
" <tr>\n",
" <th>38209</th>\n",
" <td>734</td>\n",
" <td>4</td>\n",
" <td>Mary Poppins (1964)</td>\n",
" <td>Children's, Comedy, Musical</td>\n",
" </tr>\n",
" <tr>\n",
" <th>39918</th>\n",
" <td>734</td>\n",
" <td>4</td>\n",
" <td>Rear Window (1954)</td>\n",
" <td>Mystery, Thriller</td>\n",
" </tr>\n",
" <tr>\n",
" <th>40514</th>\n",
" <td>734</td>\n",
" <td>4</td>\n",
" <td>Titanic (1997)</td>\n",
" <td>Action, Drama, Romance</td>\n",
" </tr>\n",
" <tr>\n",
" <th>42508</th>\n",
" <td>734</td>\n",
" <td>4</td>\n",
" <td>Jurassic Park (1993)</td>\n",
" <td>Action, Adventure, Sci-Fi</td>\n",
" </tr>\n",
" <tr>\n",
" <th>45160</th>\n",
" <td>734</td>\n",
" <td>4</td>\n",
" <td>Tomorrow Never Dies (1997)</td>\n",
" <td>Action, Romance, Thriller</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" user rating title \\\n",
"57566 734 5 Emma (1996) \n",
"50942 734 5 It's a Wonderful Life (1946) \n",
"48429 734 5 Rebecca (1940) \n",
"22622 734 5 My Fair Lady (1964) \n",
"22461 734 5 Sound of Music, The (1965) \n",
"35119 734 4 Much Ado About Nothing (1993) \n",
"43403 734 4 Snow White and the Seven Dwarfs (1937) \n",
"2447 734 4 Back to the Future (1985) \n",
"36121 734 4 Silence of the Lambs, The (1991) \n",
"37465 734 4 Aladdin (1992) \n",
"38209 734 4 Mary Poppins (1964) \n",
"39918 734 4 Rear Window (1954) \n",
"40514 734 4 Titanic (1997) \n",
"42508 734 4 Jurassic Park (1993) \n",
"45160 734 4 Tomorrow Never Dies (1997) \n",
"\n",
" genres \n",
"57566 Drama, Romance \n",
"50942 Drama \n",
"48429 Romance, Thriller \n",
"22622 Musical, Romance \n",
"22461 Musical \n",
"35119 Comedy, Romance \n",
"43403 Animation, Children's, Musical \n",
"2447 Comedy, Sci-Fi \n",
"36121 Drama, Thriller \n",
"37465 Animation, Children's, Comedy, Musical \n",
"38209 Children's, Comedy, Musical \n",
"39918 Mystery, Thriller \n",
"40514 Action, Drama, Romance \n",
"42508 Action, Adventure, Sci-Fi \n",
"45160 Action, Romance, Thriller "
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>user</th>\n",
" <th>rec_nb</th>\n",
" <th>title</th>\n",
" <th>genres</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>2936</th>\n",
" <td>734.0</td>\n",
" <td>1</td>\n",
" <td>Return of the Jedi (1983)</td>\n",
" <td>Action, Adventure, Romance, Sci-Fi, War</td>\n",
" </tr>\n",
" <tr>\n",
" <th>7231</th>\n",
" <td>734.0</td>\n",
" <td>2</td>\n",
" <td>Fargo (1996)</td>\n",
" <td>Crime, Drama, Thriller</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3322</th>\n",
" <td>734.0</td>\n",
" <td>3</td>\n",
" <td>Toy Story (1995)</td>\n",
" <td>Animation, Children's, Comedy</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2519</th>\n",
" <td>734.0</td>\n",
" <td>4</td>\n",
" <td>Godfather, The (1972)</td>\n",
" <td>Action, Crime, Drama</td>\n",
" </tr>\n",
" <tr>\n",
" <th>5958</th>\n",
" <td>734.0</td>\n",
" <td>5</td>\n",
" <td>Contact (1997)</td>\n",
" <td>Drama, Sci-Fi</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1621</th>\n",
" <td>734.0</td>\n",
" <td>6</td>\n",
" <td>Schindler's List (1993)</td>\n",
" <td>Drama, War</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1248</th>\n",
" <td>734.0</td>\n",
" <td>7</td>\n",
" <td>English Patient, The (1996)</td>\n",
" <td>Drama, Romance, War</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1455</th>\n",
" <td>734.0</td>\n",
" <td>8</td>\n",
" <td>Fugitive, The (1993)</td>\n",
" <td>Action, Thriller</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3542</th>\n",
" <td>734.0</td>\n",
" <td>9</td>\n",
" <td>Jerry Maguire (1996)</td>\n",
" <td>Drama, Romance</td>\n",
" </tr>\n",
" <tr>\n",
" <th>8124</th>\n",
" <td>734.0</td>\n",
" <td>10</td>\n",
" <td>Monty Python and the Holy Grail (1974)</td>\n",
" <td>Comedy</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" user rec_nb title \\\n",
"2936 734.0 1 Return of the Jedi (1983) \n",
"7231 734.0 2 Fargo (1996) \n",
"3322 734.0 3 Toy Story (1995) \n",
"2519 734.0 4 Godfather, The (1972) \n",
"5958 734.0 5 Contact (1997) \n",
"1621 734.0 6 Schindler's List (1993) \n",
"1248 734.0 7 English Patient, The (1996) \n",
"1455 734.0 8 Fugitive, The (1993) \n",
"3542 734.0 9 Jerry Maguire (1996) \n",
"8124 734.0 10 Monty Python and the Holy Grail (1974) \n",
"\n",
" genres \n",
"2936 Action, Adventure, Romance, Sci-Fi, War \n",
"7231 Crime, Drama, Thriller \n",
"3322 Animation, Children's, Comedy \n",
"2519 Action, Crime, Drama \n",
"5958 Drama, Sci-Fi \n",
"1621 Drama, War \n",
"1248 Drama, Romance, War \n",
"1455 Action, Thriller \n",
"3542 Drama, Romance \n",
"8124 Comedy "
]
},
"execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"train=pd.read_csv('./Datasets/ml-100k/train.csv', sep='\\t', header=None, names=['user', 'item', 'rating', 'timestamp'])\n",
"items=pd.read_csv('./Datasets/ml-100k/movies.csv')\n",
"\n",
"user=random.choice(list(set(train['user'])))\n",
"\n",
"train_content=pd.merge(train, items, left_on='item', right_on='id')\n",
"display(train_content[train_content['user']==user][['user', 'rating', 'title', 'genres']]\\\n",
" .sort_values(by='rating', ascending=False)[:15])\n",
"\n",
"reco = np.loadtxt('Recommendations generated/ml-100k/Self_P3_reco.csv', delimiter=',')\n",
"items=pd.read_csv('./Datasets/ml-100k/movies.csv')\n",
"\n",
"# Let's ignore scores - they are not used in evaluation: \n",
"reco_users=reco[:,:1]\n",
"reco_items=reco[:,1::2]\n",
"# Let's put them into one array\n",
"reco=np.concatenate((reco_users, reco_items), axis=1)\n",
"\n",
"# Let's rebuild it user-item dataframe\n",
"recommended=[]\n",
"for row in reco:\n",
" for rec_nb, entry in enumerate(row[1:]):\n",
" recommended.append((row[0], rec_nb+1, entry))\n",
"recommended=pd.DataFrame(recommended, columns=['user','rec_nb', 'item'])\n",
"\n",
"recommended_content=pd.merge(recommended, items, left_on='item', right_on='id')\n",
"recommended_content[recommended_content['user']==user][['user', 'rec_nb', 'title', 'genres']].sort_values(by='rec_nb')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# project task 6: generate recommendations of RP3Beta for hiperparameters found to optimize recall"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"943it [00:00, 4555.12it/s]\n"
]
},
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>RMSE</th>\n",
" <th>MAE</th>\n",
" <th>precision</th>\n",
" <th>recall</th>\n",
" <th>F_1</th>\n",
" <th>F_05</th>\n",
" <th>precision_super</th>\n",
" <th>recall_super</th>\n",
" <th>NDCG</th>\n",
" <th>mAP</th>\n",
" <th>MRR</th>\n",
" <th>LAUC</th>\n",
" <th>HR</th>\n",
" <th>F_2</th>\n",
" <th>Whole_average</th>\n",
" <th>Reco in test</th>\n",
" <th>Test coverage</th>\n",
" <th>Shannon</th>\n",
" <th>Gini</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>3.702928</td>\n",
" <td>3.527713</td>\n",
" <td>0.322694</td>\n",
" <td>0.216069</td>\n",
" <td>0.212152</td>\n",
" <td>0.247538</td>\n",
" <td>0.245279</td>\n",
" <td>0.284983</td>\n",
" <td>0.388271</td>\n",
" <td>0.248239</td>\n",
" <td>0.636318</td>\n",
" <td>0.605683</td>\n",
" <td>0.910923</td>\n",
" <td>0.20545</td>\n",
" <td>0.376967</td>\n",
" <td>0.999788</td>\n",
" <td>0.178932</td>\n",
" <td>4.549663</td>\n",
" <td>0.950182</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" RMSE MAE precision recall F_1 F_05 \\\n",
"0 3.702928 3.527713 0.322694 0.216069 0.212152 0.247538 \n",
"\n",
" precision_super recall_super NDCG mAP MRR LAUC \\\n",
"0 0.245279 0.284983 0.388271 0.248239 0.636318 0.605683 \n",
"\n",
" HR F_2 Whole_average Reco in test Test coverage Shannon \\\n",
"0 0.910923 0.20545 0.376967 0.999788 0.178932 4.549663 \n",
"\n",
" Gini \n",
"0 0.950182 "
]
},
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# use better values than (1,0) for alpha and beta\n",
"# if you want you can also modify the model to consider different weights (we took as weights user ratings, maybe take ones or squares of ratings instead)\n",
"# save the outptut in 'Recommendations generated/ml-100k/Self_RP3Beta_estimations.csv'\n",
"# and 'Recommendations generated/ml-100k/Self_RP3Beta_reco.csv'\n",
"\n",
"## SOLUTION TASK 6\n",
"\n",
"import evaluation_measures as ev\n",
"\n",
"model = RP3Beta()\n",
"model.fit(train_ui, alpha = 0.8, beta = 0.6)\n",
"\n",
"top_n = pd.DataFrame(model.recommend(user_code_id, item_code_id, topK = 10))\n",
"top_n.to_csv('Recommendations generated/ml-100k/Self_RP3Beta_reco.csv', index = False, header = False)\n",
"\n",
"estimations = pd.DataFrame(model.estimate(user_code_id, item_code_id, test_ui))\n",
"estimations.to_csv('Recommendations generated/ml-100k/Self_RP3Beta_estimations.csv', index = False, header = False)\n",
"estimations_df = pd.read_csv('Recommendations generated/ml-100k/Self_RP3Beta_estimations.csv', header = None)\n",
"\n",
"reco = np.loadtxt('Recommendations generated/ml-100k/Self_RP3Beta_reco.csv', delimiter = ',')\n",
"\n",
"ev.evaluate(test = pd.read_csv('./Datasets/ml-100k/test.csv', sep = '\\t', header = None),\n",
" estimations_df = estimations_df, \n",
" reco = reco,\n",
" super_reactions = [4, 5])"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Beta</th>\n",
" <th>RMSE</th>\n",
" <th>MAE</th>\n",
" <th>precision</th>\n",
" <th>recall</th>\n",
" <th>F_1</th>\n",
" <th>F_05</th>\n",
" <th>precision_super</th>\n",
" <th>recall_super</th>\n",
" <th>NDCG</th>\n",
" <th>mAP</th>\n",
" <th>MRR</th>\n",
" <th>LAUC</th>\n",
" <th>HR</th>\n",
" <th>F_2</th>\n",
" <th>Whole_average</th>\n",
" <th>Reco in test</th>\n",
" <th>Test coverage</th>\n",
" <th>Shannon</th>\n",
" <th>Gini</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.9</td>\n",
" <td>3.704567</td>\n",
" <td>3.529375</td>\n",
" <td>0.162354</td>\n",
" <td>0.076967</td>\n",
" <td>0.089233</td>\n",
" <td>0.114583</td>\n",
" <td>0.134657</td>\n",
" <td>0.113253</td>\n",
" <td>0.160868</td>\n",
" <td>0.085486</td>\n",
" <td>0.243590</td>\n",
" <td>0.535405</td>\n",
" <td>0.580064</td>\n",
" <td>0.078906</td>\n",
" <td>0.197947</td>\n",
" <td>0.800106</td>\n",
" <td>0.415584</td>\n",
" <td>5.563910</td>\n",
" <td>0.857396</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.8</td>\n",
" <td>3.704552</td>\n",
" <td>3.529360</td>\n",
" <td>0.266384</td>\n",
" <td>0.147571</td>\n",
" <td>0.158660</td>\n",
" <td>0.194838</td>\n",
" <td>0.214485</td>\n",
" <td>0.209336</td>\n",
" <td>0.299850</td>\n",
" <td>0.184356</td>\n",
" <td>0.492852</td>\n",
" <td>0.571152</td>\n",
" <td>0.803818</td>\n",
" <td>0.146414</td>\n",
" <td>0.307476</td>\n",
" <td>0.936373</td>\n",
" <td>0.341270</td>\n",
" <td>5.257397</td>\n",
" <td>0.895882</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.7</td>\n",
" <td>3.704528</td>\n",
" <td>3.529335</td>\n",
" <td>0.304136</td>\n",
" <td>0.187298</td>\n",
" <td>0.191990</td>\n",
" <td>0.228749</td>\n",
" <td>0.238305</td>\n",
" <td>0.256201</td>\n",
" <td>0.358807</td>\n",
" <td>0.226808</td>\n",
" <td>0.593897</td>\n",
" <td>0.591207</td>\n",
" <td>0.868505</td>\n",
" <td>0.182056</td>\n",
" <td>0.352330</td>\n",
" <td>0.983033</td>\n",
" <td>0.256854</td>\n",
" <td>4.898568</td>\n",
" <td>0.928065</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.0</td>\n",
" <td>3.702446</td>\n",
" <td>3.527273</td>\n",
" <td>0.282185</td>\n",
" <td>0.192092</td>\n",
" <td>0.186749</td>\n",
" <td>0.216980</td>\n",
" <td>0.204185</td>\n",
" <td>0.240096</td>\n",
" <td>0.339114</td>\n",
" <td>0.204905</td>\n",
" <td>0.572157</td>\n",
" <td>0.593544</td>\n",
" <td>0.875928</td>\n",
" <td>0.181702</td>\n",
" <td>0.340803</td>\n",
" <td>1.000000</td>\n",
" <td>0.077201</td>\n",
" <td>3.875892</td>\n",
" <td>0.974947</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.1</td>\n",
" <td>3.703312</td>\n",
" <td>3.528128</td>\n",
" <td>0.290138</td>\n",
" <td>0.197597</td>\n",
" <td>0.192259</td>\n",
" <td>0.223336</td>\n",
" <td>0.210944</td>\n",
" <td>0.246153</td>\n",
" <td>0.347768</td>\n",
" <td>0.212034</td>\n",
" <td>0.581038</td>\n",
" <td>0.596328</td>\n",
" <td>0.884411</td>\n",
" <td>0.187030</td>\n",
" <td>0.347420</td>\n",
" <td>1.000000</td>\n",
" <td>0.085137</td>\n",
" <td>3.957416</td>\n",
" <td>0.972784</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.2</td>\n",
" <td>3.703825</td>\n",
" <td>3.528636</td>\n",
" <td>0.297137</td>\n",
" <td>0.201202</td>\n",
" <td>0.196067</td>\n",
" <td>0.228169</td>\n",
" <td>0.218026</td>\n",
" <td>0.252767</td>\n",
" <td>0.355655</td>\n",
" <td>0.219909</td>\n",
" <td>0.588904</td>\n",
" <td>0.598160</td>\n",
" <td>0.886532</td>\n",
" <td>0.190538</td>\n",
" <td>0.352756</td>\n",
" <td>1.000000</td>\n",
" <td>0.094517</td>\n",
" <td>4.053212</td>\n",
" <td>0.969980</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.3</td>\n",
" <td>3.704130</td>\n",
" <td>3.528939</td>\n",
" <td>0.303499</td>\n",
" <td>0.204749</td>\n",
" <td>0.199901</td>\n",
" <td>0.232829</td>\n",
" <td>0.225107</td>\n",
" <td>0.260797</td>\n",
" <td>0.363757</td>\n",
" <td>0.226825</td>\n",
" <td>0.599969</td>\n",
" <td>0.599964</td>\n",
" <td>0.888653</td>\n",
" <td>0.194073</td>\n",
" <td>0.358344</td>\n",
" <td>1.000000</td>\n",
" <td>0.105339</td>\n",
" <td>4.147779</td>\n",
" <td>0.966948</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.6</td>\n",
" <td>3.704488</td>\n",
" <td>3.529295</td>\n",
" <td>0.314634</td>\n",
" <td>0.206209</td>\n",
" <td>0.204818</td>\n",
" <td>0.240159</td>\n",
" <td>0.242489</td>\n",
" <td>0.273850</td>\n",
" <td>0.376438</td>\n",
" <td>0.238428</td>\n",
" <td>0.622042</td>\n",
" <td>0.600721</td>\n",
" <td>0.897137</td>\n",
" <td>0.197320</td>\n",
" <td>0.367854</td>\n",
" <td>0.996394</td>\n",
" <td>0.212843</td>\n",
" <td>4.621938</td>\n",
" <td>0.945932</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.4</td>\n",
" <td>3.704313</td>\n",
" <td>3.529120</td>\n",
" <td>0.308908</td>\n",
" <td>0.208811</td>\n",
" <td>0.203854</td>\n",
" <td>0.237241</td>\n",
" <td>0.229614</td>\n",
" <td>0.266918</td>\n",
" <td>0.370758</td>\n",
" <td>0.232673</td>\n",
" <td>0.609385</td>\n",
" <td>0.602014</td>\n",
" <td>0.895016</td>\n",
" <td>0.197981</td>\n",
" <td>0.363598</td>\n",
" <td>0.999894</td>\n",
" <td>0.132035</td>\n",
" <td>4.259682</td>\n",
" <td>0.962989</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.5</td>\n",
" <td>3.704422</td>\n",
" <td>3.529229</td>\n",
" <td>0.314316</td>\n",
" <td>0.211411</td>\n",
" <td>0.206768</td>\n",
" <td>0.240986</td>\n",
" <td>0.237124</td>\n",
" <td>0.273416</td>\n",
" <td>0.378307</td>\n",
" <td>0.239297</td>\n",
" <td>0.622792</td>\n",
" <td>0.603327</td>\n",
" <td>0.903499</td>\n",
" <td>0.200572</td>\n",
" <td>0.369318</td>\n",
" <td>0.999046</td>\n",
" <td>0.168831</td>\n",
" <td>4.411281</td>\n",
" <td>0.956648</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Beta RMSE MAE precision recall F_1 F_05 \\\n",
"0 0.9 3.704567 3.529375 0.162354 0.076967 0.089233 0.114583 \n",
"0 0.8 3.704552 3.529360 0.266384 0.147571 0.158660 0.194838 \n",
"0 0.7 3.704528 3.529335 0.304136 0.187298 0.191990 0.228749 \n",
"0 0.0 3.702446 3.527273 0.282185 0.192092 0.186749 0.216980 \n",
"0 0.1 3.703312 3.528128 0.290138 0.197597 0.192259 0.223336 \n",
"0 0.2 3.703825 3.528636 0.297137 0.201202 0.196067 0.228169 \n",
"0 0.3 3.704130 3.528939 0.303499 0.204749 0.199901 0.232829 \n",
"0 0.6 3.704488 3.529295 0.314634 0.206209 0.204818 0.240159 \n",
"0 0.4 3.704313 3.529120 0.308908 0.208811 0.203854 0.237241 \n",
"0 0.5 3.704422 3.529229 0.314316 0.211411 0.206768 0.240986 \n",
"\n",
" precision_super recall_super NDCG mAP MRR LAUC \\\n",
"0 0.134657 0.113253 0.160868 0.085486 0.243590 0.535405 \n",
"0 0.214485 0.209336 0.299850 0.184356 0.492852 0.571152 \n",
"0 0.238305 0.256201 0.358807 0.226808 0.593897 0.591207 \n",
"0 0.204185 0.240096 0.339114 0.204905 0.572157 0.593544 \n",
"0 0.210944 0.246153 0.347768 0.212034 0.581038 0.596328 \n",
"0 0.218026 0.252767 0.355655 0.219909 0.588904 0.598160 \n",
"0 0.225107 0.260797 0.363757 0.226825 0.599969 0.599964 \n",
"0 0.242489 0.273850 0.376438 0.238428 0.622042 0.600721 \n",
"0 0.229614 0.266918 0.370758 0.232673 0.609385 0.602014 \n",
"0 0.237124 0.273416 0.378307 0.239297 0.622792 0.603327 \n",
"\n",
" HR F_2 Whole_average Reco in test Test coverage Shannon \\\n",
"0 0.580064 0.078906 0.197947 0.800106 0.415584 5.563910 \n",
"0 0.803818 0.146414 0.307476 0.936373 0.341270 5.257397 \n",
"0 0.868505 0.182056 0.352330 0.983033 0.256854 4.898568 \n",
"0 0.875928 0.181702 0.340803 1.000000 0.077201 3.875892 \n",
"0 0.884411 0.187030 0.347420 1.000000 0.085137 3.957416 \n",
"0 0.886532 0.190538 0.352756 1.000000 0.094517 4.053212 \n",
"0 0.888653 0.194073 0.358344 1.000000 0.105339 4.147779 \n",
"0 0.897137 0.197320 0.367854 0.996394 0.212843 4.621938 \n",
"0 0.895016 0.197981 0.363598 0.999894 0.132035 4.259682 \n",
"0 0.903499 0.200572 0.369318 0.999046 0.168831 4.411281 \n",
"\n",
" Gini \n",
"0 0.857396 \n",
"0 0.895882 \n",
"0 0.928065 \n",
"0 0.974947 \n",
"0 0.972784 \n",
"0 0.969980 \n",
"0 0.966948 \n",
"0 0.945932 \n",
"0 0.962989 \n",
"0 0.956648 "
]
},
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"result.sort_values([\"recall\"])"
]
},
{
"cell_type": "raw",
"metadata": {},
"source": [
"So Beta 0.6 and alpha 0.8 seems to maximze recall"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [],
"source": [
"model=RP3Beta()\n",
"model.fit(train_ui, alpha=0.8, beta=0.6)\n",
"reco=pd.DataFrame(model.recommend(user_code_id, item_code_id, topK=10))\n",
"estimations_df=pd.DataFrame(model.estimate(user_code_id, item_code_id, test_ui))\n",
"reco.to_csv('Recommendations generated/ml-100k/Self_RP3Beta_reco.csv', index=False, header=False)\n",
"estimations_df.to_csv('Recommendations generated/ml-100k/Self_RP3Beta_estimations.csv', index=False, header=False)"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"943it [00:00, 5460.13it/s]\n",
"943it [00:00, 4803.05it/s]\n",
"943it [00:00, 5055.06it/s]\n",
"943it [00:00, 5345.09it/s]\n",
"943it [00:00, 5630.17it/s]\n",
"943it [00:00, 5312.63it/s]\n",
"943it [00:00, 5254.57it/s]\n",
"943it [00:00, 5601.35it/s]\n",
"943it [00:00, 4720.28it/s]\n",
"943it [00:00, 5849.34it/s]\n",
"943it [00:00, 3628.59it/s]\n",
"943it [00:00, 4575.68it/s]\n",
"943it [00:00, 5025.63it/s]\n",
"943it [00:00, 4779.84it/s]\n",
"943it [00:00, 4822.16it/s]\n",
"943it [00:00, 4399.16it/s]\n",
"943it [00:00, 4856.83it/s]\n",
"943it [00:00, 4987.44it/s]\n"
]
},
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Model</th>\n",
" <th>RMSE</th>\n",
" <th>MAE</th>\n",
" <th>precision</th>\n",
" <th>recall</th>\n",
" <th>F_1</th>\n",
" <th>F_05</th>\n",
" <th>precision_super</th>\n",
" <th>recall_super</th>\n",
" <th>NDCG</th>\n",
" <th>mAP</th>\n",
" <th>MRR</th>\n",
" <th>LAUC</th>\n",
" <th>HR</th>\n",
" <th>F_2</th>\n",
" <th>Whole_average</th>\n",
" <th>Reco in test</th>\n",
" <th>Test coverage</th>\n",
" <th>Shannon</th>\n",
" <th>Gini</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_RP3Beta</td>\n",
" <td>3.702928</td>\n",
" <td>3.527713</td>\n",
" <td>0.322694</td>\n",
" <td>0.216069</td>\n",
" <td>0.212152</td>\n",
" <td>0.247538</td>\n",
" <td>0.245279</td>\n",
" <td>0.284983</td>\n",
" <td>0.388271</td>\n",
" <td>0.248239</td>\n",
" <td>0.636318</td>\n",
" <td>0.605683</td>\n",
" <td>0.910923</td>\n",
" <td>0.205450</td>\n",
" <td>0.376967</td>\n",
" <td>0.999788</td>\n",
" <td>0.178932</td>\n",
" <td>4.549663</td>\n",
" <td>0.950182</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_P3</td>\n",
" <td>3.702446</td>\n",
" <td>3.527273</td>\n",
" <td>0.282185</td>\n",
" <td>0.192092</td>\n",
" <td>0.186749</td>\n",
" <td>0.216980</td>\n",
" <td>0.204185</td>\n",
" <td>0.240096</td>\n",
" <td>0.339114</td>\n",
" <td>0.204905</td>\n",
" <td>0.572157</td>\n",
" <td>0.593544</td>\n",
" <td>0.875928</td>\n",
" <td>0.181702</td>\n",
" <td>0.340803</td>\n",
" <td>1.000000</td>\n",
" <td>0.077201</td>\n",
" <td>3.875892</td>\n",
" <td>0.974947</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_TopPop</td>\n",
" <td>2.508258</td>\n",
" <td>2.217909</td>\n",
" <td>0.188865</td>\n",
" <td>0.116919</td>\n",
" <td>0.118732</td>\n",
" <td>0.141584</td>\n",
" <td>0.130472</td>\n",
" <td>0.137473</td>\n",
" <td>0.214651</td>\n",
" <td>0.111707</td>\n",
" <td>0.400939</td>\n",
" <td>0.555546</td>\n",
" <td>0.765642</td>\n",
" <td>0.112750</td>\n",
" <td>0.249607</td>\n",
" <td>1.000000</td>\n",
" <td>0.038961</td>\n",
" <td>3.159079</td>\n",
" <td>0.987317</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_SVDBaseline</td>\n",
" <td>3.642359</td>\n",
" <td>3.476956</td>\n",
" <td>0.135949</td>\n",
" <td>0.079751</td>\n",
" <td>0.082423</td>\n",
" <td>0.099673</td>\n",
" <td>0.106545</td>\n",
" <td>0.104164</td>\n",
" <td>0.160100</td>\n",
" <td>0.079313</td>\n",
" <td>0.328798</td>\n",
" <td>0.536764</td>\n",
" <td>0.629905</td>\n",
" <td>0.077617</td>\n",
" <td>0.201750</td>\n",
" <td>1.000000</td>\n",
" <td>0.282828</td>\n",
" <td>5.130008</td>\n",
" <td>0.909760</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_SVD</td>\n",
" <td>0.951186</td>\n",
" <td>0.750553</td>\n",
" <td>0.094910</td>\n",
" <td>0.044564</td>\n",
" <td>0.051182</td>\n",
" <td>0.065639</td>\n",
" <td>0.084549</td>\n",
" <td>0.074410</td>\n",
" <td>0.106164</td>\n",
" <td>0.049263</td>\n",
" <td>0.228326</td>\n",
" <td>0.518988</td>\n",
" <td>0.477200</td>\n",
" <td>0.045601</td>\n",
" <td>0.153400</td>\n",
" <td>0.996925</td>\n",
" <td>0.219336</td>\n",
" <td>4.494800</td>\n",
" <td>0.949844</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_SVD</td>\n",
" <td>0.914024</td>\n",
" <td>0.717181</td>\n",
" <td>0.104454</td>\n",
" <td>0.043836</td>\n",
" <td>0.053331</td>\n",
" <td>0.070716</td>\n",
" <td>0.094528</td>\n",
" <td>0.076751</td>\n",
" <td>0.106711</td>\n",
" <td>0.050532</td>\n",
" <td>0.194366</td>\n",
" <td>0.518647</td>\n",
" <td>0.479321</td>\n",
" <td>0.045941</td>\n",
" <td>0.153261</td>\n",
" <td>0.853765</td>\n",
" <td>0.148629</td>\n",
" <td>3.836334</td>\n",
" <td>0.973007</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_Baseline</td>\n",
" <td>0.949459</td>\n",
" <td>0.752487</td>\n",
" <td>0.091410</td>\n",
" <td>0.037652</td>\n",
" <td>0.046030</td>\n",
" <td>0.061286</td>\n",
" <td>0.079614</td>\n",
" <td>0.056463</td>\n",
" <td>0.095957</td>\n",
" <td>0.043178</td>\n",
" <td>0.198193</td>\n",
" <td>0.515501</td>\n",
" <td>0.437964</td>\n",
" <td>0.039549</td>\n",
" <td>0.141900</td>\n",
" <td>1.000000</td>\n",
" <td>0.033911</td>\n",
" <td>2.836513</td>\n",
" <td>0.991139</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_SVDBiased</td>\n",
" <td>0.939359</td>\n",
" <td>0.740564</td>\n",
" <td>0.086850</td>\n",
" <td>0.036359</td>\n",
" <td>0.043933</td>\n",
" <td>0.058123</td>\n",
" <td>0.076395</td>\n",
" <td>0.056913</td>\n",
" <td>0.094528</td>\n",
" <td>0.043830</td>\n",
" <td>0.203204</td>\n",
" <td>0.514846</td>\n",
" <td>0.443266</td>\n",
" <td>0.038036</td>\n",
" <td>0.141357</td>\n",
" <td>0.994804</td>\n",
" <td>0.179654</td>\n",
" <td>4.199699</td>\n",
" <td>0.962848</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_KNNSurprisetask</td>\n",
" <td>0.946255</td>\n",
" <td>0.745209</td>\n",
" <td>0.083457</td>\n",
" <td>0.032848</td>\n",
" <td>0.041227</td>\n",
" <td>0.055493</td>\n",
" <td>0.074785</td>\n",
" <td>0.048890</td>\n",
" <td>0.089577</td>\n",
" <td>0.040902</td>\n",
" <td>0.189057</td>\n",
" <td>0.513076</td>\n",
" <td>0.417815</td>\n",
" <td>0.034996</td>\n",
" <td>0.135177</td>\n",
" <td>0.888547</td>\n",
" <td>0.130592</td>\n",
" <td>3.611806</td>\n",
" <td>0.978659</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_TopRated</td>\n",
" <td>2.508258</td>\n",
" <td>2.217909</td>\n",
" <td>0.079321</td>\n",
" <td>0.032667</td>\n",
" <td>0.039983</td>\n",
" <td>0.053170</td>\n",
" <td>0.068884</td>\n",
" <td>0.048582</td>\n",
" <td>0.070766</td>\n",
" <td>0.027602</td>\n",
" <td>0.114790</td>\n",
" <td>0.512943</td>\n",
" <td>0.411453</td>\n",
" <td>0.034385</td>\n",
" <td>0.124546</td>\n",
" <td>1.000000</td>\n",
" <td>0.024531</td>\n",
" <td>2.761238</td>\n",
" <td>0.991660</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_GlobalAvg</td>\n",
" <td>1.125760</td>\n",
" <td>0.943534</td>\n",
" <td>0.061188</td>\n",
" <td>0.025968</td>\n",
" <td>0.031383</td>\n",
" <td>0.041343</td>\n",
" <td>0.040558</td>\n",
" <td>0.032107</td>\n",
" <td>0.067695</td>\n",
" <td>0.027470</td>\n",
" <td>0.171187</td>\n",
" <td>0.509546</td>\n",
" <td>0.384942</td>\n",
" <td>0.027213</td>\n",
" <td>0.118383</td>\n",
" <td>1.000000</td>\n",
" <td>0.025974</td>\n",
" <td>2.711772</td>\n",
" <td>0.992003</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_Random</td>\n",
" <td>1.525633</td>\n",
" <td>1.225714</td>\n",
" <td>0.047720</td>\n",
" <td>0.022049</td>\n",
" <td>0.025494</td>\n",
" <td>0.032845</td>\n",
" <td>0.029077</td>\n",
" <td>0.025015</td>\n",
" <td>0.051757</td>\n",
" <td>0.019242</td>\n",
" <td>0.128181</td>\n",
" <td>0.507543</td>\n",
" <td>0.327678</td>\n",
" <td>0.022628</td>\n",
" <td>0.103269</td>\n",
" <td>0.987275</td>\n",
" <td>0.184704</td>\n",
" <td>5.105122</td>\n",
" <td>0.906561</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_I-KNN</td>\n",
" <td>1.030386</td>\n",
" <td>0.813067</td>\n",
" <td>0.026087</td>\n",
" <td>0.006908</td>\n",
" <td>0.010593</td>\n",
" <td>0.016046</td>\n",
" <td>0.021137</td>\n",
" <td>0.009522</td>\n",
" <td>0.024214</td>\n",
" <td>0.008958</td>\n",
" <td>0.048068</td>\n",
" <td>0.499885</td>\n",
" <td>0.154825</td>\n",
" <td>0.008007</td>\n",
" <td>0.069521</td>\n",
" <td>0.402333</td>\n",
" <td>0.434343</td>\n",
" <td>5.133650</td>\n",
" <td>0.877999</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_I-KNNBaseline</td>\n",
" <td>0.935327</td>\n",
" <td>0.737424</td>\n",
" <td>0.002545</td>\n",
" <td>0.000755</td>\n",
" <td>0.001105</td>\n",
" <td>0.001602</td>\n",
" <td>0.002253</td>\n",
" <td>0.000930</td>\n",
" <td>0.003444</td>\n",
" <td>0.001362</td>\n",
" <td>0.011760</td>\n",
" <td>0.496724</td>\n",
" <td>0.021209</td>\n",
" <td>0.000862</td>\n",
" <td>0.045379</td>\n",
" <td>0.482821</td>\n",
" <td>0.059885</td>\n",
" <td>2.232578</td>\n",
" <td>0.994487</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_U-KNN</td>\n",
" <td>1.023495</td>\n",
" <td>0.807913</td>\n",
" <td>0.000742</td>\n",
" <td>0.000205</td>\n",
" <td>0.000305</td>\n",
" <td>0.000449</td>\n",
" <td>0.000536</td>\n",
" <td>0.000198</td>\n",
" <td>0.000845</td>\n",
" <td>0.000274</td>\n",
" <td>0.002744</td>\n",
" <td>0.496441</td>\n",
" <td>0.007423</td>\n",
" <td>0.000235</td>\n",
" <td>0.042533</td>\n",
" <td>0.602121</td>\n",
" <td>0.010823</td>\n",
" <td>2.089186</td>\n",
" <td>0.995706</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_BaselineIU</td>\n",
" <td>0.958136</td>\n",
" <td>0.754051</td>\n",
" <td>0.000954</td>\n",
" <td>0.000188</td>\n",
" <td>0.000298</td>\n",
" <td>0.000481</td>\n",
" <td>0.000644</td>\n",
" <td>0.000223</td>\n",
" <td>0.001043</td>\n",
" <td>0.000335</td>\n",
" <td>0.003348</td>\n",
" <td>0.496433</td>\n",
" <td>0.009544</td>\n",
" <td>0.000220</td>\n",
" <td>0.042809</td>\n",
" <td>0.699046</td>\n",
" <td>0.005051</td>\n",
" <td>1.945910</td>\n",
" <td>0.995669</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_BaselineUI</td>\n",
" <td>0.967585</td>\n",
" <td>0.762740</td>\n",
" <td>0.000954</td>\n",
" <td>0.000170</td>\n",
" <td>0.000278</td>\n",
" <td>0.000463</td>\n",
" <td>0.000644</td>\n",
" <td>0.000189</td>\n",
" <td>0.000752</td>\n",
" <td>0.000168</td>\n",
" <td>0.001677</td>\n",
" <td>0.496424</td>\n",
" <td>0.009544</td>\n",
" <td>0.000201</td>\n",
" <td>0.042622</td>\n",
" <td>0.600530</td>\n",
" <td>0.005051</td>\n",
" <td>1.803126</td>\n",
" <td>0.996380</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_IKNN</td>\n",
" <td>1.018363</td>\n",
" <td>0.808793</td>\n",
" <td>0.000318</td>\n",
" <td>0.000108</td>\n",
" <td>0.000140</td>\n",
" <td>0.000189</td>\n",
" <td>0.000000</td>\n",
" <td>0.000000</td>\n",
" <td>0.000214</td>\n",
" <td>0.000037</td>\n",
" <td>0.000368</td>\n",
" <td>0.496391</td>\n",
" <td>0.003181</td>\n",
" <td>0.000118</td>\n",
" <td>0.041755</td>\n",
" <td>0.392153</td>\n",
" <td>0.115440</td>\n",
" <td>4.174741</td>\n",
" <td>0.965327</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Model RMSE MAE precision recall F_1 \\\n",
"0 Self_RP3Beta 3.702928 3.527713 0.322694 0.216069 0.212152 \n",
"0 Self_P3 3.702446 3.527273 0.282185 0.192092 0.186749 \n",
"0 Self_TopPop 2.508258 2.217909 0.188865 0.116919 0.118732 \n",
"0 Self_SVDBaseline 3.642359 3.476956 0.135949 0.079751 0.082423 \n",
"0 Ready_SVD 0.951186 0.750553 0.094910 0.044564 0.051182 \n",
"0 Self_SVD 0.914024 0.717181 0.104454 0.043836 0.053331 \n",
"0 Ready_Baseline 0.949459 0.752487 0.091410 0.037652 0.046030 \n",
"0 Ready_SVDBiased 0.939359 0.740564 0.086850 0.036359 0.043933 \n",
"0 Self_KNNSurprisetask 0.946255 0.745209 0.083457 0.032848 0.041227 \n",
"0 Self_TopRated 2.508258 2.217909 0.079321 0.032667 0.039983 \n",
"0 Self_GlobalAvg 1.125760 0.943534 0.061188 0.025968 0.031383 \n",
"0 Ready_Random 1.525633 1.225714 0.047720 0.022049 0.025494 \n",
"0 Ready_I-KNN 1.030386 0.813067 0.026087 0.006908 0.010593 \n",
"0 Ready_I-KNNBaseline 0.935327 0.737424 0.002545 0.000755 0.001105 \n",
"0 Ready_U-KNN 1.023495 0.807913 0.000742 0.000205 0.000305 \n",
"0 Self_BaselineIU 0.958136 0.754051 0.000954 0.000188 0.000298 \n",
"0 Self_BaselineUI 0.967585 0.762740 0.000954 0.000170 0.000278 \n",
"0 Self_IKNN 1.018363 0.808793 0.000318 0.000108 0.000140 \n",
"\n",
" F_05 precision_super recall_super NDCG mAP MRR \\\n",
"0 0.247538 0.245279 0.284983 0.388271 0.248239 0.636318 \n",
"0 0.216980 0.204185 0.240096 0.339114 0.204905 0.572157 \n",
"0 0.141584 0.130472 0.137473 0.214651 0.111707 0.400939 \n",
"0 0.099673 0.106545 0.104164 0.160100 0.079313 0.328798 \n",
"0 0.065639 0.084549 0.074410 0.106164 0.049263 0.228326 \n",
"0 0.070716 0.094528 0.076751 0.106711 0.050532 0.194366 \n",
"0 0.061286 0.079614 0.056463 0.095957 0.043178 0.198193 \n",
"0 0.058123 0.076395 0.056913 0.094528 0.043830 0.203204 \n",
"0 0.055493 0.074785 0.048890 0.089577 0.040902 0.189057 \n",
"0 0.053170 0.068884 0.048582 0.070766 0.027602 0.114790 \n",
"0 0.041343 0.040558 0.032107 0.067695 0.027470 0.171187 \n",
"0 0.032845 0.029077 0.025015 0.051757 0.019242 0.128181 \n",
"0 0.016046 0.021137 0.009522 0.024214 0.008958 0.048068 \n",
"0 0.001602 0.002253 0.000930 0.003444 0.001362 0.011760 \n",
"0 0.000449 0.000536 0.000198 0.000845 0.000274 0.002744 \n",
"0 0.000481 0.000644 0.000223 0.001043 0.000335 0.003348 \n",
"0 0.000463 0.000644 0.000189 0.000752 0.000168 0.001677 \n",
"0 0.000189 0.000000 0.000000 0.000214 0.000037 0.000368 \n",
"\n",
" LAUC HR F_2 Whole_average Reco in test Test coverage \\\n",
"0 0.605683 0.910923 0.205450 0.376967 0.999788 0.178932 \n",
"0 0.593544 0.875928 0.181702 0.340803 1.000000 0.077201 \n",
"0 0.555546 0.765642 0.112750 0.249607 1.000000 0.038961 \n",
"0 0.536764 0.629905 0.077617 0.201750 1.000000 0.282828 \n",
"0 0.518988 0.477200 0.045601 0.153400 0.996925 0.219336 \n",
"0 0.518647 0.479321 0.045941 0.153261 0.853765 0.148629 \n",
"0 0.515501 0.437964 0.039549 0.141900 1.000000 0.033911 \n",
"0 0.514846 0.443266 0.038036 0.141357 0.994804 0.179654 \n",
"0 0.513076 0.417815 0.034996 0.135177 0.888547 0.130592 \n",
"0 0.512943 0.411453 0.034385 0.124546 1.000000 0.024531 \n",
"0 0.509546 0.384942 0.027213 0.118383 1.000000 0.025974 \n",
"0 0.507543 0.327678 0.022628 0.103269 0.987275 0.184704 \n",
"0 0.499885 0.154825 0.008007 0.069521 0.402333 0.434343 \n",
"0 0.496724 0.021209 0.000862 0.045379 0.482821 0.059885 \n",
"0 0.496441 0.007423 0.000235 0.042533 0.602121 0.010823 \n",
"0 0.496433 0.009544 0.000220 0.042809 0.699046 0.005051 \n",
"0 0.496424 0.009544 0.000201 0.042622 0.600530 0.005051 \n",
"0 0.496391 0.003181 0.000118 0.041755 0.392153 0.115440 \n",
"\n",
" Shannon Gini \n",
"0 4.549663 0.950182 \n",
"0 3.875892 0.974947 \n",
"0 3.159079 0.987317 \n",
"0 5.130008 0.909760 \n",
"0 4.494800 0.949844 \n",
"0 3.836334 0.973007 \n",
"0 2.836513 0.991139 \n",
"0 4.199699 0.962848 \n",
"0 3.611806 0.978659 \n",
"0 2.761238 0.991660 \n",
"0 2.711772 0.992003 \n",
"0 5.105122 0.906561 \n",
"0 5.133650 0.877999 \n",
"0 2.232578 0.994487 \n",
"0 2.089186 0.995706 \n",
"0 1.945910 0.995669 \n",
"0 1.803126 0.996380 \n",
"0 4.174741 0.965327 "
]
},
"execution_count": 15,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import imp\n",
"imp.reload(ev)\n",
"\n",
"import evaluation_measures as ev\n",
"dir_path=\"Recommendations generated/ml-100k/\"\n",
"super_reactions=[4,5]\n",
"test=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None)\n",
"\n",
"ev.evaluate_all(test, dir_path, super_reactions)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# project task 7 (optional): implement graph-based model of your choice "
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [],
"source": [
"# for example change length of paths in RP3beta\n",
"# save the outptut in 'Recommendations generated/ml-100k/Self_GraphTask_estimations.csv'\n",
"# and 'Recommendations generated/ml-100k/Self_GraphTask_reco.csv'"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.9"
}
},
"nbformat": 4,
"nbformat_minor": 4
}