workshops_recommender_systems/P4. Matrix Factorization.ipynb
2020-06-13 22:14:04 +02:00

2169 lines
95 KiB
Plaintext

{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Self made SVD"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"import helpers\n",
"import pandas as pd\n",
"import numpy as np\n",
"import scipy.sparse as sparse\n",
"from collections import defaultdict\n",
"from itertools import chain\n",
"import random\n",
"\n",
"train_read=pd.read_csv('./Datasets/ml-100k/train.csv', sep='\\t', header=None)\n",
"test_read=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None)\n",
"train_ui, test_ui, user_code_id, user_id_code, item_code_id, item_id_code = helpers.data_to_csr(train_read, test_read)"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [],
"source": [
"# Done similarly to https://github.com/albertauyeung/matrix-factorization-in-python\n",
"from tqdm import tqdm\n",
"\n",
"class SVD():\n",
" \n",
" def __init__(self, train_ui, learning_rate, regularization, nb_factors, iterations):\n",
" self.train_ui=train_ui\n",
" self.uir=list(zip(*[train_ui.nonzero()[0],train_ui.nonzero()[1], train_ui.data]))\n",
" \n",
" self.learning_rate=learning_rate\n",
" self.regularization=regularization\n",
" self.iterations=iterations\n",
" self.nb_users, self.nb_items=train_ui.shape\n",
" self.nb_ratings=train_ui.nnz\n",
" self.nb_factors=nb_factors\n",
" \n",
" self.Pu=np.random.normal(loc=0, scale=1./self.nb_factors, size=(self.nb_users, self.nb_factors))\n",
" self.Qi=np.random.normal(loc=0, scale=1./self.nb_factors, size=(self.nb_items, self.nb_factors))\n",
"\n",
" def train(self, test_ui=None):\n",
" if test_ui!=None:\n",
" self.test_uir=list(zip(*[test_ui.nonzero()[0],test_ui.nonzero()[1], test_ui.data]))\n",
" \n",
" self.learning_process=[]\n",
" pbar = tqdm(range(self.iterations))\n",
" for i in pbar:\n",
" pbar.set_description(f'Epoch {i} RMSE: {self.learning_process[-1][1] if i>0 else 0}. Training epoch {i+1}...')\n",
" np.random.shuffle(self.uir)\n",
" self.sgd(self.uir)\n",
" if test_ui==None:\n",
" self.learning_process.append([i+1, self.RMSE_total(self.uir)])\n",
" else:\n",
" self.learning_process.append([i+1, self.RMSE_total(self.uir), self.RMSE_total(self.test_uir)])\n",
" \n",
" def sgd(self, uir):\n",
" \n",
" for u, i, score in uir:\n",
" # Computer prediction and error\n",
" prediction = self.get_rating(u,i)\n",
" e = (score - prediction)\n",
" \n",
" # Update user and item latent feature matrices\n",
" Pu_update=self.learning_rate * (e * self.Qi[i] - self.regularization * self.Pu[u])\n",
" Qi_update=self.learning_rate * (e * self.Pu[u] - self.regularization * self.Qi[i])\n",
" \n",
" self.Pu[u] += Pu_update\n",
" self.Qi[i] += Qi_update\n",
" \n",
" def get_rating(self, u, i):\n",
" prediction = self.Pu[u].dot(self.Qi[i].T)\n",
" return prediction\n",
" \n",
" def RMSE_total(self, uir):\n",
" RMSE=0\n",
" for u,i, score in uir:\n",
" prediction = self.get_rating(u,i)\n",
" RMSE+=(score - prediction)**2\n",
" return np.sqrt(RMSE/len(uir))\n",
" \n",
" def estimations(self):\n",
" self.estimations=\\\n",
" np.dot(self.Pu,self.Qi.T)\n",
"\n",
" def recommend(self, user_code_id, item_code_id, topK=10):\n",
" \n",
" top_k = defaultdict(list)\n",
" for nb_user, user in enumerate(self.estimations):\n",
" \n",
" user_rated=self.train_ui.indices[self.train_ui.indptr[nb_user]:self.train_ui.indptr[nb_user+1]]\n",
" for item, score in enumerate(user):\n",
" if item not in user_rated and not np.isnan(score):\n",
" top_k[user_code_id[nb_user]].append((item_code_id[item], score))\n",
" result=[]\n",
" # Let's choose k best items in the format: (user, item1, score1, item2, score2, ...)\n",
" for uid, item_scores in top_k.items():\n",
" item_scores.sort(key=lambda x: x[1], reverse=True)\n",
" result.append([uid]+list(chain(*item_scores[:topK])))\n",
" return result\n",
" \n",
" def estimate(self, user_code_id, item_code_id, test_ui):\n",
" result=[]\n",
" for user, item in zip(*test_ui.nonzero()):\n",
" result.append([user_code_id[user], item_code_id[item], \n",
" self.estimations[user,item] if not np.isnan(self.estimations[user,item]) else 1])\n",
" return result"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Epoch 39 RMSE: 0.7471068280984748. Training epoch 40...: 100%|█████████████████████████| 40/40 [02:29<00:00, 3.74s/it]\n"
]
}
],
"source": [
"model=SVD(train_ui, learning_rate=0.005, regularization=0.02, nb_factors=100, iterations=40)\n",
"model.train(test_ui)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"ename": "ModuleNotFoundError",
"evalue": "No module named 'matplotlib'",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)",
"\u001b[1;32m<ipython-input-9-5fc9eff4d893>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[1;32m----> 1\u001b[1;33m \u001b[1;32mimport\u001b[0m \u001b[0mmatplotlib\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mpyplot\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mplt\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 2\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 3\u001b[0m \u001b[0mdf\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mpd\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mDataFrame\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mlearning_process\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0miloc\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;36m2\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 4\u001b[0m \u001b[0mdf\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mcolumns\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;34m'epoch'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m'train_RMSE'\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 5\u001b[0m \u001b[0mplt\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mplot\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'epoch'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;34m'train_RMSE'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mdata\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mdf\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mcolor\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;34m'blue'\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
"\u001b[1;31mModuleNotFoundError\u001b[0m: No module named 'matplotlib'"
]
}
],
"source": [
"import matplotlib.pyplot as plt\n",
"\n",
"df=pd.DataFrame(model.learning_process).iloc[:,:2]\n",
"df.columns=['epoch', 'train_RMSE']\n",
"plt.plot('epoch', 'train_RMSE', data=df, color='blue')\n",
"plt.legend()"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<matplotlib.legend.Legend at 0x7f963ce5ddd8>"
]
},
"execution_count": 5,
"metadata": {},
"output_type": "execute_result"
},
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD4CAYAAADiry33AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deXhU5fXA8e+BAGFxYYkVCEtkadlDDSAFFKTIKlLcQFERLFqhYBUqIG4oFaUK0iJuP6CgJeCKW0GEWKwbhFX2sEoABdkUBRF4f3+cOzCEQCZhkjszOZ/nmSeZe2cm5zJ65s57z3tecc5hjDEmdhXxOwBjjDH5yxK9McbEOEv0xhgT4yzRG2NMjLNEb4wxMS7O7wCyqlChgqtevbrfYRhjTFRZvHjxd865hOz2RVyir169Ounp6X6HYYwxUUVEtp5pX45DNyIySUR2icjKM+wXERkvIhtEZIWI/DZo320ikuHdbstb+MYYY85FKGP0U4AOZ9nfEajl3foBEwFEpBzwMNAMaAo8LCJlzyVYY4wxuZdjonfOLQD2nuUh1wBTnfoCuFBEKgLtgbnOub3OuX3AXM7+gWGMMSYfhGOMvjKwLeh+prftTNtPIyL90G8DVK1aNQwhGWMK0i+//EJmZiaHDx/2O5SYFx8fT2JiIsWKFQv5ORFxMdY59yLwIkBKSoo13zEmymRmZnLeeedRvXp1RMTvcGKWc449e/aQmZlJUlJSyM8LRx39dqBK0P1Eb9uZthtjYszhw4cpX768Jfl8JiKUL18+19+cwpHo3wFu9apvLgMOOOd2AnOAq0SkrHcR9ipvmzEmBlmSLxh5+XfOcehGRKYDrYEKIpKJVtIUA3DOPQ98AHQCNgA/Abd7+/aKyGPAIu+lRjrnznZRNwwOAhuBRvn7Z4wxJorkmOidcz1z2O+A/mfYNwmYlLfQ8mIi8FfgOvTzqH7B/WljjIlQMdbr5g7gQXSEqCHQA1jja0TGmPy3f/9+nnvuuVw/r1OnTuzfvz/Xz+vduzdJSUkkJyfTqFEj5s2bd2Jf69atqVq1KsGLOnXr1o0yZcoAcPz4cQYOHEj9+vVp0KABTZo0YfPmzYB2BmjQoAHJyckkJyczcODAXMeWnYiougmfssBIYBDwDPAssBuYd7YnGWOiXCDR33333adsP3r0KHFxZ05zH3zwQZ7/5pgxY7juuutIS0ujX79+ZGRknNh34YUX8umnn9KyZUv279/Pzp07T+ybMWMGO3bsYMWKFRQpUoTMzExKly59Yn9aWhoVKlTIc1zZibFEH1AeGAXcAwQ+rbcDDwHDgRo+xWVM7LvnHli2LLyvmZwM48adef/QoUPZuHEjycnJFCtWjPj4eMqWLcvatWtZv3493bp1Y9u2bRw+fJhBgwbRr18/4GRvrYMHD9KxY0datmzJZ599RuXKlZk1axYlS5bMMbbmzZuzffupBYU9evQgNTWVli1b8uabb9K9e3dWrVoFwM6dO6lYsSJFiuiASmJiYh7/VUIXY0M3WSWgnRkAFgL/Bn6NDvFs9isoY0yYjR49mho1arBs2TLGjBnDkiVLePbZZ1m/fj0AkyZNYvHixaSnpzN+/Hj27Nlz2mtkZGTQv39/Vq1axYUXXsgbb7wR0t+ePXs23bp1O2Vb27ZtWbBgAceOHSM1NZUbb7zxxL4bbriBd999l+TkZO677z6WLl16ynPbtGlzYuhm7Nixuf2nyFaMntFn5w/AJuBJ4Hn0GnEXYBZgZWHGhMvZzrwLStOmTU+ZUDR+/HjeeustALZt20ZGRgbly5c/5TmBMXeASy+9lC1btpz1bwwZMoThw4eTmZnJ559/fsq+okWL0rJlS1JTUzl06BDBrdcTExNZt24d8+fPZ/78+bRt25bXXnuNtm3bAvkzdBPjZ/RZVQTGoSWYD6FDOIEkPw5Y5VNcxphwCh7z/vjjj/noo4/4/PPPWb58OY0bN852wlGJEiVO/F60aFGOHj161r8xZswY1q9fz5NPPkmfPn1O29+jRw8GDhzIDTfckO3f6tixI2PGjGH48OG8/fbbuTm8XCtkiT6gMvAIEPhatAMty6wPXAa8BPzgS2TGmNw777zz+OGH7P+fPXDgAGXLlqVUqVKsXbuWL774Iqx/e8CAARw/fpw5c06dD9qqVSuGDRtGz56nVqgvWbKEHTt2AFqBs2LFCqpVqxbWmLIqpIk+q0roxdqn0QTfDz37n+9nUMaYEJUvX54WLVpQv359hgwZcsq+Dh06cPToUerUqcPQoUO57LLLwvq3RYQRI0bw1FNPnbZ98ODBpw3D7Nq1i6uvvpr69evTsGFD4uLiGDBgwIn9wWP0t956a3hiDK71jAQpKSnO3xWmHPAlMBkdz78QeBNtxHkLUM6/0IyJUGvWrKFOnTp+h1FoZPfvLSKLnXMp2T3ezuhPI+jwzQtokgd4Hy3VrIQm+0/QDwRjjIl8luhD8n/AMrQs8x3gcuBmXyMyxuS//v37nxhGCdwmT57sd1i5VojKK89VI+Cf6HDOa8BF3vY96Nn+HegHgJVqGhMrJkyY4HcIYWGJPtdKA72D7q8A3gVeAaqjqyW29245z6ozxpj8ZkM356wNWp45BS3PnIZOzgrMvFsCpAPH/QjOGGMs0YdHKeA29Mx+L/A5uqAWwGNAE3Sopyf6gbCj4EM0xhRalujDrjhatRPwAjqs0wlIQ9dl6Ry0/2usgscYk58s0ee7i9AKnanomfxSYLy37xBQB7gEvaD7MXD2adfGmNPltR89wLhx4/jpp5/O+phAn/iGDRtyxRVXsHXr1hP7RIRevXqduH/06FESEhLo0qULAN9++y1dunShUaNG1K1bl06dOgGwZcsWSpYseUpFz9SpU/N0DDmxRF+gigDJQCvvvkN75tdHG621AX4FhNY1zxij8jvRgzYbW7FiBa1bt+bxxx8/sb106dKsXLmSQ4cOATB37lwqV658Yv9DDz1Eu3btWL58OatXr2b06NEn9gU6bgZu4ZoJm5Ulel+VQssy3wW+QxN8Z072y5+LDvk8i66UZUM8Jlq0zuYWSMQ/nWH/FG//d9nsO7vgfvRDhgxhzJgxNGnShIYNG/Lwww8D8OOPP9K5c2caNWpE/fr1mTFjBuPHj2fHjh20adOGNm3ahHRk2fWf79SpE++//z4A06dPP6W/zc6dO0/pOd+wYcOQ/k44WaKPGGWA7ugQT7K3bT+65vo9QF2gGvrBYA3XjAkW3I++Xbt2ZGRksHDhQpYtW8bixYtZsGABs2fPplKlSixfvpyVK1fSoUMHBg4cSKVKlUhLSyMtLS2kv5Vd//nAQiOHDx9mxYoVNGvW7MS+/v3707dvX9q0acOoUaNONDQDTnw4BW6ffPJJeP5BsrA6+oh2vXfbjJ7dzwH+h9byAzyFJv2r0AvAxXyI0ZjsfHyWfaVy2F8hh/1n9+GHH/Lhhx/SuHFjAA4ePEhGRgatWrXivvvu4/7776dLly60atUqh1c6VZs2bdi7dy9lypThscceO2Vfw4YN2bJlC9OnTz8xBh/Qvn17Nm3axOzZs/nPf/5D48aNWblyJXBy6Ca/2Rl9VEhCO2q+gQ7hBN62xcDf0Bm55dH6/Rl+BGhMxHDOMWzYsBPj3hs2bKBv377Url2bJUuW0KBBA0aMGMHIkSNz9bppaWls3bqV5OTkE8NBwbp27crgwYNPa0sMUK5cOW666SamTZtGkyZNWLBgQZ6PLy8s0Ued4BYLM9CJWW+gNfpLgI+8fQ540Lv/c0EGaEyBC+5H3759eyZNmsTBgwcB2L59O7t27WLHjh2UKlWKXr16MWTIEJYsWXLac3MSFxfHuHHjmDp1Knv37j1lX58+fXj44Ydp0KDBKdvnz59/4mLvDz/8wMaNG6lateo5HW9u2dBN1LsQHdvvjib3Q972zejQzuPoV+Ur0fYM3dFe+8bEjuB+9B07duSmm26iefPmAJQpU4ZXXnmFDRs2MGTIEIoUKUKxYsWYOHEiAP369aNDhw4nxupzUrFiRXr27MmECRN48MEHT2xPTExk4MCBpz1+8eLFDBgwgLi4OI4fP84dd9xBkyZN2LJly4kx+oA+ffpk+xrnyvrRx7Qf0Ulas4H/oGvmvgNcDWz17rfExvbNubJ+9AUrt/3o7Yw+ppVGF0Dvgp7tb0CXUQT4F/AwcD56pt8F6IheCDPGxBJL9IWGALWC7t8LNATeQxdWmYl+MOwBSqC1zOWxtsumMGnWrBk//3zqNa1p06adNu4ebSzRF1plgG7e7TjammENmuRBz/J3oaWbV6Kzdm1s35yZcw6R6D4x+PLLL/0OIUd5GW63qhuD/mdwKRDo1+GAu4EUtKLnZnQZxT8HPedAQQZoIlx8fDx79uzJUxIyoXPOsWfPHuLj43P1vJDO6EWkAzoPvyjwsnNudJb91YBJQALap7eXcy7T23cM+Mp76NfOua65itD4QIA+3u0YuoziPLQBG8B2oCrQGGiLnvG35ORELlPYJCYmkpmZye7du/0OJebFx8ef0lIhFDlW3YhIUWA90A7IBBYBPZ1zq4Me8xrwnnPuXyJyJXC7c+4Wb99B51yZUAOyqpto8C3ahG0+2nv/F/Qc4G30ou5e9Iy/OjbGb0zBOFvVTShDN02BDc65Tc65I0AqcE2Wx9RF/68HrefLut/ElF+hFTv/Bfah5ZtD0XV1QS/sXoIO93QHxgCfoh8IxpiCFkqirwxsC7qfyckavYDl6P/RoPPwzxOR8t79eBFJF5EvRKQb2RCRft5j0u2rX7Qpja6P+zhQxdvWHu1U+Ht0Td2/okM7B739n6DnAzZj15iCEK6LsYOBK0RkKXAFOoh7zNtXzfs6cRMwTkRqZH2yc+5F51yKcy4lISEhTCEZ/yQBf0LXz90AfAN8CJT19v8NHdcvh7ZhHgusPv1ljDFhEUqi387JUzXQxVBPacbsnNvhnOvunGsMPOBt2+/93O793IS2pGt87mGf7tgx6NMHPv00P17dnJtfoZd4AmYAs9CLvRvRmv67gvbPQa8DGGPCIZREvwioJSJJIlIc6IHOoz9BRCqISOC1hqEVOIhIWREpEXgM0IJ8OnXbvBlmz4aWLeHaayEjIz/+igmP84GuwD+AdcAWtKgLtO1yF+Bi9JzgfrTix4Z5jMmrHBO9c+4oMAA9zVoDzHTOrRKRkSISKJVsDawTkfXo6dsob3sdIF1ElqODsqODq3XCqWZNTe4jR8KcOVC3LgwcCN99lx9/zYRXNU5+0SuNVvKMQhu2jUXH+v/h7f8BPVewem1jQhWTTc2++QYeeQReegnKlIHhwzXplywZnhhNQfoBre5piNbuvwbcgNYDXIVe+G2L9egxhd3ZyitjMtEHrF4N998P770HVarAqFFw881QxOYDR7Fv0P48H6K99vd523eiwz0z0A+GRO9WxftZE6vpN7HsXOvoo1bduvDuu5CWBhddBLfeCikpMH9+zs81kepidN3cmcBu4EtgNHCRt38tmuwfAG5Dq3vqcXKoZyy6WtdzwGecLPk0JnbFdKIPaN0aFi6EV16BPXugbVvo2BE++8zvyMy5KYrO57ufk/8pP4x24PwRndA9D/h30P5taP+e/mhtwPloRXDAEmAHdg3AxJJCkehBh2tuvhnWrYMnn4RFi6BFC/0QmDsXImwEy5yzUmhb5iuB64K2P4O2YP4aLR57FB3nD7gRHf+/GK0MehJN/sZEr0KT6APi4+Gvf4WtW2HsWK3UueoqaNYM3n4bjh/3O0KT/wQdu78aXVd3eNC+SWipZ0e09HMoMNHbdxytHn4TvVZgTHSI6Yuxofj5Z5g6FUaPhk2bdFx/2DDo0QPirFu/YTdwGP1g2AL8hpM1/Zegwz8D0CEkY/xTaC/GhqJECfjjH3VI59VXQQRuuQVq14YXXtAPAlOYJXByYnh1tCvn58DfgWS0+ment/+/QG20/PNvwAfYeL+JBIU+0QfExcFNN8GKFTBrFiQkwF13wSWXaFnmtzYj3wC6AtdlwH3oRd2d6ExegHi03n8JWvXTGR3vD3xDXQq8gi7PYJ08TcGxRJ9FkSLQtSt88QV89JEO5YwYoXX4PXvCggV24dYEE7T6B6AZ8DrayO0AsAAYj5Z3gpZ93oJ+GJRBZwP3Bg55+48WSMSm8Cn0Y/ShWLcOnn8eJk+GAwegXj24+27o1QvOP9/v6Ez0+AW9wLs86LYNWIV+YNyC9v1rhC7xkIQOBbX1IVYTbQrtzNhw++knSE2FCRNgyRJtr9Crlyb9KF8k3kSEqeiY/zIgAziCnv0v9/b/AR0qSkKvFySh3xZaFHSgJgJZog8z57QO/7nnNPH//LN2zbzrLu2cmct1e43JxnG0hPMAJ9fqHYqO928BtqJDPZ3RlhCga/+cj34bCNyqYyO0hYMl+ny0Zw9MmQITJ8LGjVC2rJ7l33EHNGzod3Qmdh1DK3p+Qcs8j6O1/yu97QF3ofMAjqEdQQP9fwI3W9A9VliiLwDHj2tPnZdfhjffhCNHoEkTTfg9ethYvilI+9GO4qvR2cGXo98CkrJ57NPowi/foO0jqqBdQmui1wfKY83gooMl+gK2Z4/21XnpJVi1CkqVghtv1KTfvLnW6htT8I6gi8Nt826ZaIuIJuh1gXZoe4hgr6KrgG4AUtHkXxv9ALFvA5HEEr1PnNNmai+/DNOnw48/Qp06mvBvvRUqWAt1E3EOoR8CG9CmcFcDNdCy0euzPLYyen0gGe0auhL9JlADOK+A4jUBlugjwA8/wMyZmvS/+AKKF9cLt3feCZdfbmf5Jhr8xMkPgPVoqeiTaAO40WgfoIBfoUn/bXRRmLXALqCidytTYFEXFpboI8zKlfDii9pj58AB+M1voF8/uO02KFfO7+iMyYuD6IdA4LbRu80BiqFtoZ8LenwZ9HrASrQq6A1gM1AJvS5QGq0gClQ0/AjEAcWxawbZs0QfoX76Sc/yX3hBz/JLlIDrr9ez/BYt7CzfxJIt6NyAnd5tB/oN4SVv/3Vosg9WBW0nDdAB/dAogragLo2WjwZWEZrEyeZzgQvKZSlMHwqW6KPA8uV6lj9tmg7z1KunZ/m33KIlm8bENgd8j34I7EXP4AVdGB70GkGGt/1H9EOiElopBDqbeEWW12yHTkAD+Av6QXCedysD1PceA9qoroS3vaR3K4P2L4oOluijyI8/6iSsF17QSVnx8XDdddC3L1xxhZ3lG5O948C36DeAbd7Pi4Be3v4UdJLZQTThA9yMNpkD/ZYQ6DkUcCfwvPfaZdGkH49+CMQDtwOD0LbVw7zHlPNuZdFvHFXRD7HjnOyJlD8s0UepJUv04u2//61j+TVrQp8+0Ls3VKzod3TGRKujnFwr+ELv53zgB+92GE36ddE+Q0eBIUHbD3m/d0fXJd6FVhplXX/4CXQ282Z0Utv5nPwQKIt+y+iCfkBN9rb1Rr9Z5J4l+ij300/wxhua9BcsgKJFoXNnPcvv1MkWSDEmMvwC7EOHnvah5adV0cVrJnjbgm9DgGvQReoD/Yp+Ri84554l+hiyfj1MmqRtF779Vs/sb7tNk37Nmn5HZ4zJm0No8q+U51ewFaZiSO3auuzhtm26xu2ll8JTT0GtWtChA8yebf3yjYk+JTmXJJ8TS/RRqlgxuOYaePdd+PprGDlSV8fq2FErdl54QYd8jDHGEn0MqFwZHnwQtmzR8sySJbVlcpUqMHw4bN/ud4TGGD9Zoo8hxYtri+T0dL1o27o1PPkkVK+u6+EuWuR3hMYYP1iij0Ei0KqVVups2AB//jO89x40baozbmfM0MVSjDGFgyX6GJeUBM88A5mZMG4cfPON9sevXBnuvVfbKBtjYltIiV5EOojIOhHZICJDs9lfTUTmicgKEflYRBKD9t0mIhne7bZwBm9Cd/75MGiQlmfOng1t2sA//wn168PvfqclmwezzvcwxsSEHBO9iBRFq/07olPFeopI3SwP+zsw1TnXEBiJTglDRMqhzSiaAU2Bh0XEOrf4qGhRaN8eXntNz/L//nfYt0/r8CtW1P46CxdaiaYxsSSUM/qmwAbn3Cbn3BF0mZlrsjwmuI1cWtD+9sBc59xe59w+YC7ahs5EgIsugvvug9Wr4X//0546r74KzZpBo0bw7LP6IWCMiW6hJPrKaJeggExvW7DlaOMHgD8A54lI+RCfi4j0E5F0EUnfvXt3qLGbMBHRi7STJ8OOHfD889pM7Z57dCy/b19YvNjvKI0xeRWui7GDgStEZClwBbow5bFQn+yce9E5l+KcS0lISAhTSCYvLrhA++EvXAhLl2qb5NRUSEnRqp0pU+BQ1iZ/xpiIFkqi34528g9I9Lad4Jzb4Zzr7pxrDDzgbdsfynNN5EpO1hm2O3bA+PHaJ//22/Usf/BgLd00xkS+UBL9IqCWiCSJSHGgB/BO8ANEpIKIBF5rGLrcC+iSMFeJSFnvIuxV3jYTRS64QGvxV6+GtDT4/e91/D7QX2fWLDh61O8ojTFnkmOid84dBQagCXoNMNM5t0pERopIV+9hrYF1IrIeXRV4lPfcvcBj6IfFImCkt81EIRGdbTtzJmzdCo8+Cl99Bd26QY0aOgt3zx6/ozTGZGVtis05OXoU3nkHJkyA+fP1Iu7NN+s3gEaN/I7OmMLD2hSbfBMXB927w7x5enZ/6626IlZysi59+MYbNqxjjN8s0ZuwqV9fL95mZsKYMdo++brr4JJLtIf+d9/5HaExhZMlehN25cqdrMqZNUsXSxk2DBITtSZ/+XK/IzSmcLFEb/JN0aLQtSt89BGsXKmlmampOqzTpo1+CBwLebaFMSavLNGbAlGvHkyceHJYZ9MmrdapXVu7an7/vd8RGhO7LNGbAlW2rA7rbNwIr78OlSrBX/6iwzqDBtkkLGPygyV644u4OLj2WvjkE10Rq1s3PeOvXVuHe+bPtw6axoSLJXrju0svhalTdRLWgw/CF19A27Zahz9pEhw+7HeExkQ3S/QmYlSsqLNtv/5aE7yIVulUrQoPPQQ7d/odoTHRyRK9iTjx8Vqhs2yZDuE0bw6PPw7VqumErCVL/I7QmOhiid5ELJGTZZjr18Of/gRvvaVDPZdfDm++aeWZxoTCEr2JCjVrasfMzEx4+mkd3rn2Wt3+zDOwf7/fERoTuSzRm6hywQVw771ahvnGG1Clii6HWKkS/PGPOtxjjDmVJXoTlQLN1BYs0GUOb7pJ17tt3Bh+9zt45RX4+We/ozQmMliiN1Hvt7+Fl1+G7dth7FhtnnbLLToJa9gw2LLF7wiN8ZclehMzypbVBc3XroUPP4SWLeGpp7R7ZteuMHs2HD/ud5TGFDxL9CbmFCkC7dpphc7mzTB8OHz5JXTsCHXq6ALnv/zid5TGFBxL9CamVa2qNfhff61j+KVKaY1+rVracsFm3ZrCwBK9KRRKlNALtkuWwHvv6Szcu+/WYZ2nn4aDB/2O0Jj8Y4neFCoi0LkzfPaZLn9Yp45206xeXc/8rR7fxCJL9KZQEoErr9Rk/9lncNll2lCtWjV44AHYvdvvCI0JH0v0ptBr3lyHc5Ysgauugiee0DP8e+/Vkk1jop0lemM8jRvDa6/BqlXaXmH8eEhK0hm3GRl+R2dM3lmiNyaLOnW0P35Ghib5adPgN7+BHj1sYXMTnSzRG3MGSUkwYYLOrB0yBD74QBc279IFPv3U7+iMCZ0lemNycPHFMHq0roD12GM6+aplS7jiCpgzx5Y8NJHPEr0xISpbFkaM0DP8ceNg0ybo0AGaNNGe+ZbwTaSyRG9MLpUuDYMGwcaN2kxt/35d3LxxY10MxfrpmEhjid6YPCpeXNe0XbsW/vUv+OknrdZJTtbqHUv4JlJYojfmHMXF6Vq2q1drH/wjR+CGG6BBA0hNteUOjf9CSvQi0kFE1onIBhEZms3+qiKSJiJLRWSFiHTytlcXkUMissy7PR/uAzAmUsTFwc03ax3+9Om6rWdPqF9fG6pZwjd+yTHRi0hRYALQEagL9BSRulkeNgKY6ZxrDPQAngvat9E5l+zd7gpT3MZErKJFteb+q69gxgz9AOjVC+rWhZkz7aKtKXihnNE3BTY45zY5544AqcA1WR7jgPO93y8AdoQvRGOiU5EiOoSzfLmub1u8ONx4o7Zc+N///I7OFCahJPrKwLag+5netmCPAL1EJBP4APhz0L4kb0jnvyLSKrs/ICL9RCRdRNJ3WzcpE2OKFNH1bZctg//7P9i2DVq1gj/8Adat8zs6UxiE62JsT2CKcy4R6ARME5EiwE6gqjekcy/wbxE5P+uTnXMvOudSnHMpCQkJYQrJmMhStCj06QPr12tL5I8+gnr1oH9/2LXL7+hMLAsl0W8HqgTdT/S2BesLzARwzn0OxAMVnHM/O+f2eNsXAxuB2ucatDHRrHRpbYW8cSPceSe88ALUrAmjRmmJpjHhFkqiXwTUEpEkESmOXmx9J8tjvgbaAohIHTTR7xaRBO9iLiJyCVAL2BSu4I2JZhddpL10Vq2Ctm111m3t2jB5slXomPDKMdE7544CA4A5wBq0umaViIwUka7ew+4D/igiy4HpQG/nnAMuB1aIyDLgdeAu59ze/DgQY6LVr3+tC5kvWACJiTq807ix9tExJhzERVitV0pKiktPT/c7DGN84Ry8/joMHaq9dNq1gzFjoFEjvyMzkU5EFjvnUrLbZzNjjYkgInD99bBmjTZOW7xYz+5794bMTL+jM9HKEr0xEah48ZON0wYP1lYKtWrB8OHw/fd+R2eijSV6YyLYhRfCU09pvf211+p6tjVq6EXcX37xOzoTLSzRGxMFqlXThmmLFmnvnAED9Odbb1lLBZMzS/TGRJGUFJg/H957Tydgde+uK11Z/YI5G0v0xkQZEejcGVasgOef12GdJk20VbJdsDXZsURvTJSKi9OZtRkZWo45c6ZOuHroITh40O/oTCSxRG9MlDv/fL1Iu3YtXHONLmBeqxZMmmQzbI2yRG9MjKheXRc8+ewz/b1vX7j0Uh3TN4WbJXpjYkzz5prsU1N14fK2baFrV2uJXJhZojcmBonoIidr18Lo0fDxx1qO+Ze/wL59fkdnCgC5cRcAAA2nSURBVJolemNiWHw83H8/bNigzdKefVbH7597Do4e9Ts6U1As0RtTCFx0kfa9X7oUGjTQxU6Sk2HuXL8jMwXBEr0xhUijRnpx9s034dAhuOoqHb9fv97vyEx+skRvTCEjouvVrl6t4/dpaTp+f999evHWxB5L9MYUUiVK6Ph9RobOqh07Vsfvn3/e6u9jjSV6Ywq5iy+Gl1/Wfjl16sCf/qQlmsuW+R2ZCRdL9MYYAH77W/jvf+Hf/4atW7WB2v3324LlscASvTHmBBHo2VNXuOrdW3vh168PH37od2TmXFiiN8acplw5Hc5JS4NixaB9e+jVC3bt8jsykxeW6I0xZ9S6NSxfDg8+qN0x69SBKVNssZNoY4neGHNW8fEwcqRenK1TB26/XfvnZGT4HZkJlSV6Y0xI6taFBQu0/HLxYp1hO2oUHDnid2QmJ5bojTEhK1JEFztZswauvhpGjNBqnc8/9zsyczaW6I0xuVapErz2GrzzDnz/PbRoAXffDQcO+B2ZyY4lemNMnl19NaxaBQMH6pBOnTraR8cu1kYWS/TGmHNy3nkwbhx8+aV2ybz2WujWDbZt8zsyE2CJ3hgTFk2awKJFOslq7ly9ePuPf1jfnEhgid4YEzbFisGQITqc06KFDun87ndai2/8E1KiF5EOIrJORDaIyNBs9lcVkTQRWSoiK0SkU9C+Yd7z1olI+3AGb4yJTElJ8J//aN+czZt1kfLhw+HwYb8jK5xyTPQiUhSYAHQE6gI9RaRuloeNAGY65xoDPYDnvOfW9e7XAzoAz3mvZ4yJcYG+OWvXwi23wBNP6KpWn37qd2SFTyhn9E2BDc65Tc65I0AqcE2WxzjgfO/3C4Ad3u/XAKnOuZ+dc5uBDd7rGWMKiXLlYPJkmDNHz+hbtdIhnYMH/Y6s8Agl0VcGgq+fZ3rbgj0C9BKRTOAD4M+5eK4xphC46ipYuRIGDIB//tO6YhakcF2M7QlMcc4lAp2AaSIS8muLSD8RSReR9N27d4cpJGNMpClTBsaP11YK8fHaFfP222HfPr8ji22hJOPtQJWg+4netmB9gZkAzrnPgXigQojPxTn3onMuxTmXkpCQEHr0xpio1LKlNkkbNgymTdNSzLfe8juq2BVKol8E1BKRJBEpjl5cfSfLY74G2gKISB000e/2HtdDREqISBJQC1gYruCNMdErPh7+9jdYuFCXM+zeHW64Ab791u/IYk+Oid45dxQYAMwB1qDVNatEZKSIdPUedh/wRxFZDkwHeju1Cj3TXw3MBvo752z6hDHmhN/+VpP9qFEwaxbUqwczZvgdVWwRF2FNKVJSUlx6errfYRhjfBBYwnDhQrj+epgwAWw0NzQistg5l5LdPpsZa4yJGHXqaJ39E0+cPLu3sftzZ4neGBNR4uJg6FBd3CQxUcfue/WCvXv9jix6WaI3xkSk+vW1I+ajj+qYff368P77fkcVnSzRG2MiVrFi8NBDOmZfoQJ06QJ9+tgCJ7llid4YE/EaN4b0dHjgAZg61WbV5pYlemNMVCheHB5/XNenPe88nVV7xx2wf7/fkUU+S/TGmKjSpAksWQL33w9Tpuis2lmz/I4qslmiN8ZEnfh4GD1aL9YmJOjShT16wK5dfkcWmSzRG2Oi1qWX6tj9Y49pvX3duvDqq7Y4eVaW6I0xUa1YMRgxApYuhVq1tOb+6qttcfJgluiNMTGhbl343/9g3DhIS9NZtS+8AMeP+x2Z/yzRG2NiRtGiMGgQfPUVNG0Kd90FV14JGzb4HZm/LNEbY2LOJZfA3Lnw0ks6pNOoEUycWHjH7i3RG2NikojW2a9erQud3H03dOoEO3bk/NxYY4neGBPTKleG2bN1ndr//hcaNIDXXvM7qoJlid4YE/NEoH9/HcapUUNXsurVq/DMqrVEb4wpNH79a+13/8gjkJqqZ/fz5vkdVf6zRG+MKVSKFYOHH9aeOaVLw+9/D/fcA4cO+R1Z/rFEb4wplAI9cwYMgGef1Vm2ixf7HVX+sERvjCm0SpWCf/xDWx4fOACXXQZPPhl7k6ws0RtjCr127WDlSm2ONnQodOgA33zjd1ThY4neGGOAsmVh5kxtm/DJJzrJas4cv6MKD0v0xhjjEYF+/bQjZkKCntn/9a9w5IjfkZ0bS/TGGJNFvXqwaJH2yhkzRmfWbtrkd1R5Z4neGGOyUbKk9sd5/XXIyIDkZJg+3e+o8sYSvTHGnMW118KyZTq56qaboG9f+PFHv6PKHUv0xhiTg2rVtE/OAw/A5MmQkgLLl/sdVegs0RtjTAji4uDxx+Gjj7TmvlkzeO656Gh9bIneGGNy4cor9Wy+bVttlHbddbBvn99RnZ0lemOMyaWEBHj3XXj6af2ZnAyffeZ3VGcWUqIXkQ4isk5ENojI0Gz2jxWRZd5tvYjsD9p3LGjfO+EM3hhj/FKkCNx7r3bDjIuDyy+HJ56IzPYJcTk9QESKAhOAdkAmsEhE3nHOrQ48xjn3l6DH/xloHPQSh5xzyeEL2RhjIkegOdqdd8Lw4TB/PkybBhdf7HdkJ4VyRt8U2OCc2+ScOwKkAtec5fE9gSitNjXGmNy74AKtsX/pJT3Db9RIG6VFilASfWVgW9D9TG/baUSkGpAEzA/aHC8i6SLyhYh0O8Pz+nmPSd+9e3eIoRtjTOQIrFG7aJGO4bdvD8OGwS+/+B1Z+C/G9gBed84dC9pWzTmXAtwEjBORGlmf5Jx70TmX4pxLSUhICHNIxhhTcOrVg4ULtWfO6NE6dr9li78xhZLotwNVgu4netuy04MswzbOue3ez03Ax5w6fm+MMTGnVCntgpmaCqtXQ+PG8Oab/sUTSqJfBNQSkSQRKY4m89OqZ0TkN0BZ4POgbWVFpIT3ewWgBbA663ONMSYW3XijLkhes6a2UhgwAA4fLvg4ckz0zrmjwABgDrAGmOmcWyUiI0Wka9BDewCpzp0yT6wOkC4iy4E0YHRwtY4xxsS6Sy7RC7T33gsTJugqVuvWFWwM4iJs/m5KSopLT0/3OwxjjAm799+H227Ts/qJE+GWW8L32iKy2LseehqbGWuMMQWkc2fthHnppXDrrdC7Nxw8mP9/1xK9McYUoMREmDcPHnoIpk4tmE6YluiNMaaAxcXBo49qwv/++/zvhGmJ3hhjfNKmjQ7ltGmjnTBvuCF/euXk2OvGGGNM/rnoIr1I+8wzsH+/NksLN0v0xhjjsyJFYPDgfHz9/HtpY4wxkcASvTHGxDhL9MYYE+Ms0RtjTIyzRG+MMTHOEr0xxsQ4S/TGGBPjLNEbY0yMi7g2xSKyG9iaZXMF4DsfwslPsXZMsXY8EHvHFGvHA7F3TOdyPNWcc9muxRpxiT47IpJ+pj7L0SrWjinWjgdi75hi7Xgg9o4pv47Hhm6MMSbGWaI3xpgYFy2J/kW/A8gHsXZMsXY8EHvHFGvHA7F3TPlyPFExRm+MMSbvouWM3hhjTB5ZojfGmBgXcYleRCaJyC4RWRm0rZyIzBWRDO9nWT9jzI0zHM8jIrJdRJZ5t05+xphbIlJFRNJEZLWIrBKRQd72qHyfznI8Ufs+iUi8iCwUkeXeMT3qbU8SkS9FZIOIzBCR4n7HGoqzHM8UEdkc9B4l+x1rbohIURFZKiLveffz5f2JuEQPTAE6ZNk2FJjnnKsFzPPuR4spnH48AGOdc8ne7YMCjulcHQXuc87VBS4D+otIXaL3fTrT8UD0vk8/A1c65xoByUAHEbkMeBI9pprAPqCvjzHmxpmOB2BI0Hu0zL8Q82QQsCbofr68PxGX6J1zC4C9WTZfA/zL+/1fQLcCDeocnOF4oppzbqdzbon3+w/of6iVidL36SzHE7WcOujdLebdHHAl8Lq3PZreozMdT9QSkUSgM/Cyd1/Ip/cn4hL9GfzKObfT+/0b4Fd+BhMmA0RkhTe0ExVDHNkRkepAY+BLYuB9ynI8EMXvkzcssAzYBcwFNgL7nXNHvYdkEkUfaFmPxzkXeI9Gee/RWBEp4WOIuTUO+Ctw3Ltfnnx6f6Il0Z/gtB40qj/JgYlADfQr6E7gaX/DyRsRKQO8AdzjnPs+eF80vk/ZHE9Uv0/OuWPOuWQgEWgK/MbnkM5J1uMRkfrAMPS4mgDlgPt9DDFkItIF2OWcW1wQfy9aEv23IlIRwPu5y+d4zolz7lvvP9rjwEvo/4RRRUSKoUnxVefcm97mqH2fsjueWHifAJxz+4E0oDlwoYjEebsSge2+BZZHQcfTwRt2c865n4HJRM971ALoKiJbgFR0yOZZ8un9iZZE/w5wm/f7bcAsH2M5Z4Fk6PkDsPJMj41E3lji/wFrnHPPBO2KyvfpTMcTze+TiCSIyIXe7yWBdui1hzTgOu9h0fQeZXc8a4NOLAQdz46K98g5N8w5l+icqw70AOY7524mn96fiJsZKyLTgdZou85vgYeBt4GZQFW0hfENzrmouMB5huNpjQ4HOGALcGfQ2HbEE5GWwCfAV5wcXxyOjmtH3ft0luPpSZS+TyLSEL2YVxQ9oZvpnBspIpegZ5DlgKVAL+9sOKKd5XjmAwmAAMuAu4Iu2kYFEWkNDHbOdcmv9yfiEr0xxpjwipahG2OMMXlkid4YY2KcJXpjjIlxluiNMSbGWaI3xpgYZ4neGGNinCV6Y4yJcf8PWGIUcmNiHUIAAAAASUVORK5CYII=\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"import matplotlib.pyplot as plt\n",
"\n",
"df=pd.DataFrame(model.learning_process[10:], columns=['epoch', 'train_RMSE', 'test_RMSE'])\n",
"plt.plot('epoch', 'train_RMSE', data=df, color='blue')\n",
"plt.plot('epoch', 'test_RMSE', data=df, color='yellow', linestyle='dashed')\n",
"plt.legend()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Saving and evaluating recommendations"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [],
"source": [
"model.estimations()\n",
"\n",
"top_n=pd.DataFrame(model.recommend(user_code_id, item_code_id, topK=10))\n",
"\n",
"top_n.to_csv('Recommendations generated/ml-100k/Self_SVD_reco.csv', index=False, header=False)\n",
"\n",
"estimations=pd.DataFrame(model.estimate(user_code_id, item_code_id, test_ui))\n",
"estimations.to_csv('Recommendations generated/ml-100k/Self_SVD_estimations.csv', index=False, header=False)"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"943it [00:00, 4261.36it/s]\n"
]
},
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>RMSE</th>\n",
" <th>MAE</th>\n",
" <th>precision</th>\n",
" <th>recall</th>\n",
" <th>F_1</th>\n",
" <th>F_05</th>\n",
" <th>precision_super</th>\n",
" <th>recall_super</th>\n",
" <th>NDCG</th>\n",
" <th>mAP</th>\n",
" <th>MRR</th>\n",
" <th>LAUC</th>\n",
" <th>HR</th>\n",
" <th>HR2</th>\n",
" <th>Reco in test</th>\n",
" <th>Test coverage</th>\n",
" <th>Shannon</th>\n",
" <th>Gini</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.914521</td>\n",
" <td>0.71768</td>\n",
" <td>0.102757</td>\n",
" <td>0.043043</td>\n",
" <td>0.052432</td>\n",
" <td>0.069515</td>\n",
" <td>0.094528</td>\n",
" <td>0.075122</td>\n",
" <td>0.106751</td>\n",
" <td>0.051431</td>\n",
" <td>0.198701</td>\n",
" <td>0.518248</td>\n",
" <td>0.462354</td>\n",
" <td>0.255567</td>\n",
" <td>0.854931</td>\n",
" <td>0.147186</td>\n",
" <td>3.888926</td>\n",
" <td>0.972044</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" RMSE MAE precision recall F_1 F_05 \\\n",
"0 0.914521 0.71768 0.102757 0.043043 0.052432 0.069515 \n",
"\n",
" precision_super recall_super NDCG mAP MRR LAUC \\\n",
"0 0.094528 0.075122 0.106751 0.051431 0.198701 0.518248 \n",
"\n",
" HR HR2 Reco in test Test coverage Shannon Gini \n",
"0 0.462354 0.255567 0.854931 0.147186 3.888926 0.972044 "
]
},
"execution_count": 16,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import evaluation_measures as ev\n",
"\n",
"estimations_df=pd.read_csv('Recommendations generated/ml-100k/Self_SVD_estimations.csv', header=None)\n",
"reco=np.loadtxt('Recommendations generated/ml-100k/Self_SVD_reco.csv', delimiter=',')\n",
"\n",
"ev.evaluate(test=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None),\n",
" estimations_df=estimations_df, \n",
" reco=reco,\n",
" super_reactions=[4,5])"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"943it [00:00, 5504.80it/s]\n",
"943it [00:00, 4588.89it/s]\n",
"943it [00:00, 3546.71it/s]\n",
"943it [00:00, 3802.69it/s]\n",
"943it [00:00, 3533.79it/s]\n",
"943it [00:00, 3587.29it/s]\n",
"943it [00:00, 3825.53it/s]\n",
"943it [00:00, 3495.58it/s]\n",
"943it [00:00, 3725.91it/s]\n",
"943it [00:00, 3820.07it/s]\n",
"943it [00:00, 3632.69it/s]\n",
"943it [00:00, 3564.35it/s]\n",
"943it [00:00, 3651.79it/s]\n",
"943it [00:00, 3835.91it/s]\n",
"943it [00:00, 4391.98it/s]\n",
"943it [00:00, 3026.85it/s]\n",
"943it [00:00, 2492.44it/s]\n"
]
},
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Model</th>\n",
" <th>RMSE</th>\n",
" <th>MAE</th>\n",
" <th>precision</th>\n",
" <th>recall</th>\n",
" <th>F_1</th>\n",
" <th>F_05</th>\n",
" <th>precision_super</th>\n",
" <th>recall_super</th>\n",
" <th>NDCG</th>\n",
" <th>mAP</th>\n",
" <th>MRR</th>\n",
" <th>LAUC</th>\n",
" <th>HR</th>\n",
" <th>HR2</th>\n",
" <th>Reco in test</th>\n",
" <th>Test coverage</th>\n",
" <th>Shannon</th>\n",
" <th>Gini</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_TopPop</td>\n",
" <td>2.508258</td>\n",
" <td>2.217909</td>\n",
" <td>0.188865</td>\n",
" <td>0.116919</td>\n",
" <td>0.118732</td>\n",
" <td>0.141584</td>\n",
" <td>0.130472</td>\n",
" <td>0.137473</td>\n",
" <td>0.214651</td>\n",
" <td>0.111707</td>\n",
" <td>0.400939</td>\n",
" <td>0.555546</td>\n",
" <td>0.765642</td>\n",
" <td>0.492047</td>\n",
" <td>1.000000</td>\n",
" <td>0.038961</td>\n",
" <td>3.159079</td>\n",
" <td>0.987317</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_SVDBaseline</td>\n",
" <td>3.642710</td>\n",
" <td>3.477031</td>\n",
" <td>0.137858</td>\n",
" <td>0.083447</td>\n",
" <td>0.084155</td>\n",
" <td>0.101113</td>\n",
" <td>0.108476</td>\n",
" <td>0.109680</td>\n",
" <td>0.164872</td>\n",
" <td>0.083459</td>\n",
" <td>0.338033</td>\n",
" <td>0.538614</td>\n",
" <td>0.634146</td>\n",
" <td>0.359491</td>\n",
" <td>0.999788</td>\n",
" <td>0.275613</td>\n",
" <td>5.134751</td>\n",
" <td>0.909655</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_SVD</td>\n",
" <td>0.914521</td>\n",
" <td>0.717680</td>\n",
" <td>0.102757</td>\n",
" <td>0.043043</td>\n",
" <td>0.052432</td>\n",
" <td>0.069515</td>\n",
" <td>0.094528</td>\n",
" <td>0.075122</td>\n",
" <td>0.106751</td>\n",
" <td>0.051431</td>\n",
" <td>0.198701</td>\n",
" <td>0.518248</td>\n",
" <td>0.462354</td>\n",
" <td>0.255567</td>\n",
" <td>0.854931</td>\n",
" <td>0.147186</td>\n",
" <td>3.888926</td>\n",
" <td>0.972044</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_Baseline</td>\n",
" <td>0.949459</td>\n",
" <td>0.752487</td>\n",
" <td>0.091410</td>\n",
" <td>0.037652</td>\n",
" <td>0.046030</td>\n",
" <td>0.061286</td>\n",
" <td>0.079614</td>\n",
" <td>0.056463</td>\n",
" <td>0.095957</td>\n",
" <td>0.043178</td>\n",
" <td>0.198193</td>\n",
" <td>0.515501</td>\n",
" <td>0.437964</td>\n",
" <td>0.239661</td>\n",
" <td>1.000000</td>\n",
" <td>0.033911</td>\n",
" <td>2.836513</td>\n",
" <td>0.991139</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_GlobalAvg</td>\n",
" <td>1.125760</td>\n",
" <td>0.943534</td>\n",
" <td>0.061188</td>\n",
" <td>0.025968</td>\n",
" <td>0.031383</td>\n",
" <td>0.041343</td>\n",
" <td>0.040558</td>\n",
" <td>0.032107</td>\n",
" <td>0.067695</td>\n",
" <td>0.027470</td>\n",
" <td>0.171187</td>\n",
" <td>0.509546</td>\n",
" <td>0.384942</td>\n",
" <td>0.142100</td>\n",
" <td>1.000000</td>\n",
" <td>0.025974</td>\n",
" <td>2.711772</td>\n",
" <td>0.992003</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_Random</td>\n",
" <td>1.517593</td>\n",
" <td>1.220181</td>\n",
" <td>0.046023</td>\n",
" <td>0.019038</td>\n",
" <td>0.023118</td>\n",
" <td>0.030734</td>\n",
" <td>0.029292</td>\n",
" <td>0.021639</td>\n",
" <td>0.050818</td>\n",
" <td>0.019958</td>\n",
" <td>0.126646</td>\n",
" <td>0.506031</td>\n",
" <td>0.305408</td>\n",
" <td>0.111347</td>\n",
" <td>0.988547</td>\n",
" <td>0.174603</td>\n",
" <td>5.082383</td>\n",
" <td>0.908434</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_I-KNN</td>\n",
" <td>1.030386</td>\n",
" <td>0.813067</td>\n",
" <td>0.026087</td>\n",
" <td>0.006908</td>\n",
" <td>0.010593</td>\n",
" <td>0.016046</td>\n",
" <td>0.021137</td>\n",
" <td>0.009522</td>\n",
" <td>0.024214</td>\n",
" <td>0.008958</td>\n",
" <td>0.048068</td>\n",
" <td>0.499885</td>\n",
" <td>0.154825</td>\n",
" <td>0.072110</td>\n",
" <td>0.402333</td>\n",
" <td>0.434343</td>\n",
" <td>5.133650</td>\n",
" <td>0.877999</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_I-KNNWithMeans</td>\n",
" <td>0.955921</td>\n",
" <td>0.754037</td>\n",
" <td>0.004984</td>\n",
" <td>0.003225</td>\n",
" <td>0.003406</td>\n",
" <td>0.003956</td>\n",
" <td>0.004506</td>\n",
" <td>0.003861</td>\n",
" <td>0.006815</td>\n",
" <td>0.002906</td>\n",
" <td>0.020332</td>\n",
" <td>0.497969</td>\n",
" <td>0.039236</td>\n",
" <td>0.007423</td>\n",
" <td>0.587699</td>\n",
" <td>0.071429</td>\n",
" <td>2.699278</td>\n",
" <td>0.991353</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_I-KNNWithZScore</td>\n",
" <td>0.957701</td>\n",
" <td>0.752387</td>\n",
" <td>0.003712</td>\n",
" <td>0.001994</td>\n",
" <td>0.002380</td>\n",
" <td>0.002919</td>\n",
" <td>0.003433</td>\n",
" <td>0.002401</td>\n",
" <td>0.005137</td>\n",
" <td>0.002158</td>\n",
" <td>0.016458</td>\n",
" <td>0.497349</td>\n",
" <td>0.027572</td>\n",
" <td>0.007423</td>\n",
" <td>0.389926</td>\n",
" <td>0.067821</td>\n",
" <td>2.475747</td>\n",
" <td>0.992793</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_I-KNNBaseline39</td>\n",
" <td>0.935520</td>\n",
" <td>0.737631</td>\n",
" <td>0.002757</td>\n",
" <td>0.000856</td>\n",
" <td>0.001230</td>\n",
" <td>0.001758</td>\n",
" <td>0.002468</td>\n",
" <td>0.001048</td>\n",
" <td>0.003899</td>\n",
" <td>0.001620</td>\n",
" <td>0.013296</td>\n",
" <td>0.496775</td>\n",
" <td>0.022269</td>\n",
" <td>0.005302</td>\n",
" <td>0.483351</td>\n",
" <td>0.059885</td>\n",
" <td>2.235102</td>\n",
" <td>0.994479</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_I-KNNBaseline38</td>\n",
" <td>0.935685</td>\n",
" <td>0.737828</td>\n",
" <td>0.002651</td>\n",
" <td>0.000837</td>\n",
" <td>0.001197</td>\n",
" <td>0.001702</td>\n",
" <td>0.002361</td>\n",
" <td>0.001020</td>\n",
" <td>0.003635</td>\n",
" <td>0.001443</td>\n",
" <td>0.012589</td>\n",
" <td>0.496765</td>\n",
" <td>0.022269</td>\n",
" <td>0.004242</td>\n",
" <td>0.483245</td>\n",
" <td>0.059163</td>\n",
" <td>2.235851</td>\n",
" <td>0.994507</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_I-KNNBaseline</td>\n",
" <td>0.935327</td>\n",
" <td>0.737424</td>\n",
" <td>0.002545</td>\n",
" <td>0.000755</td>\n",
" <td>0.001105</td>\n",
" <td>0.001602</td>\n",
" <td>0.002253</td>\n",
" <td>0.000930</td>\n",
" <td>0.003444</td>\n",
" <td>0.001362</td>\n",
" <td>0.011760</td>\n",
" <td>0.496724</td>\n",
" <td>0.021209</td>\n",
" <td>0.004242</td>\n",
" <td>0.482821</td>\n",
" <td>0.059885</td>\n",
" <td>2.232578</td>\n",
" <td>0.994487</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_U-KNN</td>\n",
" <td>1.023495</td>\n",
" <td>0.807913</td>\n",
" <td>0.000742</td>\n",
" <td>0.000205</td>\n",
" <td>0.000305</td>\n",
" <td>0.000449</td>\n",
" <td>0.000536</td>\n",
" <td>0.000198</td>\n",
" <td>0.000845</td>\n",
" <td>0.000274</td>\n",
" <td>0.002744</td>\n",
" <td>0.496441</td>\n",
" <td>0.007423</td>\n",
" <td>0.000000</td>\n",
" <td>0.602121</td>\n",
" <td>0.010823</td>\n",
" <td>2.089186</td>\n",
" <td>0.995706</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_TopRated</td>\n",
" <td>2.508258</td>\n",
" <td>2.217909</td>\n",
" <td>0.000954</td>\n",
" <td>0.000188</td>\n",
" <td>0.000298</td>\n",
" <td>0.000481</td>\n",
" <td>0.000644</td>\n",
" <td>0.000223</td>\n",
" <td>0.001043</td>\n",
" <td>0.000335</td>\n",
" <td>0.003348</td>\n",
" <td>0.496433</td>\n",
" <td>0.009544</td>\n",
" <td>0.000000</td>\n",
" <td>0.699046</td>\n",
" <td>0.005051</td>\n",
" <td>1.945910</td>\n",
" <td>0.995669</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_BaselineIU</td>\n",
" <td>0.958136</td>\n",
" <td>0.754051</td>\n",
" <td>0.000954</td>\n",
" <td>0.000188</td>\n",
" <td>0.000298</td>\n",
" <td>0.000481</td>\n",
" <td>0.000644</td>\n",
" <td>0.000223</td>\n",
" <td>0.001043</td>\n",
" <td>0.000335</td>\n",
" <td>0.003348</td>\n",
" <td>0.496433</td>\n",
" <td>0.009544</td>\n",
" <td>0.000000</td>\n",
" <td>0.699046</td>\n",
" <td>0.005051</td>\n",
" <td>1.945910</td>\n",
" <td>0.995669</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_BaselineUI</td>\n",
" <td>0.967585</td>\n",
" <td>0.762740</td>\n",
" <td>0.000954</td>\n",
" <td>0.000170</td>\n",
" <td>0.000278</td>\n",
" <td>0.000463</td>\n",
" <td>0.000644</td>\n",
" <td>0.000189</td>\n",
" <td>0.000752</td>\n",
" <td>0.000168</td>\n",
" <td>0.001677</td>\n",
" <td>0.496424</td>\n",
" <td>0.009544</td>\n",
" <td>0.000000</td>\n",
" <td>0.600530</td>\n",
" <td>0.005051</td>\n",
" <td>1.803126</td>\n",
" <td>0.996380</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_IKNN</td>\n",
" <td>1.018363</td>\n",
" <td>0.808793</td>\n",
" <td>0.000318</td>\n",
" <td>0.000108</td>\n",
" <td>0.000140</td>\n",
" <td>0.000189</td>\n",
" <td>0.000000</td>\n",
" <td>0.000000</td>\n",
" <td>0.000214</td>\n",
" <td>0.000037</td>\n",
" <td>0.000368</td>\n",
" <td>0.496391</td>\n",
" <td>0.003181</td>\n",
" <td>0.000000</td>\n",
" <td>0.392153</td>\n",
" <td>0.115440</td>\n",
" <td>4.174741</td>\n",
" <td>0.965327</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Model RMSE MAE precision recall F_1 \\\n",
"0 Self_TopPop 2.508258 2.217909 0.188865 0.116919 0.118732 \n",
"0 Self_SVDBaseline 3.642710 3.477031 0.137858 0.083447 0.084155 \n",
"0 Self_SVD 0.914521 0.717680 0.102757 0.043043 0.052432 \n",
"0 Ready_Baseline 0.949459 0.752487 0.091410 0.037652 0.046030 \n",
"0 Self_GlobalAvg 1.125760 0.943534 0.061188 0.025968 0.031383 \n",
"0 Ready_Random 1.517593 1.220181 0.046023 0.019038 0.023118 \n",
"0 Ready_I-KNN 1.030386 0.813067 0.026087 0.006908 0.010593 \n",
"0 Ready_I-KNNWithMeans 0.955921 0.754037 0.004984 0.003225 0.003406 \n",
"0 Ready_I-KNNWithZScore 0.957701 0.752387 0.003712 0.001994 0.002380 \n",
"0 Self_I-KNNBaseline39 0.935520 0.737631 0.002757 0.000856 0.001230 \n",
"0 Self_I-KNNBaseline38 0.935685 0.737828 0.002651 0.000837 0.001197 \n",
"0 Ready_I-KNNBaseline 0.935327 0.737424 0.002545 0.000755 0.001105 \n",
"0 Ready_U-KNN 1.023495 0.807913 0.000742 0.000205 0.000305 \n",
"0 Self_TopRated 2.508258 2.217909 0.000954 0.000188 0.000298 \n",
"0 Self_BaselineIU 0.958136 0.754051 0.000954 0.000188 0.000298 \n",
"0 Self_BaselineUI 0.967585 0.762740 0.000954 0.000170 0.000278 \n",
"0 Self_IKNN 1.018363 0.808793 0.000318 0.000108 0.000140 \n",
"\n",
" F_05 precision_super recall_super NDCG mAP MRR \\\n",
"0 0.141584 0.130472 0.137473 0.214651 0.111707 0.400939 \n",
"0 0.101113 0.108476 0.109680 0.164872 0.083459 0.338033 \n",
"0 0.069515 0.094528 0.075122 0.106751 0.051431 0.198701 \n",
"0 0.061286 0.079614 0.056463 0.095957 0.043178 0.198193 \n",
"0 0.041343 0.040558 0.032107 0.067695 0.027470 0.171187 \n",
"0 0.030734 0.029292 0.021639 0.050818 0.019958 0.126646 \n",
"0 0.016046 0.021137 0.009522 0.024214 0.008958 0.048068 \n",
"0 0.003956 0.004506 0.003861 0.006815 0.002906 0.020332 \n",
"0 0.002919 0.003433 0.002401 0.005137 0.002158 0.016458 \n",
"0 0.001758 0.002468 0.001048 0.003899 0.001620 0.013296 \n",
"0 0.001702 0.002361 0.001020 0.003635 0.001443 0.012589 \n",
"0 0.001602 0.002253 0.000930 0.003444 0.001362 0.011760 \n",
"0 0.000449 0.000536 0.000198 0.000845 0.000274 0.002744 \n",
"0 0.000481 0.000644 0.000223 0.001043 0.000335 0.003348 \n",
"0 0.000481 0.000644 0.000223 0.001043 0.000335 0.003348 \n",
"0 0.000463 0.000644 0.000189 0.000752 0.000168 0.001677 \n",
"0 0.000189 0.000000 0.000000 0.000214 0.000037 0.000368 \n",
"\n",
" LAUC HR HR2 Reco in test Test coverage Shannon \\\n",
"0 0.555546 0.765642 0.492047 1.000000 0.038961 3.159079 \n",
"0 0.538614 0.634146 0.359491 0.999788 0.275613 5.134751 \n",
"0 0.518248 0.462354 0.255567 0.854931 0.147186 3.888926 \n",
"0 0.515501 0.437964 0.239661 1.000000 0.033911 2.836513 \n",
"0 0.509546 0.384942 0.142100 1.000000 0.025974 2.711772 \n",
"0 0.506031 0.305408 0.111347 0.988547 0.174603 5.082383 \n",
"0 0.499885 0.154825 0.072110 0.402333 0.434343 5.133650 \n",
"0 0.497969 0.039236 0.007423 0.587699 0.071429 2.699278 \n",
"0 0.497349 0.027572 0.007423 0.389926 0.067821 2.475747 \n",
"0 0.496775 0.022269 0.005302 0.483351 0.059885 2.235102 \n",
"0 0.496765 0.022269 0.004242 0.483245 0.059163 2.235851 \n",
"0 0.496724 0.021209 0.004242 0.482821 0.059885 2.232578 \n",
"0 0.496441 0.007423 0.000000 0.602121 0.010823 2.089186 \n",
"0 0.496433 0.009544 0.000000 0.699046 0.005051 1.945910 \n",
"0 0.496433 0.009544 0.000000 0.699046 0.005051 1.945910 \n",
"0 0.496424 0.009544 0.000000 0.600530 0.005051 1.803126 \n",
"0 0.496391 0.003181 0.000000 0.392153 0.115440 4.174741 \n",
"\n",
" Gini \n",
"0 0.987317 \n",
"0 0.909655 \n",
"0 0.972044 \n",
"0 0.991139 \n",
"0 0.992003 \n",
"0 0.908434 \n",
"0 0.877999 \n",
"0 0.991353 \n",
"0 0.992793 \n",
"0 0.994479 \n",
"0 0.994507 \n",
"0 0.994487 \n",
"0 0.995706 \n",
"0 0.995669 \n",
"0 0.995669 \n",
"0 0.996380 \n",
"0 0.965327 "
]
},
"execution_count": 17,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import imp\n",
"imp.reload(ev)\n",
"\n",
"import evaluation_measures as ev\n",
"dir_path=\"Recommendations generated/ml-100k/\"\n",
"super_reactions=[4,5]\n",
"test=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None)\n",
"\n",
"ev.evaluate_all(test, dir_path, super_reactions)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Embeddings"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([[1, 2],\n",
" [3, 4]])"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"array([[0.4472136 , 0.89442719],\n",
" [0.6 , 0.8 ]])"
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"x=np.array([[1,2],[3,4]])\n",
"display(x)\n",
"x/np.linalg.norm(x, axis=1)[:,None]"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>code</th>\n",
" <th>score</th>\n",
" <th>item_id</th>\n",
" <th>id</th>\n",
" <th>title</th>\n",
" <th>genres</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>1051</td>\n",
" <td>1.000000</td>\n",
" <td>1052</td>\n",
" <td>1052</td>\n",
" <td>Dracula: Dead and Loving It (1995)</td>\n",
" <td>Comedy, Horror</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>1177</td>\n",
" <td>0.951303</td>\n",
" <td>1178</td>\n",
" <td>1178</td>\n",
" <td>Major Payne (1994)</td>\n",
" <td>Comedy</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>1290</td>\n",
" <td>0.950489</td>\n",
" <td>1291</td>\n",
" <td>1291</td>\n",
" <td>Celtic Pride (1996)</td>\n",
" <td>Comedy</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>1375</td>\n",
" <td>0.949864</td>\n",
" <td>1376</td>\n",
" <td>1376</td>\n",
" <td>Meet Wally Sparks (1997)</td>\n",
" <td>Comedy</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>1489</td>\n",
" <td>0.947375</td>\n",
" <td>1490</td>\n",
" <td>1490</td>\n",
" <td>Fausto (1993)</td>\n",
" <td>Comedy</td>\n",
" </tr>\n",
" <tr>\n",
" <th>5</th>\n",
" <td>1495</td>\n",
" <td>0.947368</td>\n",
" <td>1496</td>\n",
" <td>1496</td>\n",
" <td>Carpool (1996)</td>\n",
" <td>Comedy, Crime</td>\n",
" </tr>\n",
" <tr>\n",
" <th>6</th>\n",
" <td>1497</td>\n",
" <td>0.947347</td>\n",
" <td>1498</td>\n",
" <td>1498</td>\n",
" <td>Farmer &amp; Chase (1995)</td>\n",
" <td>Comedy</td>\n",
" </tr>\n",
" <tr>\n",
" <th>7</th>\n",
" <td>1490</td>\n",
" <td>0.946829</td>\n",
" <td>1491</td>\n",
" <td>1491</td>\n",
" <td>Tough and Deadly (1995)</td>\n",
" <td>Action, Drama, Thriller</td>\n",
" </tr>\n",
" <tr>\n",
" <th>8</th>\n",
" <td>1320</td>\n",
" <td>0.946152</td>\n",
" <td>1321</td>\n",
" <td>1321</td>\n",
" <td>Open Season (1996)</td>\n",
" <td>Comedy</td>\n",
" </tr>\n",
" <tr>\n",
" <th>9</th>\n",
" <td>1487</td>\n",
" <td>0.945425</td>\n",
" <td>1488</td>\n",
" <td>1488</td>\n",
" <td>Germinal (1993)</td>\n",
" <td>Drama</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" code score item_id id title \\\n",
"0 1051 1.000000 1052 1052 Dracula: Dead and Loving It (1995) \n",
"1 1177 0.951303 1178 1178 Major Payne (1994) \n",
"2 1290 0.950489 1291 1291 Celtic Pride (1996) \n",
"3 1375 0.949864 1376 1376 Meet Wally Sparks (1997) \n",
"4 1489 0.947375 1490 1490 Fausto (1993) \n",
"5 1495 0.947368 1496 1496 Carpool (1996) \n",
"6 1497 0.947347 1498 1498 Farmer & Chase (1995) \n",
"7 1490 0.946829 1491 1491 Tough and Deadly (1995) \n",
"8 1320 0.946152 1321 1321 Open Season (1996) \n",
"9 1487 0.945425 1488 1488 Germinal (1993) \n",
"\n",
" genres \n",
"0 Comedy, Horror \n",
"1 Comedy \n",
"2 Comedy \n",
"3 Comedy \n",
"4 Comedy \n",
"5 Comedy, Crime \n",
"6 Comedy \n",
"7 Action, Drama, Thriller \n",
"8 Comedy \n",
"9 Drama "
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"item=random.choice(list(set(train_ui.indices)))\n",
"\n",
"embeddings_norm=model.Qi/np.linalg.norm(model.Qi, axis=1)[:,None] # we do not mean-center here\n",
"# omitting normalization also makes sense, but items with a greater magnitude will be recommended more often\n",
"\n",
"similarity_scores=np.dot(embeddings_norm,embeddings_norm[item].T)\n",
"top_similar_items=pd.DataFrame(enumerate(similarity_scores), columns=['code', 'score'])\\\n",
".sort_values(by=['score'], ascending=[False])[:10]\n",
"\n",
"top_similar_items['item_id']=top_similar_items['code'].apply(lambda x: item_code_id[x])\n",
"\n",
"items=pd.read_csv('./Datasets/ml-100k/movies.csv')\n",
"\n",
"result=pd.merge(top_similar_items, items, left_on='item_id', right_on='id')\n",
"\n",
"result"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# project task 5: implement SVD on top baseline (as it is in Surprise library)"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [],
"source": [
"# making changes to our implementation by considering additional parameters in the gradient descent procedure \n",
"# seems to be the fastest option\n",
"# please save the output in 'Recommendations generated/ml-100k/Self_SVDBaseline_reco.csv' and\n",
"# 'Recommendations generated/ml-100k/Self_SVDBaseline_estimations.csv'"
]
},
{
"cell_type": "code",
"execution_count": 24,
"metadata": {},
"outputs": [],
"source": [
"# Done similarly to https://github.com/albertauyeung/matrix-factorization-in-python\n",
"from tqdm import tqdm\n",
"\n",
"class SVDBaseline():\n",
" \n",
" def __init__(self, train_ui, learning_rate, regularization, nb_factors, iterations):\n",
" self.train_ui=train_ui\n",
" self.uir=list(zip(*[train_ui.nonzero()[0],train_ui.nonzero()[1], train_ui.data]))\n",
" \n",
" self.learning_rate=learning_rate\n",
" self.regularization=regularization\n",
" self.iterations=iterations\n",
" self.nb_users, self.nb_items=train_ui.shape\n",
" self.nb_ratings=train_ui.nnz\n",
" self.nb_factors=nb_factors\n",
" \n",
" self.Pu=np.random.normal(loc=0, scale=1./self.nb_factors, size=(self.nb_users, self.nb_factors))\n",
" self.Qi=np.random.normal(loc=0, scale=1./self.nb_factors, size=(self.nb_items, self.nb_factors))\n",
"\n",
" self.b_u = np.zeros(self.nb_users)\n",
" self.b_i = np.zeros(self.nb_items)\n",
" \n",
" def train(self, test_ui=None):\n",
" if test_ui!=None:\n",
" self.test_uir=list(zip(*[test_ui.nonzero()[0],test_ui.nonzero()[1], test_ui.data]))\n",
" \n",
" self.learning_process=[]\n",
" pbar = tqdm(range(self.iterations))\n",
" for i in pbar:\n",
" pbar.set_description(f'Epoch {i} RMSE: {self.learning_process[-1][1] if i>0 else 0}. Training epoch {i+1}...')\n",
" np.random.shuffle(self.uir)\n",
" self.sgd(self.uir)\n",
" if test_ui==None:\n",
" self.learning_process.append([i+1, self.RMSE_total(self.uir)])\n",
" else:\n",
" self.learning_process.append([i+1, self.RMSE_total(self.uir), self.RMSE_total(self.test_uir)])\n",
" \n",
" def sgd(self, uir):\n",
" \n",
" for u, i, score in uir:\n",
" # Computer prediction and error\n",
" prediction = self.get_rating(u,i)\n",
" e = (score - prediction)\n",
" \n",
" \n",
" b_u_update = self.learning_rate * (e - self.regularization * self.b_u[u])\n",
" b_i_update = self.learning_rate * (e - self.regularization * self.b_i[i])\n",
" \n",
" self.b_u[u] += b_u_update\n",
" self.b_i[i] += b_i_update\n",
" # Update user and item latent feature matrices\n",
" Pu_update=self.learning_rate * (e * self.Qi[i] - self.regularization * self.Pu[u])\n",
" Qi_update=self.learning_rate * (e * self.Pu[u] - self.regularization * self.Qi[i])\n",
" \n",
" self.Pu[u] += Pu_update\n",
" self.Qi[i] += Qi_update\n",
" \n",
" def get_rating(self, u, i):\n",
" prediction = self.b_u[u] + self.b_i[i] + self.Pu[u].dot(self.Qi[i].T)\n",
" return prediction\n",
" \n",
" def RMSE_total(self, uir):\n",
" RMSE=0\n",
" for u,i, score in uir:\n",
" prediction = self.get_rating(u,i)\n",
" RMSE+=(score - prediction)**2\n",
" return np.sqrt(RMSE/len(uir))\n",
" \n",
" def estimations(self):\n",
" self.estimations=\\\n",
" self.b_u[:,np.newaxis] + self.b_i[np.newaxis:,] + np.dot(self.Pu,self.Qi.T)\n",
"\n",
" def recommend(self, user_code_id, item_code_id, topK=10):\n",
" \n",
" top_k = defaultdict(list)\n",
" for nb_user, user in enumerate(self.estimations):\n",
" \n",
" user_rated=self.train_ui.indices[self.train_ui.indptr[nb_user]:self.train_ui.indptr[nb_user+1]]\n",
" for item, score in enumerate(user):\n",
" if item not in user_rated and not np.isnan(score):\n",
" top_k[user_code_id[nb_user]].append((item_code_id[item], score))\n",
" result=[]\n",
" # Let's choose k best items in the format: (user, item1, score1, item2, score2, ...)\n",
" for uid, item_scores in top_k.items():\n",
" item_scores.sort(key=lambda x: x[1], reverse=True)\n",
" result.append([uid]+list(chain(*item_scores[:topK])))\n",
" return result\n",
" \n",
" def estimate(self, user_code_id, item_code_id, test_ui):\n",
" result=[]\n",
" for user, item in zip(*test_ui.nonzero()):\n",
" result.append([user_code_id[user], item_code_id[item], \n",
" self.estimations[user,item] if not np.isnan(self.estimations[user,item]) else 1])\n",
" return result"
]
},
{
"cell_type": "code",
"execution_count": 25,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Epoch 39 RMSE: 0.7820631219900416. Training epoch 40...: 100%|█████████████████████████| 40/40 [03:33<00:00, 5.34s/it]\n"
]
}
],
"source": [
"model=SVDBaseline(train_ui, learning_rate=0.005, regularization=0.02, nb_factors=100, iterations=40)\n",
"model.train(test_ui)"
]
},
{
"cell_type": "code",
"execution_count": 26,
"metadata": {},
"outputs": [],
"source": [
"model.estimations()\n",
"\n",
"top_n=pd.DataFrame(model.recommend(user_code_id, item_code_id, topK=10))\n",
"\n",
"top_n.to_csv('Recommendations generated/ml-100k/Self_SVDBaseline_reco.csv', index=False, header=False)\n",
"\n",
"estimations=pd.DataFrame(model.estimate(user_code_id, item_code_id, test_ui))\n",
"estimations.to_csv('Recommendations generated/ml-100k/Self_SVDBaseline_estimations.csv', index=False, header=False)"
]
},
{
"cell_type": "code",
"execution_count": 27,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"943it [00:00, 3891.04it/s]\n"
]
},
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>RMSE</th>\n",
" <th>MAE</th>\n",
" <th>precision</th>\n",
" <th>recall</th>\n",
" <th>F_1</th>\n",
" <th>F_05</th>\n",
" <th>precision_super</th>\n",
" <th>recall_super</th>\n",
" <th>NDCG</th>\n",
" <th>mAP</th>\n",
" <th>MRR</th>\n",
" <th>LAUC</th>\n",
" <th>HR</th>\n",
" <th>HR2</th>\n",
" <th>Reco in test</th>\n",
" <th>Test coverage</th>\n",
" <th>Shannon</th>\n",
" <th>Gini</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>0.913253</td>\n",
" <td>0.719475</td>\n",
" <td>0.10509</td>\n",
" <td>0.043952</td>\n",
" <td>0.053454</td>\n",
" <td>0.070803</td>\n",
" <td>0.095279</td>\n",
" <td>0.073469</td>\n",
" <td>0.118152</td>\n",
" <td>0.058739</td>\n",
" <td>0.244096</td>\n",
" <td>0.518714</td>\n",
" <td>0.471898</td>\n",
" <td>0.279958</td>\n",
" <td>0.999682</td>\n",
" <td>0.111111</td>\n",
" <td>3.572421</td>\n",
" <td>0.980655</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" RMSE MAE precision recall F_1 F_05 \\\n",
"0 0.913253 0.719475 0.10509 0.043952 0.053454 0.070803 \n",
"\n",
" precision_super recall_super NDCG mAP MRR LAUC \\\n",
"0 0.095279 0.073469 0.118152 0.058739 0.244096 0.518714 \n",
"\n",
" HR HR2 Reco in test Test coverage Shannon Gini \n",
"0 0.471898 0.279958 0.999682 0.111111 3.572421 0.980655 "
]
},
"execution_count": 27,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import evaluation_measures as ev\n",
"\n",
"estimations_df=pd.read_csv('Recommendations generated/ml-100k/Self_SVDBaseline_estimations.csv', header=None)\n",
"reco=np.loadtxt('Recommendations generated/ml-100k/Self_SVDBaseline_reco.csv', delimiter=',')\n",
"\n",
"ev.evaluate(test=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None),\n",
" estimations_df=estimations_df, \n",
" reco=reco,\n",
" super_reactions=[4,5])"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Ready-made SVD - Surprise implementation"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### SVD"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Generating predictions...\n",
"Generating top N recommendations...\n",
"Generating predictions...\n"
]
}
],
"source": [
"import helpers\n",
"import surprise as sp\n",
"import imp\n",
"imp.reload(helpers)\n",
"\n",
"algo = sp.SVD(biased=False) # to use unbiased version\n",
"\n",
"helpers.ready_made(algo, reco_path='Recommendations generated/ml-100k/Ready_SVD_reco.csv',\n",
" estimations_path='Recommendations generated/ml-100k/Ready_SVD_estimations.csv')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### SVD biased - on top baseline"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Generating predictions...\n",
"Generating top N recommendations...\n",
"Generating predictions...\n"
]
}
],
"source": [
"import helpers\n",
"import surprise as sp\n",
"import imp\n",
"imp.reload(helpers)\n",
"\n",
"algo = sp.SVD() # default is biased=True\n",
"\n",
"helpers.ready_made(algo, reco_path='Recommendations generated/ml-100k/Ready_SVDBiased_reco.csv',\n",
" estimations_path='Recommendations generated/ml-100k/Ready_SVDBiased_estimations.csv')"
]
},
{
"cell_type": "code",
"execution_count": 28,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"943it [00:00, 3972.80it/s]\n",
"943it [00:00, 3608.86it/s]\n",
"943it [00:00, 3514.94it/s]\n",
"943it [00:00, 3447.85it/s]\n",
"943it [00:00, 3615.55it/s]\n",
"943it [00:00, 3364.78it/s]\n",
"943it [00:00, 3508.24it/s]\n",
"943it [00:00, 3394.08it/s]\n",
"943it [00:00, 3294.51it/s]\n",
"943it [00:00, 3636.65it/s]\n",
"943it [00:00, 3356.18it/s]\n",
"943it [00:00, 3364.83it/s]\n",
"943it [00:00, 3438.26it/s]\n",
"943it [00:00, 3642.63it/s]\n",
"943it [00:00, 3294.49it/s]\n",
"943it [00:00, 3205.15it/s]\n",
"943it [00:00, 3737.24it/s]\n",
"943it [00:00, 3456.46it/s]\n",
"943it [00:00, 3528.07it/s]\n",
"943it [00:00, 3495.27it/s]\n",
"943it [00:00, 3321.11it/s]\n",
"943it [00:00, 2405.91it/s]\n",
"943it [00:00, 2676.16it/s]\n"
]
},
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Model</th>\n",
" <th>RMSE</th>\n",
" <th>MAE</th>\n",
" <th>precision</th>\n",
" <th>recall</th>\n",
" <th>F_1</th>\n",
" <th>F_05</th>\n",
" <th>precision_super</th>\n",
" <th>recall_super</th>\n",
" <th>NDCG</th>\n",
" <th>mAP</th>\n",
" <th>MRR</th>\n",
" <th>LAUC</th>\n",
" <th>HR</th>\n",
" <th>HR2</th>\n",
" <th>Reco in test</th>\n",
" <th>Test coverage</th>\n",
" <th>Shannon</th>\n",
" <th>Gini</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_TopPop</td>\n",
" <td>2.508258</td>\n",
" <td>2.217909</td>\n",
" <td>0.188865</td>\n",
" <td>0.116919</td>\n",
" <td>0.118732</td>\n",
" <td>0.141584</td>\n",
" <td>0.130472</td>\n",
" <td>0.137473</td>\n",
" <td>0.214651</td>\n",
" <td>0.111707</td>\n",
" <td>0.400939</td>\n",
" <td>0.555546</td>\n",
" <td>0.765642</td>\n",
" <td>0.492047</td>\n",
" <td>1.000000</td>\n",
" <td>0.038961</td>\n",
" <td>3.159079</td>\n",
" <td>0.987317</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_SVDBaseline</td>\n",
" <td>0.913253</td>\n",
" <td>0.719475</td>\n",
" <td>0.105090</td>\n",
" <td>0.043952</td>\n",
" <td>0.053454</td>\n",
" <td>0.070803</td>\n",
" <td>0.095279</td>\n",
" <td>0.073469</td>\n",
" <td>0.118152</td>\n",
" <td>0.058739</td>\n",
" <td>0.244096</td>\n",
" <td>0.518714</td>\n",
" <td>0.471898</td>\n",
" <td>0.279958</td>\n",
" <td>0.999682</td>\n",
" <td>0.111111</td>\n",
" <td>3.572421</td>\n",
" <td>0.980655</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_SVD</td>\n",
" <td>0.914521</td>\n",
" <td>0.717680</td>\n",
" <td>0.102757</td>\n",
" <td>0.043043</td>\n",
" <td>0.052432</td>\n",
" <td>0.069515</td>\n",
" <td>0.094528</td>\n",
" <td>0.075122</td>\n",
" <td>0.106751</td>\n",
" <td>0.051431</td>\n",
" <td>0.198701</td>\n",
" <td>0.518248</td>\n",
" <td>0.462354</td>\n",
" <td>0.255567</td>\n",
" <td>0.854931</td>\n",
" <td>0.147186</td>\n",
" <td>3.888926</td>\n",
" <td>0.972044</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_Baseline</td>\n",
" <td>0.949459</td>\n",
" <td>0.752487</td>\n",
" <td>0.091410</td>\n",
" <td>0.037652</td>\n",
" <td>0.046030</td>\n",
" <td>0.061286</td>\n",
" <td>0.079614</td>\n",
" <td>0.056463</td>\n",
" <td>0.095957</td>\n",
" <td>0.043178</td>\n",
" <td>0.198193</td>\n",
" <td>0.515501</td>\n",
" <td>0.437964</td>\n",
" <td>0.239661</td>\n",
" <td>1.000000</td>\n",
" <td>0.033911</td>\n",
" <td>2.836513</td>\n",
" <td>0.991139</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_GlobalAvg</td>\n",
" <td>1.125760</td>\n",
" <td>0.943534</td>\n",
" <td>0.061188</td>\n",
" <td>0.025968</td>\n",
" <td>0.031383</td>\n",
" <td>0.041343</td>\n",
" <td>0.040558</td>\n",
" <td>0.032107</td>\n",
" <td>0.067695</td>\n",
" <td>0.027470</td>\n",
" <td>0.171187</td>\n",
" <td>0.509546</td>\n",
" <td>0.384942</td>\n",
" <td>0.142100</td>\n",
" <td>1.000000</td>\n",
" <td>0.025974</td>\n",
" <td>2.711772</td>\n",
" <td>0.992003</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_Random</td>\n",
" <td>1.517593</td>\n",
" <td>1.220181</td>\n",
" <td>0.046023</td>\n",
" <td>0.019038</td>\n",
" <td>0.023118</td>\n",
" <td>0.030734</td>\n",
" <td>0.029292</td>\n",
" <td>0.021639</td>\n",
" <td>0.050818</td>\n",
" <td>0.019958</td>\n",
" <td>0.126646</td>\n",
" <td>0.506031</td>\n",
" <td>0.305408</td>\n",
" <td>0.111347</td>\n",
" <td>0.988547</td>\n",
" <td>0.174603</td>\n",
" <td>5.082383</td>\n",
" <td>0.908434</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_I-KNN</td>\n",
" <td>1.030386</td>\n",
" <td>0.813067</td>\n",
" <td>0.026087</td>\n",
" <td>0.006908</td>\n",
" <td>0.010593</td>\n",
" <td>0.016046</td>\n",
" <td>0.021137</td>\n",
" <td>0.009522</td>\n",
" <td>0.024214</td>\n",
" <td>0.008958</td>\n",
" <td>0.048068</td>\n",
" <td>0.499885</td>\n",
" <td>0.154825</td>\n",
" <td>0.072110</td>\n",
" <td>0.402333</td>\n",
" <td>0.434343</td>\n",
" <td>5.133650</td>\n",
" <td>0.877999</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_I-KNNWithMeans</td>\n",
" <td>0.955921</td>\n",
" <td>0.754037</td>\n",
" <td>0.004984</td>\n",
" <td>0.003225</td>\n",
" <td>0.003406</td>\n",
" <td>0.003956</td>\n",
" <td>0.004506</td>\n",
" <td>0.003861</td>\n",
" <td>0.006815</td>\n",
" <td>0.002906</td>\n",
" <td>0.020332</td>\n",
" <td>0.497969</td>\n",
" <td>0.039236</td>\n",
" <td>0.007423</td>\n",
" <td>0.587699</td>\n",
" <td>0.071429</td>\n",
" <td>2.699278</td>\n",
" <td>0.991353</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_I-KNNWithZScore</td>\n",
" <td>0.957701</td>\n",
" <td>0.752387</td>\n",
" <td>0.003712</td>\n",
" <td>0.001994</td>\n",
" <td>0.002380</td>\n",
" <td>0.002919</td>\n",
" <td>0.003433</td>\n",
" <td>0.002401</td>\n",
" <td>0.005137</td>\n",
" <td>0.002158</td>\n",
" <td>0.016458</td>\n",
" <td>0.497349</td>\n",
" <td>0.027572</td>\n",
" <td>0.007423</td>\n",
" <td>0.389926</td>\n",
" <td>0.067821</td>\n",
" <td>2.475747</td>\n",
" <td>0.992793</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_I-KNNBaseline45</td>\n",
" <td>0.935268</td>\n",
" <td>0.737543</td>\n",
" <td>0.003075</td>\n",
" <td>0.001044</td>\n",
" <td>0.001450</td>\n",
" <td>0.002016</td>\n",
" <td>0.002790</td>\n",
" <td>0.001317</td>\n",
" <td>0.004287</td>\n",
" <td>0.001812</td>\n",
" <td>0.014189</td>\n",
" <td>0.496871</td>\n",
" <td>0.024390</td>\n",
" <td>0.005302</td>\n",
" <td>0.482609</td>\n",
" <td>0.058442</td>\n",
" <td>2.225340</td>\n",
" <td>0.994599</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_I-KNNBaseline42</td>\n",
" <td>0.935028</td>\n",
" <td>0.737210</td>\n",
" <td>0.002969</td>\n",
" <td>0.000980</td>\n",
" <td>0.001374</td>\n",
" <td>0.001929</td>\n",
" <td>0.002682</td>\n",
" <td>0.001217</td>\n",
" <td>0.004069</td>\n",
" <td>0.001677</td>\n",
" <td>0.013349</td>\n",
" <td>0.496838</td>\n",
" <td>0.023330</td>\n",
" <td>0.006363</td>\n",
" <td>0.481972</td>\n",
" <td>0.059163</td>\n",
" <td>2.227849</td>\n",
" <td>0.994531</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_I-KNNBaseline43</td>\n",
" <td>0.935241</td>\n",
" <td>0.737463</td>\n",
" <td>0.002863</td>\n",
" <td>0.000952</td>\n",
" <td>0.001331</td>\n",
" <td>0.001862</td>\n",
" <td>0.002575</td>\n",
" <td>0.001186</td>\n",
" <td>0.004014</td>\n",
" <td>0.001663</td>\n",
" <td>0.013467</td>\n",
" <td>0.496824</td>\n",
" <td>0.023330</td>\n",
" <td>0.005302</td>\n",
" <td>0.482609</td>\n",
" <td>0.055556</td>\n",
" <td>2.225996</td>\n",
" <td>0.994623</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_I-KNNBaseline44</td>\n",
" <td>0.935259</td>\n",
" <td>0.737530</td>\n",
" <td>0.002969</td>\n",
" <td>0.000902</td>\n",
" <td>0.001305</td>\n",
" <td>0.001880</td>\n",
" <td>0.002682</td>\n",
" <td>0.001129</td>\n",
" <td>0.004215</td>\n",
" <td>0.001823</td>\n",
" <td>0.013977</td>\n",
" <td>0.496799</td>\n",
" <td>0.023330</td>\n",
" <td>0.005302</td>\n",
" <td>0.482397</td>\n",
" <td>0.057720</td>\n",
" <td>2.225495</td>\n",
" <td>0.994598</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_I-KNNBaseline39</td>\n",
" <td>0.935520</td>\n",
" <td>0.737631</td>\n",
" <td>0.002757</td>\n",
" <td>0.000856</td>\n",
" <td>0.001230</td>\n",
" <td>0.001758</td>\n",
" <td>0.002468</td>\n",
" <td>0.001048</td>\n",
" <td>0.003899</td>\n",
" <td>0.001620</td>\n",
" <td>0.013296</td>\n",
" <td>0.496775</td>\n",
" <td>0.022269</td>\n",
" <td>0.005302</td>\n",
" <td>0.483351</td>\n",
" <td>0.059885</td>\n",
" <td>2.235102</td>\n",
" <td>0.994479</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_I-KNNBaseline38</td>\n",
" <td>0.935685</td>\n",
" <td>0.737828</td>\n",
" <td>0.002651</td>\n",
" <td>0.000837</td>\n",
" <td>0.001197</td>\n",
" <td>0.001702</td>\n",
" <td>0.002361</td>\n",
" <td>0.001020</td>\n",
" <td>0.003635</td>\n",
" <td>0.001443</td>\n",
" <td>0.012589</td>\n",
" <td>0.496765</td>\n",
" <td>0.022269</td>\n",
" <td>0.004242</td>\n",
" <td>0.483245</td>\n",
" <td>0.059163</td>\n",
" <td>2.235851</td>\n",
" <td>0.994507</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_I-KNNBaseline41</td>\n",
" <td>0.935205</td>\n",
" <td>0.737439</td>\n",
" <td>0.002651</td>\n",
" <td>0.000774</td>\n",
" <td>0.001138</td>\n",
" <td>0.001658</td>\n",
" <td>0.002361</td>\n",
" <td>0.000959</td>\n",
" <td>0.003537</td>\n",
" <td>0.001435</td>\n",
" <td>0.011494</td>\n",
" <td>0.496734</td>\n",
" <td>0.021209</td>\n",
" <td>0.005302</td>\n",
" <td>0.482503</td>\n",
" <td>0.057720</td>\n",
" <td>2.228123</td>\n",
" <td>0.994555</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_I-KNNBaseline</td>\n",
" <td>0.935327</td>\n",
" <td>0.737424</td>\n",
" <td>0.002545</td>\n",
" <td>0.000755</td>\n",
" <td>0.001105</td>\n",
" <td>0.001602</td>\n",
" <td>0.002253</td>\n",
" <td>0.000930</td>\n",
" <td>0.003444</td>\n",
" <td>0.001362</td>\n",
" <td>0.011760</td>\n",
" <td>0.496724</td>\n",
" <td>0.021209</td>\n",
" <td>0.004242</td>\n",
" <td>0.482821</td>\n",
" <td>0.059885</td>\n",
" <td>2.232578</td>\n",
" <td>0.994487</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_I-KNNBaseline40</td>\n",
" <td>0.935327</td>\n",
" <td>0.737424</td>\n",
" <td>0.002545</td>\n",
" <td>0.000755</td>\n",
" <td>0.001105</td>\n",
" <td>0.001602</td>\n",
" <td>0.002253</td>\n",
" <td>0.000930</td>\n",
" <td>0.003444</td>\n",
" <td>0.001362</td>\n",
" <td>0.011760</td>\n",
" <td>0.496724</td>\n",
" <td>0.021209</td>\n",
" <td>0.004242</td>\n",
" <td>0.482821</td>\n",
" <td>0.059885</td>\n",
" <td>2.232578</td>\n",
" <td>0.994487</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Ready_U-KNN</td>\n",
" <td>1.023495</td>\n",
" <td>0.807913</td>\n",
" <td>0.000742</td>\n",
" <td>0.000205</td>\n",
" <td>0.000305</td>\n",
" <td>0.000449</td>\n",
" <td>0.000536</td>\n",
" <td>0.000198</td>\n",
" <td>0.000845</td>\n",
" <td>0.000274</td>\n",
" <td>0.002744</td>\n",
" <td>0.496441</td>\n",
" <td>0.007423</td>\n",
" <td>0.000000</td>\n",
" <td>0.602121</td>\n",
" <td>0.010823</td>\n",
" <td>2.089186</td>\n",
" <td>0.995706</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_TopRated</td>\n",
" <td>2.508258</td>\n",
" <td>2.217909</td>\n",
" <td>0.000954</td>\n",
" <td>0.000188</td>\n",
" <td>0.000298</td>\n",
" <td>0.000481</td>\n",
" <td>0.000644</td>\n",
" <td>0.000223</td>\n",
" <td>0.001043</td>\n",
" <td>0.000335</td>\n",
" <td>0.003348</td>\n",
" <td>0.496433</td>\n",
" <td>0.009544</td>\n",
" <td>0.000000</td>\n",
" <td>0.699046</td>\n",
" <td>0.005051</td>\n",
" <td>1.945910</td>\n",
" <td>0.995669</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_BaselineIU</td>\n",
" <td>0.958136</td>\n",
" <td>0.754051</td>\n",
" <td>0.000954</td>\n",
" <td>0.000188</td>\n",
" <td>0.000298</td>\n",
" <td>0.000481</td>\n",
" <td>0.000644</td>\n",
" <td>0.000223</td>\n",
" <td>0.001043</td>\n",
" <td>0.000335</td>\n",
" <td>0.003348</td>\n",
" <td>0.496433</td>\n",
" <td>0.009544</td>\n",
" <td>0.000000</td>\n",
" <td>0.699046</td>\n",
" <td>0.005051</td>\n",
" <td>1.945910</td>\n",
" <td>0.995669</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_BaselineUI</td>\n",
" <td>0.967585</td>\n",
" <td>0.762740</td>\n",
" <td>0.000954</td>\n",
" <td>0.000170</td>\n",
" <td>0.000278</td>\n",
" <td>0.000463</td>\n",
" <td>0.000644</td>\n",
" <td>0.000189</td>\n",
" <td>0.000752</td>\n",
" <td>0.000168</td>\n",
" <td>0.001677</td>\n",
" <td>0.496424</td>\n",
" <td>0.009544</td>\n",
" <td>0.000000</td>\n",
" <td>0.600530</td>\n",
" <td>0.005051</td>\n",
" <td>1.803126</td>\n",
" <td>0.996380</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Self_IKNN</td>\n",
" <td>1.018363</td>\n",
" <td>0.808793</td>\n",
" <td>0.000318</td>\n",
" <td>0.000108</td>\n",
" <td>0.000140</td>\n",
" <td>0.000189</td>\n",
" <td>0.000000</td>\n",
" <td>0.000000</td>\n",
" <td>0.000214</td>\n",
" <td>0.000037</td>\n",
" <td>0.000368</td>\n",
" <td>0.496391</td>\n",
" <td>0.003181</td>\n",
" <td>0.000000</td>\n",
" <td>0.392153</td>\n",
" <td>0.115440</td>\n",
" <td>4.174741</td>\n",
" <td>0.965327</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Model RMSE MAE precision recall F_1 \\\n",
"0 Self_TopPop 2.508258 2.217909 0.188865 0.116919 0.118732 \n",
"0 Self_SVDBaseline 0.913253 0.719475 0.105090 0.043952 0.053454 \n",
"0 Self_SVD 0.914521 0.717680 0.102757 0.043043 0.052432 \n",
"0 Ready_Baseline 0.949459 0.752487 0.091410 0.037652 0.046030 \n",
"0 Self_GlobalAvg 1.125760 0.943534 0.061188 0.025968 0.031383 \n",
"0 Ready_Random 1.517593 1.220181 0.046023 0.019038 0.023118 \n",
"0 Ready_I-KNN 1.030386 0.813067 0.026087 0.006908 0.010593 \n",
"0 Ready_I-KNNWithMeans 0.955921 0.754037 0.004984 0.003225 0.003406 \n",
"0 Ready_I-KNNWithZScore 0.957701 0.752387 0.003712 0.001994 0.002380 \n",
"0 Self_I-KNNBaseline45 0.935268 0.737543 0.003075 0.001044 0.001450 \n",
"0 Self_I-KNNBaseline42 0.935028 0.737210 0.002969 0.000980 0.001374 \n",
"0 Self_I-KNNBaseline43 0.935241 0.737463 0.002863 0.000952 0.001331 \n",
"0 Self_I-KNNBaseline44 0.935259 0.737530 0.002969 0.000902 0.001305 \n",
"0 Self_I-KNNBaseline39 0.935520 0.737631 0.002757 0.000856 0.001230 \n",
"0 Self_I-KNNBaseline38 0.935685 0.737828 0.002651 0.000837 0.001197 \n",
"0 Self_I-KNNBaseline41 0.935205 0.737439 0.002651 0.000774 0.001138 \n",
"0 Ready_I-KNNBaseline 0.935327 0.737424 0.002545 0.000755 0.001105 \n",
"0 Self_I-KNNBaseline40 0.935327 0.737424 0.002545 0.000755 0.001105 \n",
"0 Ready_U-KNN 1.023495 0.807913 0.000742 0.000205 0.000305 \n",
"0 Self_TopRated 2.508258 2.217909 0.000954 0.000188 0.000298 \n",
"0 Self_BaselineIU 0.958136 0.754051 0.000954 0.000188 0.000298 \n",
"0 Self_BaselineUI 0.967585 0.762740 0.000954 0.000170 0.000278 \n",
"0 Self_IKNN 1.018363 0.808793 0.000318 0.000108 0.000140 \n",
"\n",
" F_05 precision_super recall_super NDCG mAP MRR \\\n",
"0 0.141584 0.130472 0.137473 0.214651 0.111707 0.400939 \n",
"0 0.070803 0.095279 0.073469 0.118152 0.058739 0.244096 \n",
"0 0.069515 0.094528 0.075122 0.106751 0.051431 0.198701 \n",
"0 0.061286 0.079614 0.056463 0.095957 0.043178 0.198193 \n",
"0 0.041343 0.040558 0.032107 0.067695 0.027470 0.171187 \n",
"0 0.030734 0.029292 0.021639 0.050818 0.019958 0.126646 \n",
"0 0.016046 0.021137 0.009522 0.024214 0.008958 0.048068 \n",
"0 0.003956 0.004506 0.003861 0.006815 0.002906 0.020332 \n",
"0 0.002919 0.003433 0.002401 0.005137 0.002158 0.016458 \n",
"0 0.002016 0.002790 0.001317 0.004287 0.001812 0.014189 \n",
"0 0.001929 0.002682 0.001217 0.004069 0.001677 0.013349 \n",
"0 0.001862 0.002575 0.001186 0.004014 0.001663 0.013467 \n",
"0 0.001880 0.002682 0.001129 0.004215 0.001823 0.013977 \n",
"0 0.001758 0.002468 0.001048 0.003899 0.001620 0.013296 \n",
"0 0.001702 0.002361 0.001020 0.003635 0.001443 0.012589 \n",
"0 0.001658 0.002361 0.000959 0.003537 0.001435 0.011494 \n",
"0 0.001602 0.002253 0.000930 0.003444 0.001362 0.011760 \n",
"0 0.001602 0.002253 0.000930 0.003444 0.001362 0.011760 \n",
"0 0.000449 0.000536 0.000198 0.000845 0.000274 0.002744 \n",
"0 0.000481 0.000644 0.000223 0.001043 0.000335 0.003348 \n",
"0 0.000481 0.000644 0.000223 0.001043 0.000335 0.003348 \n",
"0 0.000463 0.000644 0.000189 0.000752 0.000168 0.001677 \n",
"0 0.000189 0.000000 0.000000 0.000214 0.000037 0.000368 \n",
"\n",
" LAUC HR HR2 Reco in test Test coverage Shannon \\\n",
"0 0.555546 0.765642 0.492047 1.000000 0.038961 3.159079 \n",
"0 0.518714 0.471898 0.279958 0.999682 0.111111 3.572421 \n",
"0 0.518248 0.462354 0.255567 0.854931 0.147186 3.888926 \n",
"0 0.515501 0.437964 0.239661 1.000000 0.033911 2.836513 \n",
"0 0.509546 0.384942 0.142100 1.000000 0.025974 2.711772 \n",
"0 0.506031 0.305408 0.111347 0.988547 0.174603 5.082383 \n",
"0 0.499885 0.154825 0.072110 0.402333 0.434343 5.133650 \n",
"0 0.497969 0.039236 0.007423 0.587699 0.071429 2.699278 \n",
"0 0.497349 0.027572 0.007423 0.389926 0.067821 2.475747 \n",
"0 0.496871 0.024390 0.005302 0.482609 0.058442 2.225340 \n",
"0 0.496838 0.023330 0.006363 0.481972 0.059163 2.227849 \n",
"0 0.496824 0.023330 0.005302 0.482609 0.055556 2.225996 \n",
"0 0.496799 0.023330 0.005302 0.482397 0.057720 2.225495 \n",
"0 0.496775 0.022269 0.005302 0.483351 0.059885 2.235102 \n",
"0 0.496765 0.022269 0.004242 0.483245 0.059163 2.235851 \n",
"0 0.496734 0.021209 0.005302 0.482503 0.057720 2.228123 \n",
"0 0.496724 0.021209 0.004242 0.482821 0.059885 2.232578 \n",
"0 0.496724 0.021209 0.004242 0.482821 0.059885 2.232578 \n",
"0 0.496441 0.007423 0.000000 0.602121 0.010823 2.089186 \n",
"0 0.496433 0.009544 0.000000 0.699046 0.005051 1.945910 \n",
"0 0.496433 0.009544 0.000000 0.699046 0.005051 1.945910 \n",
"0 0.496424 0.009544 0.000000 0.600530 0.005051 1.803126 \n",
"0 0.496391 0.003181 0.000000 0.392153 0.115440 4.174741 \n",
"\n",
" Gini \n",
"0 0.987317 \n",
"0 0.980655 \n",
"0 0.972044 \n",
"0 0.991139 \n",
"0 0.992003 \n",
"0 0.908434 \n",
"0 0.877999 \n",
"0 0.991353 \n",
"0 0.992793 \n",
"0 0.994599 \n",
"0 0.994531 \n",
"0 0.994623 \n",
"0 0.994598 \n",
"0 0.994479 \n",
"0 0.994507 \n",
"0 0.994555 \n",
"0 0.994487 \n",
"0 0.994487 \n",
"0 0.995706 \n",
"0 0.995669 \n",
"0 0.995669 \n",
"0 0.996380 \n",
"0 0.965327 "
]
},
"execution_count": 28,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import imp\n",
"imp.reload(ev)\n",
"\n",
"import evaluation_measures as ev\n",
"dir_path=\"Recommendations generated/ml-100k/\"\n",
"super_reactions=[4,5]\n",
"test=pd.read_csv('./Datasets/ml-100k/test.csv', sep='\\t', header=None)\n",
"\n",
"ev.evaluate_all(test, dir_path, super_reactions)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.5"
}
},
"nbformat": 4,
"nbformat_minor": 4
}