{ "cells": [ { "cell_type": "code", "execution_count": 76, "metadata": {}, "outputs": [], "source": [ "import torch\n", "import lzma\n", "from itertools import islice\n", "import re\n", "import sys\n", "from torchtext.vocab import build_vocab_from_iterator\n", "from torch import nn\n", "from torch.utils.data import IterableDataset, DataLoader\n", "import itertools\n", "import matplotlib.pyplot as plt" ] }, { "cell_type": "code", "execution_count": 77, "metadata": {}, "outputs": [], "source": [ "VOCAB_SIZE = 10_000\n", "EMBED_SIZE = 400" ] }, { "cell_type": "code", "execution_count": 78, "metadata": {}, "outputs": [ { "ename": "KeyboardInterrupt", "evalue": "", "output_type": "error", "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", "\u001b[1;32md:\\studia\\challenging-america-word-gap-prediction\\nn_trigram.ipynb Cell 3\u001b[0m in \u001b[0;36m\u001b[1;34m()\u001b[0m\n\u001b[0;32m 13\u001b[0m \u001b[39myield\u001b[39;00m get_words_from_line(line)\n\u001b[0;32m 15\u001b[0m vocab_size \u001b[39m=\u001b[39m \u001b[39m1_000\u001b[39m\n\u001b[1;32m---> 17\u001b[0m vocab \u001b[39m=\u001b[39m build_vocab_from_iterator(\n\u001b[0;32m 18\u001b[0m get_word_lines_from_file(\u001b[39m\"\u001b[39;49m\u001b[39mtrain/in.tsv.xz\u001b[39;49m\u001b[39m\"\u001b[39;49m),\n\u001b[0;32m 19\u001b[0m max_tokens \u001b[39m=\u001b[39;49m VOCAB_SIZE,\n\u001b[0;32m 20\u001b[0m specials \u001b[39m=\u001b[39;49m [\u001b[39m'\u001b[39;49m\u001b[39m\u001b[39;49m\u001b[39m'\u001b[39;49m])\n", "File \u001b[1;32mc:\\PROGRAMY\\Anaconda3\\envs\\modelowanie-jezyka\\lib\\site-packages\\torchtext\\vocab\\vocab_factory.py:98\u001b[0m, in \u001b[0;36mbuild_vocab_from_iterator\u001b[1;34m(iterator, min_freq, specials, special_first, max_tokens)\u001b[0m\n\u001b[0;32m 72\u001b[0m \u001b[39m\u001b[39m\u001b[39m\"\"\"\u001b[39;00m\n\u001b[0;32m 73\u001b[0m \u001b[39mBuild a Vocab from an iterator.\u001b[39;00m\n\u001b[0;32m 74\u001b[0m \n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 94\u001b[0m \u001b[39m >>> vocab = build_vocab_from_iterator(yield_tokens(file_path), specials=[\"\"])\u001b[39;00m\n\u001b[0;32m 95\u001b[0m \u001b[39m\"\"\"\u001b[39;00m\n\u001b[0;32m 97\u001b[0m counter \u001b[39m=\u001b[39m Counter()\n\u001b[1;32m---> 98\u001b[0m \u001b[39mfor\u001b[39;00m tokens \u001b[39min\u001b[39;00m iterator:\n\u001b[0;32m 99\u001b[0m counter\u001b[39m.\u001b[39mupdate(tokens)\n\u001b[0;32m 101\u001b[0m specials \u001b[39m=\u001b[39m specials \u001b[39mor\u001b[39;00m []\n", "\u001b[1;32md:\\studia\\challenging-america-word-gap-prediction\\nn_trigram.ipynb Cell 3\u001b[0m in \u001b[0;36mget_word_lines_from_file\u001b[1;34m(file_name)\u001b[0m\n\u001b[0;32m 10\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mget_word_lines_from_file\u001b[39m(file_name):\n\u001b[0;32m 11\u001b[0m \u001b[39mwith\u001b[39;00m lzma\u001b[39m.\u001b[39mopen(file_name, encoding\u001b[39m=\u001b[39m\u001b[39m'\u001b[39m\u001b[39mutf8\u001b[39m\u001b[39m'\u001b[39m, mode\u001b[39m=\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mrt\u001b[39m\u001b[39m\"\u001b[39m) \u001b[39mas\u001b[39;00m fh:\n\u001b[1;32m---> 12\u001b[0m \u001b[39mfor\u001b[39;00m line \u001b[39min\u001b[39;00m fh:\n\u001b[0;32m 13\u001b[0m \u001b[39myield\u001b[39;00m get_words_from_line(line)\n", "File \u001b[1;32mc:\\PROGRAMY\\Anaconda3\\envs\\modelowanie-jezyka\\lib\\lzma.py:212\u001b[0m, in \u001b[0;36mLZMAFile.read1\u001b[1;34m(self, size)\u001b[0m\n\u001b[0;32m 210\u001b[0m \u001b[39mif\u001b[39;00m size \u001b[39m<\u001b[39m \u001b[39m0\u001b[39m:\n\u001b[0;32m 211\u001b[0m size \u001b[39m=\u001b[39m io\u001b[39m.\u001b[39mDEFAULT_BUFFER_SIZE\n\u001b[1;32m--> 212\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_buffer\u001b[39m.\u001b[39;49mread1(size)\n", "File \u001b[1;32mc:\\PROGRAMY\\Anaconda3\\envs\\modelowanie-jezyka\\lib\\_compression.py:68\u001b[0m, in \u001b[0;36mDecompressReader.readinto\u001b[1;34m(self, b)\u001b[0m\n\u001b[0;32m 66\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mreadinto\u001b[39m(\u001b[39mself\u001b[39m, b):\n\u001b[0;32m 67\u001b[0m \u001b[39mwith\u001b[39;00m \u001b[39mmemoryview\u001b[39m(b) \u001b[39mas\u001b[39;00m view, view\u001b[39m.\u001b[39mcast(\u001b[39m\"\u001b[39m\u001b[39mB\u001b[39m\u001b[39m\"\u001b[39m) \u001b[39mas\u001b[39;00m byte_view:\n\u001b[1;32m---> 68\u001b[0m data \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mread(\u001b[39mlen\u001b[39;49m(byte_view))\n\u001b[0;32m 69\u001b[0m byte_view[:\u001b[39mlen\u001b[39m(data)] \u001b[39m=\u001b[39m data\n\u001b[0;32m 70\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mlen\u001b[39m(data)\n", "File \u001b[1;32mc:\\PROGRAMY\\Anaconda3\\envs\\modelowanie-jezyka\\lib\\_compression.py:103\u001b[0m, in \u001b[0;36mDecompressReader.read\u001b[1;34m(self, size)\u001b[0m\n\u001b[0;32m 101\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[0;32m 102\u001b[0m rawblock \u001b[39m=\u001b[39m \u001b[39mb\u001b[39m\u001b[39m\"\u001b[39m\u001b[39m\"\u001b[39m\n\u001b[1;32m--> 103\u001b[0m data \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_decompressor\u001b[39m.\u001b[39;49mdecompress(rawblock, size)\n\u001b[0;32m 104\u001b[0m \u001b[39mif\u001b[39;00m data:\n\u001b[0;32m 105\u001b[0m \u001b[39mbreak\u001b[39;00m\n", "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } ], "source": [ "def get_words_from_line(line):\n", " line = line.rstrip()\n", " line = line.split(\"\\t\")\n", " text = line[-2] + \" \" + line[-1]\n", " text = re.sub(r\"\\\\+n\", \" \", text)\n", " text = re.sub('[^A-Za-z ]+', '', text)\n", " for t in text.split():\n", " yield t\n", "\n", "def get_word_lines_from_file(file_name):\n", " with lzma.open(file_name, encoding='utf8', mode=\"rt\") as fh:\n", " for line in fh:\n", " yield get_words_from_line(line)\n", "\n", "vocab = build_vocab_from_iterator(\n", " get_word_lines_from_file(\"train/in.tsv.xz\"),\n", " max_tokens = VOCAB_SIZE,\n", " specials = [''])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def look_ahead_iterator(gen):\n", " first = None\n", " second = None\n", " for item in gen:\n", " if first is not None and second is not None:\n", " yield ((first, item), second)\n", " first = second\n", " second = item\n", "\n", "class Trigrams(IterableDataset):\n", " def __init__(self, text_file, vocabulary_size):\n", " self.vocab = vocab\n", " self.vocab.set_default_index(self.vocab[''])\n", " self.vocabulary_size = VOCAB_SIZE\n", " self.text_file = text_file\n", "\n", " def __iter__(self):\n", " return look_ahead_iterator(\n", " (self.vocab[t] for t in itertools.chain.from_iterable(get_word_lines_from_file(self.text_file))))\n", "\n", "train_dataset = Trigrams(\"train/in.tsv.xz\", VOCAB_SIZE)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "class TrigramNNModel(nn.Module):\n", " def __init__(self, VOCAB_SIZE, EMBED_SIZE):\n", " super(TrigramNNModel, self).__init__()\n", " self.embeddings = nn.Embedding(VOCAB_SIZE, EMBED_SIZE)\n", " self.hidden_layer = nn.Linear(EMBED_SIZE*2, 1200)\n", " self.output_layer = nn.Linear(1200, VOCAB_SIZE)\n", " self.softmax = nn.Softmax()\n", "\n", " def forward(self, x):\n", " emb_2 = self.embeddings(x[0])\n", " emb_1 = self.embeddings(x[1])\n", " x = torch.cat([emb_2, emb_1], dim=1)\n", " x = self.hidden_layer(x)\n", " x = self.output_layer(x)\n", " x = self.softmax(x)\n", " return x\n", "\n", "model = TrigramNNModel(vocab_size, embed_size)\n", "\n", "vocab.set_default_index(vocab[''])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\micha\\AppData\\Local\\Temp\\ipykernel_14016\\2809838665.py:15: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.\n", " x = self.softmax(x)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "0 tensor(9.2713, grad_fn=)\n", "1 LOSS DIFF: tensor(8.2370, grad_fn=) tensor(8.2154, grad_fn=)\n", "2 LOSS DIFF: tensor(8.0085, grad_fn=) tensor(7.9711, grad_fn=)\n", "3 LOSS DIFF: tensor(8.0149, grad_fn=) tensor(8.0085, grad_fn=)\n", "4 LOSS DIFF: tensor(7.5328, grad_fn=) tensor(7.4404, grad_fn=)\n", "5 LOSS DIFF: tensor(7.5367, grad_fn=) tensor(7.5328, grad_fn=)\n", "6 LOSS DIFF: tensor(7.6733, grad_fn=) tensor(7.5367, grad_fn=)\n", "7 LOSS DIFF: tensor(7.4703, grad_fn=) tensor(7.3663, grad_fn=)\n", "8 LOSS DIFF: tensor(7.2923, grad_fn=) tensor(7.1224, grad_fn=)\n", "9 LOSS DIFF: tensor(7.2912, grad_fn=) tensor(7.0721, grad_fn=)\n", "10 LOSS DIFF: tensor(7.4529, grad_fn=) tensor(7.0255, grad_fn=)\n", "11 LOSS DIFF: tensor(7.2017, grad_fn=) tensor(7.0108, grad_fn=)\n", "12 LOSS DIFF: tensor(7.0689, grad_fn=) tensor(6.7964, grad_fn=)\n", "13 LOSS DIFF: tensor(7.1870, grad_fn=) tensor(6.7505, grad_fn=)\n", "14 LOSS DIFF: tensor(7.0149, grad_fn=) tensor(6.7360, grad_fn=)\n", "15 LOSS DIFF: tensor(7.0185, grad_fn=) tensor(6.5064, grad_fn=)\n", "16 LOSS DIFF: tensor(6.6809, grad_fn=) tensor(6.6315, grad_fn=)\n", "17 LOSS DIFF: tensor(6.6161, grad_fn=) tensor(6.5363, grad_fn=)\n", "18 LOSS DIFF: tensor(6.6186, grad_fn=) tensor(6.4474, grad_fn=)\n", "19 LOSS DIFF: tensor(6.7242, grad_fn=) tensor(6.6186, grad_fn=)\n", "20 LOSS DIFF: tensor(6.8363, grad_fn=) tensor(6.4740, grad_fn=)\n", "21 LOSS DIFF: tensor(6.4746, grad_fn=) tensor(6.3583, grad_fn=)\n", "22 LOSS DIFF: tensor(6.2821, grad_fn=) tensor(6.2621, grad_fn=)\n", "23 LOSS DIFF: tensor(6.5530, grad_fn=) tensor(6.2821, grad_fn=)\n", "24 LOSS DIFF: tensor(6.3082, grad_fn=) tensor(6.1749, grad_fn=)\n", "25 LOSS DIFF: tensor(6.3215, grad_fn=) tensor(6.0069, grad_fn=)\n", "26 LOSS DIFF: tensor(6.3455, grad_fn=) tensor(6.1887, grad_fn=)\n", "27 LOSS DIFF: tensor(6.0695, grad_fn=) tensor(6.0053, grad_fn=)\n", "28 LOSS DIFF: tensor(6.2298, grad_fn=) tensor(6.0553, grad_fn=)\n", "29 LOSS DIFF: tensor(6.2879, grad_fn=) tensor(6.2298, grad_fn=)\n", "30 LOSS DIFF: tensor(5.8552, grad_fn=) tensor(5.7972, grad_fn=)\n", "31 LOSS DIFF: tensor(5.8884, grad_fn=) tensor(5.8552, grad_fn=)\n", "32 LOSS DIFF: tensor(6.0852, grad_fn=) tensor(5.8884, grad_fn=)\n", "33 LOSS DIFF: tensor(6.2040, grad_fn=) tensor(6.0852, grad_fn=)\n", "34 LOSS DIFF: tensor(6.1036, grad_fn=) tensor(5.9439, grad_fn=)\n", "35 LOSS DIFF: tensor(6.0782, grad_fn=) tensor(5.9413, grad_fn=)\n", "36 LOSS DIFF: tensor(5.9607, grad_fn=) tensor(5.7949, grad_fn=)\n", "37 LOSS DIFF: tensor(6.0354, grad_fn=) tensor(5.9607, grad_fn=)\n", "38 LOSS DIFF: tensor(6.2669, grad_fn=) tensor(6.0243, grad_fn=)\n", "39 LOSS DIFF: tensor(5.8678, grad_fn=) tensor(5.6556, grad_fn=)\n", "40 LOSS DIFF: tensor(6.0265, grad_fn=) tensor(5.8678, grad_fn=)\n", "41 LOSS DIFF: tensor(6.1147, grad_fn=) tensor(5.8050, grad_fn=)\n", "100 tensor(5.8244, grad_fn=)\n", "42 LOSS DIFF: tensor(5.8244, grad_fn=) tensor(5.7412, grad_fn=)\n", "43 LOSS DIFF: tensor(5.9226, grad_fn=) tensor(5.8244, grad_fn=)\n", "44 LOSS DIFF: tensor(5.9487, grad_fn=) tensor(5.9226, grad_fn=)\n", "45 LOSS DIFF: tensor(5.8844, grad_fn=) tensor(5.3183, grad_fn=)\n", "46 LOSS DIFF: tensor(6.0141, grad_fn=) tensor(5.8844, grad_fn=)\n", "47 LOSS DIFF: tensor(6.1782, grad_fn=) tensor(5.8340, grad_fn=)\n", "48 LOSS DIFF: tensor(5.8840, grad_fn=) tensor(5.7920, grad_fn=)\n", "49 LOSS DIFF: tensor(5.7265, grad_fn=) tensor(5.6177, grad_fn=)\n", "50 LOSS DIFF: tensor(5.9389, grad_fn=) tensor(5.7265, grad_fn=)\n", "51 LOSS DIFF: tensor(5.6946, grad_fn=) tensor(5.6487, grad_fn=)\n", "52 LOSS DIFF: tensor(5.8837, grad_fn=) tensor(5.6946, grad_fn=)\n", "53 LOSS DIFF: tensor(5.9090, grad_fn=) tensor(5.8837, grad_fn=)\n", "54 LOSS DIFF: tensor(5.9914, grad_fn=) tensor(5.9090, grad_fn=)\n", "55 LOSS DIFF: tensor(5.8042, grad_fn=) tensor(5.7994, grad_fn=)\n", "56 LOSS DIFF: tensor(5.9282, grad_fn=) tensor(5.8042, grad_fn=)\n", "57 LOSS DIFF: tensor(5.9366, grad_fn=) tensor(5.7254, grad_fn=)\n", "58 LOSS DIFF: tensor(5.7995, grad_fn=) tensor(5.7486, grad_fn=)\n", "59 LOSS DIFF: tensor(5.6361, grad_fn=) tensor(5.5307, grad_fn=)\n", "60 LOSS DIFF: tensor(5.7078, grad_fn=) tensor(5.6361, grad_fn=)\n", "61 LOSS DIFF: tensor(5.7592, grad_fn=) tensor(5.7078, grad_fn=)\n", "62 LOSS DIFF: tensor(5.7625, grad_fn=) tensor(5.5981, grad_fn=)\n", "63 LOSS DIFF: tensor(5.8389, grad_fn=) tensor(5.7625, grad_fn=)\n", "64 LOSS DIFF: tensor(5.7739, grad_fn=) tensor(5.7312, grad_fn=)\n", "65 LOSS DIFF: tensor(5.9031, grad_fn=) tensor(5.6170, grad_fn=)\n", "66 LOSS DIFF: tensor(5.7173, grad_fn=) tensor(5.5232, grad_fn=)\n", "67 LOSS DIFF: tensor(5.7408, grad_fn=) tensor(5.7173, grad_fn=)\n", "68 LOSS DIFF: tensor(5.8191, grad_fn=) tensor(5.7408, grad_fn=)\n", "69 LOSS DIFF: tensor(6.0318, grad_fn=) tensor(5.8191, grad_fn=)\n", "70 LOSS DIFF: tensor(5.6656, grad_fn=) tensor(5.5086, grad_fn=)\n", "71 LOSS DIFF: tensor(5.7288, grad_fn=) tensor(5.6656, grad_fn=)\n", "72 LOSS DIFF: tensor(6.0700, grad_fn=) tensor(5.7288, grad_fn=)\n", "73 LOSS DIFF: tensor(5.8114, grad_fn=) tensor(5.5442, grad_fn=)\n", "74 LOSS DIFF: tensor(5.8363, grad_fn=) tensor(5.5099, grad_fn=)\n", "75 LOSS DIFF: tensor(5.8545, grad_fn=) tensor(5.8363, grad_fn=)\n", "76 LOSS DIFF: tensor(5.9820, grad_fn=) tensor(5.8545, grad_fn=)\n", "77 LOSS DIFF: tensor(5.8431, grad_fn=) tensor(5.7144, grad_fn=)\n", "78 LOSS DIFF: tensor(5.9114, grad_fn=) tensor(5.8431, grad_fn=)\n", "79 LOSS DIFF: tensor(5.8020, grad_fn=) tensor(5.4449, grad_fn=)\n", "80 LOSS DIFF: tensor(5.8973, grad_fn=) tensor(5.5983, grad_fn=)\n", "81 LOSS DIFF: tensor(5.6962, grad_fn=) tensor(5.6396, grad_fn=)\n", "82 LOSS DIFF: tensor(5.6928, grad_fn=) tensor(5.5821, grad_fn=)\n", "83 LOSS DIFF: tensor(5.7957, grad_fn=) tensor(5.6928, grad_fn=)\n", "84 LOSS DIFF: tensor(5.5650, grad_fn=) tensor(5.5055, grad_fn=)\n", "85 LOSS DIFF: tensor(5.6884, grad_fn=) tensor(5.5650, grad_fn=)\n", "86 LOSS DIFF: tensor(5.7350, grad_fn=) tensor(5.6884, grad_fn=)\n", "87 LOSS DIFF: tensor(5.6654, grad_fn=) tensor(5.5815, grad_fn=)\n", "88 LOSS DIFF: tensor(5.7693, grad_fn=) tensor(5.3977, grad_fn=)\n", "89 LOSS DIFF: tensor(5.5829, grad_fn=) tensor(5.5628, grad_fn=)\n", "90 LOSS DIFF: tensor(5.8661, grad_fn=) tensor(5.5829, grad_fn=)\n", "91 LOSS DIFF: tensor(5.4884, grad_fn=) tensor(5.4546, grad_fn=)\n", "92 LOSS DIFF: tensor(5.6575, grad_fn=) tensor(5.4884, grad_fn=)\n", "93 LOSS DIFF: tensor(5.8113, grad_fn=) tensor(5.6575, grad_fn=)\n", "94 LOSS DIFF: tensor(5.6923, grad_fn=) tensor(5.5077, grad_fn=)\n", "95 LOSS DIFF: tensor(5.7196, grad_fn=) tensor(5.6923, grad_fn=)\n", "96 LOSS DIFF: tensor(5.6317, grad_fn=) tensor(5.6262, grad_fn=)\n", "97 LOSS DIFF: tensor(5.7707, grad_fn=) tensor(5.6099, grad_fn=)\n", "200 tensor(5.4212, grad_fn=)\n", "98 LOSS DIFF: tensor(5.5956, grad_fn=) tensor(5.4212, grad_fn=)\n", "99 LOSS DIFF: tensor(5.7422, grad_fn=) tensor(5.5956, grad_fn=)\n", "100 LOSS DIFF: tensor(5.8166, grad_fn=) tensor(5.7422, grad_fn=)\n", "101 LOSS DIFF: tensor(5.8615, grad_fn=) tensor(5.8166, grad_fn=)\n", "102 LOSS DIFF: tensor(5.9617, grad_fn=) tensor(5.8615, grad_fn=)\n", "103 LOSS DIFF: tensor(5.9847, grad_fn=) tensor(5.9617, grad_fn=)\n", "104 LOSS DIFF: tensor(5.8443, grad_fn=) tensor(5.6014, grad_fn=)\n", "105 LOSS DIFF: tensor(5.7755, grad_fn=) tensor(5.7413, grad_fn=)\n", "106 LOSS DIFF: tensor(6.0574, grad_fn=) tensor(5.6690, grad_fn=)\n", "107 LOSS DIFF: tensor(5.4708, grad_fn=) tensor(5.4460, grad_fn=)\n", "108 LOSS DIFF: tensor(5.6402, grad_fn=) tensor(5.4708, grad_fn=)\n", "109 LOSS DIFF: tensor(5.7016, grad_fn=) tensor(5.6402, grad_fn=)\n", "110 LOSS DIFF: tensor(5.5643, grad_fn=) tensor(5.4158, grad_fn=)\n", "111 LOSS DIFF: tensor(5.6958, grad_fn=) tensor(5.3094, grad_fn=)\n", "112 LOSS DIFF: tensor(5.8296, grad_fn=) tensor(5.4617, grad_fn=)\n", "113 LOSS DIFF: tensor(5.6992, grad_fn=) tensor(5.5483, grad_fn=)\n", "114 LOSS DIFF: tensor(5.4980, grad_fn=) tensor(5.4310, grad_fn=)\n", "115 LOSS DIFF: tensor(5.4942, grad_fn=) tensor(5.3832, grad_fn=)\n", "116 LOSS DIFF: tensor(5.6928, grad_fn=) tensor(5.4942, grad_fn=)\n", "117 LOSS DIFF: tensor(5.6334, grad_fn=) tensor(5.5606, grad_fn=)\n", "118 LOSS DIFF: tensor(5.7307, grad_fn=) tensor(5.5210, grad_fn=)\n", "119 LOSS DIFF: tensor(5.5673, grad_fn=) tensor(5.5488, grad_fn=)\n", "120 LOSS DIFF: tensor(6.0060, grad_fn=) tensor(5.4800, grad_fn=)\n", "121 LOSS DIFF: tensor(5.5278, grad_fn=) tensor(5.1856, grad_fn=)\n", "122 LOSS DIFF: tensor(5.5388, grad_fn=) tensor(5.5278, grad_fn=)\n", "123 LOSS DIFF: tensor(5.6835, grad_fn=) tensor(5.5388, grad_fn=)\n", "124 LOSS DIFF: tensor(5.6808, grad_fn=) tensor(5.5417, grad_fn=)\n", "125 LOSS DIFF: tensor(5.8665, grad_fn=) tensor(5.5828, grad_fn=)\n", "126 LOSS DIFF: tensor(5.7710, grad_fn=) tensor(5.5468, grad_fn=)\n", "127 LOSS DIFF: tensor(5.6604, grad_fn=) tensor(5.6368, grad_fn=)\n", "128 LOSS DIFF: tensor(5.5983, grad_fn=) tensor(5.5213, grad_fn=)\n", "129 LOSS DIFF: tensor(5.6943, grad_fn=) tensor(5.4842, grad_fn=)\n", "130 LOSS DIFF: tensor(5.5073, grad_fn=) tensor(5.4259, grad_fn=)\n", "131 LOSS DIFF: tensor(5.5320, grad_fn=) tensor(5.5073, grad_fn=)\n", "132 LOSS DIFF: tensor(5.6082, grad_fn=) tensor(5.4292, grad_fn=)\n", "133 LOSS DIFF: tensor(5.6768, grad_fn=) tensor(5.4724, grad_fn=)\n", "134 LOSS DIFF: tensor(5.5272, grad_fn=) tensor(5.5222, grad_fn=)\n", "135 LOSS DIFF: tensor(5.5190, grad_fn=) tensor(5.5016, grad_fn=)\n", "136 LOSS DIFF: tensor(5.6560, grad_fn=) tensor(5.5190, grad_fn=)\n", "137 LOSS DIFF: tensor(5.6775, grad_fn=) tensor(5.6560, grad_fn=)\n", "138 LOSS DIFF: tensor(5.6694, grad_fn=) tensor(5.6686, grad_fn=)\n", "139 LOSS DIFF: tensor(5.5788, grad_fn=) tensor(5.2768, grad_fn=)\n", "140 LOSS DIFF: tensor(5.3935, grad_fn=) tensor(5.3774, grad_fn=)\n", "141 LOSS DIFF: tensor(5.6068, grad_fn=) tensor(5.3935, grad_fn=)\n", "142 LOSS DIFF: tensor(5.6336, grad_fn=) tensor(5.6068, grad_fn=)\n", "143 LOSS DIFF: tensor(5.7687, grad_fn=) tensor(5.5630, grad_fn=)\n", "144 LOSS DIFF: tensor(5.7539, grad_fn=) tensor(5.6827, grad_fn=)\n", "145 LOSS DIFF: tensor(5.7485, grad_fn=) tensor(5.6277, grad_fn=)\n", "300 tensor(5.8304, grad_fn=)\n", "146 LOSS DIFF: tensor(5.8304, grad_fn=) tensor(5.5549, grad_fn=)\n", "147 LOSS DIFF: tensor(5.5819, grad_fn=) tensor(5.4616, grad_fn=)\n", "148 LOSS DIFF: tensor(5.6154, grad_fn=) tensor(5.5819, grad_fn=)\n", "149 LOSS DIFF: tensor(5.7859, grad_fn=) tensor(5.3329, grad_fn=)\n", "150 LOSS DIFF: tensor(5.5458, grad_fn=) tensor(5.5438, grad_fn=)\n", "151 LOSS DIFF: tensor(5.7121, grad_fn=) tensor(5.5458, grad_fn=)\n", "152 LOSS DIFF: tensor(5.6329, grad_fn=) tensor(5.2700, grad_fn=)\n", "153 LOSS DIFF: tensor(5.6739, grad_fn=) tensor(5.3680, grad_fn=)\n", "154 LOSS DIFF: tensor(5.7045, grad_fn=) tensor(5.6739, grad_fn=)\n", "155 LOSS DIFF: tensor(5.5067, grad_fn=) tensor(5.2978, grad_fn=)\n", "156 LOSS DIFF: tensor(5.5102, grad_fn=) tensor(5.5067, grad_fn=)\n", "157 LOSS DIFF: tensor(5.5956, grad_fn=) tensor(5.4116, grad_fn=)\n", "158 LOSS DIFF: tensor(5.5993, grad_fn=) tensor(5.4012, grad_fn=)\n", "159 LOSS DIFF: tensor(5.6150, grad_fn=) tensor(5.3476, grad_fn=)\n", "160 LOSS DIFF: tensor(5.4375, grad_fn=) tensor(5.4351, grad_fn=)\n", "161 LOSS DIFF: tensor(5.7052, grad_fn=) tensor(5.4375, grad_fn=)\n", "162 LOSS DIFF: tensor(5.7059, grad_fn=) tensor(5.5050, grad_fn=)\n", "163 LOSS DIFF: tensor(5.7356, grad_fn=) tensor(5.5716, grad_fn=)\n", "164 LOSS DIFF: tensor(5.7517, grad_fn=) tensor(5.5423, grad_fn=)\n", "165 LOSS DIFF: tensor(5.7358, grad_fn=) tensor(5.4403, grad_fn=)\n", "166 LOSS DIFF: tensor(5.6180, grad_fn=) tensor(5.4437, grad_fn=)\n", "167 LOSS DIFF: tensor(5.5725, grad_fn=) tensor(5.2734, grad_fn=)\n", "168 LOSS DIFF: tensor(5.8849, grad_fn=) tensor(5.3810, grad_fn=)\n", "169 LOSS DIFF: tensor(5.5414, grad_fn=) tensor(5.5272, grad_fn=)\n", "170 LOSS DIFF: tensor(5.5738, grad_fn=) tensor(5.3898, grad_fn=)\n", "171 LOSS DIFF: tensor(5.7096, grad_fn=) tensor(5.2583, grad_fn=)\n", "172 LOSS DIFF: tensor(5.7039, grad_fn=) tensor(5.6133, grad_fn=)\n", "173 LOSS DIFF: tensor(5.5324, grad_fn=) tensor(5.5068, grad_fn=)\n", "174 LOSS DIFF: tensor(5.5902, grad_fn=) tensor(5.4034, grad_fn=)\n", "175 LOSS DIFF: tensor(5.5912, grad_fn=) tensor(5.5902, grad_fn=)\n", "176 LOSS DIFF: tensor(5.7047, grad_fn=) tensor(5.5912, grad_fn=)\n", "177 LOSS DIFF: tensor(5.6506, grad_fn=) tensor(5.4474, grad_fn=)\n", "178 LOSS DIFF: tensor(5.5547, grad_fn=) tensor(5.5172, grad_fn=)\n", "179 LOSS DIFF: tensor(5.5271, grad_fn=) tensor(5.2485, grad_fn=)\n", "180 LOSS DIFF: tensor(5.5400, grad_fn=) tensor(5.4519, grad_fn=)\n", "181 LOSS DIFF: tensor(5.6702, grad_fn=) tensor(5.5037, grad_fn=)\n", "182 LOSS DIFF: tensor(5.5462, grad_fn=) tensor(5.4319, grad_fn=)\n", "183 LOSS DIFF: tensor(5.5346, grad_fn=) tensor(5.4046, grad_fn=)\n", "184 LOSS DIFF: tensor(5.5779, grad_fn=) tensor(5.5096, grad_fn=)\n", "185 LOSS DIFF: tensor(5.5979, grad_fn=) tensor(5.4310, grad_fn=)\n", "186 LOSS DIFF: tensor(5.4231, grad_fn=) tensor(5.2371, grad_fn=)\n", "187 LOSS DIFF: tensor(5.6120, grad_fn=) tensor(5.4231, grad_fn=)\n", "188 LOSS DIFF: tensor(5.4934, grad_fn=) tensor(5.1333, grad_fn=)\n", "189 LOSS DIFF: tensor(5.5445, grad_fn=) tensor(5.2967, grad_fn=)\n", "190 LOSS DIFF: tensor(5.5506, grad_fn=) tensor(5.5445, grad_fn=)\n", "191 LOSS DIFF: tensor(5.6374, grad_fn=) tensor(5.5506, grad_fn=)\n", "400 tensor(5.5743, grad_fn=)\n", "192 LOSS DIFF: tensor(5.6050, grad_fn=) tensor(5.5743, grad_fn=)\n", "193 LOSS DIFF: tensor(5.5826, grad_fn=) tensor(5.3787, grad_fn=)\n", "194 LOSS DIFF: tensor(5.5223, grad_fn=) tensor(5.3267, grad_fn=)\n", "195 LOSS DIFF: tensor(5.4600, grad_fn=) tensor(5.4485, grad_fn=)\n", "196 LOSS DIFF: tensor(5.5178, grad_fn=) tensor(5.4600, grad_fn=)\n", "197 LOSS DIFF: tensor(5.5514, grad_fn=) tensor(5.2249, grad_fn=)\n", "198 LOSS DIFF: tensor(5.5651, grad_fn=) tensor(5.4807, grad_fn=)\n", "199 LOSS DIFF: tensor(5.4252, grad_fn=) tensor(5.1542, grad_fn=)\n", "200 LOSS DIFF: tensor(5.6503, grad_fn=) tensor(5.4252, grad_fn=)\n", "201 LOSS DIFF: tensor(5.5460, grad_fn=) tensor(5.3643, grad_fn=)\n", "202 LOSS DIFF: tensor(5.7145, grad_fn=) tensor(5.4959, grad_fn=)\n", "203 LOSS DIFF: tensor(5.4506, grad_fn=) tensor(5.4382, grad_fn=)\n", "204 LOSS DIFF: tensor(5.5514, grad_fn=) tensor(5.4506, grad_fn=)\n", "205 LOSS DIFF: tensor(5.5680, grad_fn=) tensor(5.5468, grad_fn=)\n", "206 LOSS DIFF: tensor(5.5970, grad_fn=) tensor(5.5680, grad_fn=)\n", "207 LOSS DIFF: tensor(5.6742, grad_fn=) tensor(5.5970, grad_fn=)\n", "208 LOSS DIFF: tensor(5.5306, grad_fn=) tensor(5.2061, grad_fn=)\n", "209 LOSS DIFF: tensor(5.7571, grad_fn=) tensor(5.5306, grad_fn=)\n", "210 LOSS DIFF: tensor(5.6525, grad_fn=) tensor(5.3833, grad_fn=)\n", "211 LOSS DIFF: tensor(5.5354, grad_fn=) tensor(5.3948, grad_fn=)\n", "212 LOSS DIFF: tensor(5.5960, grad_fn=) tensor(5.5354, grad_fn=)\n", "213 LOSS DIFF: tensor(5.7113, grad_fn=) tensor(5.5470, grad_fn=)\n", "214 LOSS DIFF: tensor(5.4059, grad_fn=) tensor(5.3649, grad_fn=)\n", "215 LOSS DIFF: tensor(5.4863, grad_fn=) tensor(5.4004, grad_fn=)\n", "216 LOSS DIFF: tensor(5.5381, grad_fn=) tensor(5.4863, grad_fn=)\n", "217 LOSS DIFF: tensor(5.3652, grad_fn=) tensor(5.3540, grad_fn=)\n", "218 LOSS DIFF: tensor(5.3894, grad_fn=) tensor(5.1646, grad_fn=)\n", "219 LOSS DIFF: tensor(5.6803, grad_fn=) tensor(5.3894, grad_fn=)\n", "220 LOSS DIFF: tensor(5.6113, grad_fn=) tensor(5.4769, grad_fn=)\n", "221 LOSS DIFF: tensor(5.6813, grad_fn=) tensor(5.2015, grad_fn=)\n", "222 LOSS DIFF: tensor(5.3458, grad_fn=) tensor(5.2679, grad_fn=)\n", "223 LOSS DIFF: tensor(5.2445, grad_fn=) tensor(5.1445, grad_fn=)\n", "224 LOSS DIFF: tensor(5.6649, grad_fn=) tensor(5.2441, grad_fn=)\n", "225 LOSS DIFF: tensor(5.8539, grad_fn=) tensor(5.6026, grad_fn=)\n", "226 LOSS DIFF: tensor(5.4560, grad_fn=) tensor(5.4208, grad_fn=)\n", "227 LOSS DIFF: tensor(5.5729, grad_fn=) tensor(5.4560, grad_fn=)\n", "228 LOSS DIFF: tensor(5.5996, grad_fn=) tensor(5.3175, grad_fn=)\n", "229 LOSS DIFF: tensor(5.6685, grad_fn=) tensor(5.2451, grad_fn=)\n", "230 LOSS DIFF: tensor(5.5938, grad_fn=) tensor(5.4874, grad_fn=)\n", "231 LOSS DIFF: tensor(5.6228, grad_fn=) tensor(5.2840, grad_fn=)\n", "232 LOSS DIFF: tensor(5.3415, grad_fn=) tensor(5.3339, grad_fn=)\n", "233 LOSS DIFF: tensor(5.3861, grad_fn=) tensor(5.1807, grad_fn=)\n", "234 LOSS DIFF: tensor(5.4093, grad_fn=) tensor(5.3861, grad_fn=)\n", "235 LOSS DIFF: tensor(5.6085, grad_fn=) tensor(5.4093, grad_fn=)\n", "236 LOSS DIFF: tensor(5.3475, grad_fn=) tensor(5.1380, grad_fn=)\n", "237 LOSS DIFF: tensor(5.6542, grad_fn=) tensor(5.3475, grad_fn=)\n", "238 LOSS DIFF: tensor(5.6034, grad_fn=) tensor(5.2396, grad_fn=)\n", "239 LOSS DIFF: tensor(5.5599, grad_fn=) tensor(5.2510, grad_fn=)\n", "240 LOSS DIFF: tensor(5.4534, grad_fn=) tensor(5.3629, grad_fn=)\n", "500 tensor(5.5447, grad_fn=)\n", "241 LOSS DIFF: tensor(5.5447, grad_fn=) tensor(5.4534, grad_fn=)\n", "242 LOSS DIFF: tensor(5.4929, grad_fn=) tensor(5.3445, grad_fn=)\n", "243 LOSS DIFF: tensor(5.4963, grad_fn=) tensor(5.3411, grad_fn=)\n", "244 LOSS DIFF: tensor(5.3306, grad_fn=) tensor(5.1341, grad_fn=)\n", "245 LOSS DIFF: tensor(5.3853, grad_fn=) tensor(5.3306, grad_fn=)\n", "246 LOSS DIFF: tensor(5.5949, grad_fn=) tensor(5.3853, grad_fn=)\n", "247 LOSS DIFF: tensor(5.5202, grad_fn=) tensor(5.2283, grad_fn=)\n", "248 LOSS DIFF: tensor(5.5862, grad_fn=) tensor(5.5202, grad_fn=)\n", "249 LOSS DIFF: tensor(5.5425, grad_fn=) tensor(5.2707, grad_fn=)\n", "250 LOSS DIFF: tensor(5.6233, grad_fn=) tensor(5.2300, grad_fn=)\n", "251 LOSS DIFF: tensor(5.4803, grad_fn=) tensor(5.3777, grad_fn=)\n", "252 LOSS DIFF: tensor(5.6414, grad_fn=) tensor(5.3601, grad_fn=)\n", "253 LOSS DIFF: tensor(5.2371, grad_fn=) tensor(5.2364, grad_fn=)\n", "254 LOSS DIFF: tensor(5.3186, grad_fn=) tensor(5.2371, grad_fn=)\n", "255 LOSS DIFF: tensor(5.6731, grad_fn=) tensor(5.3186, grad_fn=)\n", "256 LOSS DIFF: tensor(5.5774, grad_fn=) tensor(5.5003, grad_fn=)\n", "257 LOSS DIFF: tensor(5.6139, grad_fn=) tensor(5.0909, grad_fn=)\n", "258 LOSS DIFF: tensor(5.4975, grad_fn=) tensor(5.3252, grad_fn=)\n", "259 LOSS DIFF: tensor(5.1695, grad_fn=) tensor(5.1682, grad_fn=)\n", "260 LOSS DIFF: tensor(5.4441, grad_fn=) tensor(5.1695, grad_fn=)\n", "261 LOSS DIFF: tensor(5.5408, grad_fn=) tensor(5.4441, grad_fn=)\n", "262 LOSS DIFF: tensor(5.5618, grad_fn=) tensor(5.5408, grad_fn=)\n", "263 LOSS DIFF: tensor(5.5545, grad_fn=) tensor(5.5457, grad_fn=)\n", "264 LOSS DIFF: tensor(5.6082, grad_fn=) tensor(5.5545, grad_fn=)\n", "265 LOSS DIFF: tensor(5.3351, grad_fn=) tensor(5.3258, grad_fn=)\n", "266 LOSS DIFF: tensor(5.5028, grad_fn=) tensor(5.3351, grad_fn=)\n", "267 LOSS DIFF: tensor(5.4873, grad_fn=) tensor(5.3415, grad_fn=)\n", "268 LOSS DIFF: tensor(5.5458, grad_fn=) tensor(5.4873, grad_fn=)\n", "269 LOSS DIFF: tensor(5.3706, grad_fn=) tensor(5.3371, grad_fn=)\n", "270 LOSS DIFF: tensor(5.5207, grad_fn=) tensor(5.3706, grad_fn=)\n", "271 LOSS DIFF: tensor(5.4275, grad_fn=) tensor(5.3686, grad_fn=)\n", "272 LOSS DIFF: tensor(5.5256, grad_fn=) tensor(5.4275, grad_fn=)\n", "273 LOSS DIFF: tensor(5.3044, grad_fn=) tensor(5.1722, grad_fn=)\n", "274 LOSS DIFF: tensor(5.1798, grad_fn=) tensor(5.0866, grad_fn=)\n", "275 LOSS DIFF: tensor(5.5159, grad_fn=) tensor(5.1798, grad_fn=)\n", "276 LOSS DIFF: tensor(5.3755, grad_fn=) tensor(5.3404, grad_fn=)\n", "277 LOSS DIFF: tensor(5.3817, grad_fn=) tensor(5.3755, grad_fn=)\n", "278 LOSS DIFF: tensor(5.5214, grad_fn=) tensor(5.3817, grad_fn=)\n", "279 LOSS DIFF: tensor(5.4231, grad_fn=) tensor(5.4104, grad_fn=)\n", "280 LOSS DIFF: tensor(5.7068, grad_fn=) tensor(5.4231, grad_fn=)\n", "281 LOSS DIFF: tensor(5.6217, grad_fn=) tensor(5.3672, grad_fn=)\n", "282 LOSS DIFF: tensor(5.5297, grad_fn=) tensor(5.2592, grad_fn=)\n", "283 LOSS DIFF: tensor(5.4354, grad_fn=) tensor(5.1583, grad_fn=)\n", "284 LOSS DIFF: tensor(5.3529, grad_fn=) tensor(5.3227, grad_fn=)\n", "285 LOSS DIFF: tensor(5.5201, grad_fn=) tensor(5.3529, grad_fn=)\n", "286 LOSS DIFF: tensor(5.3654, grad_fn=) tensor(5.3083, grad_fn=)\n", "287 LOSS DIFF: tensor(5.3719, grad_fn=) tensor(5.3654, grad_fn=)\n", "288 LOSS DIFF: tensor(5.7598, grad_fn=) tensor(5.3256, grad_fn=)\n", "289 LOSS DIFF: tensor(5.4723, grad_fn=) tensor(5.3773, grad_fn=)\n", "600 tensor(5.1854, grad_fn=)\n", "290 LOSS DIFF: tensor(5.2626, grad_fn=) tensor(5.1854, grad_fn=)\n", "291 LOSS DIFF: tensor(5.3265, grad_fn=) tensor(5.2626, grad_fn=)\n", "292 LOSS DIFF: tensor(5.3546, grad_fn=) tensor(5.3265, grad_fn=)\n", "293 LOSS DIFF: tensor(5.4134, grad_fn=) tensor(5.3546, grad_fn=)\n", "294 LOSS DIFF: tensor(5.3317, grad_fn=) tensor(5.3061, grad_fn=)\n", "295 LOSS DIFF: tensor(5.5886, grad_fn=) tensor(5.3317, grad_fn=)\n", "296 LOSS DIFF: tensor(5.2714, grad_fn=) tensor(5.2538, grad_fn=)\n", "297 LOSS DIFF: tensor(5.4437, grad_fn=) tensor(5.2699, grad_fn=)\n", "298 LOSS DIFF: tensor(5.4026, grad_fn=) tensor(5.3539, grad_fn=)\n", "299 LOSS DIFF: tensor(5.5344, grad_fn=) tensor(5.4026, grad_fn=)\n", "300 LOSS DIFF: tensor(5.2724, grad_fn=) tensor(5.1554, grad_fn=)\n", "301 LOSS DIFF: tensor(5.4204, grad_fn=) tensor(5.2614, grad_fn=)\n", "302 LOSS DIFF: tensor(5.5588, grad_fn=) tensor(5.4204, grad_fn=)\n", "303 LOSS DIFF: tensor(5.4821, grad_fn=) tensor(5.2939, grad_fn=)\n", "304 LOSS DIFF: tensor(5.5529, grad_fn=) tensor(5.4821, grad_fn=)\n", "305 LOSS DIFF: tensor(5.5659, grad_fn=) tensor(5.5529, grad_fn=)\n", "306 LOSS DIFF: tensor(5.3128, grad_fn=) tensor(5.1975, grad_fn=)\n", "307 LOSS DIFF: tensor(5.4044, grad_fn=) tensor(5.2514, grad_fn=)\n", "308 LOSS DIFF: tensor(5.5461, grad_fn=) tensor(5.4044, grad_fn=)\n", "309 LOSS DIFF: tensor(5.4835, grad_fn=) tensor(5.4153, grad_fn=)\n", "310 LOSS DIFF: tensor(5.4990, grad_fn=) tensor(5.3391, grad_fn=)\n", "311 LOSS DIFF: tensor(5.5111, grad_fn=) tensor(5.4990, grad_fn=)\n", "312 LOSS DIFF: tensor(5.4828, grad_fn=) tensor(5.3784, grad_fn=)\n", "313 LOSS DIFF: tensor(5.4165, grad_fn=) tensor(5.0706, grad_fn=)\n", "314 LOSS DIFF: tensor(5.5142, grad_fn=) tensor(5.4165, grad_fn=)\n", "315 LOSS DIFF: tensor(5.3397, grad_fn=) tensor(5.1207, grad_fn=)\n", "316 LOSS DIFF: tensor(5.6205, grad_fn=) tensor(5.3397, grad_fn=)\n", "317 LOSS DIFF: tensor(5.4190, grad_fn=) tensor(5.3573, grad_fn=)\n", "318 LOSS DIFF: tensor(5.2788, grad_fn=) tensor(5.2728, grad_fn=)\n", "319 LOSS DIFF: tensor(5.3070, grad_fn=) tensor(5.2788, grad_fn=)\n", "320 LOSS DIFF: tensor(5.5223, grad_fn=) tensor(5.3070, grad_fn=)\n", "321 LOSS DIFF: tensor(5.3895, grad_fn=) tensor(5.2946, grad_fn=)\n", "322 LOSS DIFF: tensor(5.6954, grad_fn=) tensor(5.2766, grad_fn=)\n", "323 LOSS DIFF: tensor(5.3206, grad_fn=) tensor(5.2566, grad_fn=)\n", "324 LOSS DIFF: tensor(5.4333, grad_fn=) tensor(5.1247, grad_fn=)\n", "325 LOSS DIFF: tensor(5.5108, grad_fn=) tensor(5.2871, grad_fn=)\n", "326 LOSS DIFF: tensor(5.3659, grad_fn=) tensor(5.2939, grad_fn=)\n", "327 LOSS DIFF: tensor(5.4602, grad_fn=) tensor(5.2214, grad_fn=)\n", "328 LOSS DIFF: tensor(5.1405, grad_fn=) tensor(4.9549, grad_fn=)\n", "329 LOSS DIFF: tensor(5.4136, grad_fn=) tensor(4.9053, grad_fn=)\n", "330 LOSS DIFF: tensor(5.7120, grad_fn=) tensor(5.2294, grad_fn=)\n", "331 LOSS DIFF: tensor(5.4775, grad_fn=) tensor(5.3224, grad_fn=)\n", "332 LOSS DIFF: tensor(5.2917, grad_fn=) tensor(5.1672, grad_fn=)\n", "333 LOSS DIFF: tensor(5.3209, grad_fn=) tensor(5.2917, grad_fn=)\n", "334 LOSS DIFF: tensor(5.3745, grad_fn=) tensor(5.3209, grad_fn=)\n", "335 LOSS DIFF: tensor(5.4889, grad_fn=) tensor(5.3172, grad_fn=)\n", "336 LOSS DIFF: tensor(5.3614, grad_fn=) tensor(5.2868, grad_fn=)\n", "337 LOSS DIFF: tensor(5.4456, grad_fn=) tensor(5.3614, grad_fn=)\n", "338 LOSS DIFF: tensor(5.3012, grad_fn=) tensor(5.2641, grad_fn=)\n", "339 LOSS DIFF: tensor(5.5309, grad_fn=) tensor(5.3012, grad_fn=)\n", "340 LOSS DIFF: tensor(5.2953, grad_fn=) tensor(5.1931, grad_fn=)\n", "341 LOSS DIFF: tensor(5.3908, grad_fn=) tensor(5.2953, grad_fn=)\n", "342 LOSS DIFF: tensor(5.5060, grad_fn=) tensor(5.1682, grad_fn=)\n", "700 tensor(5.1404, grad_fn=)\n", "343 LOSS DIFF: tensor(5.3184, grad_fn=) tensor(4.8281, grad_fn=)\n", "344 LOSS DIFF: tensor(5.4549, grad_fn=) tensor(5.3184, grad_fn=)\n", "345 LOSS DIFF: tensor(5.4196, grad_fn=) tensor(5.4127, grad_fn=)\n", "346 LOSS DIFF: tensor(5.4480, grad_fn=) tensor(5.4196, grad_fn=)\n", "347 LOSS DIFF: tensor(5.5778, grad_fn=) tensor(5.3616, grad_fn=)\n", "348 LOSS DIFF: tensor(5.2266, grad_fn=) tensor(5.1052, grad_fn=)\n", "349 LOSS DIFF: tensor(5.4058, grad_fn=) tensor(5.2266, grad_fn=)\n", "350 LOSS DIFF: tensor(5.2772, grad_fn=) tensor(5.1653, grad_fn=)\n", "351 LOSS DIFF: tensor(5.3236, grad_fn=) tensor(5.2772, grad_fn=)\n", "352 LOSS DIFF: tensor(5.3818, grad_fn=) tensor(5.3236, grad_fn=)\n", "353 LOSS DIFF: tensor(5.1957, grad_fn=) tensor(5.1122, grad_fn=)\n", "354 LOSS DIFF: tensor(5.2754, grad_fn=) tensor(5.1957, grad_fn=)\n", "355 LOSS DIFF: tensor(5.4069, grad_fn=) tensor(5.2754, grad_fn=)\n", "356 LOSS DIFF: tensor(5.3361, grad_fn=) tensor(5.1708, grad_fn=)\n", "357 LOSS DIFF: tensor(5.5310, grad_fn=) tensor(5.2320, grad_fn=)\n", "358 LOSS DIFF: tensor(5.5582, grad_fn=) tensor(5.3281, grad_fn=)\n", "359 LOSS DIFF: tensor(5.4403, grad_fn=) tensor(5.0958, grad_fn=)\n", "360 LOSS DIFF: tensor(5.3855, grad_fn=) tensor(5.3547, grad_fn=)\n", "361 LOSS DIFF: tensor(5.4341, grad_fn=) tensor(5.3628, grad_fn=)\n", "362 LOSS DIFF: tensor(5.4064, grad_fn=) tensor(5.3641, grad_fn=)\n", "363 LOSS DIFF: tensor(5.4232, grad_fn=) tensor(5.4064, grad_fn=)\n", "364 LOSS DIFF: tensor(5.4929, grad_fn=) tensor(5.2922, grad_fn=)\n", "365 LOSS DIFF: tensor(5.2788, grad_fn=) tensor(5.1483, grad_fn=)\n", "366 LOSS DIFF: tensor(5.3894, grad_fn=) tensor(5.1464, grad_fn=)\n", "367 LOSS DIFF: tensor(5.5410, grad_fn=) tensor(5.3032, grad_fn=)\n", "368 LOSS DIFF: tensor(5.4745, grad_fn=) tensor(5.3954, grad_fn=)\n", "369 LOSS DIFF: tensor(5.4002, grad_fn=) tensor(5.2852, grad_fn=)\n", "370 LOSS DIFF: tensor(5.5121, grad_fn=) tensor(5.1010, grad_fn=)\n", "371 LOSS DIFF: tensor(5.1770, grad_fn=) tensor(4.9924, grad_fn=)\n", "372 LOSS DIFF: tensor(5.2602, grad_fn=) tensor(5.0630, grad_fn=)\n", "373 LOSS DIFF: tensor(5.1854, grad_fn=) tensor(5.1847, grad_fn=)\n", "374 LOSS DIFF: tensor(5.4752, grad_fn=) tensor(5.1854, grad_fn=)\n", "375 LOSS DIFF: tensor(5.3940, grad_fn=) tensor(4.9471, grad_fn=)\n", "376 LOSS DIFF: tensor(5.4444, grad_fn=) tensor(5.3940, grad_fn=)\n", "377 LOSS DIFF: tensor(5.2639, grad_fn=) tensor(5.2434, grad_fn=)\n", "378 LOSS DIFF: tensor(5.5010, grad_fn=) tensor(5.2639, grad_fn=)\n", "379 LOSS DIFF: tensor(5.3871, grad_fn=) tensor(5.2697, grad_fn=)\n", "380 LOSS DIFF: tensor(5.5319, grad_fn=) tensor(5.2951, grad_fn=)\n", "381 LOSS DIFF: tensor(5.2672, grad_fn=) tensor(5.0885, grad_fn=)\n", "382 LOSS DIFF: tensor(5.3262, grad_fn=) tensor(5.2672, grad_fn=)\n", "383 LOSS DIFF: tensor(5.4015, grad_fn=) tensor(5.3262, grad_fn=)\n", "384 LOSS DIFF: tensor(5.2618, grad_fn=) tensor(5.2335, grad_fn=)\n", "385 LOSS DIFF: tensor(5.3040, grad_fn=) tensor(5.2618, grad_fn=)\n", "386 LOSS DIFF: tensor(5.2459, grad_fn=) tensor(5.0806, grad_fn=)\n", "387 LOSS DIFF: tensor(5.3756, grad_fn=) tensor(5.2459, grad_fn=)\n", "388 LOSS DIFF: tensor(5.3504, grad_fn=) tensor(5.1054, grad_fn=)\n", "389 LOSS DIFF: tensor(5.2258, grad_fn=) tensor(5.1519, grad_fn=)\n", "390 LOSS DIFF: tensor(5.2802, grad_fn=) tensor(5.2258, grad_fn=)\n", "391 LOSS DIFF: tensor(5.3461, grad_fn=) tensor(5.2802, grad_fn=)\n", "392 LOSS DIFF: tensor(5.3227, grad_fn=) tensor(5.2572, grad_fn=)\n", "800 tensor(5.1938, grad_fn=)\n", "393 LOSS DIFF: tensor(5.4509, grad_fn=) tensor(5.1938, grad_fn=)\n", "394 LOSS DIFF: tensor(5.1965, grad_fn=) tensor(5.1726, grad_fn=)\n", "395 LOSS DIFF: tensor(5.3317, grad_fn=) tensor(5.1965, grad_fn=)\n", "396 LOSS DIFF: tensor(5.2442, grad_fn=) tensor(5.0167, grad_fn=)\n", "397 LOSS DIFF: tensor(5.2592, grad_fn=) tensor(5.2442, grad_fn=)\n", "398 LOSS DIFF: tensor(5.2272, grad_fn=) tensor(5.1738, grad_fn=)\n", "399 LOSS DIFF: tensor(5.2863, grad_fn=) tensor(5.2272, grad_fn=)\n", "400 LOSS DIFF: tensor(5.3143, grad_fn=) tensor(5.2863, grad_fn=)\n", "401 LOSS DIFF: tensor(5.0616, grad_fn=) tensor(5.0013, grad_fn=)\n", "402 LOSS DIFF: tensor(5.4039, grad_fn=) tensor(5.0616, grad_fn=)\n", "403 LOSS DIFF: tensor(5.3913, grad_fn=) tensor(4.9984, grad_fn=)\n", "404 LOSS DIFF: tensor(5.2658, grad_fn=) tensor(5.2179, grad_fn=)\n", "405 LOSS DIFF: tensor(5.2846, grad_fn=) tensor(5.2658, grad_fn=)\n", "406 LOSS DIFF: tensor(5.3590, grad_fn=) tensor(5.2846, grad_fn=)\n", "407 LOSS DIFF: tensor(5.4706, grad_fn=) tensor(5.0496, grad_fn=)\n", "408 LOSS DIFF: tensor(5.6955, grad_fn=) tensor(5.4706, grad_fn=)\n", "409 LOSS DIFF: tensor(5.4540, grad_fn=) tensor(4.9054, grad_fn=)\n", "410 LOSS DIFF: tensor(5.1788, grad_fn=) tensor(5.0048, grad_fn=)\n", "411 LOSS DIFF: tensor(5.2213, grad_fn=) tensor(5.1788, grad_fn=)\n", "412 LOSS DIFF: tensor(5.2282, grad_fn=) tensor(5.2213, grad_fn=)\n", "413 LOSS DIFF: tensor(5.4138, grad_fn=) tensor(5.1972, grad_fn=)\n", "414 LOSS DIFF: tensor(5.3300, grad_fn=) tensor(4.9654, grad_fn=)\n", "415 LOSS DIFF: tensor(5.0692, grad_fn=) tensor(4.9775, grad_fn=)\n", "416 LOSS DIFF: tensor(5.1780, grad_fn=) tensor(5.0692, grad_fn=)\n", "417 LOSS DIFF: tensor(5.4131, grad_fn=) tensor(5.1780, grad_fn=)\n", "418 LOSS DIFF: tensor(5.5625, grad_fn=) tensor(5.4131, grad_fn=)\n", "419 LOSS DIFF: tensor(5.1862, grad_fn=) tensor(5.1502, grad_fn=)\n", "420 LOSS DIFF: tensor(5.2858, grad_fn=) tensor(5.1862, grad_fn=)\n", "421 LOSS DIFF: tensor(5.2607, grad_fn=) tensor(5.2394, grad_fn=)\n", "422 LOSS DIFF: tensor(5.4085, grad_fn=) tensor(5.2607, grad_fn=)\n", "423 LOSS DIFF: tensor(5.3268, grad_fn=) tensor(5.3040, grad_fn=)\n", "424 LOSS DIFF: tensor(5.4477, grad_fn=) tensor(5.3268, grad_fn=)\n", "425 LOSS DIFF: tensor(5.3032, grad_fn=) tensor(5.2228, grad_fn=)\n", "426 LOSS DIFF: tensor(5.4339, grad_fn=) tensor(5.2517, grad_fn=)\n", "427 LOSS DIFF: tensor(5.3693, grad_fn=) tensor(5.0677, grad_fn=)\n", "428 LOSS DIFF: tensor(5.2379, grad_fn=) tensor(5.2100, grad_fn=)\n", "429 LOSS DIFF: tensor(5.2541, grad_fn=) tensor(5.2379, grad_fn=)\n", "430 LOSS DIFF: tensor(5.2259, grad_fn=) tensor(5.1291, grad_fn=)\n", "431 LOSS DIFF: tensor(5.2455, grad_fn=) tensor(5.1523, grad_fn=)\n", "432 LOSS DIFF: tensor(5.3854, grad_fn=) tensor(5.2147, grad_fn=)\n", "433 LOSS DIFF: tensor(5.2580, grad_fn=) tensor(5.1674, grad_fn=)\n", "434 LOSS DIFF: tensor(5.3666, grad_fn=) tensor(5.2580, grad_fn=)\n", "435 LOSS DIFF: tensor(5.3990, grad_fn=) tensor(5.2895, grad_fn=)\n", "436 LOSS DIFF: tensor(5.4095, grad_fn=) tensor(5.2050, grad_fn=)\n", "437 LOSS DIFF: tensor(5.3580, grad_fn=) tensor(5.1551, grad_fn=)\n", "438 LOSS DIFF: tensor(5.5038, grad_fn=) tensor(5.2894, grad_fn=)\n", "439 LOSS DIFF: tensor(5.3097, grad_fn=) tensor(5.1047, grad_fn=)\n", "440 LOSS DIFF: tensor(5.4076, grad_fn=) tensor(5.3097, grad_fn=)\n", "441 LOSS DIFF: tensor(5.3938, grad_fn=) tensor(5.2490, grad_fn=)\n", "442 LOSS DIFF: tensor(5.6185, grad_fn=) tensor(5.3873, grad_fn=)\n", "900 tensor(5.2894, grad_fn=)\n", "443 LOSS DIFF: tensor(5.2605, grad_fn=) tensor(5.0513, grad_fn=)\n", "444 LOSS DIFF: tensor(5.5549, grad_fn=) tensor(5.2605, grad_fn=)\n", "445 LOSS DIFF: tensor(5.1775, grad_fn=) tensor(5.1379, grad_fn=)\n", "446 LOSS DIFF: tensor(5.3998, grad_fn=) tensor(5.1775, grad_fn=)\n", "447 LOSS DIFF: tensor(5.4069, grad_fn=) tensor(5.3169, grad_fn=)\n", "448 LOSS DIFF: tensor(5.2558, grad_fn=) tensor(4.9919, grad_fn=)\n", "449 LOSS DIFF: tensor(5.4139, grad_fn=) tensor(5.2558, grad_fn=)\n", "450 LOSS DIFF: tensor(5.4725, grad_fn=) tensor(5.4139, grad_fn=)\n", "451 LOSS DIFF: tensor(5.3004, grad_fn=) tensor(5.1489, grad_fn=)\n", "452 LOSS DIFF: tensor(5.3943, grad_fn=) tensor(5.3004, grad_fn=)\n", "453 LOSS DIFF: tensor(5.2652, grad_fn=) tensor(5.0230, grad_fn=)\n", "454 LOSS DIFF: tensor(5.3982, grad_fn=) tensor(5.2229, grad_fn=)\n", "455 LOSS DIFF: tensor(5.4184, grad_fn=) tensor(5.2137, grad_fn=)\n", "456 LOSS DIFF: tensor(5.6858, grad_fn=) tensor(5.1474, grad_fn=)\n", "457 LOSS DIFF: tensor(5.3886, grad_fn=) tensor(5.1649, grad_fn=)\n", "458 LOSS DIFF: tensor(5.3129, grad_fn=) tensor(5.2705, grad_fn=)\n", "459 LOSS DIFF: tensor(5.4430, grad_fn=) tensor(5.0307, grad_fn=)\n", "460 LOSS DIFF: tensor(5.4555, grad_fn=) tensor(5.3132, grad_fn=)\n", "461 LOSS DIFF: tensor(5.2490, grad_fn=) tensor(4.9971, grad_fn=)\n", "462 LOSS DIFF: tensor(5.4743, grad_fn=) tensor(5.1878, grad_fn=)\n", "463 LOSS DIFF: tensor(5.2897, grad_fn=) tensor(4.9685, grad_fn=)\n", "464 LOSS DIFF: tensor(5.3322, grad_fn=) tensor(5.1790, grad_fn=)\n", "465 LOSS DIFF: tensor(5.2013, grad_fn=) tensor(5.0778, grad_fn=)\n", "466 LOSS DIFF: tensor(5.2347, grad_fn=) tensor(5.0395, grad_fn=)\n", "467 LOSS DIFF: tensor(5.2472, grad_fn=) tensor(5.2347, grad_fn=)\n", "468 LOSS DIFF: tensor(5.3672, grad_fn=) tensor(5.1695, grad_fn=)\n", "469 LOSS DIFF: tensor(5.3892, grad_fn=) tensor(5.3672, grad_fn=)\n", "470 LOSS DIFF: tensor(5.1295, grad_fn=) tensor(5.1241, grad_fn=)\n", "471 LOSS DIFF: tensor(5.2935, grad_fn=) tensor(5.1295, grad_fn=)\n", "472 LOSS DIFF: tensor(5.4916, grad_fn=) tensor(5.2935, grad_fn=)\n", "473 LOSS DIFF: tensor(5.2570, grad_fn=) tensor(5.0166, grad_fn=)\n", "474 LOSS DIFF: tensor(5.3124, grad_fn=) tensor(5.1387, grad_fn=)\n", "475 LOSS DIFF: tensor(5.2445, grad_fn=) tensor(5.1581, grad_fn=)\n", "476 LOSS DIFF: tensor(5.4986, grad_fn=) tensor(5.2445, grad_fn=)\n", "477 LOSS DIFF: tensor(5.2073, grad_fn=) tensor(5.1772, grad_fn=)\n", "478 LOSS DIFF: tensor(5.2213, grad_fn=) tensor(5.0682, grad_fn=)\n", "479 LOSS DIFF: tensor(5.2317, grad_fn=) tensor(5.2213, grad_fn=)\n", "480 LOSS DIFF: tensor(5.2169, grad_fn=) tensor(4.8229, grad_fn=)\n", "481 LOSS DIFF: tensor(5.4192, grad_fn=) tensor(5.2169, grad_fn=)\n", "482 LOSS DIFF: tensor(5.3481, grad_fn=) tensor(5.1884, grad_fn=)\n", "483 LOSS DIFF: tensor(5.4329, grad_fn=) tensor(5.3481, grad_fn=)\n", "484 LOSS DIFF: tensor(5.1482, grad_fn=) tensor(4.8979, grad_fn=)\n", "485 LOSS DIFF: tensor(5.3562, grad_fn=) tensor(5.1482, grad_fn=)\n", "486 LOSS DIFF: tensor(5.5739, grad_fn=) tensor(5.3562, grad_fn=)\n", "487 LOSS DIFF: tensor(5.0749, grad_fn=) tensor(4.9742, grad_fn=)\n", "488 LOSS DIFF: tensor(5.2301, grad_fn=) tensor(5.0749, grad_fn=)\n", "489 LOSS DIFF: tensor(5.4543, grad_fn=) tensor(5.2301, grad_fn=)\n", "490 LOSS DIFF: tensor(5.2210, grad_fn=) tensor(4.9663, grad_fn=)\n", "491 LOSS DIFF: tensor(5.3469, grad_fn=) tensor(5.2210, grad_fn=)\n", "1000 tensor(5.4116, grad_fn=)\n", "492 LOSS DIFF: tensor(5.4116, grad_fn=) tensor(5.2156, grad_fn=)\n", "493 LOSS DIFF: tensor(5.1600, grad_fn=) tensor(4.9976, grad_fn=)\n", "494 LOSS DIFF: tensor(5.2190, grad_fn=) tensor(5.1102, grad_fn=)\n", "495 LOSS DIFF: tensor(5.1974, grad_fn=) tensor(5.0123, grad_fn=)\n", "496 LOSS DIFF: tensor(5.3085, grad_fn=) tensor(5.1974, grad_fn=)\n", "497 LOSS DIFF: tensor(5.3090, grad_fn=) tensor(5.3085, grad_fn=)\n", "498 LOSS DIFF: tensor(5.3978, grad_fn=) tensor(5.0467, grad_fn=)\n", "499 LOSS DIFF: tensor(5.3369, grad_fn=) tensor(5.0919, grad_fn=)\n", "500 LOSS DIFF: tensor(5.3036, grad_fn=) tensor(5.2151, grad_fn=)\n" ] } ], "source": [ "device = 'cpu'\n", "model = TrigramNNModel(VOCAB_SIZE, EMBED_SIZE).to(device)\n", "data = DataLoader(train_dataset, batch_size=2_000)\n", "optimizer = torch.optim.Adam(model.parameters())\n", "criterion = torch.nn.NLLLoss()\n", "\n", "loss_track = []\n", "last_loss = 1_000\n", "trigger_count = 0\n", "\n", "model.train()\n", "step = 0\n", "for x, y in data:\n", " x[0] = x[0].to(device)\n", " x[1] = x[1].to(device)\n", " y = y.to(device)\n", " optimizer.zero_grad()\n", " ypredicted = model(x)\n", " loss = criterion(torch.log(ypredicted), y)\n", " if step % 100 == 0:\n", " print(step, loss)\n", " step += 1\n", " loss.backward()\n", " optimizer.step()\n", "\n", " if loss > last_loss:\n", " trigger_count += 1 \n", " print(trigger_count, 'LOSS DIFF:', loss, last_loss)\n", "\n", " if trigger_count >= 500:\n", " break\n", "\n", " loss_track.append(loss)\n", " last_loss = loss" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAhYAAAGdCAYAAABO2DpVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABdaElEQVR4nO3dd3hTZfsH8G+SpumgAygUCmXvDbKXoggiL+JWRAVxi+t1veLe4PwpvoobF+rr3ooIyJC9KSjDsvfqnknO74826XNOzknOSU+SNv1+rstLmvnkNM25cz/3cz8WSZIkEBEREZnAGukBEBERUfRgYEFERESmYWBBREREpmFgQURERKZhYEFERESmYWBBREREpmFgQURERKZhYEFERESmiQn3E7rdbhw8eBBJSUmwWCzhfnoiIiIKgiRJyM/PR0ZGBqxW7bxE2AOLgwcPIjMzM9xPS0RERCbYt28fmjdvrnl92AOLpKQkABUDS05ODvfTExERURDy8vKQmZnpPY9rCXtg4Zn+SE5OZmBBRERUywQqY2DxJhEREZmGgQURERGZhoEFERERmYaBBREREZmGgQURERGZhoEFERERmYaBBREREZmGgQURERGZhoEFERERmYaBBREREZmGgQURERGZhoEFERERmSbsm5CFyku/bUNOcTluHdEOjZPjIj0cIiKiOilqMhafrt6HD5fvwbGC0kgPhYiIqM6KmsAi1lbxUsqc7giPhIiIqO6KmsDCEcPAgoiIKNKiJrCI9QQWLgYWREREkRJ9gQUzFkRERBETPYEFayyIiIgiLnoCC06FEBERRVzUBRalzFgQERFFTPQEFpVTIeXMWBAREUVM9AQWLN4kIiKKOAYWREREZJqoCSzYIIuIiCjyoiaw8C43ZY0FERFRxERPYMGMBRERUcRFXWDB5aZERESREz2Bhc0GgFMhREREkRQ9gQWnQoiIiCLOcGCRn5+PO++8Ey1btkR8fDwGDx6M1atXh2JshjCwICIiijzDgcV1112HefPm4aOPPsLmzZsxatQojBw5EgcOHAjF+HRjYEFERBR5hgKL4uJifPXVV3juuecwfPhwtGvXDo899hjatWuHWbNmhWqMuji43JSIiCjiYozc2Ol0wuVyIS4uTnZ5fHw8li5dqnqf0tJSlJaWen/Oy8sLYpiBMWNBREQUeYYyFklJSRg0aBCefPJJHDx4EC6XCx9//DGWL1+OQ4cOqd5n+vTpSElJ8f6XmZlpysCVGFgQERFFnuEai48++giSJKFZs2ZwOByYOXMmJkyYAKtV/aGmTZuG3Nxc73/79u2r9qDVeDpvlnIqhIiIKGIMTYUAQNu2bbFo0SIUFhYiLy8PTZs2xWWXXYY2bdqo3t7hcMDhcFR7oIEwY0FERBR5QfexSExMRNOmTXHq1CnMnTsX48ePN3NchlUFFq6IjoOIiKguM5yxmDt3LiRJQseOHbFz507ce++96NSpE6655ppQjE83b2DBqRAiIqKIMZyxyM3NxdSpU9GpUydcffXVGDp0KObOnQu73R6K8enm3d2UUyFEREQRYzhjcemll+LSSy8NxViqxcEaCyIioojjXiFERERkmugLLFhjQUREFDHRE1hU1liUuyS43VKER0NERFQ3RU9gEVP1Upi1ICIiigwGFkRERGSa6AksbEJgwQJOIiKiiIiawMJisbCXBRERUYRFTWABcMkpERFRpEVnYMEaCyIiooiIrsCCUyFEREQRFV2BRWXGopSBBRERUUREZWDBjAUREVFkRFdgYWONBRERUSRFV2DBjAUREVFEMbAgIiIi00RVYOHwLjd1RXgkREREdVNUBRZcbkpERBRZ0RVYcCqEiIgooqIysGAfCyIiosiIqsDCzuWmREREERVVgQWnQoiIiCIrugKLyoxFOTMWREREERFVgYXdZgEAOF1ShEdCRERUN0VVYGGzVrwcp5uBBRERUSREVWBRlbHgVAgREVEkRFVgEVOZsShnxoKIiCgioiuwqMxYuFhjQUREFBHRFVhYKwKLcjenQoiIiCIhugKLyuWmXBVCREQUGdEVWFRmLFyssSAiIoqI6AosKmss2CCLiIgoMqIqsLCzjwUREVFERVVgYaucCmFgQUREFBlRFVjEsEEWERFRREVVYGHnqhAiIqKIiqrAgn0siIiIIiu6Agsbl5sSERFFUnQFFp69QjgVQkREFBHRFViweJOIiCiioiuwqMxYcCqEiIgoMqIrsLCxeJOIiCiSoiqw8HbeZI0FERFRRERVYMHOm0RERJEVVYGFncWbREREERVVgUUMO28SERFFVHQFFpVTIWXMWBAREUVEVAUWSXExAIBSpxvlDC6IiIjCLsoCC7v337nF5REcCRERUd0UVYGFzWrxZi1yihhYEBERhVtUBRYAkJpQkbXYuC8nsgMhIiKqg6IusNh3shgAcPcXGyM8EiIioron6gILIiIiipyoCyz6tEiN9BCIiIjqrKgLLJ4Y3w0A0DAxNsIjISIiqnuiLrCIj7UBAPtYEBERRUDUBRaxlW29y9nWm4iIKOyiLrCwe/YLcTNjQUREFG5RF1jEVO5wWu6SIEnMWhAREYVT1AUWnowFwOkQIiKicIu6wCJWCCw4HUJERBReURdYeKZCAKDcyYwFERFROEVfYGGtCizKuOSUiIgorKIusLBYLN7pEE6FEBERhVfUBRaAsDKEUyFERERhZSiwcLlcePjhh9G6dWvEx8ejbdu2ePLJJ2vcsk7PyhBOhRAREYVXjJEbP/vss5g1axY++OADdO3aFWvWrME111yDlJQU3H777aEao2FskkVERBQZhjIWy5Ytw/jx4zF27Fi0atUKF198MUaNGoVVq1aFanxBsVdOhbz42/Yal00hIiKKZoYCi8GDB2P+/PnYvn07AGDjxo1YunQpxowZE5LBBetQbgkAYN7WI9h5tCDCoyEiIqo7DE2F3H///cjLy0OnTp1gs9ngcrnw9NNPY+LEiZr3KS0tRWlpqffnvLy84EcbhMIyV1ifj4iIqC4zlLH4/PPPMWfOHHzyySdYt24dPvjgA7zwwgv44IMPNO8zffp0pKSkeP/LzMys9qCN4PbpRERE4WORDBQhZGZm4v7778fUqVO9lz311FP4+OOP8ffff6veRy1jkZmZidzcXCQnJ1dj6Npa3f+T999zrhuAIe3SQvI8REREdUVeXh5SUlICnr8NZSyKiopgtcrvYrPZ4Paz+sLhcCA5OVn2X6i9O6mv999cckpERBQ+hmosxo0bh6effhotWrRA165dsX79erz00kuYMmVKqMYXlLM6p6N3i1Ss35uDMicDCyIionAxFFi8+uqrePjhh3HLLbfg6NGjyMjIwI033ohHHnkkVOMLmqetN2ssiIiIwsdQYJGUlISXX34ZL7/8coiGY57YmMrum8xYEBERhU1U7hUCMGNBREQUCVEbWFTtF8LOm0REROEStYEFp0KIiIjCL2oDCzunQoiIiMIuagMLZiyIiIjCL3oDi8odTpmxICIiCp+oDSyqijcZWBAREYVL1AYWnAohIiIKv6gNLFi8SUREFH5RG1gwY0FERBR+URtY2CuLN51skEVERBQ2URtYxFRu7+50M7AgIiIKl+gNLDwZCzenQoiIiMIlegMLT8aCUyFERERhE8WBhSdjwcCCiIgoXKI3sLAxsCAiIgq3qA0sbJ6MBftYEBERhU3UBhaeGotl/5zApW8ux76TRREeERERUfSL3sCicioEAFbtOolHv98SwdEQERHVDdEbWFgtsp9zisoiNBIiIqK6I3oDC5v8pdltUftSiYiIaoyoPdsqMxaevUOIiIgodKL2bGtTBhbMWBAREYVc1J5t7TaL4ueofalEREQ1RtSebW1WRY0Fp0KIiIhCLmrPtsoaC2UGg4iIiMwXvYGFjTUWRERE4Ra1Z9sYxVSIspiTiIiIzBfFgYU8kHBL3IyMiIgo1KI2sFBmKMpdDCyIiIhCLWoDC+XyUu5ySkREFHpRG1j4ZCzczFgQERGFWtQGFsrlpT9tOgQ3gwsiIqKQitrAQm0VyMwFOwAAx/JLUebk1AgREZHZojawUGvh/fbibGQfK0C/p3/Hv15dEoFRERERRbeoDSzUMhY2qwW/ZB0GAGw/UhDuIREREUW9qA0s7DYrbjmjLRonObyXxcfa2CiLiIgohKI2sACA+87phEfHdfX+HG+3wWZhYEFERBQqUR1YAEBJucv77zg7MxZEREShFPWBRbEQWEgS9wwhIiIKpagPLMSMRbnbDasQWEjcP4SIiMhUUR9Y9MpM9f7b5ZZkNRbcP4SIiMhcUR9Y9G3VAPeO7ggAcLok2a6nZdw/hIiIyFRRH1gAwOkdGgEAnIqpEHbfJCIiMledCCxiKvcNcbklWV0FAwsiIiJz1Y3AwlrxMkvL3XAzsCAiIgqZOhJYVGQs8kud+M9Xm72Xs8aCiIjIXHUjsLCp965gxoKIiMhcdSOwsKq/TGYsiIiIzFU3AgtmLIiIiMKibgQWGm28GVgQERGZq24EFjb1l1nOqRAiIiJT1Y3AQiNjsf9UUZhHQkREFN3qdGDx8HdbcCi3GIWlzjCPiIiIKDrVicDC31bpF72+DF0fnYtl/xwP44iIiIiiU50ILCwW7cDiYG4JAGD6z3+HazhERERRq04EFnqIrb6JiIgoOAwsKrkZVxAREVUbA4tKEjMWRERE1cbAohKnQoiIiKqvzgQWD5zbye/1jCuIiIiqr84EFjcMb+v3emYsiIiIqq/OBBaBMKwgIiKqvjoVWLRokKB5HRMWRERE1VenAos51w3QvI5TIURERNVnKLBo1aoVLBaLz39Tp04N1fhMleknY8HAgoiIqPpijNx49erVcLlc3p+zsrJw9tln45JLLjF9YOHmdgNFZU4kxMbgz53H0SotEc1S4yM9LCIiolrFUGDRqFEj2c8zZsxA27Ztcfrpp5s6qFCaPLgV3l+22+fyAznF6PLIXJzRsRH+2HYMALB7xtgwj46IiKh2C7rGoqysDB9//DGmTJnid5Ov0tJS5OXlyf6LpEfHdcErl/fSvN4TVBAREZFxQQcW3377LXJycjB58mS/t5s+fTpSUlK8/2VmZgb7lKawWCwY36sZHDF1qm6ViIgoLII+u7777rsYM2YMMjIy/N5u2rRpyM3N9f63b9++YJ/SVC0bahdyEhERUXAM1Vh47NmzB7///ju+/vrrgLd1OBxwOBzBPE1IcREIERGR+YLKWMyePRuNGzfG2LG1t7iRy0uJiIjMZziwcLvdmD17NiZNmoSYmKASHjUC4woiIiLzGQ4sfv/9d+zduxdTpkwJxXjChhkLIiIi8xlOOYwaNQpSFJyU3bX/JRAREdU4dXbNJTMWRERE5quzgQXjCiIiIvPV2cBCT8bCzfkSIiIiQxhY+OFiWoOIiMiQOhtY6IkZXG4JLreE/5u3HUt3HA/9oIiIiGq52tuIopr0zHI43RLmbjmIV+bvAMDdTomIiAKpwxkLHVMhLgn7TxWHYTRERETRoc4GFnpqLJxuN6x+toQnIiIiuTobWIzr6X9XVqCixsJWZ48QERGRcXX2tPnAuZ1xTtcmfm/jdEvMWBARERlQZwOLOLsNl/Zr7vc2LgYWREREhtTZwAIA4u3+F8VUTIVUBRZsmEVERORfnQ4s+rdugOEdGmHy4Faq1zvdEkrKXd6fy1zuMI2MiIiodqqzfSwAwGa14MMp/QEAhaVOfLF2v+z6nKIyTP/lb+/PpeVuxNltYR0jERFRbVKnMxai5y/piQfO7SS7bOWuk7KfS10uEBERkTYGFoLGSXGynwtLnbKfy5ycCiEiIvKHgYWgUZJD9vOHy/fIfmZgQURE5B8DC4EysChQZixYvElEROQXAwtBUpz/WlZmLIiIiPxjYCGIi/G/4oOBBRERkX8MLASBlpIysCAiIvKPgYXAEeP/cJQysCAiIvKLgYXAavW/L0hhmdPv9URERHUdAwsDlH0tiIiISI6BhQEFpey8SURE5A8DCwOKKjMWpU4X9p8qivBoiIiIah4GFgYUVNZYXPLGcgx9diE27suJ7ICIiIhqGAYWBnhqLDbtzwUAfLF2XySHQ0REVOMwsDCgUFFjUe6UIjQSIiKimomBhQa1lafcO4SIiMg/BhYK3ZolAwAm9G/hc12Roo9FqZOrRIiIiET+d92qgz69fiCyDuQhPdmBOSv3yq77c+cJlJRXBRNs8U1ERCTHjIVCUpwdg9o2REKsesx11+cbvP9mi28iIiI5BhYa4uzqh+bnzYe9/2ZgQUREJMfAQoO40+m7k/qq3oZTIURERHKssdAQZ7fhptPboqTchVZpiaq3EestiIiIiIGFX/eP6QQAOJhTrHr98YKycA6HiIioxuNUiA7xwrSI6HhBKSSJTbKIiIg8GFjoEB+rHlgAwMlCZi2IiIg8GFjo4IjRPkzHCkrDOBIiIqKajYGFDhaLSn/vSqXlXBlCRETkwcCimkqdbry3dBfW7T0V6aEQERFFHAMLnT6/cRDGdm/qc/l3Gw7giR+34sLXl0VgVERERDULl5vq1L91A7jcEn7afEh2uXI/ESIiorqMGQsDEvysDiEiIiIGFoYECiwkScIBjWZaREREdQEDCwPqJ8b6vX7Gr39jyIwFmLNyT5hGREREVLMwsDCgYYDA4s1F2QCAZ376S3Z5UZkTz8/9G5v354ZsbERERDUBAwsD/PWzEKUlOWQ/z5y/E68t/Afj/rs0FMMiIiKqMRhYGPThlP4Y1SUdSQ7tBTVp9eSBxV+H8kI9LCIiohqBgYVBwzs0wltX9/Vbb5FWT36dVV+ig4iIqNZjYBEkl1t7V9PUeHlgYWNkQUREdQQDiyA53dp7hJwsKsNFs5bhuw0HAABWnbUZREREtR07bwbJX8Zi3tYjAIC1e05hfK9mzFgQEVGdwYxFkJx+Agslq47A4qkft+LSN5ej3MXdUomIqPZiYBGky/pl6r6tnqmQd5buwqpdJ/HHtmPVGRYREVFEMbAI0t1nd9R9W5sQV7gDZDqczFgQEVEtxsAiSLEx+g+dmLEodfoPHFjnSUREtRkDizAQO3aWlLsgSRIKS50RHBEREVFoMLAIsXKXG5JUNf1R4nTh1k/Wo+ujc7HreCEAyK4nIiKqzRhYhNh/vtyEMqFuoqTcjZ82HwIAfLh8NwDl0lXOhRARUe3FwCLEvl5/QDbtUep0ef/tdFUEFOLSVdZYEBFRbcbAIgwWCktIS8qrshee7p2ywKIazyNJEh79LgsfVWZCiIiIwo2BRZiVlFdlLMorMxYulzk1FiuyT+KD5Xvw8HdbTHk8PYrKnLjxozX4et3+sD0nERHVXIYDiwMHDuDKK69Ew4YNER8fj+7du2PNmjWhGFtUEgMLT8+Kcj/7jhiRW1xuyuMYMfvP3Zi75Qju+nxj2J+biIhqHkN7hZw6dQpDhgzBiBEj8Msvv6BRo0bYsWMH6tevH6rxRZ1fNh/2/tszBSIWb7qDXCHy0m/bMHPBzuoNLgg5RWVhf04iIqq5DAUWzz77LDIzMzF79mzvZa1btzZ9UNHsf2v2ef/tdEmQJAkfLd9TdZmBPUhEkQgqAHmPDiIiIkNTId9//z369u2LSy65BI0bN0bv3r3x9ttvh2psUc/plvBr1mH8d2FVUOBv19SaKJiwYs+JQox5ZYl3W3kiIooehgKL7OxszJo1C+3bt8fcuXNx88034/bbb8cHH3ygeZ/S0lLk5eXJ/osWz1/cAwBw3dDgsjZOtxtbDsqPhxhYPPHDVjz2/ZaADbS0rne63CHZe0SSJKzbewrFZa6gIosHv8nCX4fycMdnG0wfGxERRZahqRC3242+ffvimWeeAQD07t0bWVlZeOONNzBp0iTV+0yfPh2PP/549UdaA13SNxPjezVDbIwV1w1rgw37TuGmj9fpvr9adsIzFZJXUo73/twFALhmSCu0bJio+TjlKqtKXG4JI178AzaLBQvuPkPX1u16fbJqLx78JgsX9G6G9OQ4w/cvYDtzIqKoZShj0bRpU3Tp0kV2WefOnbF3717N+0ybNg25ubne//bt26d529rIsxlZk5Q4xFiNLbIpV8kmeIKN3KKqFR7bjxT4fZwylcc5kleCfSeLsftEkemrRR7+NgsA8M36A2zoRUREMoYyFkOGDMG2bdtkl23fvh0tW7bUvI/D4YDD4QhudLWMzU9WIM5ulTXHAoA9J4qwIvuk7DJPxuKUsNpi+5F8nN0lXfOxy1R2TBWXtZpdtSEmWoKJKxiMEBFFL0Nfsf/9739jxYoVeOaZZ7Bz50588skneOuttzB16tRQja9W8Tfd0DDRN7g6lFvic9nPmw7h8reW4+9D+cLtivHod1lYu+eU6mOrBRZFZb79MsyWGGsLKkhgXEFEFL0MZSz69euHb775BtOmTcMTTzyB1q1b4+WXX8bEiRNDNb5axebnLJscb8eBnOKAj7E8+wQAyDIZH6+omGr6YPke7J4x1uc+aoFFXknV9IfaVIkZ4mNtQd2PS1SJiKKX4c6b//rXv7B582aUlJTgr7/+wvXXXx+KcdVKYolFWr1YzJzQ2/tzcpyhGE63wlInisp9iyHzS6ouu/7DtTheUGrK84nZjzi7DVYhSHDrXCrLsIKIKHpxrxATiRkLR4wNsbaqn5Pi7KY/X25xObo+OhfnvLzE5zoxsPjrUB5emrcdfx/Ow31fbtSVOdEiPm6c3SYLEvQ292LCgogoeoXma3QdJRZvxsZYYbdVxW2hyFis2nVS87r8EvlKkNziclzw2jIUl7uw7UgBvps6JKjnFAMLSZJkUYLT7UZsHYpVn/n5LyTHxeDWM9tHeihERDVG3TkLhIFYvJlWLxYxQmBRLwSBhb/GWQUl8umR+gl2FFeuFNm0P0fzfpv35/rNaIi1G063JMtYqPXTUGNRmQzZdbwQG/Zpj6um2XuiCG8tzsYLv22vdd1SiYhCiYGFicSpkI5NkmC3ilMh5gcW/s5nyiZU9RNivf/Wikf2nijCuP8uxZAZCzQfV8xYOF2SbFpD9+oTlamQES/8gfNf+xMHqzFNE06lzqpVN7U5sFi47Sh2HS+M9DCIKIowsDCROBXSMT0J9hghY+Ewr8ZCkiRs2p+DojLtDpYHFUtZU+IDP/+Wg7kBb3M0v+pxD+QU463F2d6fg91ATVRbTnJiQFVbA4u1e07imtmrMeKFPyI9FCKKIqyxMJEYWDSrHy/7Ym5mxuKXrMO4ZY7/1uG/bz1i+HH1nB/3n5JnFMR+GWIn0fl/HcGh3BJcOdC3eZqJ3cX92nEkH5+u2odbRrRFWj2zm7TJa0uA4JbeRtL6vTmRHgIRRSEGFiYSA4vGSXEoFKYjzAwsPl2l3ULdo1jovAkA7gAbmQGAJPToPJhTjPTkOJ9uontPFGne3ynUWFz7wRoAQJ8W9dElI1l2O2WNRai+8Z87cwnKXRL2nCjEu5P7mfrY0ZCx0POeICIyilMhJhI/p9OT42QnnHoOcwKLknIXHDHGf22frvLdo0VZ/CmeHwfPWIDbP13vc5+Dudo1EBXf3OWO5Pt2F1VS2zPFDJ5i0o37A0/xGBVM0WpNU0vjISKq4RhYmEiseWiYGAuXcOKODSIYUPOfrzbBEWM87a6sXZj6yTqc9eIi+Z4iikDjp82HfB7nREGZz2UeqidYlYuUfSxCFViIz+dyS9i8P9e09ubit31mLIiIqjCwMFGP5qkY0q4hJg9uBavVgr4tG6BBYixOa1lftvPplCGtfe7bs3mKz2VJjhhcPUheo/DdhoOyKYtg/bTpELKPF8p6Yfg7QeYWl0OSJNnmaEqeqRAxQFEbqzKwEKdQ/D2+XsoAyQLgxd+2Ydx/l+KR77dU+/EBQIxP1DI1tQHjCiIKBQYWJrJZLZhz3UA8dl5XABV7aayYdha+uHEQYmOqzqYD2zTwue/UEe18LnPYbXhifDefy5f/c8K0MXsyKbd9uh53fb5R9TZr95xEz8d/w+M/bMXJQj8Zi8oTrBif6Dnnlgs3uvWT9fh2/QEdI1f3zpJsDJw+H3tOyDM0r//xDwDgk5WB61P0qEkZC7dbQnGZK/ANFSI97nDbeTQfh1U2/iMiczGwCLHYGCusVossY6E2LWK3WX1WLmjVUpwqKle9PBie5/hh40HN27z423YAwPvLdqNUZcMzD0/mQfwGr3bqUhZvKqdQHv42y++Y/Xnqp79wJK8Upz//R9XzmbAK5ZOVe/HSvO3en8WTshnLbKvjyndXovMjv+JYvrH9YOrSVMjRvBKMfGkxBk6fH+mhEEU9rgoJE7G9t1pgUXHyk3/QO+yhj/sWbjsWsP5D2QPDalEv/PMEFOJJVzktUVTmVJkKkQcrZp+o1Tp9GrH1YB4e+GYzgIoOptcMaV2jMhbLKjNYv2YdwlWDWum+X11KWOw4WhDpIRDVGcxYhIld2JDMEWND92bymgqb1eIz5x1MkaZRM+fvwNiZSzWv37gvx2epbIPEWNXbVmUsql6IePJ6e3E2ujwyF0t2HJfdT5mxMLtmwWKpXtbi3JlVm7w9/sNWuN2SLJgIdfGpXlaDDUJktTBRnr3gvndE4cPAIkzEjIUjxoqvbxmMe0d39F5ms1h8vql7pikSYyPXfGn8a3/i8zX7ZZfF2dXH481YaCy/fPrnv3wukyTJ58RsfsYCsu3dv1q73/vcuX6mlQ7mFMtad3uUOF2yjEV+iROz/9yFf45VfCt2uSXkFps3XSU6XlCK8a/9iTkr9/hcZ7VYsGrXSTz901bZah8tLo0AsDbad7IIu3V2bY10EPX5mn2Y/vNfER8HUagwsAgTsb23Z+dTMRNgsVh8TmKe5lSL7xuhupIkUuI1AovrP1yL2z5dj78O5Xkv++/CHZAkCTuO5Kvexy3JV4UAFasVSp0u/LHtaFBFiUoWi0W2j8vdX1QUqb78+w70fOI3LPz7qM99Pl+zD4NnLEDHh371ua64zCVbFfLib9vw+A9bcdaLi/Dt+gO4+I1l6Pn4bzjkp+dHsP5v3nZs3JeDB7/xrUOxWSy49M3leHvJLryzJFvl3nJiMFFbV7YAFcWrw55biDNe+EPWlE7z9iE+n289mKf6nvK478tNeHNxNpbuPK55m7qkoNSJL9fu9xvkU+3CwCJMxA3JYiuzFxbhZGezWlCmKIz01B40rOfAAJWVJHpMHNAiqPv5o5WxcLkl/LDxIK54Z6X3sqwDefhi7X6c/X+LVe/z8Yo9+Grdfp/Ln/npL0yevRp3f7HBe5nbLXm/5UmSpJpN0GJVeae/Mn8HAOCF37b5XPfsL39rPlZxuUv2bX/17lPef9/5vw3eVtlzsw7rHp9e+Ypda8VvveJUyK7j8g6pB3KKkX2sAMfyS7F4+zFIkiRbChzpOpHqKBOivOMFGgWsYeyUeu7MJbjm/dXYdlg9mPbYe1K7i21dMu3rzbjni4246eO1kR6Kbi/N264reK+rWLwZJuJUSExlvYU4JW6z+n6TShS6dcbagosBGyWZvUeGdsZCy9uLtf8AH9XoK/HB8opU/8+bK07ObreEcf9diji7DV/eNAgXv7Eca/ecwusT++Dc7k39Pr/FIt95FpAXjGbWT/C5T+emyZrfKEvK5VMhFot6T4jYyhqZwlInPlqxB2O6NUHLhol+xxqI+DSlTheufX+N92fx/aQst1DuWPvqhN4oEHeqrXzzud0Sftt6BD0zU9A0Jb5aY/Vn68E8vDJ/O+4Z1RHt05Oq9Vji1JmeQt1wBVHZxwrQsYn2a8vhN3QAVSvSlmebt4w+lPadLMLMyi8l1w5tLfuCSBWYsQgTm1C86ZnvF092am/O1ISq1RgxtuDevDEh2PHL6GoVlwlzyccLSrHlYB7W7jmFVbtOYu2eiizBXZ9vkJ0o1JboWiy+hY2HhH4GTVPjfO6ToXKZR1GZK+BzAlXH/vm52zDjl79x7itLVG+nl0vI2ADAV2sPyIKfPKGuQ9zjxa1yIv11y2HMEXp6eG7z1br9uOnjtTj9uT987lNY6sSK7BOmnJgve3M55m45gqveXVXtxyoXMn1an/EWn03jIi/HhGZwFH7iPky1ONEXUgwswkTMOCRUFmOKBYXKb9QAkBJftfrCHmTGIhTRtNZUiBYzatTE4ORnodV4SblbdqJQO+lZYPHZTC2vpOokrLaqQ3l7UXGZSzYerdU79321CaVOl7ehWaGOehGtFSZztxxGt0fnyl774Tx5s6fHftiqOv5ylROpcjM5z7f+PysDlTKVcUx6bxUuf2sFZv+5K9DL0OR2S1i8/RjyK2shlK8hGOLr09Obo6ZM+zBjUfvVlPdSTcPAIkzi7Da8dGlPPHdRD6QmVAQMFtlUiPxEZrUANwxv4/3ZHmTGIhSUUyHndG3i9/ZmNGIqLa86efytmLv2fJN/7PstqitK1KZCxCmEknLfk2iZU3vMxeUuWRbA36ZwWQdydWebZv+5C10fnYuVKinhGz9aW/G8wrDy/Kw8Ed9PyuJYtTF7PiBj/ASwayqzRJ+t9t3QLhBPpuXbDQdw9XvVz1KIxNentaJIrCeJdEMzj5wQrRyi8KlLTeaMYGARRhf2aY5L+2V6fxY//JWJhU2PjUbrtKr5+GAzFqFY0hanmAoJdOI0449P/Ab9zzH5skKnW8KOowV4f9lu1fvuPVmEE4pW5OI3ZeUW8wdyirF05zHNsSz4+yheW7jT+7O/DI7TJfn93eWVlHvrPR7/YSvKnG7c+b8NPrdTa2ImZl2UxGyYWhZEOT3lDSyE9+TFs5ap7nBrNMRdv/cU+j71O75Ysw+/qhS0bjlYvd1nxdf3mEbNjpi0MfNb5tfr9uPGj9bINiD0R/x71HsfCq3vNhwIuhCTGQt1DCwiyKpYFdK/VcXKj/N6Zvhss652cmrfuF7A5wjF+16ZsQhUx2HGlPYTQppfWfnvcknYpNgavVlqVeFhoLimtDKwkCQJh3KLMWTGAhzJ026P/eHyPVhXufID8P/6XW5JM9t0JK8EfZ6Yh4vfWC67/FBuiU8fivoJ8u6ngP6Mhdqus8oPRM/P4l4wa/acwvcbD2LHkXz869Wq+pAdRwsMfaBO+3ozThSW4d4vN6m+H8fOXIolO6oCuVd+34Gr31vls0pKi/j6lM3XPMRAysyMxV2fb8TcLUfwzpKK6SG1ehaReLUZS6mp+u74bAOe+ukv7DzqfxWPGjPqx6IRA4sIEgsKbRYL3rjqNDx7UXc8c2F3n9uqnZzmXDcgpOMDgMZJDlzat7nssji7DV2aJgOoOOH5S58D5mRN/K35d7ol7Doub9k868o+uh+7uNyFR7/LQutpP2PQ9AWB76CgVo8gjk0tKPxuwwEMeGY+nG4JG/bl+Byjz9fIpxvqJ/h2O/XXhEs2FaIS2fl2O5Ww50Qhftt6xOe2Uz9Zh6wDebLL/qcxHXK8oBSXvrEcX63dj/2niiBJkiKjpf5e+HZ91V41//f7dizefgx/bNPuBSEfu/z1qb3fXGIdjvDal+08joM51e834smABTrRiGMtVpmCo/AS3yvB1LwECiTrKgYWESR+0bVYLGiQGIvL+rXwyVYAvhmLO85qj8bJ2isXPMTPubO7pGN013SDY7TguYt7YuaE3t7LHHYb3rzqNFxyWnP878ZBAes/qvu3Fygw+SXrkKwGA4Bs07dASsrd3uWtwdh/SvvEVFzukgVeLreEglIn7vhsg+x2yqmaR77bIvtGq9yvBfDtVSESfyNqNRbKD8RDOcWYpxJUAFDdEXTN7pOqt33xt21Ytfsk7v5iI4Y+uxCz/9yNVKEIWeu94AnO8v1M72gpV9TDqAV6atvcr8g+gSveWYnBM4wHk0qeDJOYyVGrmxZjID3dUZWKy1x4beFObNdoOEfGiAF2MHXuoZ4KyS0uxy6dHWVrEgYWEaScCvFHPDn974aB+PfZHXQ9h1i09vbVffHmVX0xrH2agTFW/D9OmOOPt9uQ2SABz1/SEx3SkwKexKubLvS3oypQcRJWnkyMLM/V060xWMVlLllztIJSJ54UpnU8DqgEJ99Ubh+/ZMcxrNzleyLXbAYF+clVrcZC+S3/indW4qmffFuuA+rH/1RRGU4V+i6XPFUoDwye+HGrLCjSqrfxTEf9ItRg6P3MVq56URuvS2Xl0AqNvgkut4TCUidm/fEPvvez66/acwY60Yh/C8FMhcxcsAPPz92GURoN5zycLrfP5n612do9p7B5f/VqcdTI/zb0fWbINiAM8VTIoOnzMeKFP7Czlm2ix8AiggItNxWJWYF6cfr7mqm979+/pj/O6NhI9bGVPPPRYktyo8Wb1Z1L3nwg8AeK8hu/1cDXj1Dt6wEAD3+XJfsgOpZfgj+2+6b496h0YSx3uZF1IDeoXg/lssDC901gpM5A7US9cNsx9H5yHgpLnX43YUuItSFFqA/Rum1Z5Wu978tN3svE7MWpwjLNzJUyI6PMXgHKjIWne6v6mM9/7U90fXQunv31b2/x6rH8Ur+ZM89zBjquYuChLBrWw9O/JdBznPniIpz10qKoSNXnlZTjolnLMO6/S00PlsQ6nh82HsQVb6/ACT8BO6DYYyfEsVtR5Wfn8n9qV/t3BhYRJJ8K8X9beR8MI4GF7weLzWqRfRv01w3Ss/LAbpVnLESBVqwUVDMj8Nyv2u21PbIrV4o0SnLgnav7onl9/V0jQxlY5Jc4sXBbVWHiyJcWqxaGfrnWt6251QL861XtnWf9KXdK2HW8EK/O34FTKo2Y1KZHgnHN+6vR+4l53r4YkqKGoqjMhU+ERlxa89hlTrdPdsDzvvlx00H0fnKebCWOSBmsqLV6V+t1ojwCu48X4lh+qU8g++Xa/ej39O94fq5v63flcwYs3tQZWGgGMTp+bScKS7H3ZBH2nCjSXNK6ds8pXDN7FbKPGfsmXFLuCvvmaTlCFqyw1NyCV/G98/6y3Vj2zwm8OG+73/vIVhiF6VgY3bk40hhYRJA4/RFoKkRsdOWvb4JeJwurTm7iUyvrOzw9HsSshnJ5pRndPfu1qo8J/dX3NRH34tDi2Vl06hltMbJLOuLsNrx11Wm6nruoBlTnr1aZ6lAWTBpR7nLj4lnL8OK87ZiusqusWXPDq3adREGpE28u/gdA4BU4Wse61On2Bocenn1R7v68YtO4F37bjt+2HMaLv23zftOUJAnrFN/iH/t+Cy54/U+UOl04klfi04Ldm1UQLjuaV4IzXvgD/Z7+3Wds91RuWvf6H/9ovq7ScjdyisowX9h8TO1YiCeiMqf6dMVtn67HmS8uUq3B0LN0e+qcdcIY1G9/0axlWLjtGK7/cI3q9Wr2nypCnyfneTfxCxfxmOWXmvslQC0bF+iLhvg7CFdGKFBGu6bhXiERZKTGIjkuBr0yU+GWJDQJULTZp0Wqdzmk1ttenAsX2x1rrXAQazx8Aosge2x4/HDrUHRskoTYGCtapyXgmZ8DZyiUPCeaWKELZprKPimJsTZdHTDDTe3b6//WGG9E5VHmcnunhzaqzE2b3STK03000KNq1bOUOl3e7Fiz1HgcyCn2ZizED/8bPqrYqCr7WCF2HS/EsA5peHORvAfB739VnNzfXpyNF37bjo7pSbhuWNXuwJ56C/EQ6JluAyqyF52aJEGSgFcX7JCN/+I3lsvmwl2ShL8P52HD3hxc2jcTVqvF50RUVO5CsuLvx7N3xqLtxzC6axPkFpcjOS4GFotFT8JCFoirTYOJPD1hvly7H1ZLRa8dLT9sPISiMhe+XncAL17SM2x7ZIhTqdXNfiqpTc0FelVioBOuBlm1LWPBwCKCLAamQiwWC76+eTAA+Zvs59uH4ZesQ5i39Qj+PpwPiwXomSkEFhrve7HeUnxu5TcoT/vxWJt2jYVd55u+Y3oSzurc2OebX7dmyd4PqasGtsJrC/8JenpCbCSllkmJr6GBhdn81T0A8mJGM+QUl+HnzYcC1tNoXZ91IA8NEitWj3gCi0O5JTj/tT9Vb/9TZWvzrYe0szqeQtBtR/LlGQuXZypEnj3Q454vNqJZajwO5hbL/rZKnW6fAjuXW8I5L1f0/4iPtWF8r2Y+qXN/z+tyS1iRfQKXv7UCE/pnYvqFPQxPQyjfB7lF5T41WrnF5d6szOiuTWSbHwIVm6nN+OVvtGhQtVnfsfxSXavSzCAG3QUl1Q8s3G4JE95egeR4O+4e5VsEH6g+SwwOg8n8ee7v+Rw/kleCOSv34or+LdAkJc7ndnrGVNNwKiSCjBRvAhVvRGXk2iUjGXeP6oi3r+6LC/s0w0+3DcM9ozpi6oi2+PG2oZoR9cuX9UbLhgl4++q+snGIfycjOzfGJ9cPBCAv0Aw2Y/HIuC6475xOPpeL33ziY2348bahfh/HZrVgZOfGqteJgYVat0qj+5wAwBUh2Ho+1AJN75idsfh63QHcMmed334jALx7hKjxNOfqklHRI+WHjQexYV9O0GNKFGqRxG/unpOBWxEY6HUgp9gnYFftbio8QVZlRkRZ21LmdGPb4XxcPGsZlv1zXBY4uNwS/q9yvv/TVft8xqyH+Lp2Hi1Azyd+w+TZqxS3qXqvqE2/XP/hGvy29QjeWbrLe1k4l0CKY/L3/tEr+3ghVu46iXlbj6gGuoE+imXFm0FkLCa+sxIjX6qa6rr+wzWYOX8Hrvtwtex2Yva4mknhsKtlw41egaZCAslskICXLu2FLhnJSHTE4N7RndCtWYpm6vS0lvWx6N4ROLtLuuYf0juT+qFXZioAeYGmb/GmvrErvwlpyUj1X3i56N4zZH01RGJmRW07dKOBxY+3DcUzF/g2LFOj1msilNQCJ49ATZ9qcivisT2amvI4CY6q37W4k+iSyuBHPCfozVhoUQvU1C5TnoimfrIO13+4Bmv2nMIVb6+U3UftpBXot6acahEDnk9XVRTRKruT3vW/qpoJl1vCtsP5uOLtFd5eJcoW+hWPG5r3j1rNiXjyN2NpuLxJmUpgIfx7yY5j+O+CHfKATxb8+T7+24uz8fRPvkvKgYqal+XZJ5B9vBALKutxPF2DlTVVYmDBjAUFJVzzlWrU3rT+godgvvUDQL3KD/qHxnb2ezub1YIPp/THcxf38LluWPs0NK+foLkyRixsVQtklK8rECN7tJhRVGuEWiM1j6P5/pfM1ZSNuNS0aphoyqZ74nn5mHA8ZlVOxYlTIWorSYwI1IRs59ECPPDNZp9mauv35uBQbtVlYiCgDCw27ssJuNxS2dNDDJjEYDIxturvQMwylbncuOGjNVj2zwmfVvMiM1dDiF+qdh0v9AnyzJ4KEY+DWoZG/Dy86t1VeOG37fhDWNklvnTPY+08mu9dGfX0z3/h7SW7sO2wbxMzMYP0l59pPED+u7NYLDiQU1xr9pdhYBFB4p9mdTMWms+h4+9fLaYZ3Lah7Gd/GYsdR/QtWavnqPhGf92wNt7aDS3DOzTCpX0zfS7/7wT/rbqV3+IHtG4g+1lZHxKIkUZb/jIIoeDvGOYH+ACuyRmLpLgYwwGgGvGEdEytN4FwCKpTKAuot00XT74Ltx3DJyv34qbK4lPZMIRxzN1S1SBMGUOMf+1PbDno/2SkDHDEzJUYqNRP9G0RD1Sc+A4JnVa1Tn7+anQkScKEt1Zg8uxVumpCxGngs/9vMS6atUx2vZixEN/Xf2w7ijs/W2+4HkvWS6RM5XVUDkccu7hkWzkVsnbPKYx8aTGGP79QFkyqBS1ij5WTKg3mRGJgkX2sAENmLMDw5/7we5+agoFFDRGq5USdmyYFvI2YLXl3Ul+c2akxnlVkC+Q1FvK3Te8WqaqPe17PDNnPiY7qnyxSVDbjEilP7u9f0x/3CAVaRrMtnqmVQIGQ2nMHw0h8Ke5+a1RNDSzsNgvi7DZDvVq0rBKW8B7M8W1LLp5oq7O0F1DPWKwXNqrzUKsREO/5b2FaotzlNtxmWjmOm+eswy1z1uKZn/+S/c4bagQWZU43HMKXiDGvLFG9nb8+KIfzSrA8+wT+2HZM1yoOZfZDuUJHDBALhW/sk2evxrcbDmLQ9Pl46TftHiNKYrZOLQPgyViIPVfEvXpcijoYMRASgxy1TLCYGfN3DLcfyZdtvLiwctrEX7fdmoSBRQ0RqpmQ83pm4Knzu/ktiBRPZmd1Tsd7k/shrZ5iqabwN6CcHrjotOaYOaE37h3d0ee5RWIxXahWaSmnI+JjbejePFW4Xn9gkZpgR6PKJaufXj8QvTJT8WllMasasb5DvF3DxFiMEDqd+qO3ELZFgwQ8f3FPXbetTTy/n3gdgZwRe1U6m54KYtMpLcrOr4B60zM1WgWAwewlopwKAYCfNx/GW4uzMUdoVKZVI1HmdMu67GrxV7QonlADFcVKkqQa5EqShKOVG7uJgYVaUXJRmQszF+wMOLXgESir4Bn9kfwS3wsBLN5eNS2iDIpOCpkNtZ0OxONRojH99mvWIYz6v8X4Vcheqf2+wt2ozAgGFjVEqKZCLBYLrhzYEt2apWjfRsfjJAuFicraBbvNivN6ZqC74jlGdklHsrC0TVzRouzQqOWxcV2Q5IjBwDYN8OVNgwLeXi1rIGYbjJywFt83wpvh6JmZim+nDsGgtg3RtXLVgpIY1AwSppIsFuClS3vhqoEtA49fR2CRnuzAonvPkC1Nq67+iimjSPHUVpgxFSJSpp3fWZKt+8Qfalrnh5IAu5++NG+7bL+W1xbuxOMq+9Co0epXU+Zy66pvOZhTIqtbETllUw2+J8+tB/PwZ2Vdh1bi7IXftqH/M/Px1dr9qsWbT/3o+zoD1R+cKCjF5W8tx4fChoMPf7fF53Y7jhbgke+yZE3rxOzC7D93e/+tDIrE95laxkIMZLSWXt/08Tqfy5SB3Ker9qLXE/Owfm/g5oGRwD4WNUQkO6vpqTiOs9uw6N4zYIFFM+WvtrnZRac1l/0heugNticPaY3JQ1oHvmEltROzeJKKN1BjkRynPu2iNfaM1HjVZlRAxZz2k+d3w0crKj7Uzu+VgW83+G5wpaemw2qx+C32tVktuqc6RnRshDtGdkCrhgno9cQ8XfcJJU/GxuyMhZLWhms1yd6ThViRrb6LLADMnL8Dy/85ji9uGozdxwv9thxX0trUqrTcratg+Ykft+KJH7fin2fO9flSVC58K1fLMJw7s2J65bmLe+D8Xs1UH/+1hRUFtg99m4VJg1t5Ly8sdeJQbrFs6auHTSVF4HZLsFotmLNyDx78Jivg6wKADftyfJY5vzRvO/adLMLuE/IVMscVwZW/wCLrQC6OClkQtRUpWsW5yr/naV9vBgDc+sl6/Hn/mQCANxf9gzkr9+LzGweZ+qUjGMxYRJCYyorkaqKH/tUFAHDbme383q5lw0S0aOi7hNPDYrHg8n6+BZdqQpXES1IJBsSTlFaNxbD2aZjQX9/YtZzeoRFuO7Md3rhSWWBa9cud9+/heGhsZ4zX+EDV86HuLxC8elBLQ/UTNqsVvTJTkZqgPucebkZqWqKdp3eFP6t3n8LsP3dh6ie+33KDUeZyGaoVKipz4mh+iWzPEXFli78swn1fbtKcDvAoLnfJHq+wzIVyp/r7W9kQ79X5O9DnqXnIPlagO6jQ8tehPDzx41ZZtgOoqGERiYGFmGXYvD8X/3p1Kaa8X9VCXW0aRiuTJGaBxL9vsSh5+i9/Y+/JIm/vk0hiYFFDRHK5aa/MVGx76hzcPapj4BsHoHwdl/eraC6lXGUSKo1U2njLpkI0Agub1YLpF/bQNSUlfqyl1YuVXX73qI44p5t2H4b26Um4blgbzQ/v/q2Cn5Lo1iwZD/+ri6xmJrOB/54gNa3xTkyIpkKi2eM/bA24WkSLsiapzOnWNR3nUVLuRv+n5+PMFxd5ayLEk2OgTqx6lo+WOeWBSplL/TGfUeyJ8+K87cgpKseZLy4K+BxmEQMLMQCYs3KPz23VsjlqO/MC8lVH4vFQ678SqOtuONSwjxWKFCNFjf4o46OOTZKw5qGR+HBKf/kVJqYsrBbg1hHt8Ptdp6ten2CvmvHTChw8l7ZsoJ2R8RAzTYvuHSFcrmOwAcZx9aDAdRintazvc1mP5in44dahsNusuKwya9SucT3sO+m/UVaMWoVZiOip47AHMRVisQBnd0kPelx1Wfv0erKfS51uQxkL8Vv3qwt2otTpkgUTynS/suBw8IwFAZ9DlrEodakvEQWw7J8TAbc8D7UTBeoZi32nfIuH1aZCtDIWYk1uoGZuNaGkk4EFmUrtfJlWz+Gz2kFv8aYe8XYb7hndEe0a11O9Pi626rnFZ50knMQ9mZY+Kidtf8RsiFalvFoySmtH2PhYm+brAIAbT2+DJ8Z39bk8rZ7D+xoeHdcV08Z0wvvX9Au482w4NzeaPLiVz8ohJc94jUyFNE2Ow8uX9dLdIbW60pN9s2Lhcu1Q/fVGorM6qbfAtyhKt0ud+mosPMTlpB+t2IOOD/0qa6yl/FYeTGO2MsXUir/ple06e+qESn5J1Uoj8bWqHdMSAxkL8Rh8rJL9EK1S2Sk53BhYRFBNiCzNprf1rJkrpQI9p5jaFc+jg9pWFZt6Ln7g3M7o0yIVT57fTddzi1M/Wi9JbXRaGQurxeJ3Kd+0MZ1V6yHEh4uz23Dj6W3RvH4Cnr/Et3upqLpb3v965zDdt7VZLbj59LZ+b+P5tlxfo+ZDbdnulYNaItERo7u+pza7blhrwyvIXp/YB+9O7qd6nfJPp8zpNvT4YuMoNbd9uh4v/14156+3/kfslfP1ugPefxeWulS/6XvsOOrb7TKcxLFd+Poy3Otni3n1jIX6axOzQIGKdA/kFGNjNfbYMQMDCzJVRHra69gZ1rMc9vQOVd/cxJOqZ9gNEmPx9S1D/C4N1TzvG4iW/E1BjO7aRPfjeGjV6DRLrZraUUtxV/f31alJMv564hy8cEngnho2i+8mekqe38m53avqVJ69qDvuHd0RD43tjA7p8oZvE/q3wPXD2gAIbfYl3m7DqxN64383DAxZDxY9Euwxhtudp/vZhVT5vilzunUtP/fI0dEL5OXfK7aXX737JB76Vl8RpVb8UVjm9Nvf41Sheb1JgqEc2xdr90OSJNVMRHG5y2e5qNYS40BLaT31LR6efUgihctNI2hA6wZIibejQ7p26ru2aawzTWzmZ7OeD0LPpmV7hOViNtkHdPVPSlofhqpTIRonB7ck4Y6z2qN943rIPlaI/y7cqeu5tUYvFuc5bFaf+VkxuLp+WGu8vcR3GV8g8bE2dGmq3ttDpOd37kkZd2uWgteu6IOUeDuGCsuYn/v1b9ntx/fK8Ju679+6gTc13CszFYdzS3A4z7cLZyDF5S6Mq2z4pnwdKfF2w22lgxUbY4XdakUJ9BfoGQlEjOzyCgC3zNG/GuUSP3uPKCk3U/MoCpCxiPReGmpjO1ZQqlo7UVLuxgWvy9uXa9ZY+PnjKS5zYdhzC2WXictaI4GBRQQlxMZgzUMjq52OrkmmDGmN7YfzcXYX/9+6zewaZ2RFTUOho6h4ku3YRH9wp1UfYuQ1af3OE2JjEGe34cI+zfHBst26H0/rEPhsI6+obRO/5T9wbuegAouKxw78O/C3v4SHGCSo7XKqPG7Kk2ac3Sr71nfT6W0wvH0aNu7PxesT++CyN5cHFVj4k5oQvsDCalX/Pfrjb2rDrrjO6XKHZIM6o7vHak0HlrnceHWBdrBdGOnAQqVuYu+JIt2b3GnVWPhzJK/EJyA8nBvZwIJTIRFmt1kjutTUbHF2G16+vLdpW1/rYeTwibs65haV4+tbBuPG09vg1hHtdT+GVvxg5ONY7cO+foLdb+GmP1pTGrKMhcpUiHw6KPj3YawtcLGlnlVwgb5dK4uAlU2RGibKM2Z2mxW3ntkeb1/dF3abVdY19sVLeqKNn/1WTu+g3oZd+ftPNGFfE71sFu0GdVo8027injkAcO/ojj4rjMrdUsAdVIORfdxYUaW/2CZbZRt3j6LSihO4VsYj1IpVAoMjeaW6AwatjIU/avuxqLWYDycGFhQRZvzZXzGgokfGPQb6b4gnT4fdij4t6mPamM6Gljee2bmiTkPZM0NzKkRlokItEPjPOZ10j8HnOTTOxw6hH4TaCUnr22z/Vg1w8WnNdT+/nl1j9Wy1HWivFOVolRmMhvViFdfLH08MAhrWi0X2cfWT1MwJvTG2u1ZwLH8dCbE277FV7o8DAIPa+O/hYrVU7C3jbx8aD5vVohog+uOZdrv1THnw3DjJgdvPkl+2IvuEZvdYoGLqKRhGsm/V4SkmDeYEbQbVHU2dLt3jMZrZAdQDCz2bv4USp0IoIsyYCXlqfDfcNLyt326gal6+rBeW/3NCViBoxL9HdkCbtERZISgANEvV30ZXz0nWSAJBK9sgrohRq0XQCixaNEzA8xf30L2fRpxGYHbd0Nbe9st6vkUGas6k/IBW1qqM6NgYm4QTozIDkiDssKvMbojapCXq3tQqxmbBm1eehlKnC+d0a4o1u0/ioJCKHto+DcuzT2jePzbG6t1b5pyuTWSbTylZgspYaLw3Yqw++/74W6o4eXCroKcw9XQRNcPCbcfwyHdZPg35wlUHs0slUD2UW4I9J3z7WKjRO2UiUmsyVhjhwIIZC6q1rFaL4aACAM7v3QzPXtzD0Hp9UZzdhsv6tfD24/9wSn/cdXYHn9UcnoZQl6u0Cldbdqe8yMjEhOZUiJBJULuNVmDRrnG9gFMjNwxv4/23VqdMT7t4QN9Sw0B7pfgWn8p/h1NHtJNtfKf8HYsZiwb1YvHAuZ3QNSMZKx84S3a7bs1SZDtKTqzMjqmRJGBEp8bejquf3SDfLK95ff/dT8Vg6tUremP1gyP9395gYKH1Ozb6/n/svK41atpW67h+uHyPz0ZeafW0W9Y3DfG+Gkb2cPk1Szuo1JJf6hsw6eloGkoMLIiqaXiHRrj9rPY+H7rvTe6Hj67tj1tH+O7Bovah6FMUauBDXOuWgTIAF/SW71ny1c2DcduZ7XDNkFaa95nQvwXWPDQS08ZUTd3oOUklVZ7we7dI1bxNoMdRFqkpv43Hxlhxq7DnjTJQEY9xw8RY3DC8LX66fZhsSabnsIvfHp8S+poE+tLeomECOgurZJqmBAgshK63dptVtS29SG1qzR+tY2oksPjo2orOuf56rITbsPbqNTBq/B3TX+8Y7nOZkbbmZvpx0yHD93lHpeC6sMwVsToTgIEFUcjUc8RgWPtGqnUDCbExWPvQSGx8dJT3MuVn9mgDbao1ayyEb7dqq1k6NZH3hTitZX3cPaqjZov31mmJmH5hd1mnT6XUBDu6NUvGO1f3BQDMuLA7JvTPxMjOFa/n/Wv646ubB6l24Qy0y2+gqZCK56/6dqo8QYgrRrQ2pIurfO1idsRfIzS1j+9YYVxqQaQYqBitmdCT+RFbw2tnLPQFKBf0buY9iYsrRs5QaVYWSuLrePai7obqPZr46eWRkuC7cWG9uNpTJaC1T0x+BLMWDCyIIqRhPQdS4qs+1JSni8bJcch6fDTa61gp0r1ZiurlsmJVlWDBaGpbT3FZm7RE/HjbMIysDIwu798C0y/s4V3amhJvx2ktG6hOzQQajrK6Xq3RWAMhsFAGdf56IHh4ClF1F9KpnOfFk2C9uBifDNCorlVBo9GpDaeOZbtdM6oyJnaNZmx6v5WLr0VcMXLLGf53QzZbg8Sq32tSnB0D2zTET7cP1XXfHs1TDT1XoqN2b4KX2SAeBRFcesvAgqimUEkz13P4/+b08+3D8MC5nTBpcKuAD39+72aqG5gZ4a/roUd1MrCBAgtlxkJ12W5iVbCm/FauZ9mfJwAbU1ncq2xgpyxgVMsEieOyW63oqTix2WQBn+/HsDKTJHK69BT+Vj2+TSMzYdcZ0IhjFZ9baz8XPYFwMJqlVmV+PNNqXTPUA2o1300d4nOZ1vstnEuIQ2HJfWfKjle4MbCgiHi1shOm2oZadZXW6cLfvHaXjGTcMLytrvny2Bgrvrp5sGZ2Qw893/irNw8fqHhT/vxq37rFfUaUx+XOkRXLKyf52UXWk7Fo17geVkw7Cz/cJv9W7DMVEiBjYbP6tjIXr1fLWLx/TX88cK768uNyHRkLMRuktSpE7xy8GJiUC/fRWqL91tV9cX6Qy1I9Mhv4nhTFKaXGScYKLl1uCT0zU336xGgdG62APop6GYYUAwuKiHE9M/DXE+fg6kGtIj2UGkPrg96serlGlZXxRpa0LbznDDx/cdVGZnoyFno3mhJ5vv22DLDKR9nITC1lrbZJm0e3ZinIenw0HjtPO6AVay+apMRp1pt4qL1acYrGbrP41I5YrRYkVZ68zlBpxNUkJQ43DFffsC1QxqJDej3ZCVDr5Omvw6bYol2esagKarQyFpn14/Hk+d18MjGJBnrFtGjg+z4QT/at0oytBvMU0yqPhNZqKq3VZkY3gKurGFhQxBhpSlUXaH3Mn1m55XWwy+L+e0VvTBnSGqMq26wb2Q+idVoiLulbtVxWT8wQTGDxxU2D8PrEPrguwLbg3ZunoGdmqvdntcLY1Hg7HDFWxNqsqruk1nPEqNaWdKzc4OzCPs18rhPpCfSsio6mymHaLBb8fMcwPHV+N0w9U7tWYcl9I3wuE5fB3nKGPPj48/4z8f2tQ2UnTK2TYbmfpk1NhPeaeH/xubWWGMfYrEiKs2OVsGz2miGt8MVNgzWfT0ktA5cpBBuBgj2gIpj45Y5hePmyXhjSrqKvhbKBmVbQpRbYAL4rcoyUKE0d4X9n32jCwIKohtA6Yd0zuiOevag7vlWZI9bjXz0y8Mi4Lt6T3V1nV7R2vrSv/s6aaZV7rOhpOR6oF4WH+KHcNSMF53ZvGrDzJgAEenir1YINj4zCukfONlQY+b8bB+K9yX0xZYj/4EZJrWlUPUUmRfnN2Ga1ILNBAq4c2NLvSTJT5QQnFm/ed04n2RRBs9R4xNltsmMrBlGett71HDEY0Fq7I6gYNIiBhbjfi9oXg8eFTJCYobAabOylLMq1WIBJg1vhnK5N8PJlvXQ9xltXnYbOTZNxfu9m3mNw0xlt8caVfby30Qq6xN/JMGETPIulom9NWr1YvDupLxoHWBos8ve38+C5nWU/G93Btqap3RUqRFHggt7NsGj7Mc1vyp6GXGYZ36sZ+rSob6i46383DsSsP/5R7cnhMf3C7njxt22YcWEPzduYQWtraVEw2bDUhFic2SnwEl9lIKF2chrdtQl+3lzV7EgZWFRnu3rlVIhal0WtE+atZ7bH1BHt4HRLfutyxBObbFWIkI1Sq285v1fVe1i8n9VibFmtT8dUuw31HDF446rTdN3/q5sHqQZldpvV28hMOUaP9yb3le1HMuOiHhgyYwGAit/b8A6NsPrBkbBYLLB8o28beKBiibmaz28chKwD8jbqdpsV5S7jXThrCgYWRBH2f5f1gtPl1vVt3SxqH7r+tG1UDy9c0tPvbSb0b4HL+2XqXsIa7Km1JIi2x2YST+sNEmPxpNCTwmNcjwxsPZSH5vUrjrPyBFaduXrlFEa3ZilYsuO4t2YD8B+4WCyWgN+IxaBDvty06tWr/Z7FQk/xeovF2B4nyr+FeJ2rNP68/0zsOVGI01o20HV7303sKoLLA6d2ey8Tj5XnUHhem5Ff45B2aTivZwaGtGuI/3y1GUBFJrB/6wbYctA3sAB83+cxVktIdp81G6dCiGqAcAYVoRSOls8lKltTR8rah0aiU5Nkn8utVgumjemMqwa29P4su74ah+nJ8RWBjGfO/oVLemLy4Fb49taqqTI9m8J5qG22JmZ8ZMWbflakxNmtmnUXFot6XYRWjYNyO/f4WH2vp1lqPAa3TQt4uwmVbfbvVuz46nmp4u9L7AOi/D0aeb/Xc8Rg5oTesuyj5+7KR9GaNtI6vjUNMxZEFBEjOjXG9F/+RqpK50N/+rVugO82HDR8v1DQe2JRrgqpTgB2ab9MnNGxkbdNdXpynM8ql+HtG2F4h0ay1R1apl/UHUVlTizcdsx7mRhYiCfTEZ0aY/XuU96aG48bh7fBnSM7aGZirBaLbN8aj4RYG/JUOkQqp2kS7Oaeqp4+vztuOaOdSubOd/xivw9lJqi6cbTn7sr3g1bzsvhYG/IDbDB2usoqo3BjYEFEEdEhPQmL7j3D5yQVyOPndUXrtESffU7CpXn9BN07n3poNL8MWmM/LaqBigzYh1P663qs5Dg7Jg1uJQssxAZRYlbh+mFt0Cw1HgMrt4K/amBL/JJ1CNcPb+O3rsVqUT9ZJjpiVAMLZQGw2SvIrJXFs1rE1R9qUyFVP1cvsvDcXRlIaU0b6elX897kftUakxmiI/9KRLVSy4aJPlt3B5KaEIs7R3ZAy4aJIRqVf69P7IMzOzXGlzcNCnzjSoH2QIk05SoMsUeFrIuozYrxvZp5N2178vxuWPnAyIDBodXi2yRM+Twin4yFn8Diwyn9kZpgxxtX6ivs9Ect4SKbCvEpwq3u81U8wPheGbLpj2B3XgZqRq8NQ6N/7LHHKiphhf86dVLvDkdEFI1apyXivcn90LeVvgJBoGZ82Psjdkudf/fp8qmQAEGRntemdYvL+mWqXq4sLvVXWzC8QyOsf/hsnNOtScBxaPFkvzw744ovWQyIjNZYtArQ8M1z70RHDBbec4b3cnuM+uPW8PjUy/BUSNeuXfH7779XPUAMZ1OIiPxR+7Zek7iEwCKzfgI2xeZ4f9YqsDRC6wR87dA2aN84CScLy3D3FxurntNnVYj/qZDqFg0/f3EPTB3RFm0bVfSa0Ho05aFomhKHXccL1W8M4OF/dcG1H6zBDcPbqF4v29NF+Hcw27aP65mBy/qqB2rhZnj0MTExaNKkife/tLTAFbhERHVZTZ8KcQnLSO02C+KFYkkzgiKtvTdsVgtGdGqMBvXkHVKNTIWYIcZmRbvGSZoBSp8WqQCAi0+TN5V77uIeOL1DI3ygUc9yVud0bHxkFKaNUc/sizu2Bto/JpCbTm+Doe1rxvnYcLphx44dyMjIQFxcHAYNGoTp06ejRQvt5j2lpaUoLS31/pyXZ6zoiYiotqvpnQfEjIXFYpHtwVKdjMXD/+qC37cewcSB/hu8KZ+jkaKjZaSXWc6+pj9W7zqJ4YoVF83rJ2gGFR4pKquX3p3UFzMX7MRLl1b1hhGPgbLmRY/WaZGpOVJjaPQDBgzA+++/j19//RWzZs3Crl27MGzYMOTn52veZ/r06UhJSfH+l5lZM1I1RETh4vSzL0dNkBIvP/klaCw3Neraoa3x6Q0DvV0nxV1lJw9u5f23MqOjTOnrbZBlFuVGdinxdozsku43k5BhYC+fszqn47upQ7xTL4D8OGvtECweJk8gMXNCb2x45GzNzp6RYGgkY8aM8f67R48eGDBgAFq2bInPP/8c1157rep9pk2bhrvuusv7c15eHoMLIqpTymp4YDGgdQPcOLyNdz8LcSrEzGmcx8d3w6PjuiL7eAHapFWdVMVpgCfHd/U5gRvZk8MMo7qk44oBLdCrearu+/x0+zBsP5KPy95aEdRzihkLPd01v791CHYdL0T3ZilhaUxnRLVCnNTUVHTo0AE7d+7UvI3D4YDDEd43BRFRTRJoq/NIs1gsmCZshJWRWvXtW20vkuqwWi1o1zhJdpnNz8oLALhigHl75ehhtVrwzAXdDd2nfmIsBrRpiGHt07Bkx3H0N7BqCFBu9lb1fpk5oTfu+t8GvHhpT/yy+TD2nSwGACTF2dHDQOATTtUKLAoKCvDPP//gqquuMms8RERRp0mQW95HijgVkH28IOTPJwYTngzJB1P64+t1+/HEed0QV0taWQPAzMt74/uNBzFOsUV7IFqBxXk9M3BO1yaIjbFiSLs0NEpyaC7TrSkM1Vjcc889WLRoEXbv3o1ly5bhggsugM1mw4QJE0I1PiKiWq9DepJsBUBtMHFAC8RYLZjQP/TZghiVjMXpHRrhlct7qxY/1mT1E2MxaXArw79vccrJpZgK8UwNpdVz4Mnzu6Fbs5TqDzSEDGUs9u/fjwkTJuDEiRNo1KgRhg4dihUrVqBRo8j3JiciqskuOa053lycHelh6PbU+d1w/5hOSIoL/YldbMJV3TbZtZVVI2NRGxkKLD777LNQjYOIKKrV9O6bShaLJSxBBSBflZJfUh6W56zJantgwb1CiIjCYFTXipbTRjddqwsyGySga0bFTqw9M1MjO5gaIC7EDcFCreYsfCUiimK9MlMx79/Da10hZ7h8fctg7DlRhA7pSYFvHOXG9WiKtMRY7y6ytQ0DCyKiMGkfxEnTYgE0+iVFFUeMjUFFJbvNindrwPbnweJUCBFRDVa7KjOIGFgQEdVodXWVRF1W23/lDCyIiGqw2n6SobqHgQURUQ32yuW9AQAPje0c4JYULTx7ttRWFkkKb1lQXl4eUlJSkJubi+Tk5HA+NRFRrVRc5kJ8LV+CSIFt3p+LHUfzcWGf5pEeiiq952+uCiEiquEYVNQN3ZunoHvzmt2uWw9OhRAREZFpGFgQERGRaRhYEBERkWkYWBAREZFpGFgQERGRaRhYEBERkWkYWBAREZFpGFgQERGRaRhYEBERkWkYWBAREZFpGFgQERGRaRhYEBERkWkYWBAREZFpwr67qWeX9ry8vHA/NREREQXJc972nMe1hD2wyM/PBwBkZmaG+6mJiIiomvLz85GSor29u0UKFHqYzO124+DBg0hKSoLFYjHtcfPy8pCZmYl9+/YhOTnZtMelKjzGocdjHHo8xqHHYxxakTq+kiQhPz8fGRkZsFq1KynCnrGwWq1o3rx5yB4/OTmZb+QQ4zEOPR7j0OMxDj0e49CKxPH1l6nwYPEmERERmYaBBREREZkmagILh8OBRx99FA6HI9JDiVo8xqHHYxx6PMahx2McWjX9+Ia9eJOIiIiiV9RkLIiIiCjyGFgQERGRaRhYEBERkWkYWBAREZFpoiaweO2119CqVSvExcVhwIABWLVqVaSHVCtMnz4d/fr1Q1JSEho3bozzzz8f27Ztk92mpKQEU6dORcOGDVGvXj1cdNFFOHLkiOw2e/fuxdixY5GQkIDGjRvj3nvvhdPpDOdLqRVmzJgBi8WCO++803sZj685Dhw4gCuvvBINGzZEfHw8unfvjjVr1nivlyQJjzzyCJo2bYr4+HiMHDkSO3bskD3GyZMnMXHiRCQnJyM1NRXXXnstCgoKwv1SahyXy4WHH34YrVu3Rnx8PNq2bYsnn3xStmcEj68xixcvxrhx45CRkQGLxYJvv/1Wdr1Zx3PTpk0YNmwY4uLikJmZieeeey7ULw2QosBnn30mxcbGSu+99560ZcsW6frrr5dSU1OlI0eORHpoNd7o0aOl2bNnS1lZWdKGDRukc889V2rRooVUUFDgvc1NN90kZWZmSvPnz5fWrFkjDRw4UBo8eLD3eqfTKXXr1k0aOXKktH79eunnn3+W0tLSpGnTpkXiJdVYq1atklq1aiX16NFDuuOOO7yX8/hW38mTJ6WWLVtKkydPllauXCllZ2dLc+fOlXbu3Om9zYwZM6SUlBTp22+/lTZu3Cidd955UuvWraXi4mLvbc455xypZ8+e0ooVK6QlS5ZI7dq1kyZMmBCJl1SjPP3001LDhg2lH3/8Udq1a5f0xRdfSPXq1ZNeeeUV7214fI35+eefpQcffFD6+uuvJQDSN998I7vejOOZm5srpaenSxMnTpSysrKkTz/9VIqPj5fefPPNkL62qAgs+vfvL02dOtX7s8vlkjIyMqTp06dHcFS109GjRyUA0qJFiyRJkqScnBzJbrdLX3zxhfc2f/31lwRAWr58uSRJFX8gVqtVOnz4sPc2s2bNkpKTk6XS0tLwvoAaKj8/X2rfvr00b9486fTTT/cGFjy+5vjPf/4jDR06VPN6t9stNWnSRHr++ee9l+Xk5EgOh0P69NNPJUmSpK1bt0oApNWrV3tv88svv0gWi0U6cOBA6AZfC4wdO1aaMmWK7LILL7xQmjhxoiRJPL7VpQwszDqer7/+ulS/fn3Z58R//vMfqWPHjiF9PbV+KqSsrAxr167FyJEjvZdZrVaMHDkSy5cvj+DIaqfc3FwAQIMGDQAAa9euRXl5uez4durUCS1atPAe3+XLl6N79+5IT0/33mb06NHIy8vDli1bwjj6mmvq1KkYO3as7DgCPL5m+f7779G3b19ccsklaNy4MXr37o23337be/2uXbtw+PBh2XFOSUnBgAEDZMc5NTUVffv29d5m5MiRsFqtWLlyZfheTA00ePBgzJ8/H9u3bwcAbNy4EUuXLsWYMWMA8PiazazjuXz5cgwfPhyxsbHe24wePRrbtm3DqVOnQjb+sG9CZrbjx4/D5XLJPnQBID09HX///XeERlU7ud1u3HnnnRgyZAi6desGADh8+DBiY2ORmpoqu216ejoOHz7svY3a8fdcV9d99tlnWLduHVavXu1zHY+vObKzszFr1izcddddeOCBB7B69WrcfvvtiI2NxaRJk7zHSe04ise5cePGsutjYmLQoEGDOn+c77//fuTl5aFTp06w2WxwuVx4+umnMXHiRADg8TWZWcfz8OHDaN26tc9jeK6rX79+SMZf6wMLMs/UqVORlZWFpUuXRnooUWPfvn244447MG/ePMTFxUV6OFHL7Xajb9++eOaZZwAAvXv3RlZWFt544w1MmjQpwqOr/T7//HPMmTMHn3zyCbp27YoNGzbgzjvvREZGBo8v+aj1UyFpaWmw2Ww+VfRHjhxBkyZNIjSq2ufWW2/Fjz/+iIULF8q2tW/SpAnKysqQk5Mju714fJs0aaJ6/D3X1WVr167F0aNH0adPH8TExCAmJgaLFi3CzJkzERMTg/T0dB5fEzRt2hRdunSRXda5c2fs3bsXQNVx8vc50aRJExw9elR2vdPpxMmTJ+v8cb733ntx//334/LLL0f37t1x1VVX4d///jemT58OgMfXbGYdz0h9dtT6wCI2NhannXYa5s+f773M7XZj/vz5GDRoUARHVjtIkoRbb70V33zzDRYsWOCTNjvttNNgt9tlx3fbtm3Yu3ev9/gOGjQImzdvlr3J582bh+TkZJ8P+7rmrLPOwubNm7Fhwwbvf3379sXEiRO9/+bxrb4hQ4b4LJPevn07WrZsCQBo3bo1mjRpIjvOeXl5WLlypew45+TkYO3atd7bLFiwAG63GwMGDAjDq6i5ioqKYLXKTxc2mw1utxsAj6/ZzDqegwYNwuLFi1FeXu69zbx589CxY8eQTYMAiJ7lpg6HQ3r//felrVu3SjfccIOUmpoqq6IndTfffLOUkpIi/fHHH9KhQ4e8/xUVFXlvc9NNN0ktWrSQFixYIK1Zs0YaNGiQNGjQIO/1nuWQo0aNkjZs2CD9+uuvUqNGjbgcUoO4KkSSeHzNsGrVKikmJkZ6+umnpR07dkhz5syREhISpI8//th7mxkzZkipqanSd999J23atEkaP3686vK93r17SytXrpSWLl0qtW/fvs4uhxRNmjRJatasmXe56ddffy2lpaVJ9913n/c2PL7G5OfnS+vXr5fWr18vAZBeeuklaf369dKePXskSTLneObk5Ejp6enSVVddJWVlZUmfffaZlJCQwOWmer366qtSixYtpNjYWKl///7SihUrIj2kWgGA6n+zZ8/23qa4uFi65ZZbpPr160sJCQnSBRdcIB06dEj2OLt375bGjBkjxcfHS2lpadLdd98tlZeXh/nV1A7KwILH1xw//PCD1K1bN8nhcEidOnWS3nrrLdn1brdbevjhh6X09HTJ4XBIZ511lrRt2zbZbU6cOCFNmDBBqlevnpScnCxdc801Un5+fjhfRo2Ul5cn3XHHHVKLFi2kuLg4qU2bNtKDDz4oW8bI42vMwoULVT97J02aJEmSecdz48aN0tChQyWHwyE1a9ZMmjFjRshfG7dNJyIiItPU+hoLIiIiqjkYWBAREZFpGFgQERGRaRhYEBERkWkYWBAREZFpGFgQERGRaRhYEBERkWkYWBAREZFpGFgQERGRaRhYEBERkWkYWBAREZFpGFgQERGRaf4fwV2nXfquU98AAAAASUVORK5CYII=", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "plt.plot([t.detach().numpy() for t in loss_track])\n", "plt.show()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "torch.save(model.state_dict(), f'model_trigram-EMBED_SIZE={EMBED_SIZE}.bin')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "vocab_unique = set(vocab.get_stoi().keys())" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\micha\\AppData\\Local\\Temp\\ipykernel_14016\\2809838665.py:15: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.\n", " x = self.softmax(x)\n" ] } ], "source": [ "output = []\n", "with lzma.open(\"dev-0/in.tsv.xz\", encoding='utf8', mode=\"rt\") as file:\n", " for line in file:\n", " line = line.split(\"\\t\")\n", "\n", " first_word = re.sub(r\"\\\\+n\", \" \", line[-2]).split()[-1]\n", " first_word = re.sub('[^A-Za-z]+', '', first_word)\n", "\n", " second_word = re.sub(r\"\\\\+n\", \" \", line[-1]).split()[0]\n", " second_word = re.sub('[^A-Za-z]+', '', second_word)\n", "\n", " if first_word not in vocab_unique:\n", " word = \"\"\n", " if second_word not in vocab_unique:\n", " word = \"\"\n", "\n", " input_tokens = torch.tensor([vocab.forward([first_word]), vocab.forward([second_word])]).to(device)\n", " out = model(input_tokens)\n", "\n", " top = torch.topk(out[0], 10)\n", " top_indices = top.indices.tolist()\n", " top_probs = top.values.tolist()\n", " unk_bonus = 1 - sum(top_probs)\n", " top_words = vocab.lookup_tokens(top_indices)\n", " top_zipped = list(zip(top_words, top_probs))\n", "\n", " res = \"\"\n", " for w, p in top_zipped:\n", " if w == \"\":\n", " res += f\":{(p + unk_bonus):.4f} \"\n", " else:\n", " res += f\"{w}:{p:.4f} \"\n", " \n", " res = res[:-1]\n", " res += \"\\n\"\n", " output.append(res)\n", "\n", "with open(f\"dev-0/out-EMBED_SIZE={EMBED_SIZE}.tsv\", mode=\"w\") as file:\n", " file.writelines(output)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "C:\\Users\\micha\\AppData\\Local\\Temp\\ipykernel_14016\\2809838665.py:15: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.\n", " x = self.softmax(x)\n" ] } ], "source": [ "model.eval()\n", "\n", "output = []\n", "with lzma.open(\"test-A/in.tsv.xz\", encoding='utf8', mode=\"rt\") as file:\n", " for line in file:\n", " line = line.split(\"\\t\")\n", "\n", " first_word = re.sub(r\"\\\\+n\", \" \", line[-2]).split()[-1]\n", " first_word = re.sub('[^A-Za-z]+', '', first_word)\n", "\n", " second_word = re.sub(r\"\\\\+n\", \" \", line[-1]).split()[0]\n", " second_word = re.sub('[^A-Za-z]+', '', second_word)\n", "\n", " if first_word not in vocab_unique:\n", " word = \"\"\n", " if second_word not in vocab_unique:\n", " word = \"\"\n", "\n", " input_tokens = torch.tensor([vocab.forward([first_word]), vocab.forward([second_word])]).to(device)\n", " out = model(input_tokens)\n", "\n", " top = torch.topk(out[0], 10)\n", " top_indices = top.indices.tolist()\n", " top_probs = top.values.tolist()\n", " unk_bonus = 1 - sum(top_probs)\n", " top_words = vocab.lookup_tokens(top_indices)\n", " top_zipped = list(zip(top_words, top_probs))\n", "\n", " res = \"\"\n", " for w, p in top_zipped:\n", " if w == \"\":\n", " res += f\":{(p + unk_bonus):.4f} \"\n", " else:\n", " res += f\"{w}:{p:.4f} \"\n", " \n", " res = res[:-1]\n", " res += \"\\n\"\n", " output.append(res)\n", "\n", "with open(f\"test-A/out-EMBED_SIZE={EMBED_SIZE}.tsv\", mode=\"w\") as file:\n", " file.writelines(output)" ] } ], "metadata": { "kernelspec": { "display_name": "modelowanie-jezyka", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.16" }, "orig_nbformat": 4 }, "nbformat": 4, "nbformat_minor": 2 }