{ "cells": [ { "cell_type": "code", "execution_count": 74, "metadata": {}, "outputs": [], "source": [ "import torch\n", "import lzma\n", "from itertools import islice\n", "import regex as re\n", "import sys\n", "from torchtext.vocab import build_vocab_from_iterator\n", "from torch import nn\n", "from torch.utils.data import IterableDataset\n", "import itertools\n", "from torch.utils.data import DataLoader" ] }, { "cell_type": "code", "execution_count": 75, "metadata": {}, "outputs": [], "source": [ "# torch.cuda.is_available()" ] }, { "cell_type": "code", "execution_count": 76, "metadata": {}, "outputs": [], "source": [ "# torch.cuda.device_count()" ] }, { "cell_type": "code", "execution_count": 77, "metadata": {}, "outputs": [], "source": [ "# torch.cuda.current_device()" ] }, { "cell_type": "code", "execution_count": 78, "metadata": {}, "outputs": [], "source": [ "# torch.cuda.device(0)" ] }, { "cell_type": "code", "execution_count": 79, "metadata": {}, "outputs": [], "source": [ "# torch.cuda.get_device_name(0)" ] }, { "cell_type": "code", "execution_count": 80, "metadata": {}, "outputs": [ { "ename": "KeyboardInterrupt", "evalue": "", "output_type": "error", "traceback": [ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", "Cell \u001b[1;32mIn[80], line 18\u001b[0m\n\u001b[0;32m 14\u001b[0m \u001b[39myield\u001b[39;00m get_words_from_line(line)\n\u001b[0;32m 16\u001b[0m vocab_size \u001b[39m=\u001b[39m \u001b[39m10_000\u001b[39m\n\u001b[1;32m---> 18\u001b[0m vocab \u001b[39m=\u001b[39m build_vocab_from_iterator(\n\u001b[0;32m 19\u001b[0m get_word_lines_from_file(\u001b[39m\"\u001b[39;49m\u001b[39mtrain/in.tsv.xz\u001b[39;49m\u001b[39m\"\u001b[39;49m),\n\u001b[0;32m 20\u001b[0m max_tokens \u001b[39m=\u001b[39;49m vocab_size,\n\u001b[0;32m 21\u001b[0m specials \u001b[39m=\u001b[39;49m [\u001b[39m'\u001b[39;49m\u001b[39m\u001b[39;49m\u001b[39m'\u001b[39;49m])\n\u001b[0;32m 23\u001b[0m vocab\u001b[39m.\u001b[39mset_default_index(vocab[\u001b[39m'\u001b[39m\u001b[39m\u001b[39m\u001b[39m'\u001b[39m])\n", "File \u001b[1;32mc:\\PROGRAMY\\Anaconda3\\envs\\scweet\\lib\\site-packages\\torchtext\\vocab\\vocab_factory.py:99\u001b[0m, in \u001b[0;36mbuild_vocab_from_iterator\u001b[1;34m(iterator, min_freq, specials, special_first, max_tokens)\u001b[0m\n\u001b[0;32m 97\u001b[0m counter \u001b[39m=\u001b[39m Counter()\n\u001b[0;32m 98\u001b[0m \u001b[39mfor\u001b[39;00m tokens \u001b[39min\u001b[39;00m iterator:\n\u001b[1;32m---> 99\u001b[0m counter\u001b[39m.\u001b[39;49mupdate(tokens)\n\u001b[0;32m 101\u001b[0m specials \u001b[39m=\u001b[39m specials \u001b[39mor\u001b[39;00m []\n\u001b[0;32m 103\u001b[0m \u001b[39m# First sort by descending frequency, then lexicographically\u001b[39;00m\n", "File \u001b[1;32mc:\\PROGRAMY\\Anaconda3\\envs\\scweet\\lib\\collections\\__init__.py:637\u001b[0m, in \u001b[0;36mCounter.update\u001b[1;34m(self, iterable, **kwds)\u001b[0m\n\u001b[0;32m 635\u001b[0m \u001b[39msuper\u001b[39m(Counter, \u001b[39mself\u001b[39m)\u001b[39m.\u001b[39mupdate(iterable) \u001b[39m# fast path when counter is empty\u001b[39;00m\n\u001b[0;32m 636\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m--> 637\u001b[0m _count_elements(\u001b[39mself\u001b[39;49m, iterable)\n\u001b[0;32m 638\u001b[0m \u001b[39mif\u001b[39;00m kwds:\n\u001b[0;32m 639\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mupdate(kwds)\n", "Cell \u001b[1;32mIn[80], line 4\u001b[0m, in \u001b[0;36mget_words_from_line\u001b[1;34m(line)\u001b[0m\n\u001b[0;32m 2\u001b[0m line \u001b[39m=\u001b[39m line\u001b[39m.\u001b[39mrstrip()\n\u001b[0;32m 3\u001b[0m line \u001b[39m=\u001b[39m re\u001b[39m.\u001b[39msub(\u001b[39mr\u001b[39m\u001b[39m'\u001b[39m\u001b[39m\\\\\u001b[39;00m\u001b[39mn\u001b[39m\u001b[39m'\u001b[39m, \u001b[39m'\u001b[39m\u001b[39m \u001b[39m\u001b[39m'\u001b[39m, line)\n\u001b[1;32m----> 4\u001b[0m line \u001b[39m=\u001b[39m re\u001b[39m.\u001b[39;49msub(\u001b[39mr\u001b[39;49m\u001b[39m'\u001b[39;49m\u001b[39m[^a-zA-Z] \u001b[39;49m\u001b[39m'\u001b[39;49m, \u001b[39m'\u001b[39;49m\u001b[39m \u001b[39;49m\u001b[39m'\u001b[39;49m, line)\n\u001b[0;32m 5\u001b[0m line \u001b[39m=\u001b[39m line\u001b[39m.\u001b[39mlower()\n\u001b[0;32m 6\u001b[0m \u001b[39myield\u001b[39;00m \u001b[39m'\u001b[39m\u001b[39m\u001b[39m\u001b[39m'\u001b[39m\n", "File \u001b[1;32mc:\\PROGRAMY\\Anaconda3\\envs\\scweet\\lib\\site-packages\\regex\\regex.py:278\u001b[0m, in \u001b[0;36msub\u001b[1;34m(pattern, repl, string, count, flags, pos, endpos, concurrent, timeout, ignore_unused, **kwargs)\u001b[0m\n\u001b[0;32m 272\u001b[0m \u001b[39m\u001b[39m\u001b[39m\"\"\"Return the string obtained by replacing the leftmost (or rightmost with a\u001b[39;00m\n\u001b[0;32m 273\u001b[0m \u001b[39mreverse pattern) non-overlapping occurrences of the pattern in string by the\u001b[39;00m\n\u001b[0;32m 274\u001b[0m \u001b[39mreplacement repl. repl can be either a string or a callable; if a string,\u001b[39;00m\n\u001b[0;32m 275\u001b[0m \u001b[39mbackslash escapes in it are processed; if a callable, it's passed the match\u001b[39;00m\n\u001b[0;32m 276\u001b[0m \u001b[39mobject and must return a replacement string to be used.\"\"\"\u001b[39;00m\n\u001b[0;32m 277\u001b[0m pat \u001b[39m=\u001b[39m _compile(pattern, flags, ignore_unused, kwargs, \u001b[39mTrue\u001b[39;00m)\n\u001b[1;32m--> 278\u001b[0m \u001b[39mreturn\u001b[39;00m pat\u001b[39m.\u001b[39;49msub(repl, string, count, pos, endpos, concurrent, timeout)\n", "\u001b[1;31mKeyboardInterrupt\u001b[0m: " ] } ], "source": [ "def get_words_from_line(line):\n", " line = line.rstrip()\n", " line = re.sub(r'\\\\n', ' ', line)\n", " line = re.sub(r'[^a-zA-Z] ', ' ', line)\n", " line = line.lower()\n", " yield ''\n", " for t in line.split():\n", " yield t\n", " yield ''\n", "\n", "def get_word_lines_from_file(file_name):\n", " with lzma.open(file_name, encoding='utf8', mode=\"rt\") as fh:\n", " for line in fh:\n", " yield get_words_from_line(line)\n", "\n", "vocab_size = 10_000\n", "\n", "vocab = build_vocab_from_iterator(\n", " get_word_lines_from_file(\"train/in.tsv.xz\"),\n", " max_tokens = vocab_size,\n", " specials = [''])\n", "\n", "vocab.set_default_index(vocab[''])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def look_ahead_iterator(gen):\n", " prev = None\n", " for item in gen:\n", " if prev is not None:\n", " yield (prev, item)\n", " prev = item\n", "\n", "class Bigrams(IterableDataset):\n", " def __init__(self, text_file, vocabulary_size):\n", " self.vocab = vocab\n", " self.vocab.set_default_index(self.vocab[''])\n", " self.vocabulary_size = vocabulary_size\n", " self.text_file = text_file\n", "\n", " def __iter__(self):\n", " return look_ahead_iterator(\n", " (self.vocab[t] for t in itertools.chain.from_iterable(get_word_lines_from_file(self.text_file))))\n", "\n", "train_dataset = Bigrams(\"train/in.tsv.xz\", vocab_size)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[tensor([ 33, 0, 226, 35, 0, 6421, 6420, 219, 5781, 1]),\n", " tensor([ 0, 226, 35, 0, 6421, 6420, 219, 5781, 1, 113])]" ] }, "execution_count": 67, "metadata": {}, "output_type": "execute_result" } ], "source": [ "next(iter(DataLoader(train_dataset, batch_size=10)))" ] }, { "cell_type": "code", "execution_count": 85, "metadata": {}, "outputs": [], "source": [ "embed_size = 100\n", "\n", "class SimpleBigramNeuralLanguageModel(nn.Module):\n", " def __init__(self, vocabulary_size, embedding_size):\n", " super(SimpleBigramNeuralLanguageModel, self).__init__()\n", " self.model = nn.Sequential(\n", " nn.Embedding(vocabulary_size, embedding_size),\n", " nn.Linear(embedding_size, 1000),\n", " nn.ReLU(),\n", " nn.Linear(1000, 500),\n", " nn.ReLU(),\n", " nn.Linear(500, vocabulary_size),\n", " nn.Softmax()\n", " )\n", "\n", " def forward(self, x):\n", " return self.model(x)\n", "\n", "model = SimpleBigramNeuralLanguageModel(vocab_size, embed_size)\n", "\n", "vocab.set_default_index(vocab[''])\n", "ixs = torch.tensor(vocab.forward(['is']))" ] }, { "cell_type": "code", "execution_count": 86, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "tensor(8.4503e-05, grad_fn=)" ] }, "execution_count": 86, "metadata": {}, "output_type": "execute_result" } ], "source": [ "out = model(ixs)\n", "out[0][vocab['is']]" ] }, { "cell_type": "code", "execution_count": 87, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "0 tensor(9.2249, grad_fn=)\n", "1 LOSS DIFF: tensor(6.9568, grad_fn=) tensor(6.9539, grad_fn=)\n", "2 LOSS DIFF: tensor(6.5283, grad_fn=) tensor(6.3437, grad_fn=)\n", "3 LOSS DIFF: tensor(6.4010, grad_fn=) tensor(6.3773, grad_fn=)\n", "4 LOSS DIFF: tensor(6.4818, grad_fn=) tensor(6.4010, grad_fn=)\n", "5 LOSS DIFF: tensor(6.4520, grad_fn=) tensor(6.3898, grad_fn=)\n", "6 LOSS DIFF: tensor(6.2989, grad_fn=) tensor(6.2184, grad_fn=)\n", "7 LOSS DIFF: tensor(6.3109, grad_fn=) tensor(6.2989, grad_fn=)\n", "8 LOSS DIFF: tensor(6.3028, grad_fn=) tensor(6.2805, grad_fn=)\n", "9 LOSS DIFF: tensor(6.3590, grad_fn=) tensor(6.3028, grad_fn=)\n", "10 LOSS DIFF: tensor(6.1484, grad_fn=) tensor(6.1278, grad_fn=)\n", "11 LOSS DIFF: tensor(6.2458, grad_fn=) tensor(6.0779, grad_fn=)\n", "12 LOSS DIFF: tensor(6.3209, grad_fn=) tensor(6.2458, grad_fn=)\n", "13 LOSS DIFF: tensor(6.2801, grad_fn=) tensor(6.1436, grad_fn=)\n", "14 LOSS DIFF: tensor(6.1245, grad_fn=) tensor(6.0657, grad_fn=)\n", "15 LOSS DIFF: tensor(6.2682, grad_fn=) tensor(6.0906, grad_fn=)\n", "16 LOSS DIFF: tensor(6.0394, grad_fn=) tensor(6.0062, grad_fn=)\n", "17 LOSS DIFF: tensor(6.1070, grad_fn=) tensor(6.0394, grad_fn=)\n", "18 LOSS DIFF: tensor(6.2271, grad_fn=) tensor(6.1070, grad_fn=)\n", "19 LOSS DIFF: tensor(6.0964, grad_fn=) tensor(6.0577, grad_fn=)\n", "20 LOSS DIFF: tensor(6.0909, grad_fn=) tensor(6.0436, grad_fn=)\n", "21 LOSS DIFF: tensor(6.0210, grad_fn=) tensor(6.0016, grad_fn=)\n", "22 LOSS DIFF: tensor(6.0296, grad_fn=) tensor(6.0210, grad_fn=)\n", "23 LOSS DIFF: tensor(6.1812, grad_fn=) tensor(6.0296, grad_fn=)\n", "24 LOSS DIFF: tensor(6.1665, grad_fn=) tensor(6.0736, grad_fn=)\n", "25 LOSS DIFF: tensor(6.0107, grad_fn=) tensor(5.9340, grad_fn=)\n", "26 LOSS DIFF: tensor(5.9806, grad_fn=) tensor(5.9473, grad_fn=)\n", "27 LOSS DIFF: tensor(5.9364, grad_fn=) tensor(5.8515, grad_fn=)\n", "28 LOSS DIFF: tensor(5.9202, grad_fn=) tensor(5.9180, grad_fn=)\n", "29 LOSS DIFF: tensor(6.0357, grad_fn=) tensor(5.8964, grad_fn=)\n", "30 LOSS DIFF: tensor(6.1189, grad_fn=) tensor(5.9309, grad_fn=)\n", "31 LOSS DIFF: tensor(6.0280, grad_fn=) tensor(5.8488, grad_fn=)\n", "32 LOSS DIFF: tensor(6.1555, grad_fn=) tensor(6.0280, grad_fn=)\n", "33 LOSS DIFF: tensor(6.0389, grad_fn=) tensor(5.9000, grad_fn=)\n", "34 LOSS DIFF: tensor(5.8367, grad_fn=) tensor(5.7437, grad_fn=)\n", "35 LOSS DIFF: tensor(5.9835, grad_fn=) tensor(5.8367, grad_fn=)\n", "36 LOSS DIFF: tensor(5.9613, grad_fn=) tensor(5.7643, grad_fn=)\n", "37 LOSS DIFF: tensor(6.0189, grad_fn=) tensor(5.9613, grad_fn=)\n", "38 LOSS DIFF: tensor(5.9064, grad_fn=) tensor(5.8300, grad_fn=)\n", "39 LOSS DIFF: tensor(5.9395, grad_fn=) tensor(5.8984, grad_fn=)\n", "40 LOSS DIFF: tensor(5.9919, grad_fn=) tensor(5.9395, grad_fn=)\n", "41 LOSS DIFF: tensor(5.8834, grad_fn=) tensor(5.8792, grad_fn=)\n", "42 LOSS DIFF: tensor(5.7971, grad_fn=) tensor(5.7641, grad_fn=)\n", "43 LOSS DIFF: tensor(5.8632, grad_fn=) tensor(5.7971, grad_fn=)\n", "44 LOSS DIFF: tensor(5.8988, grad_fn=) tensor(5.8632, grad_fn=)\n", "45 LOSS DIFF: tensor(5.9258, grad_fn=) tensor(5.8670, grad_fn=)\n", "100 tensor(5.8536, grad_fn=)\n", "46 LOSS DIFF: tensor(5.8536, grad_fn=) tensor(5.8226, grad_fn=)\n", "47 LOSS DIFF: tensor(5.8648, grad_fn=) tensor(5.8536, grad_fn=)\n", "48 LOSS DIFF: tensor(6.0083, grad_fn=) tensor(5.8648, grad_fn=)\n", "49 LOSS DIFF: tensor(5.8324, grad_fn=) tensor(5.7953, grad_fn=)\n", "50 LOSS DIFF: tensor(5.9055, grad_fn=) tensor(5.8324, grad_fn=)\n", "51 LOSS DIFF: tensor(5.9507, grad_fn=) tensor(5.7720, grad_fn=)\n", "52 LOSS DIFF: tensor(5.8892, grad_fn=) tensor(5.7376, grad_fn=)\n", "53 LOSS DIFF: tensor(5.8218, grad_fn=) tensor(5.6474, grad_fn=)\n", "54 LOSS DIFF: tensor(5.8381, grad_fn=) tensor(5.8218, grad_fn=)\n", "55 LOSS DIFF: tensor(5.9608, grad_fn=) tensor(5.8381, grad_fn=)\n", "56 LOSS DIFF: tensor(5.9855, grad_fn=) tensor(5.9496, grad_fn=)\n", "57 LOSS DIFF: tensor(5.9235, grad_fn=) tensor(5.7299, grad_fn=)\n", "58 LOSS DIFF: tensor(5.9411, grad_fn=) tensor(5.7029, grad_fn=)\n", "59 LOSS DIFF: tensor(5.8516, grad_fn=) tensor(5.7566, grad_fn=)\n", "60 LOSS DIFF: tensor(5.8243, grad_fn=) tensor(5.6658, grad_fn=)\n", "61 LOSS DIFF: tensor(5.8496, grad_fn=) tensor(5.7968, grad_fn=)\n", "62 LOSS DIFF: tensor(5.7651, grad_fn=) tensor(5.6680, grad_fn=)\n", "63 LOSS DIFF: tensor(5.8133, grad_fn=) tensor(5.7651, grad_fn=)\n", "64 LOSS DIFF: tensor(5.8699, grad_fn=) tensor(5.4926, grad_fn=)\n", "65 LOSS DIFF: tensor(5.7983, grad_fn=) tensor(5.7203, grad_fn=)\n", "66 LOSS DIFF: tensor(5.8621, grad_fn=) tensor(5.4968, grad_fn=)\n", "67 LOSS DIFF: tensor(5.8183, grad_fn=) tensor(5.6879, grad_fn=)\n", "68 LOSS DIFF: tensor(5.7855, grad_fn=) tensor(5.7245, grad_fn=)\n", "69 LOSS DIFF: tensor(5.7728, grad_fn=) tensor(5.6484, grad_fn=)\n", "70 LOSS DIFF: tensor(5.7415, grad_fn=) tensor(5.5859, grad_fn=)\n", "71 LOSS DIFF: tensor(5.7307, grad_fn=) tensor(5.6239, grad_fn=)\n", "72 LOSS DIFF: tensor(5.7754, grad_fn=) tensor(5.6253, grad_fn=)\n", "73 LOSS DIFF: tensor(5.8733, grad_fn=) tensor(5.5662, grad_fn=)\n", "74 LOSS DIFF: tensor(5.7932, grad_fn=) tensor(5.7448, grad_fn=)\n", "75 LOSS DIFF: tensor(5.7643, grad_fn=) tensor(5.6964, grad_fn=)\n", "76 LOSS DIFF: tensor(5.6395, grad_fn=) tensor(5.6045, grad_fn=)\n", "77 LOSS DIFF: tensor(5.7189, grad_fn=) tensor(5.6395, grad_fn=)\n", "78 LOSS DIFF: tensor(5.7524, grad_fn=) tensor(5.5841, grad_fn=)\n", "79 LOSS DIFF: tensor(5.7829, grad_fn=) tensor(5.5593, grad_fn=)\n", "80 LOSS DIFF: tensor(5.8024, grad_fn=) tensor(5.7829, grad_fn=)\n", "81 LOSS DIFF: tensor(5.8275, grad_fn=) tensor(5.7907, grad_fn=)\n", "82 LOSS DIFF: tensor(5.6191, grad_fn=) tensor(5.5317, grad_fn=)\n", "83 LOSS DIFF: tensor(5.7328, grad_fn=) tensor(5.6191, grad_fn=)\n", "84 LOSS DIFF: tensor(5.7513, grad_fn=) tensor(5.6999, grad_fn=)\n", "85 LOSS DIFF: tensor(5.7847, grad_fn=) tensor(5.7513, grad_fn=)\n", "86 LOSS DIFF: tensor(5.7548, grad_fn=) tensor(5.6437, grad_fn=)\n", "87 LOSS DIFF: tensor(5.7529, grad_fn=) tensor(5.7198, grad_fn=)\n", "88 LOSS DIFF: tensor(5.7664, grad_fn=) tensor(5.5831, grad_fn=)\n", "89 LOSS DIFF: tensor(5.7668, grad_fn=) tensor(5.6415, grad_fn=)\n", "90 LOSS DIFF: tensor(5.7174, grad_fn=) tensor(5.6232, grad_fn=)\n", "91 LOSS DIFF: tensor(5.7451, grad_fn=) tensor(5.6730, grad_fn=)\n", "92 LOSS DIFF: tensor(5.7578, grad_fn=) tensor(5.7451, grad_fn=)\n", "93 LOSS DIFF: tensor(5.6858, grad_fn=) tensor(5.4322, grad_fn=)\n", "94 LOSS DIFF: tensor(5.7738, grad_fn=) tensor(5.6858, grad_fn=)\n", "200 tensor(5.7337, grad_fn=)\n", "95 LOSS DIFF: tensor(5.7337, grad_fn=) tensor(5.6356, grad_fn=)\n", "96 LOSS DIFF: tensor(5.6635, grad_fn=) tensor(5.5954, grad_fn=)\n", "97 LOSS DIFF: tensor(5.6635, grad_fn=) tensor(5.6516, grad_fn=)\n", "98 LOSS DIFF: tensor(5.8410, grad_fn=) tensor(5.6141, grad_fn=)\n", "99 LOSS DIFF: tensor(5.7671, grad_fn=) tensor(5.6264, grad_fn=)\n", "100 LOSS DIFF: tensor(5.6642, grad_fn=) tensor(5.6263, grad_fn=)\n", "101 LOSS DIFF: tensor(5.7031, grad_fn=) tensor(5.6022, grad_fn=)\n", "102 LOSS DIFF: tensor(5.7371, grad_fn=) tensor(5.7031, grad_fn=)\n", "103 LOSS DIFF: tensor(5.6638, grad_fn=) tensor(5.6220, grad_fn=)\n", "104 LOSS DIFF: tensor(5.6687, grad_fn=) tensor(5.6638, grad_fn=)\n", "105 LOSS DIFF: tensor(5.7376, grad_fn=) tensor(5.6687, grad_fn=)\n", "106 LOSS DIFF: tensor(5.7511, grad_fn=) tensor(5.7249, grad_fn=)\n", "107 LOSS DIFF: tensor(5.6811, grad_fn=) tensor(5.6714, grad_fn=)\n", "108 LOSS DIFF: tensor(5.7101, grad_fn=) tensor(5.5892, grad_fn=)\n", "109 LOSS DIFF: tensor(5.6188, grad_fn=) tensor(5.5320, grad_fn=)\n", "110 LOSS DIFF: tensor(5.6656, grad_fn=) tensor(5.6188, grad_fn=)\n", "111 LOSS DIFF: tensor(5.6711, grad_fn=) tensor(5.5220, grad_fn=)\n", "112 LOSS DIFF: tensor(5.7719, grad_fn=) tensor(5.6711, grad_fn=)\n", "113 LOSS DIFF: tensor(5.7275, grad_fn=) tensor(5.6023, grad_fn=)\n", "114 LOSS DIFF: tensor(5.7216, grad_fn=) tensor(5.6046, grad_fn=)\n", "115 LOSS DIFF: tensor(5.6189, grad_fn=) tensor(5.5715, grad_fn=)\n", "116 LOSS DIFF: tensor(5.6879, grad_fn=) tensor(5.6189, grad_fn=)\n", "117 LOSS DIFF: tensor(5.7076, grad_fn=) tensor(5.6879, grad_fn=)\n", "118 LOSS DIFF: tensor(5.6123, grad_fn=) tensor(5.5496, grad_fn=)\n", "119 LOSS DIFF: tensor(5.6219, grad_fn=) tensor(5.6123, grad_fn=)\n", "120 LOSS DIFF: tensor(5.6567, grad_fn=) tensor(5.4889, grad_fn=)\n", "121 LOSS DIFF: tensor(5.7262, grad_fn=) tensor(5.6334, grad_fn=)\n", "122 LOSS DIFF: tensor(5.7325, grad_fn=) tensor(5.6450, grad_fn=)\n", "123 LOSS DIFF: tensor(5.7161, grad_fn=) tensor(5.5794, grad_fn=)\n", "124 LOSS DIFF: tensor(5.5623, grad_fn=) tensor(5.5361, grad_fn=)\n", "125 LOSS DIFF: tensor(5.5797, grad_fn=) tensor(5.5623, grad_fn=)\n", "126 LOSS DIFF: tensor(5.6225, grad_fn=) tensor(5.5797, grad_fn=)\n", "127 LOSS DIFF: tensor(5.5912, grad_fn=) tensor(5.5347, grad_fn=)\n", "128 LOSS DIFF: tensor(5.6655, grad_fn=) tensor(5.5912, grad_fn=)\n", "129 LOSS DIFF: tensor(5.6695, grad_fn=) tensor(5.6655, grad_fn=)\n", "130 LOSS DIFF: tensor(5.7027, grad_fn=) tensor(5.6695, grad_fn=)\n", "131 LOSS DIFF: tensor(5.6836, grad_fn=) tensor(5.5821, grad_fn=)\n", "132 LOSS DIFF: tensor(5.5875, grad_fn=) tensor(5.5289, grad_fn=)\n", "133 LOSS DIFF: tensor(5.6111, grad_fn=) tensor(5.4911, grad_fn=)\n", "134 LOSS DIFF: tensor(5.6462, grad_fn=) tensor(5.6111, grad_fn=)\n", "135 LOSS DIFF: tensor(5.4761, grad_fn=) tensor(5.3862, grad_fn=)\n", "136 LOSS DIFF: tensor(5.5751, grad_fn=) tensor(5.4761, grad_fn=)\n", "137 LOSS DIFF: tensor(5.5107, grad_fn=) tensor(5.3580, grad_fn=)\n", "138 LOSS DIFF: tensor(5.5294, grad_fn=) tensor(5.5032, grad_fn=)\n", "139 LOSS DIFF: tensor(5.8044, grad_fn=) tensor(5.5294, grad_fn=)\n", "140 LOSS DIFF: tensor(5.5610, grad_fn=) tensor(5.4624, grad_fn=)\n", "141 LOSS DIFF: tensor(5.6199, grad_fn=) tensor(5.5610, grad_fn=)\n", "142 LOSS DIFF: tensor(5.6073, grad_fn=) tensor(5.5645, grad_fn=)\n", "143 LOSS DIFF: tensor(5.8155, grad_fn=) tensor(5.6073, grad_fn=)\n", "144 LOSS DIFF: tensor(5.6119, grad_fn=) tensor(5.5148, grad_fn=)\n", "145 LOSS DIFF: tensor(5.6557, grad_fn=) tensor(5.5193, grad_fn=)\n", "300 tensor(5.5923, grad_fn=)\n", "146 LOSS DIFF: tensor(5.6352, grad_fn=) tensor(5.5923, grad_fn=)\n", "147 LOSS DIFF: tensor(5.6034, grad_fn=) tensor(5.4999, grad_fn=)\n", "148 LOSS DIFF: tensor(5.6058, grad_fn=) tensor(5.6034, grad_fn=)\n", "149 LOSS DIFF: tensor(5.6262, grad_fn=) tensor(5.5992, grad_fn=)\n", "150 LOSS DIFF: tensor(5.6428, grad_fn=) tensor(5.5092, grad_fn=)\n", "151 LOSS DIFF: tensor(5.6501, grad_fn=) tensor(5.5660, grad_fn=)\n", "152 LOSS DIFF: tensor(5.6203, grad_fn=) tensor(5.5295, grad_fn=)\n", "153 LOSS DIFF: tensor(5.6420, grad_fn=) tensor(5.6203, grad_fn=)\n", "154 LOSS DIFF: tensor(5.7322, grad_fn=) tensor(5.4864, grad_fn=)\n", "155 LOSS DIFF: tensor(5.6117, grad_fn=) tensor(5.4803, grad_fn=)\n", "156 LOSS DIFF: tensor(5.5395, grad_fn=) tensor(5.4970, grad_fn=)\n", "157 LOSS DIFF: tensor(5.6619, grad_fn=) tensor(5.5060, grad_fn=)\n", "158 LOSS DIFF: tensor(5.6368, grad_fn=) tensor(5.5258, grad_fn=)\n", "159 LOSS DIFF: tensor(5.5889, grad_fn=) tensor(5.5490, grad_fn=)\n", "160 LOSS DIFF: tensor(5.6312, grad_fn=) tensor(5.5038, grad_fn=)\n", "161 LOSS DIFF: tensor(5.5349, grad_fn=) tensor(5.5015, grad_fn=)\n", "162 LOSS DIFF: tensor(5.6371, grad_fn=) tensor(5.5349, grad_fn=)\n", "163 LOSS DIFF: tensor(5.6482, grad_fn=) tensor(5.6371, grad_fn=)\n", "164 LOSS DIFF: tensor(5.6638, grad_fn=) tensor(5.6482, grad_fn=)\n", "165 LOSS DIFF: tensor(5.6737, grad_fn=) tensor(5.4801, grad_fn=)\n", "166 LOSS DIFF: tensor(5.4878, grad_fn=) tensor(5.4866, grad_fn=)\n", "167 LOSS DIFF: tensor(5.6624, grad_fn=) tensor(5.4878, grad_fn=)\n", "168 LOSS DIFF: tensor(5.5738, grad_fn=) tensor(5.5648, grad_fn=)\n", "169 LOSS DIFF: tensor(5.5267, grad_fn=) tensor(5.4309, grad_fn=)\n", "170 LOSS DIFF: tensor(5.6041, grad_fn=) tensor(5.3970, grad_fn=)\n", "171 LOSS DIFF: tensor(5.6640, grad_fn=) tensor(5.4885, grad_fn=)\n", "172 LOSS DIFF: tensor(5.6136, grad_fn=) tensor(5.4977, grad_fn=)\n", "173 LOSS DIFF: tensor(5.6567, grad_fn=) tensor(5.5459, grad_fn=)\n", "174 LOSS DIFF: tensor(5.5721, grad_fn=) tensor(5.4921, grad_fn=)\n", "175 LOSS DIFF: tensor(5.5685, grad_fn=) tensor(5.5363, grad_fn=)\n", "176 LOSS DIFF: tensor(5.5438, grad_fn=) tensor(5.4754, grad_fn=)\n", "177 LOSS DIFF: tensor(5.6087, grad_fn=) tensor(5.5345, grad_fn=)\n", "178 LOSS DIFF: tensor(5.5624, grad_fn=) tensor(5.3589, grad_fn=)\n", "179 LOSS DIFF: tensor(5.6284, grad_fn=) tensor(5.4887, grad_fn=)\n", "180 LOSS DIFF: tensor(5.4859, grad_fn=) tensor(5.4453, grad_fn=)\n", "181 LOSS DIFF: tensor(5.4949, grad_fn=) tensor(5.4859, grad_fn=)\n", "182 LOSS DIFF: tensor(5.5938, grad_fn=) tensor(5.4949, grad_fn=)\n", "183 LOSS DIFF: tensor(5.5222, grad_fn=) tensor(5.4890, grad_fn=)\n", "184 LOSS DIFF: tensor(5.6673, grad_fn=) tensor(5.5222, grad_fn=)\n", "185 LOSS DIFF: tensor(5.6337, grad_fn=) tensor(5.5833, grad_fn=)\n", "186 LOSS DIFF: tensor(5.7171, grad_fn=) tensor(5.6337, grad_fn=)\n", "187 LOSS DIFF: tensor(5.5721, grad_fn=) tensor(5.4927, grad_fn=)\n", "188 LOSS DIFF: tensor(5.5771, grad_fn=) tensor(5.5721, grad_fn=)\n", "189 LOSS DIFF: tensor(5.6379, grad_fn=) tensor(5.5771, grad_fn=)\n", "190 LOSS DIFF: tensor(5.6032, grad_fn=) tensor(5.4434, grad_fn=)\n", "191 LOSS DIFF: tensor(5.5389, grad_fn=) tensor(5.3454, grad_fn=)\n", "192 LOSS DIFF: tensor(5.6966, grad_fn=) tensor(5.4275, grad_fn=)\n", "193 LOSS DIFF: tensor(5.3675, grad_fn=) tensor(5.3163, grad_fn=)\n", "194 LOSS DIFF: tensor(5.4924, grad_fn=) tensor(5.3675, grad_fn=)\n", "195 LOSS DIFF: tensor(5.5475, grad_fn=) tensor(5.4881, grad_fn=)\n", "196 LOSS DIFF: tensor(5.6223, grad_fn=) tensor(5.3634, grad_fn=)\n", "400 tensor(5.5316, grad_fn=)\n", "197 LOSS DIFF: tensor(5.5377, grad_fn=) tensor(5.4920, grad_fn=)\n", "198 LOSS DIFF: tensor(5.6185, grad_fn=) tensor(5.4576, grad_fn=)\n", "199 LOSS DIFF: tensor(5.4915, grad_fn=) tensor(5.4151, grad_fn=)\n", "200 LOSS DIFF: tensor(5.5837, grad_fn=) tensor(5.4915, grad_fn=)\n", "201 LOSS DIFF: tensor(5.5875, grad_fn=) tensor(5.5837, grad_fn=)\n", "202 LOSS DIFF: tensor(5.5331, grad_fn=) tensor(5.4873, grad_fn=)\n", "203 LOSS DIFF: tensor(5.5345, grad_fn=) tensor(5.3964, grad_fn=)\n", "204 LOSS DIFF: tensor(5.5764, grad_fn=) tensor(5.5345, grad_fn=)\n", "205 LOSS DIFF: tensor(5.6070, grad_fn=) tensor(5.5764, grad_fn=)\n", "206 LOSS DIFF: tensor(5.5005, grad_fn=) tensor(5.3572, grad_fn=)\n", "207 LOSS DIFF: tensor(5.5520, grad_fn=) tensor(5.3860, grad_fn=)\n", "208 LOSS DIFF: tensor(5.5800, grad_fn=) tensor(5.5520, grad_fn=)\n", "209 LOSS DIFF: tensor(5.6465, grad_fn=) tensor(5.5469, grad_fn=)\n", "210 LOSS DIFF: tensor(5.5691, grad_fn=) tensor(5.5241, grad_fn=)\n", "211 LOSS DIFF: tensor(5.7237, grad_fn=) tensor(5.4803, grad_fn=)\n", "212 LOSS DIFF: tensor(5.5532, grad_fn=) tensor(5.5012, grad_fn=)\n", "213 LOSS DIFF: tensor(5.5011, grad_fn=) tensor(5.4712, grad_fn=)\n", "214 LOSS DIFF: tensor(5.5370, grad_fn=) tensor(5.5011, grad_fn=)\n", "215 LOSS DIFF: tensor(5.5579, grad_fn=) tensor(5.4126, grad_fn=)\n", "216 LOSS DIFF: tensor(5.5109, grad_fn=) tensor(5.3875, grad_fn=)\n", "217 LOSS DIFF: tensor(5.5403, grad_fn=) tensor(5.4174, grad_fn=)\n", "218 LOSS DIFF: tensor(5.5404, grad_fn=) tensor(5.5403, grad_fn=)\n", "219 LOSS DIFF: tensor(5.5593, grad_fn=) tensor(5.5404, grad_fn=)\n", "220 LOSS DIFF: tensor(5.5262, grad_fn=) tensor(5.5250, grad_fn=)\n", "221 LOSS DIFF: tensor(5.4107, grad_fn=) tensor(5.4092, grad_fn=)\n", "222 LOSS DIFF: tensor(5.4920, grad_fn=) tensor(5.3499, grad_fn=)\n", "223 LOSS DIFF: tensor(5.5064, grad_fn=) tensor(5.4920, grad_fn=)\n", "224 LOSS DIFF: tensor(5.5648, grad_fn=) tensor(5.5064, grad_fn=)\n", "225 LOSS DIFF: tensor(5.5107, grad_fn=) tensor(5.3439, grad_fn=)\n", "226 LOSS DIFF: tensor(5.4968, grad_fn=) tensor(5.4720, grad_fn=)\n", "227 LOSS DIFF: tensor(5.5473, grad_fn=) tensor(5.4854, grad_fn=)\n", "228 LOSS DIFF: tensor(5.4800, grad_fn=) tensor(5.3762, grad_fn=)\n", "229 LOSS DIFF: tensor(5.6251, grad_fn=) tensor(5.4800, grad_fn=)\n", "230 LOSS DIFF: tensor(5.6237, grad_fn=) tensor(5.4478, grad_fn=)\n", "231 LOSS DIFF: tensor(5.5439, grad_fn=) tensor(5.4108, grad_fn=)\n", "232 LOSS DIFF: tensor(5.3186, grad_fn=) tensor(5.3012, grad_fn=)\n", "233 LOSS DIFF: tensor(5.5069, grad_fn=) tensor(5.3186, grad_fn=)\n", "234 LOSS DIFF: tensor(5.5190, grad_fn=) tensor(5.5043, grad_fn=)\n", "235 LOSS DIFF: tensor(5.4706, grad_fn=) tensor(5.4560, grad_fn=)\n", "236 LOSS DIFF: tensor(5.5252, grad_fn=) tensor(5.4706, grad_fn=)\n", "237 LOSS DIFF: tensor(5.4765, grad_fn=) tensor(5.4103, grad_fn=)\n", "238 LOSS DIFF: tensor(5.5218, grad_fn=) tensor(5.4765, grad_fn=)\n", "239 LOSS DIFF: tensor(5.6028, grad_fn=) tensor(5.4596, grad_fn=)\n", "240 LOSS DIFF: tensor(5.5504, grad_fn=) tensor(5.5021, grad_fn=)\n", "241 LOSS DIFF: tensor(5.4777, grad_fn=) tensor(5.4091, grad_fn=)\n", "242 LOSS DIFF: tensor(5.4404, grad_fn=) tensor(5.3918, grad_fn=)\n", "243 LOSS DIFF: tensor(5.5580, grad_fn=) tensor(5.4404, grad_fn=)\n", "244 LOSS DIFF: tensor(5.4812, grad_fn=) tensor(5.4398, grad_fn=)\n", "500 tensor(5.5214, grad_fn=)\n", "245 LOSS DIFF: tensor(5.5214, grad_fn=) tensor(5.4142, grad_fn=)\n", "246 LOSS DIFF: tensor(5.6153, grad_fn=) tensor(5.5214, grad_fn=)\n", "247 LOSS DIFF: tensor(5.4794, grad_fn=) tensor(5.4672, grad_fn=)\n", "248 LOSS DIFF: tensor(5.5978, grad_fn=) tensor(5.4794, grad_fn=)\n", "249 LOSS DIFF: tensor(5.4549, grad_fn=) tensor(5.3421, grad_fn=)\n", "250 LOSS DIFF: tensor(5.4747, grad_fn=) tensor(5.4549, grad_fn=)\n", "251 LOSS DIFF: tensor(5.5439, grad_fn=) tensor(5.3348, grad_fn=)\n", "252 LOSS DIFF: tensor(5.5953, grad_fn=) tensor(5.5439, grad_fn=)\n", "253 LOSS DIFF: tensor(5.5308, grad_fn=) tensor(5.4385, grad_fn=)\n", "254 LOSS DIFF: tensor(5.5379, grad_fn=) tensor(5.4373, grad_fn=)\n", "255 LOSS DIFF: tensor(5.5022, grad_fn=) tensor(5.4306, grad_fn=)\n", "256 LOSS DIFF: tensor(5.5225, grad_fn=) tensor(5.4898, grad_fn=)\n", "257 LOSS DIFF: tensor(5.6141, grad_fn=) tensor(5.5225, grad_fn=)\n", "258 LOSS DIFF: tensor(5.4873, grad_fn=) tensor(5.4444, grad_fn=)\n", "259 LOSS DIFF: tensor(5.6677, grad_fn=) tensor(5.4873, grad_fn=)\n", "260 LOSS DIFF: tensor(5.5404, grad_fn=) tensor(5.4581, grad_fn=)\n", "261 LOSS DIFF: tensor(5.5603, grad_fn=) tensor(5.3583, grad_fn=)\n", "262 LOSS DIFF: tensor(5.5292, grad_fn=) tensor(5.2255, grad_fn=)\n", "263 LOSS DIFF: tensor(5.4456, grad_fn=) tensor(5.3846, grad_fn=)\n", "264 LOSS DIFF: tensor(5.4504, grad_fn=) tensor(5.4456, grad_fn=)\n", "265 LOSS DIFF: tensor(5.4899, grad_fn=) tensor(5.3406, grad_fn=)\n", "266 LOSS DIFF: tensor(5.5023, grad_fn=) tensor(5.4899, grad_fn=)\n", "267 LOSS DIFF: tensor(5.3884, grad_fn=) tensor(5.2800, grad_fn=)\n", "268 LOSS DIFF: tensor(5.4713, grad_fn=) tensor(5.3884, grad_fn=)\n", "269 LOSS DIFF: tensor(5.4810, grad_fn=) tensor(5.4713, grad_fn=)\n", "270 LOSS DIFF: tensor(5.3896, grad_fn=) tensor(5.3593, grad_fn=)\n", "271 LOSS DIFF: tensor(5.5195, grad_fn=) tensor(5.3896, grad_fn=)\n", "272 LOSS DIFF: tensor(5.4173, grad_fn=) tensor(5.3982, grad_fn=)\n", "273 LOSS DIFF: tensor(5.5428, grad_fn=) tensor(5.3779, grad_fn=)\n", "274 LOSS DIFF: tensor(5.4749, grad_fn=) tensor(5.4675, grad_fn=)\n", "275 LOSS DIFF: tensor(5.3978, grad_fn=) tensor(5.2620, grad_fn=)\n", "276 LOSS DIFF: tensor(5.4689, grad_fn=) tensor(5.3978, grad_fn=)\n", "277 LOSS DIFF: tensor(5.4733, grad_fn=) tensor(5.4689, grad_fn=)\n", "278 LOSS DIFF: tensor(5.5054, grad_fn=) tensor(5.4733, grad_fn=)\n", "279 LOSS DIFF: tensor(5.4809, grad_fn=) tensor(5.4288, grad_fn=)\n", "280 LOSS DIFF: tensor(5.5698, grad_fn=) tensor(5.4809, grad_fn=)\n", "281 LOSS DIFF: tensor(5.5550, grad_fn=) tensor(5.4103, grad_fn=)\n", "282 LOSS DIFF: tensor(5.5803, grad_fn=) tensor(5.5550, grad_fn=)\n", "283 LOSS DIFF: tensor(5.5616, grad_fn=) tensor(5.4858, grad_fn=)\n", "284 LOSS DIFF: tensor(5.4863, grad_fn=) tensor(5.3357, grad_fn=)\n", "285 LOSS DIFF: tensor(5.3506, grad_fn=) tensor(5.2871, grad_fn=)\n", "286 LOSS DIFF: tensor(5.6320, grad_fn=) tensor(5.3506, grad_fn=)\n", "287 LOSS DIFF: tensor(5.4488, grad_fn=) tensor(5.4314, grad_fn=)\n", "288 LOSS DIFF: tensor(5.4596, grad_fn=) tensor(5.4488, grad_fn=)\n", "289 LOSS DIFF: tensor(5.5325, grad_fn=) tensor(5.4596, grad_fn=)\n", "290 LOSS DIFF: tensor(5.4566, grad_fn=) tensor(5.2072, grad_fn=)\n", "291 LOSS DIFF: tensor(5.4784, grad_fn=) tensor(5.4303, grad_fn=)\n", "292 LOSS DIFF: tensor(5.4439, grad_fn=) tensor(5.3270, grad_fn=)\n", "293 LOSS DIFF: tensor(5.5160, grad_fn=) tensor(5.4439, grad_fn=)\n", "294 LOSS DIFF: tensor(5.4134, grad_fn=) tensor(5.3536, grad_fn=)\n", "295 LOSS DIFF: tensor(5.4426, grad_fn=) tensor(5.4134, grad_fn=)\n", "296 LOSS DIFF: tensor(5.3758, grad_fn=) tensor(5.3700, grad_fn=)\n", "297 LOSS DIFF: tensor(5.5559, grad_fn=) tensor(5.3758, grad_fn=)\n", "600 tensor(5.4824, grad_fn=)\n", "298 LOSS DIFF: tensor(5.3795, grad_fn=) tensor(5.3762, grad_fn=)\n", "299 LOSS DIFF: tensor(5.3878, grad_fn=) tensor(5.3795, grad_fn=)\n", "300 LOSS DIFF: tensor(5.4699, grad_fn=) tensor(5.3878, grad_fn=)\n", "301 LOSS DIFF: tensor(5.4967, grad_fn=) tensor(5.4699, grad_fn=)\n", "302 LOSS DIFF: tensor(5.5724, grad_fn=) tensor(5.4967, grad_fn=)\n", "303 LOSS DIFF: tensor(5.4520, grad_fn=) tensor(5.4072, grad_fn=)\n", "304 LOSS DIFF: tensor(5.5089, grad_fn=) tensor(5.4520, grad_fn=)\n", "305 LOSS DIFF: tensor(5.5398, grad_fn=) tensor(5.3168, grad_fn=)\n", "306 LOSS DIFF: tensor(5.3561, grad_fn=) tensor(5.3058, grad_fn=)\n", "307 LOSS DIFF: tensor(5.4668, grad_fn=) tensor(5.3448, grad_fn=)\n", "308 LOSS DIFF: tensor(5.4964, grad_fn=) tensor(5.4668, grad_fn=)\n", "309 LOSS DIFF: tensor(5.4440, grad_fn=) tensor(5.3221, grad_fn=)\n", "310 LOSS DIFF: tensor(5.4516, grad_fn=) tensor(5.4289, grad_fn=)\n", "311 LOSS DIFF: tensor(5.4969, grad_fn=) tensor(5.3983, grad_fn=)\n", "312 LOSS DIFF: tensor(5.4254, grad_fn=) tensor(5.3790, grad_fn=)\n", "313 LOSS DIFF: tensor(5.4874, grad_fn=) tensor(5.4254, grad_fn=)\n", "314 LOSS DIFF: tensor(5.3839, grad_fn=) tensor(5.3470, grad_fn=)\n", "315 LOSS DIFF: tensor(5.5822, grad_fn=) tensor(5.3839, grad_fn=)\n", "316 LOSS DIFF: tensor(5.4169, grad_fn=) tensor(5.3044, grad_fn=)\n", "317 LOSS DIFF: tensor(5.4778, grad_fn=) tensor(5.4169, grad_fn=)\n", "318 LOSS DIFF: tensor(5.3589, grad_fn=) tensor(5.2238, grad_fn=)\n", "319 LOSS DIFF: tensor(5.3547, grad_fn=) tensor(5.3184, grad_fn=)\n", "320 LOSS DIFF: tensor(5.5022, grad_fn=) tensor(5.3547, grad_fn=)\n", "321 LOSS DIFF: tensor(5.4749, grad_fn=) tensor(5.4294, grad_fn=)\n", "322 LOSS DIFF: tensor(5.3813, grad_fn=) tensor(5.3557, grad_fn=)\n", "323 LOSS DIFF: tensor(5.4019, grad_fn=) tensor(5.3813, grad_fn=)\n", "324 LOSS DIFF: tensor(5.7250, grad_fn=) tensor(5.4019, grad_fn=)\n", "325 LOSS DIFF: tensor(5.4055, grad_fn=) tensor(5.3304, grad_fn=)\n", "326 LOSS DIFF: tensor(5.4721, grad_fn=) tensor(5.4055, grad_fn=)\n", "327 LOSS DIFF: tensor(5.4590, grad_fn=) tensor(5.3773, grad_fn=)\n", "328 LOSS DIFF: tensor(5.6097, grad_fn=) tensor(5.4590, grad_fn=)\n", "329 LOSS DIFF: tensor(5.5304, grad_fn=) tensor(5.2807, grad_fn=)\n", "330 LOSS DIFF: tensor(5.4286, grad_fn=) tensor(5.3879, grad_fn=)\n", "331 LOSS DIFF: tensor(5.4221, grad_fn=) tensor(5.2779, grad_fn=)\n", "332 LOSS DIFF: tensor(5.3690, grad_fn=) tensor(5.3191, grad_fn=)\n", "333 LOSS DIFF: tensor(5.3814, grad_fn=) tensor(5.3690, grad_fn=)\n", "334 LOSS DIFF: tensor(5.4241, grad_fn=) tensor(5.3760, grad_fn=)\n", "335 LOSS DIFF: tensor(5.4727, grad_fn=) tensor(5.4241, grad_fn=)\n", "336 LOSS DIFF: tensor(5.4216, grad_fn=) tensor(5.3401, grad_fn=)\n", "337 LOSS DIFF: tensor(5.4938, grad_fn=) tensor(5.3908, grad_fn=)\n", "338 LOSS DIFF: tensor(5.4742, grad_fn=) tensor(5.3384, grad_fn=)\n", "339 LOSS DIFF: tensor(5.4628, grad_fn=) tensor(5.2785, grad_fn=)\n", "340 LOSS DIFF: tensor(5.5419, grad_fn=) tensor(5.3019, grad_fn=)\n", "341 LOSS DIFF: tensor(5.4736, grad_fn=) tensor(5.3646, grad_fn=)\n", "342 LOSS DIFF: tensor(5.4150, grad_fn=) tensor(5.3511, grad_fn=)\n", "343 LOSS DIFF: tensor(5.4531, grad_fn=) tensor(5.2982, grad_fn=)\n", "344 LOSS DIFF: tensor(5.4617, grad_fn=) tensor(5.4531, grad_fn=)\n", "345 LOSS DIFF: tensor(5.4939, grad_fn=) tensor(5.4617, grad_fn=)\n", "346 LOSS DIFF: tensor(5.4178, grad_fn=) tensor(5.3127, grad_fn=)\n", "700 tensor(5.7095, grad_fn=)\n", "347 LOSS DIFF: tensor(5.7095, grad_fn=) tensor(5.3593, grad_fn=)\n", "348 LOSS DIFF: tensor(5.4054, grad_fn=) tensor(5.3883, grad_fn=)\n", "349 LOSS DIFF: tensor(5.6016, grad_fn=) tensor(5.4054, grad_fn=)\n", "350 LOSS DIFF: tensor(5.4695, grad_fn=) tensor(5.4424, grad_fn=)\n", "351 LOSS DIFF: tensor(5.5022, grad_fn=) tensor(5.4695, grad_fn=)\n", "352 LOSS DIFF: tensor(5.5172, grad_fn=) tensor(5.4135, grad_fn=)\n", "353 LOSS DIFF: tensor(5.5003, grad_fn=) tensor(5.3490, grad_fn=)\n", "354 LOSS DIFF: tensor(5.3198, grad_fn=) tensor(5.2805, grad_fn=)\n", "355 LOSS DIFF: tensor(5.3726, grad_fn=) tensor(5.3198, grad_fn=)\n", "356 LOSS DIFF: tensor(5.3992, grad_fn=) tensor(5.3726, grad_fn=)\n", "357 LOSS DIFF: tensor(5.5122, grad_fn=) tensor(5.3992, grad_fn=)\n", "358 LOSS DIFF: tensor(5.6000, grad_fn=) tensor(5.3476, grad_fn=)\n", "359 LOSS DIFF: tensor(5.4421, grad_fn=) tensor(5.3207, grad_fn=)\n", "360 LOSS DIFF: tensor(5.6211, grad_fn=) tensor(5.4421, grad_fn=)\n", "361 LOSS DIFF: tensor(5.3617, grad_fn=) tensor(5.3425, grad_fn=)\n", "362 LOSS DIFF: tensor(5.3828, grad_fn=) tensor(5.3617, grad_fn=)\n", "363 LOSS DIFF: tensor(5.4569, grad_fn=) tensor(5.3828, grad_fn=)\n", "364 LOSS DIFF: tensor(5.4314, grad_fn=) tensor(5.2452, grad_fn=)\n", "365 LOSS DIFF: tensor(5.5384, grad_fn=) tensor(5.4314, grad_fn=)\n", "366 LOSS DIFF: tensor(5.4293, grad_fn=) tensor(5.3797, grad_fn=)\n", "367 LOSS DIFF: tensor(5.4823, grad_fn=) tensor(5.4289, grad_fn=)\n", "368 LOSS DIFF: tensor(5.4602, grad_fn=) tensor(5.3212, grad_fn=)\n", "369 LOSS DIFF: tensor(5.4459, grad_fn=) tensor(5.3457, grad_fn=)\n", "370 LOSS DIFF: tensor(5.5089, grad_fn=) tensor(5.3548, grad_fn=)\n", "371 LOSS DIFF: tensor(5.3639, grad_fn=) tensor(5.2607, grad_fn=)\n", "372 LOSS DIFF: tensor(5.4079, grad_fn=) tensor(5.3639, grad_fn=)\n", "373 LOSS DIFF: tensor(5.5557, grad_fn=) tensor(5.4079, grad_fn=)\n", "374 LOSS DIFF: tensor(5.3965, grad_fn=) tensor(5.3427, grad_fn=)\n", "375 LOSS DIFF: tensor(5.4149, grad_fn=) tensor(5.3965, grad_fn=)\n", "376 LOSS DIFF: tensor(5.3285, grad_fn=) tensor(5.3265, grad_fn=)\n", "377 LOSS DIFF: tensor(5.3672, grad_fn=) tensor(5.3285, grad_fn=)\n", "378 LOSS DIFF: tensor(5.4523, grad_fn=) tensor(5.3471, grad_fn=)\n", "379 LOSS DIFF: tensor(5.4315, grad_fn=) tensor(5.4231, grad_fn=)\n", "380 LOSS DIFF: tensor(5.5363, grad_fn=) tensor(5.4315, grad_fn=)\n", "381 LOSS DIFF: tensor(5.4404, grad_fn=) tensor(5.4114, grad_fn=)\n", "382 LOSS DIFF: tensor(5.2667, grad_fn=) tensor(5.2283, grad_fn=)\n", "383 LOSS DIFF: tensor(5.3342, grad_fn=) tensor(5.2667, grad_fn=)\n", "384 LOSS DIFF: tensor(5.4847, grad_fn=) tensor(5.3342, grad_fn=)\n", "385 LOSS DIFF: tensor(5.5349, grad_fn=) tensor(5.4847, grad_fn=)\n", "386 LOSS DIFF: tensor(5.4216, grad_fn=) tensor(5.2991, grad_fn=)\n", "387 LOSS DIFF: tensor(5.4483, grad_fn=) tensor(5.3455, grad_fn=)\n", "388 LOSS DIFF: tensor(5.4229, grad_fn=) tensor(5.3271, grad_fn=)\n", "389 LOSS DIFF: tensor(5.5482, grad_fn=) tensor(5.4229, grad_fn=)\n", "390 LOSS DIFF: tensor(5.4596, grad_fn=) tensor(5.3374, grad_fn=)\n", "391 LOSS DIFF: tensor(5.4694, grad_fn=) tensor(5.4596, grad_fn=)\n", "392 LOSS DIFF: tensor(5.4744, grad_fn=) tensor(5.3277, grad_fn=)\n", "393 LOSS DIFF: tensor(5.4301, grad_fn=) tensor(5.3380, grad_fn=)\n", "394 LOSS DIFF: tensor(5.2605, grad_fn=) tensor(5.2482, grad_fn=)\n", "395 LOSS DIFF: tensor(5.4596, grad_fn=) tensor(5.2605, grad_fn=)\n", "396 LOSS DIFF: tensor(5.3527, grad_fn=) tensor(5.2774, grad_fn=)\n", "397 LOSS DIFF: tensor(5.5415, grad_fn=) tensor(5.3283, grad_fn=)\n", "398 LOSS DIFF: tensor(5.5558, grad_fn=) tensor(5.4762, grad_fn=)\n", "399 LOSS DIFF: tensor(5.3862, grad_fn=) tensor(5.3796, grad_fn=)\n", "400 LOSS DIFF: tensor(5.5006, grad_fn=) tensor(5.2756, grad_fn=)\n", "401 LOSS DIFF: tensor(5.4776, grad_fn=) tensor(5.2884, grad_fn=)\n", "800 tensor(5.4405, grad_fn=)\n", "402 LOSS DIFF: tensor(5.5078, grad_fn=) tensor(5.2731, grad_fn=)\n", "403 LOSS DIFF: tensor(5.4186, grad_fn=) tensor(5.3394, grad_fn=)\n", "404 LOSS DIFF: tensor(5.4645, grad_fn=) tensor(5.4186, grad_fn=)\n", "405 LOSS DIFF: tensor(5.3991, grad_fn=) tensor(5.1863, grad_fn=)\n", "406 LOSS DIFF: tensor(5.4625, grad_fn=) tensor(5.3991, grad_fn=)\n", "407 LOSS DIFF: tensor(5.2887, grad_fn=) tensor(5.2630, grad_fn=)\n", "408 LOSS DIFF: tensor(5.3613, grad_fn=) tensor(5.2887, grad_fn=)\n", "409 LOSS DIFF: tensor(5.4549, grad_fn=) tensor(5.3613, grad_fn=)\n", "410 LOSS DIFF: tensor(5.4254, grad_fn=) tensor(5.3545, grad_fn=)\n", "411 LOSS DIFF: tensor(5.4779, grad_fn=) tensor(5.4254, grad_fn=)\n", "412 LOSS DIFF: tensor(5.4206, grad_fn=) tensor(5.3494, grad_fn=)\n", "413 LOSS DIFF: tensor(5.4468, grad_fn=) tensor(5.3558, grad_fn=)\n", "414 LOSS DIFF: tensor(5.3703, grad_fn=) tensor(5.3009, grad_fn=)\n", "415 LOSS DIFF: tensor(5.4129, grad_fn=) tensor(5.3703, grad_fn=)\n", "416 LOSS DIFF: tensor(5.4347, grad_fn=) tensor(5.3186, grad_fn=)\n", "417 LOSS DIFF: tensor(5.3410, grad_fn=) tensor(5.2797, grad_fn=)\n", "418 LOSS DIFF: tensor(5.4206, grad_fn=) tensor(5.3410, grad_fn=)\n", "419 LOSS DIFF: tensor(5.3961, grad_fn=) tensor(5.3201, grad_fn=)\n", "420 LOSS DIFF: tensor(5.3999, grad_fn=) tensor(5.3961, grad_fn=)\n", "421 LOSS DIFF: tensor(5.4644, grad_fn=) tensor(5.2622, grad_fn=)\n", "422 LOSS DIFF: tensor(5.3218, grad_fn=) tensor(5.3111, grad_fn=)\n", "423 LOSS DIFF: tensor(5.3554, grad_fn=) tensor(5.3218, grad_fn=)\n", "424 LOSS DIFF: tensor(5.4028, grad_fn=) tensor(5.3554, grad_fn=)\n", "425 LOSS DIFF: tensor(5.3832, grad_fn=) tensor(5.3375, grad_fn=)\n", "426 LOSS DIFF: tensor(5.4313, grad_fn=) tensor(5.3181, grad_fn=)\n", "427 LOSS DIFF: tensor(5.4721, grad_fn=) tensor(5.3831, grad_fn=)\n", "428 LOSS DIFF: tensor(5.3902, grad_fn=) tensor(5.2394, grad_fn=)\n", "429 LOSS DIFF: tensor(5.3492, grad_fn=) tensor(5.3336, grad_fn=)\n", "430 LOSS DIFF: tensor(5.3523, grad_fn=) tensor(5.3492, grad_fn=)\n", "431 LOSS DIFF: tensor(5.4211, grad_fn=) tensor(5.3486, grad_fn=)\n", "432 LOSS DIFF: tensor(5.4755, grad_fn=) tensor(5.2288, grad_fn=)\n", "433 LOSS DIFF: tensor(5.5728, grad_fn=) tensor(5.4755, grad_fn=)\n", "434 LOSS DIFF: tensor(5.3855, grad_fn=) tensor(5.3527, grad_fn=)\n", "435 LOSS DIFF: tensor(5.4776, grad_fn=) tensor(5.3855, grad_fn=)\n", "436 LOSS DIFF: tensor(5.3750, grad_fn=) tensor(5.3262, grad_fn=)\n", "437 LOSS DIFF: tensor(5.3902, grad_fn=) tensor(5.3750, grad_fn=)\n", "438 LOSS DIFF: tensor(5.3135, grad_fn=) tensor(5.2863, grad_fn=)\n", "439 LOSS DIFF: tensor(5.4483, grad_fn=) tensor(5.3135, grad_fn=)\n", "440 LOSS DIFF: tensor(5.3201, grad_fn=) tensor(5.2603, grad_fn=)\n", "441 LOSS DIFF: tensor(5.3807, grad_fn=) tensor(5.3201, grad_fn=)\n", "442 LOSS DIFF: tensor(5.5009, grad_fn=) tensor(5.2434, grad_fn=)\n", "443 LOSS DIFF: tensor(5.4282, grad_fn=) tensor(5.4278, grad_fn=)\n", "444 LOSS DIFF: tensor(5.3787, grad_fn=) tensor(5.3128, grad_fn=)\n", "445 LOSS DIFF: tensor(5.5917, grad_fn=) tensor(5.3324, grad_fn=)\n", "446 LOSS DIFF: tensor(5.4186, grad_fn=) tensor(5.3144, grad_fn=)\n", "447 LOSS DIFF: tensor(5.4553, grad_fn=) tensor(5.4186, grad_fn=)\n", "448 LOSS DIFF: tensor(5.4903, grad_fn=) tensor(5.4553, grad_fn=)\n", "449 LOSS DIFF: tensor(5.4295, grad_fn=) tensor(5.3503, grad_fn=)\n", "450 LOSS DIFF: tensor(5.3945, grad_fn=) tensor(5.3607, grad_fn=)\n", "451 LOSS DIFF: tensor(5.2822, grad_fn=) tensor(5.2387, grad_fn=)\n", "452 LOSS DIFF: tensor(5.3334, grad_fn=) tensor(5.2822, grad_fn=)\n", "453 LOSS DIFF: tensor(5.4073, grad_fn=) tensor(5.3334, grad_fn=)\n", "454 LOSS DIFF: tensor(5.3797, grad_fn=) tensor(5.3469, grad_fn=)\n", "455 LOSS DIFF: tensor(5.4848, grad_fn=) tensor(5.2529, grad_fn=)\n", "900 tensor(5.3078, grad_fn=)\n", "456 LOSS DIFF: tensor(5.4695, grad_fn=) tensor(5.3078, grad_fn=)\n", "457 LOSS DIFF: tensor(5.4369, grad_fn=) tensor(5.3834, grad_fn=)\n", "458 LOSS DIFF: tensor(5.4973, grad_fn=) tensor(5.4369, grad_fn=)\n", "459 LOSS DIFF: tensor(5.4526, grad_fn=) tensor(5.3075, grad_fn=)\n", "460 LOSS DIFF: tensor(5.4022, grad_fn=) tensor(5.2870, grad_fn=)\n", "461 LOSS DIFF: tensor(5.3850, grad_fn=) tensor(5.2879, grad_fn=)\n", "462 LOSS DIFF: tensor(5.4370, grad_fn=) tensor(5.3154, grad_fn=)\n", "463 LOSS DIFF: tensor(5.4111, grad_fn=) tensor(5.3927, grad_fn=)\n", "464 LOSS DIFF: tensor(5.4638, grad_fn=) tensor(5.4111, grad_fn=)\n", "465 LOSS DIFF: tensor(5.3719, grad_fn=) tensor(5.3195, grad_fn=)\n", "466 LOSS DIFF: tensor(5.4880, grad_fn=) tensor(5.3719, grad_fn=)\n", "467 LOSS DIFF: tensor(5.4762, grad_fn=) tensor(5.4186, grad_fn=)\n", "468 LOSS DIFF: tensor(5.3155, grad_fn=) tensor(5.2086, grad_fn=)\n", "469 LOSS DIFF: tensor(5.4985, grad_fn=) tensor(5.3155, grad_fn=)\n", "470 LOSS DIFF: tensor(5.4505, grad_fn=) tensor(5.3731, grad_fn=)\n", "471 LOSS DIFF: tensor(5.4291, grad_fn=) tensor(5.3408, grad_fn=)\n", "472 LOSS DIFF: tensor(5.3826, grad_fn=) tensor(5.3232, grad_fn=)\n", "473 LOSS DIFF: tensor(5.4152, grad_fn=) tensor(5.3468, grad_fn=)\n", "474 LOSS DIFF: tensor(5.4983, grad_fn=) tensor(5.4152, grad_fn=)\n", "475 LOSS DIFF: tensor(5.5432, grad_fn=) tensor(5.3502, grad_fn=)\n", "476 LOSS DIFF: tensor(5.3989, grad_fn=) tensor(5.3489, grad_fn=)\n", "477 LOSS DIFF: tensor(5.4624, grad_fn=) tensor(5.3761, grad_fn=)\n", "478 LOSS DIFF: tensor(5.4082, grad_fn=) tensor(5.4043, grad_fn=)\n", "479 LOSS DIFF: tensor(5.4074, grad_fn=) tensor(5.3588, grad_fn=)\n", "480 LOSS DIFF: tensor(5.4588, grad_fn=) tensor(5.4074, grad_fn=)\n", "481 LOSS DIFF: tensor(5.3339, grad_fn=) tensor(5.2172, grad_fn=)\n", "482 LOSS DIFF: tensor(5.4468, grad_fn=) tensor(5.3339, grad_fn=)\n", "483 LOSS DIFF: tensor(5.4736, grad_fn=) tensor(5.4024, grad_fn=)\n", "484 LOSS DIFF: tensor(5.3780, grad_fn=) tensor(5.3095, grad_fn=)\n", "485 LOSS DIFF: tensor(5.4251, grad_fn=) tensor(5.3780, grad_fn=)\n", "486 LOSS DIFF: tensor(5.4035, grad_fn=) tensor(5.3474, grad_fn=)\n", "487 LOSS DIFF: tensor(5.3575, grad_fn=) tensor(5.2837, grad_fn=)\n", "488 LOSS DIFF: tensor(5.4629, grad_fn=) tensor(5.3298, grad_fn=)\n", "489 LOSS DIFF: tensor(5.4593, grad_fn=) tensor(5.4124, grad_fn=)\n", "490 LOSS DIFF: tensor(5.4040, grad_fn=) tensor(5.3532, grad_fn=)\n", "491 LOSS DIFF: tensor(5.4693, grad_fn=) tensor(5.4040, grad_fn=)\n", "492 LOSS DIFF: tensor(5.4201, grad_fn=) tensor(5.3561, grad_fn=)\n", "493 LOSS DIFF: tensor(5.4786, grad_fn=) tensor(5.4201, grad_fn=)\n", "494 LOSS DIFF: tensor(5.3819, grad_fn=) tensor(5.3108, grad_fn=)\n", "495 LOSS DIFF: tensor(5.3170, grad_fn=) tensor(5.3080, grad_fn=)\n", "496 LOSS DIFF: tensor(5.3305, grad_fn=) tensor(5.2931, grad_fn=)\n", "497 LOSS DIFF: tensor(5.3719, grad_fn=) tensor(5.3305, grad_fn=)\n", "498 LOSS DIFF: tensor(5.3756, grad_fn=) tensor(5.3702, grad_fn=)\n", "499 LOSS DIFF: tensor(5.4073, grad_fn=) tensor(5.1951, grad_fn=)\n", "500 LOSS DIFF: tensor(5.4267, grad_fn=) tensor(5.3957, grad_fn=)\n", "501 LOSS DIFF: tensor(5.3842, grad_fn=) tensor(5.3569, grad_fn=)\n", "502 LOSS DIFF: tensor(5.4202, grad_fn=) tensor(5.3842, grad_fn=)\n", "503 LOSS DIFF: tensor(5.3634, grad_fn=) tensor(5.2962, grad_fn=)\n", "504 LOSS DIFF: tensor(5.4654, grad_fn=) tensor(5.3512, grad_fn=)\n", "1000 tensor(5.4063, grad_fn=)\n", "505 LOSS DIFF: tensor(5.4063, grad_fn=) tensor(5.3712, grad_fn=)\n", "506 LOSS DIFF: tensor(5.3378, grad_fn=) tensor(5.2547, grad_fn=)\n", "507 LOSS DIFF: tensor(5.3185, grad_fn=) tensor(5.2350, grad_fn=)\n", "508 LOSS DIFF: tensor(5.3049, grad_fn=) tensor(5.1821, grad_fn=)\n", "509 LOSS DIFF: tensor(5.4689, grad_fn=) tensor(5.3049, grad_fn=)\n", "510 LOSS DIFF: tensor(5.1437, grad_fn=) tensor(5.1380, grad_fn=)\n", "511 LOSS DIFF: tensor(5.3984, grad_fn=) tensor(5.1437, grad_fn=)\n", "512 LOSS DIFF: tensor(5.5009, grad_fn=) tensor(5.2426, grad_fn=)\n", "513 LOSS DIFF: tensor(5.3734, grad_fn=) tensor(5.3096, grad_fn=)\n", "514 LOSS DIFF: tensor(5.3889, grad_fn=) tensor(5.3734, grad_fn=)\n", "515 LOSS DIFF: tensor(5.4053, grad_fn=) tensor(5.3114, grad_fn=)\n", "516 LOSS DIFF: tensor(5.3912, grad_fn=) tensor(5.2357, grad_fn=)\n", "517 LOSS DIFF: tensor(5.4400, grad_fn=) tensor(5.3115, grad_fn=)\n", "518 LOSS DIFF: tensor(5.4756, grad_fn=) tensor(5.2689, grad_fn=)\n", "519 LOSS DIFF: tensor(5.3111, grad_fn=) tensor(5.1618, grad_fn=)\n", "520 LOSS DIFF: tensor(5.3974, grad_fn=) tensor(5.3030, grad_fn=)\n", "521 LOSS DIFF: tensor(5.3955, grad_fn=) tensor(5.2872, grad_fn=)\n", "522 LOSS DIFF: tensor(5.4712, grad_fn=) tensor(5.3863, grad_fn=)\n", "523 LOSS DIFF: tensor(5.4095, grad_fn=) tensor(5.3686, grad_fn=)\n", "524 LOSS DIFF: tensor(5.3285, grad_fn=) tensor(5.2293, grad_fn=)\n", "525 LOSS DIFF: tensor(5.3468, grad_fn=) tensor(5.2348, grad_fn=)\n", "526 LOSS DIFF: tensor(5.3140, grad_fn=) tensor(5.2460, grad_fn=)\n", "527 LOSS DIFF: tensor(5.3772, grad_fn=) tensor(5.3140, grad_fn=)\n", "528 LOSS DIFF: tensor(5.3576, grad_fn=) tensor(5.3363, grad_fn=)\n", "529 LOSS DIFF: tensor(5.2631, grad_fn=) tensor(5.2239, grad_fn=)\n", "530 LOSS DIFF: tensor(5.4207, grad_fn=) tensor(5.2631, grad_fn=)\n", "531 LOSS DIFF: tensor(5.4238, grad_fn=) tensor(5.2798, grad_fn=)\n", "532 LOSS DIFF: tensor(5.4496, grad_fn=) tensor(5.2819, grad_fn=)\n", "533 LOSS DIFF: tensor(5.2788, grad_fn=) tensor(5.2125, grad_fn=)\n", "534 LOSS DIFF: tensor(5.3159, grad_fn=) tensor(5.2788, grad_fn=)\n", "535 LOSS DIFF: tensor(5.3200, grad_fn=) tensor(5.3159, grad_fn=)\n", "536 LOSS DIFF: tensor(5.3934, grad_fn=) tensor(5.3087, grad_fn=)\n", "537 LOSS DIFF: tensor(5.2843, grad_fn=) tensor(5.2815, grad_fn=)\n", "538 LOSS DIFF: tensor(5.5309, grad_fn=) tensor(5.2377, grad_fn=)\n", "539 LOSS DIFF: tensor(5.4258, grad_fn=) tensor(5.3734, grad_fn=)\n", "540 LOSS DIFF: tensor(5.4562, grad_fn=) tensor(5.2893, grad_fn=)\n", "541 LOSS DIFF: tensor(5.3672, grad_fn=) tensor(5.3331, grad_fn=)\n", "542 LOSS DIFF: tensor(5.3475, grad_fn=) tensor(5.3409, grad_fn=)\n", "543 LOSS DIFF: tensor(5.3826, grad_fn=) tensor(5.3475, grad_fn=)\n", "544 LOSS DIFF: tensor(5.4529, grad_fn=) tensor(5.3826, grad_fn=)\n", "545 LOSS DIFF: tensor(5.4554, grad_fn=) tensor(5.3758, grad_fn=)\n", "546 LOSS DIFF: tensor(5.3725, grad_fn=) tensor(5.2762, grad_fn=)\n", "547 LOSS DIFF: tensor(5.3809, grad_fn=) tensor(5.3140, grad_fn=)\n", "548 LOSS DIFF: tensor(5.4411, grad_fn=) tensor(5.3809, grad_fn=)\n", "1100 tensor(5.2577, grad_fn=)\n", "549 LOSS DIFF: tensor(5.3207, grad_fn=) tensor(5.2233, grad_fn=)\n", "550 LOSS DIFF: tensor(5.3287, grad_fn=) tensor(5.3207, grad_fn=)\n", "551 LOSS DIFF: tensor(5.4455, grad_fn=) tensor(5.3140, grad_fn=)\n", "552 LOSS DIFF: tensor(5.3970, grad_fn=) tensor(5.3160, grad_fn=)\n", "553 LOSS DIFF: tensor(5.4958, grad_fn=) tensor(5.3970, grad_fn=)\n", "554 LOSS DIFF: tensor(5.4289, grad_fn=) tensor(5.3781, grad_fn=)\n", "555 LOSS DIFF: tensor(5.3988, grad_fn=) tensor(5.2830, grad_fn=)\n", "556 LOSS DIFF: tensor(5.3452, grad_fn=) tensor(5.3121, grad_fn=)\n", "557 LOSS DIFF: tensor(5.3707, grad_fn=) tensor(5.3452, grad_fn=)\n", "558 LOSS DIFF: tensor(5.4004, grad_fn=) tensor(5.3490, grad_fn=)\n", "559 LOSS DIFF: tensor(5.3442, grad_fn=) tensor(5.2255, grad_fn=)\n", "560 LOSS DIFF: tensor(5.3311, grad_fn=) tensor(5.3145, grad_fn=)\n", "561 LOSS DIFF: tensor(5.4662, grad_fn=) tensor(5.3171, grad_fn=)\n", "562 LOSS DIFF: tensor(5.3376, grad_fn=) tensor(5.3006, grad_fn=)\n", "563 LOSS DIFF: tensor(5.3617, grad_fn=) tensor(5.3376, grad_fn=)\n", "564 LOSS DIFF: tensor(5.3627, grad_fn=) tensor(5.3617, grad_fn=)\n", "565 LOSS DIFF: tensor(5.3169, grad_fn=) tensor(5.2494, grad_fn=)\n", "566 LOSS DIFF: tensor(5.3391, grad_fn=) tensor(5.2797, grad_fn=)\n", "567 LOSS DIFF: tensor(5.3793, grad_fn=) tensor(5.3391, grad_fn=)\n", "568 LOSS DIFF: tensor(5.3983, grad_fn=) tensor(5.3793, grad_fn=)\n", "569 LOSS DIFF: tensor(5.3797, grad_fn=) tensor(5.1963, grad_fn=)\n", "570 LOSS DIFF: tensor(5.3978, grad_fn=) tensor(5.3797, grad_fn=)\n", "571 LOSS DIFF: tensor(5.4648, grad_fn=) tensor(5.2794, grad_fn=)\n", "572 LOSS DIFF: tensor(5.3364, grad_fn=) tensor(5.3139, grad_fn=)\n", "573 LOSS DIFF: tensor(5.3724, grad_fn=) tensor(5.3364, grad_fn=)\n", "574 LOSS DIFF: tensor(5.4125, grad_fn=) tensor(5.3724, grad_fn=)\n", "575 LOSS DIFF: tensor(5.4216, grad_fn=) tensor(5.3249, grad_fn=)\n", "576 LOSS DIFF: tensor(5.3209, grad_fn=) tensor(5.2087, grad_fn=)\n", "577 LOSS DIFF: tensor(5.2730, grad_fn=) tensor(5.2515, grad_fn=)\n", "578 LOSS DIFF: tensor(5.3871, grad_fn=) tensor(5.2537, grad_fn=)\n", "579 LOSS DIFF: tensor(5.2357, grad_fn=) tensor(5.1883, grad_fn=)\n", "580 LOSS DIFF: tensor(5.4435, grad_fn=) tensor(5.2357, grad_fn=)\n", "581 LOSS DIFF: tensor(5.3116, grad_fn=) tensor(5.2408, grad_fn=)\n", "582 LOSS DIFF: tensor(5.4295, grad_fn=) tensor(5.3116, grad_fn=)\n", "583 LOSS DIFF: tensor(5.3725, grad_fn=) tensor(5.2704, grad_fn=)\n", "584 LOSS DIFF: tensor(5.3951, grad_fn=) tensor(5.3211, grad_fn=)\n", "585 LOSS DIFF: tensor(5.4080, grad_fn=) tensor(5.3951, grad_fn=)\n", "586 LOSS DIFF: tensor(5.3569, grad_fn=) tensor(5.2900, grad_fn=)\n", "587 LOSS DIFF: tensor(5.3004, grad_fn=) tensor(5.2806, grad_fn=)\n", "588 LOSS DIFF: tensor(5.3874, grad_fn=) tensor(5.3004, grad_fn=)\n", "589 LOSS DIFF: tensor(5.4849, grad_fn=) tensor(5.2921, grad_fn=)\n", "590 LOSS DIFF: tensor(5.2856, grad_fn=) tensor(5.2661, grad_fn=)\n", "591 LOSS DIFF: tensor(5.4242, grad_fn=) tensor(5.2856, grad_fn=)\n", "592 LOSS DIFF: tensor(5.2910, grad_fn=) tensor(5.1762, grad_fn=)\n", "593 LOSS DIFF: tensor(5.3048, grad_fn=) tensor(5.1369, grad_fn=)\n", "594 LOSS DIFF: tensor(5.3170, grad_fn=) tensor(5.3048, grad_fn=)\n", "595 LOSS DIFF: tensor(5.4164, grad_fn=) tensor(5.3170, grad_fn=)\n", "1200 tensor(5.2414, grad_fn=)\n", "596 LOSS DIFF: tensor(5.4063, grad_fn=) tensor(5.2414, grad_fn=)\n", "597 LOSS DIFF: tensor(5.3547, grad_fn=) tensor(5.2150, grad_fn=)\n", "598 LOSS DIFF: tensor(5.2713, grad_fn=) tensor(5.2182, grad_fn=)\n", "599 LOSS DIFF: tensor(5.2934, grad_fn=) tensor(5.2713, grad_fn=)\n", "600 LOSS DIFF: tensor(5.3680, grad_fn=) tensor(5.2934, grad_fn=)\n", "601 LOSS DIFF: tensor(5.3810, grad_fn=) tensor(5.2937, grad_fn=)\n", "602 LOSS DIFF: tensor(5.2992, grad_fn=) tensor(5.2390, grad_fn=)\n", "603 LOSS DIFF: tensor(5.3592, grad_fn=) tensor(5.2325, grad_fn=)\n", "604 LOSS DIFF: tensor(5.4165, grad_fn=) tensor(5.2317, grad_fn=)\n", "605 LOSS DIFF: tensor(5.5033, grad_fn=) tensor(5.4165, grad_fn=)\n", "606 LOSS DIFF: tensor(5.4137, grad_fn=) tensor(5.1996, grad_fn=)\n", "607 LOSS DIFF: tensor(5.5262, grad_fn=) tensor(5.4137, grad_fn=)\n", "608 LOSS DIFF: tensor(5.3964, grad_fn=) tensor(5.3314, grad_fn=)\n", "609 LOSS DIFF: tensor(5.3722, grad_fn=) tensor(5.3268, grad_fn=)\n", "610 LOSS DIFF: tensor(5.3378, grad_fn=) tensor(5.3186, grad_fn=)\n", "611 LOSS DIFF: tensor(5.4699, grad_fn=) tensor(5.3378, grad_fn=)\n", "612 LOSS DIFF: tensor(5.4191, grad_fn=) tensor(5.3715, grad_fn=)\n", "613 LOSS DIFF: tensor(5.3107, grad_fn=) tensor(5.2864, grad_fn=)\n", "614 LOSS DIFF: tensor(5.3746, grad_fn=) tensor(5.2844, grad_fn=)\n", "615 LOSS DIFF: tensor(5.4486, grad_fn=) tensor(5.3746, grad_fn=)\n", "616 LOSS DIFF: tensor(5.4732, grad_fn=) tensor(5.4486, grad_fn=)\n", "617 LOSS DIFF: tensor(5.3487, grad_fn=) tensor(5.2559, grad_fn=)\n", "618 LOSS DIFF: tensor(5.3737, grad_fn=) tensor(5.3487, grad_fn=)\n", "619 LOSS DIFF: tensor(5.3524, grad_fn=) tensor(5.3056, grad_fn=)\n", "620 LOSS DIFF: tensor(5.4119, grad_fn=) tensor(5.3524, grad_fn=)\n", "621 LOSS DIFF: tensor(5.3877, grad_fn=) tensor(5.3544, grad_fn=)\n", "622 LOSS DIFF: tensor(5.3305, grad_fn=) tensor(5.3165, grad_fn=)\n", "623 LOSS DIFF: tensor(5.4056, grad_fn=) tensor(5.3305, grad_fn=)\n", "624 LOSS DIFF: tensor(5.3550, grad_fn=) tensor(5.3069, grad_fn=)\n", "625 LOSS DIFF: tensor(5.3018, grad_fn=) tensor(5.2306, grad_fn=)\n", "626 LOSS DIFF: tensor(5.3613, grad_fn=) tensor(5.3018, grad_fn=)\n", "627 LOSS DIFF: tensor(5.3056, grad_fn=) tensor(5.2849, grad_fn=)\n", "628 LOSS DIFF: tensor(5.4281, grad_fn=) tensor(5.1398, grad_fn=)\n", "629 LOSS DIFF: tensor(5.3037, grad_fn=) tensor(5.2343, grad_fn=)\n", "630 LOSS DIFF: tensor(5.3630, grad_fn=) tensor(5.2993, grad_fn=)\n", "631 LOSS DIFF: tensor(5.3922, grad_fn=) tensor(5.3630, grad_fn=)\n", "632 LOSS DIFF: tensor(5.3583, grad_fn=) tensor(5.2346, grad_fn=)\n", "633 LOSS DIFF: tensor(5.3638, grad_fn=) tensor(5.3486, grad_fn=)\n", "634 LOSS DIFF: tensor(5.2703, grad_fn=) tensor(5.2605, grad_fn=)\n", "635 LOSS DIFF: tensor(5.3341, grad_fn=) tensor(5.2703, grad_fn=)\n", "636 LOSS DIFF: tensor(5.3615, grad_fn=) tensor(5.3341, grad_fn=)\n", "637 LOSS DIFF: tensor(5.3735, grad_fn=) tensor(5.3225, grad_fn=)\n", "638 LOSS DIFF: tensor(5.3535, grad_fn=) tensor(5.2765, grad_fn=)\n", "639 LOSS DIFF: tensor(5.4068, grad_fn=) tensor(5.3535, grad_fn=)\n", "640 LOSS DIFF: tensor(5.3669, grad_fn=) tensor(5.2441, grad_fn=)\n", "641 LOSS DIFF: tensor(5.3348, grad_fn=) tensor(5.2892, grad_fn=)\n", "642 LOSS DIFF: tensor(5.4134, grad_fn=) tensor(5.3348, grad_fn=)\n", "643 LOSS DIFF: tensor(5.3649, grad_fn=) tensor(5.3365, grad_fn=)\n", "644 LOSS DIFF: tensor(5.3606, grad_fn=) tensor(5.2532, grad_fn=)\n", "645 LOSS DIFF: tensor(5.3622, grad_fn=) tensor(5.2414, grad_fn=)\n", "646 LOSS DIFF: tensor(5.3985, grad_fn=) tensor(5.3297, grad_fn=)\n", "1300 tensor(5.2993, grad_fn=)\n", "647 LOSS DIFF: tensor(5.2993, grad_fn=) tensor(5.2568, grad_fn=)\n", "648 LOSS DIFF: tensor(5.3153, grad_fn=) tensor(5.2993, grad_fn=)\n", "649 LOSS DIFF: tensor(5.3619, grad_fn=) tensor(5.2734, grad_fn=)\n", "650 LOSS DIFF: tensor(5.4052, grad_fn=) tensor(5.2523, grad_fn=)\n", "651 LOSS DIFF: tensor(5.3573, grad_fn=) tensor(5.3209, grad_fn=)\n", "652 LOSS DIFF: tensor(5.2472, grad_fn=) tensor(5.2427, grad_fn=)\n", "653 LOSS DIFF: tensor(5.4110, grad_fn=) tensor(5.2472, grad_fn=)\n", "654 LOSS DIFF: tensor(5.2660, grad_fn=) tensor(5.2397, grad_fn=)\n", "655 LOSS DIFF: tensor(5.3451, grad_fn=) tensor(5.2660, grad_fn=)\n", "656 LOSS DIFF: tensor(5.2828, grad_fn=) tensor(5.1689, grad_fn=)\n", "657 LOSS DIFF: tensor(5.3989, grad_fn=) tensor(5.2828, grad_fn=)\n", "658 LOSS DIFF: tensor(5.3128, grad_fn=) tensor(5.2708, grad_fn=)\n", "659 LOSS DIFF: tensor(5.2602, grad_fn=) tensor(5.2357, grad_fn=)\n", "660 LOSS DIFF: tensor(5.3591, grad_fn=) tensor(5.2602, grad_fn=)\n", "661 LOSS DIFF: tensor(5.4472, grad_fn=) tensor(5.2953, grad_fn=)\n", "662 LOSS DIFF: tensor(5.2631, grad_fn=) tensor(5.1217, grad_fn=)\n", "663 LOSS DIFF: tensor(5.3468, grad_fn=) tensor(5.2631, grad_fn=)\n", "664 LOSS DIFF: tensor(5.3112, grad_fn=) tensor(5.1798, grad_fn=)\n", "665 LOSS DIFF: tensor(5.4536, grad_fn=) tensor(5.3112, grad_fn=)\n", "666 LOSS DIFF: tensor(5.2946, grad_fn=) tensor(5.2031, grad_fn=)\n", "667 LOSS DIFF: tensor(5.3658, grad_fn=) tensor(5.2946, grad_fn=)\n", "668 LOSS DIFF: tensor(5.3176, grad_fn=) tensor(5.3126, grad_fn=)\n", "669 LOSS DIFF: tensor(5.3397, grad_fn=) tensor(5.2761, grad_fn=)\n", "670 LOSS DIFF: tensor(5.3414, grad_fn=) tensor(5.1992, grad_fn=)\n", "671 LOSS DIFF: tensor(5.3593, grad_fn=) tensor(5.2940, grad_fn=)\n", "672 LOSS DIFF: tensor(5.3734, grad_fn=) tensor(5.3593, grad_fn=)\n", "673 LOSS DIFF: tensor(5.3879, grad_fn=) tensor(5.3734, grad_fn=)\n", "674 LOSS DIFF: tensor(5.4095, grad_fn=) tensor(5.3879, grad_fn=)\n", "675 LOSS DIFF: tensor(5.3731, grad_fn=) tensor(5.3149, grad_fn=)\n", "676 LOSS DIFF: tensor(5.3762, grad_fn=) tensor(5.2030, grad_fn=)\n", "677 LOSS DIFF: tensor(5.3640, grad_fn=) tensor(5.2093, grad_fn=)\n", "678 LOSS DIFF: tensor(5.3913, grad_fn=) tensor(5.3640, grad_fn=)\n", "679 LOSS DIFF: tensor(5.3979, grad_fn=) tensor(5.3913, grad_fn=)\n", "680 LOSS DIFF: tensor(5.3584, grad_fn=) tensor(5.2680, grad_fn=)\n", "681 LOSS DIFF: tensor(5.3767, grad_fn=) tensor(5.3584, grad_fn=)\n", "682 LOSS DIFF: tensor(5.3828, grad_fn=) tensor(5.2542, grad_fn=)\n", "683 LOSS DIFF: tensor(5.3277, grad_fn=) tensor(5.2771, grad_fn=)\n", "684 LOSS DIFF: tensor(5.2910, grad_fn=) tensor(5.2756, grad_fn=)\n", "685 LOSS DIFF: tensor(5.3150, grad_fn=) tensor(5.2910, grad_fn=)\n", "686 LOSS DIFF: tensor(5.3208, grad_fn=) tensor(5.3150, grad_fn=)\n", "687 LOSS DIFF: tensor(5.4099, grad_fn=) tensor(5.1751, grad_fn=)\n", "688 LOSS DIFF: tensor(5.3103, grad_fn=) tensor(5.1557, grad_fn=)\n", "689 LOSS DIFF: tensor(5.2464, grad_fn=) tensor(5.2038, grad_fn=)\n", "690 LOSS DIFF: tensor(5.4148, grad_fn=) tensor(5.2464, grad_fn=)\n", "691 LOSS DIFF: tensor(5.3898, grad_fn=) tensor(5.1863, grad_fn=)\n", "692 LOSS DIFF: tensor(5.3926, grad_fn=) tensor(5.3898, grad_fn=)\n", "693 LOSS DIFF: tensor(5.3975, grad_fn=) tensor(5.2156, grad_fn=)\n", "694 LOSS DIFF: tensor(5.2680, grad_fn=) tensor(5.2367, grad_fn=)\n", "695 LOSS DIFF: tensor(5.4590, grad_fn=) tensor(5.1675, grad_fn=)\n", "696 LOSS DIFF: tensor(5.3168, grad_fn=) tensor(5.2447, grad_fn=)\n", "697 LOSS DIFF: tensor(5.3581, grad_fn=) tensor(5.2256, grad_fn=)\n", "698 LOSS DIFF: tensor(5.3668, grad_fn=) tensor(5.3399, grad_fn=)\n", "1400 tensor(5.4240, grad_fn=)\n", "699 LOSS DIFF: tensor(5.4240, grad_fn=) tensor(5.2860, grad_fn=)\n", "700 LOSS DIFF: tensor(5.4507, grad_fn=) tensor(5.2273, grad_fn=)\n", "701 LOSS DIFF: tensor(5.3034, grad_fn=) tensor(5.2823, grad_fn=)\n", "702 LOSS DIFF: tensor(5.3641, grad_fn=) tensor(5.2678, grad_fn=)\n", "703 LOSS DIFF: tensor(5.3712, grad_fn=) tensor(5.3641, grad_fn=)\n", "704 LOSS DIFF: tensor(5.3199, grad_fn=) tensor(5.2634, grad_fn=)\n", "705 LOSS DIFF: tensor(5.2937, grad_fn=) tensor(5.2929, grad_fn=)\n", "706 LOSS DIFF: tensor(5.4281, grad_fn=) tensor(5.2937, grad_fn=)\n", "707 LOSS DIFF: tensor(5.3490, grad_fn=) tensor(5.2559, grad_fn=)\n", "708 LOSS DIFF: tensor(5.2956, grad_fn=) tensor(5.2263, grad_fn=)\n", "709 LOSS DIFF: tensor(5.3573, grad_fn=) tensor(5.2956, grad_fn=)\n", "710 LOSS DIFF: tensor(5.2388, grad_fn=) tensor(5.1368, grad_fn=)\n", "711 LOSS DIFF: tensor(5.4568, grad_fn=) tensor(5.2388, grad_fn=)\n", "712 LOSS DIFF: tensor(5.3657, grad_fn=) tensor(5.2206, grad_fn=)\n", "713 LOSS DIFF: tensor(5.3937, grad_fn=) tensor(5.3657, grad_fn=)\n", "714 LOSS DIFF: tensor(5.3151, grad_fn=) tensor(5.2181, grad_fn=)\n", "715 LOSS DIFF: tensor(5.3477, grad_fn=) tensor(5.3151, grad_fn=)\n", "716 LOSS DIFF: tensor(5.3319, grad_fn=) tensor(5.2977, grad_fn=)\n", "717 LOSS DIFF: tensor(5.2638, grad_fn=) tensor(5.1780, grad_fn=)\n", "718 LOSS DIFF: tensor(5.2669, grad_fn=) tensor(5.2638, grad_fn=)\n", "719 LOSS DIFF: tensor(5.2977, grad_fn=) tensor(5.2669, grad_fn=)\n", "720 LOSS DIFF: tensor(5.4203, grad_fn=) tensor(5.2977, grad_fn=)\n", "721 LOSS DIFF: tensor(5.3931, grad_fn=) tensor(5.3073, grad_fn=)\n", "722 LOSS DIFF: tensor(5.2668, grad_fn=) tensor(5.2528, grad_fn=)\n", "723 LOSS DIFF: tensor(5.2713, grad_fn=) tensor(5.2102, grad_fn=)\n", "724 LOSS DIFF: tensor(5.4657, grad_fn=) tensor(5.2713, grad_fn=)\n", "725 LOSS DIFF: tensor(5.3160, grad_fn=) tensor(5.2097, grad_fn=)\n", "726 LOSS DIFF: tensor(5.2945, grad_fn=) tensor(5.2223, grad_fn=)\n", "727 LOSS DIFF: tensor(5.2871, grad_fn=) tensor(5.2417, grad_fn=)\n", "728 LOSS DIFF: tensor(5.3049, grad_fn=) tensor(5.2871, grad_fn=)\n", "729 LOSS DIFF: tensor(5.2566, grad_fn=) tensor(5.2405, grad_fn=)\n", "730 LOSS DIFF: tensor(5.3831, grad_fn=) tensor(5.2566, grad_fn=)\n", "731 LOSS DIFF: tensor(5.3322, grad_fn=) tensor(5.2234, grad_fn=)\n", "732 LOSS DIFF: tensor(5.3731, grad_fn=) tensor(5.2365, grad_fn=)\n", "733 LOSS DIFF: tensor(5.4400, grad_fn=) tensor(5.3731, grad_fn=)\n", "734 LOSS DIFF: tensor(5.4715, grad_fn=) tensor(5.3013, grad_fn=)\n", "735 LOSS DIFF: tensor(5.4422, grad_fn=) tensor(5.4010, grad_fn=)\n", "736 LOSS DIFF: tensor(5.2298, grad_fn=) tensor(5.2163, grad_fn=)\n", "737 LOSS DIFF: tensor(5.2493, grad_fn=) tensor(5.2298, grad_fn=)\n", "738 LOSS DIFF: tensor(5.2958, grad_fn=) tensor(5.2493, grad_fn=)\n", "739 LOSS DIFF: tensor(5.4094, grad_fn=) tensor(5.2502, grad_fn=)\n", "740 LOSS DIFF: tensor(5.2576, grad_fn=) tensor(5.2305, grad_fn=)\n", "741 LOSS DIFF: tensor(5.3885, grad_fn=) tensor(5.2576, grad_fn=)\n", "742 LOSS DIFF: tensor(5.3493, grad_fn=) tensor(5.3387, grad_fn=)\n", "743 LOSS DIFF: tensor(5.2640, grad_fn=) tensor(5.1842, grad_fn=)\n", "744 LOSS DIFF: tensor(5.3568, grad_fn=) tensor(5.2640, grad_fn=)\n", "745 LOSS DIFF: tensor(5.4262, grad_fn=) tensor(5.3232, grad_fn=)\n", "746 LOSS DIFF: tensor(5.3020, grad_fn=) tensor(5.2816, grad_fn=)\n", "1500 tensor(5.1988, grad_fn=)\n", "747 LOSS DIFF: tensor(5.2921, grad_fn=) tensor(5.1988, grad_fn=)\n", "748 LOSS DIFF: tensor(5.3279, grad_fn=) tensor(5.2921, grad_fn=)\n", "749 LOSS DIFF: tensor(5.3318, grad_fn=) tensor(5.0392, grad_fn=)\n", "750 LOSS DIFF: tensor(5.4100, grad_fn=) tensor(5.1959, grad_fn=)\n", "751 LOSS DIFF: tensor(5.2634, grad_fn=) tensor(5.2334, grad_fn=)\n", "752 LOSS DIFF: tensor(5.2761, grad_fn=) tensor(5.2634, grad_fn=)\n", "753 LOSS DIFF: tensor(5.3743, grad_fn=) tensor(5.2761, grad_fn=)\n", "754 LOSS DIFF: tensor(5.4399, grad_fn=) tensor(5.2495, grad_fn=)\n", "755 LOSS DIFF: tensor(5.3723, grad_fn=) tensor(5.2125, grad_fn=)\n", "756 LOSS DIFF: tensor(5.4313, grad_fn=) tensor(5.2310, grad_fn=)\n", "757 LOSS DIFF: tensor(5.3316, grad_fn=) tensor(5.2243, grad_fn=)\n", "758 LOSS DIFF: tensor(5.3435, grad_fn=) tensor(5.3128, grad_fn=)\n", "759 LOSS DIFF: tensor(5.3396, grad_fn=) tensor(5.1988, grad_fn=)\n", "760 LOSS DIFF: tensor(5.3344, grad_fn=) tensor(5.2798, grad_fn=)\n", "761 LOSS DIFF: tensor(5.3503, grad_fn=) tensor(5.2845, grad_fn=)\n", "762 LOSS DIFF: tensor(5.3522, grad_fn=) tensor(5.3503, grad_fn=)\n", "763 LOSS DIFF: tensor(5.2487, grad_fn=) tensor(5.2103, grad_fn=)\n", "764 LOSS DIFF: tensor(5.3914, grad_fn=) tensor(5.2487, grad_fn=)\n", "765 LOSS DIFF: tensor(5.3346, grad_fn=) tensor(5.3265, grad_fn=)\n", "766 LOSS DIFF: tensor(5.3932, grad_fn=) tensor(5.2668, grad_fn=)\n", "767 LOSS DIFF: tensor(5.3308, grad_fn=) tensor(5.2136, grad_fn=)\n", "768 LOSS DIFF: tensor(5.2342, grad_fn=) tensor(5.1842, grad_fn=)\n", "769 LOSS DIFF: tensor(5.2779, grad_fn=) tensor(5.2342, grad_fn=)\n", "770 LOSS DIFF: tensor(5.3309, grad_fn=) tensor(5.2779, grad_fn=)\n", "771 LOSS DIFF: tensor(5.2772, grad_fn=) tensor(5.2208, grad_fn=)\n", "772 LOSS DIFF: tensor(5.2998, grad_fn=) tensor(5.2772, grad_fn=)\n", "773 LOSS DIFF: tensor(5.3198, grad_fn=) tensor(5.2998, grad_fn=)\n", "774 LOSS DIFF: tensor(5.4071, grad_fn=) tensor(5.2555, grad_fn=)\n", "775 LOSS DIFF: tensor(5.3407, grad_fn=) tensor(5.2137, grad_fn=)\n", "776 LOSS DIFF: tensor(5.3168, grad_fn=) tensor(5.1123, grad_fn=)\n", "777 LOSS DIFF: tensor(5.3270, grad_fn=) tensor(5.3168, grad_fn=)\n", "778 LOSS DIFF: tensor(5.2770, grad_fn=) tensor(5.1605, grad_fn=)\n", "779 LOSS DIFF: tensor(5.3174, grad_fn=) tensor(5.2770, grad_fn=)\n", "780 LOSS DIFF: tensor(5.5412, grad_fn=) tensor(5.2626, grad_fn=)\n", "781 LOSS DIFF: tensor(5.3245, grad_fn=) tensor(5.2973, grad_fn=)\n", "782 LOSS DIFF: tensor(5.2911, grad_fn=) tensor(5.2910, grad_fn=)\n", "783 LOSS DIFF: tensor(5.3198, grad_fn=) tensor(5.2911, grad_fn=)\n", "784 LOSS DIFF: tensor(5.2661, grad_fn=) tensor(5.2297, grad_fn=)\n", "785 LOSS DIFF: tensor(5.3086, grad_fn=) tensor(5.2661, grad_fn=)\n", "786 LOSS DIFF: tensor(5.3143, grad_fn=) tensor(5.3086, grad_fn=)\n", "787 LOSS DIFF: tensor(5.3467, grad_fn=) tensor(5.3143, grad_fn=)\n", "788 LOSS DIFF: tensor(5.3771, grad_fn=) tensor(5.3003, grad_fn=)\n", "789 LOSS DIFF: tensor(5.2802, grad_fn=) tensor(5.2619, grad_fn=)\n", "790 LOSS DIFF: tensor(5.3205, grad_fn=) tensor(5.2489, grad_fn=)\n", "791 LOSS DIFF: tensor(5.3028, grad_fn=) tensor(5.1770, grad_fn=)\n", "792 LOSS DIFF: tensor(5.3130, grad_fn=) tensor(5.3028, grad_fn=)\n", "793 LOSS DIFF: tensor(5.2011, grad_fn=) tensor(5.0365, grad_fn=)\n", "794 LOSS DIFF: tensor(5.2648, grad_fn=) tensor(5.2011, grad_fn=)\n", "795 LOSS DIFF: tensor(5.3135, grad_fn=) tensor(5.2648, grad_fn=)\n", "796 LOSS DIFF: tensor(5.3958, grad_fn=) tensor(5.3135, grad_fn=)\n", "797 LOSS DIFF: tensor(5.3604, grad_fn=) tensor(5.1652, grad_fn=)\n", "1600 tensor(5.3680, grad_fn=)\n", "798 LOSS DIFF: tensor(5.3680, grad_fn=) tensor(5.2941, grad_fn=)\n", "799 LOSS DIFF: tensor(5.2164, grad_fn=) tensor(5.1485, grad_fn=)\n", "800 LOSS DIFF: tensor(5.3943, grad_fn=) tensor(5.2164, grad_fn=)\n", "801 LOSS DIFF: tensor(5.2456, grad_fn=) tensor(5.1408, grad_fn=)\n", "802 LOSS DIFF: tensor(5.2624, grad_fn=) tensor(5.2268, grad_fn=)\n", "803 LOSS DIFF: tensor(5.3054, grad_fn=) tensor(5.1765, grad_fn=)\n", "804 LOSS DIFF: tensor(5.3530, grad_fn=) tensor(5.3054, grad_fn=)\n", "805 LOSS DIFF: tensor(5.3219, grad_fn=) tensor(5.2960, grad_fn=)\n", "806 LOSS DIFF: tensor(5.3445, grad_fn=) tensor(5.2025, grad_fn=)\n", "807 LOSS DIFF: tensor(5.4269, grad_fn=) tensor(5.2403, grad_fn=)\n", "808 LOSS DIFF: tensor(5.3550, grad_fn=) tensor(5.2981, grad_fn=)\n", "809 LOSS DIFF: tensor(5.2882, grad_fn=) tensor(5.2592, grad_fn=)\n", "810 LOSS DIFF: tensor(5.3459, grad_fn=) tensor(5.2882, grad_fn=)\n", "811 LOSS DIFF: tensor(5.3961, grad_fn=) tensor(5.2398, grad_fn=)\n", "812 LOSS DIFF: tensor(5.3464, grad_fn=) tensor(5.2061, grad_fn=)\n", "813 LOSS DIFF: tensor(5.4667, grad_fn=) tensor(5.3051, grad_fn=)\n", "814 LOSS DIFF: tensor(5.3144, grad_fn=) tensor(5.2452, grad_fn=)\n", "815 LOSS DIFF: tensor(5.3118, grad_fn=) tensor(5.1809, grad_fn=)\n", "816 LOSS DIFF: tensor(5.2670, grad_fn=) tensor(5.2661, grad_fn=)\n", "817 LOSS DIFF: tensor(5.2897, grad_fn=) tensor(5.2135, grad_fn=)\n", "818 LOSS DIFF: tensor(5.3138, grad_fn=) tensor(5.2798, grad_fn=)\n", "819 LOSS DIFF: tensor(5.3730, grad_fn=) tensor(5.3138, grad_fn=)\n", "820 LOSS DIFF: tensor(5.3392, grad_fn=) tensor(5.3115, grad_fn=)\n", "821 LOSS DIFF: tensor(5.3534, grad_fn=) tensor(5.2959, grad_fn=)\n", "822 LOSS DIFF: tensor(5.3893, grad_fn=) tensor(5.3500, grad_fn=)\n", "823 LOSS DIFF: tensor(5.2580, grad_fn=) tensor(5.1436, grad_fn=)\n", "824 LOSS DIFF: tensor(5.2688, grad_fn=) tensor(5.2580, grad_fn=)\n", "825 LOSS DIFF: tensor(5.3212, grad_fn=) tensor(5.2688, grad_fn=)\n", "826 LOSS DIFF: tensor(5.3839, grad_fn=) tensor(5.2897, grad_fn=)\n", "827 LOSS DIFF: tensor(5.3353, grad_fn=) tensor(5.2536, grad_fn=)\n", "828 LOSS DIFF: tensor(5.2735, grad_fn=) tensor(5.2156, grad_fn=)\n", "829 LOSS DIFF: tensor(5.3446, grad_fn=) tensor(5.2735, grad_fn=)\n", "830 LOSS DIFF: tensor(5.3156, grad_fn=) tensor(5.2965, grad_fn=)\n", "831 LOSS DIFF: tensor(5.3263, grad_fn=) tensor(5.2847, grad_fn=)\n", "832 LOSS DIFF: tensor(5.2776, grad_fn=) tensor(5.2448, grad_fn=)\n", "833 LOSS DIFF: tensor(5.3394, grad_fn=) tensor(5.2776, grad_fn=)\n", "834 LOSS DIFF: tensor(5.3633, grad_fn=) tensor(5.2746, grad_fn=)\n", "835 LOSS DIFF: tensor(5.2726, grad_fn=) tensor(5.2409, grad_fn=)\n", "836 LOSS DIFF: tensor(5.2986, grad_fn=) tensor(5.2726, grad_fn=)\n", "837 LOSS DIFF: tensor(5.2534, grad_fn=) tensor(5.1774, grad_fn=)\n", "838 LOSS DIFF: tensor(5.3111, grad_fn=) tensor(5.2534, grad_fn=)\n", "839 LOSS DIFF: tensor(5.3127, grad_fn=) tensor(5.3111, grad_fn=)\n", "840 LOSS DIFF: tensor(5.4215, grad_fn=) tensor(5.2348, grad_fn=)\n", "841 LOSS DIFF: tensor(5.2974, grad_fn=) tensor(5.1407, grad_fn=)\n", "842 LOSS DIFF: tensor(5.3341, grad_fn=) tensor(5.2498, grad_fn=)\n", "843 LOSS DIFF: tensor(5.3087, grad_fn=) tensor(5.2148, grad_fn=)\n", "844 LOSS DIFF: tensor(5.2507, grad_fn=) tensor(5.1230, grad_fn=)\n", "1700 tensor(5.3550, grad_fn=)\n", "845 LOSS DIFF: tensor(5.3550, grad_fn=) tensor(5.2507, grad_fn=)\n", "846 LOSS DIFF: tensor(5.3766, grad_fn=) tensor(5.3550, grad_fn=)\n", "847 LOSS DIFF: tensor(5.2487, grad_fn=) tensor(5.2300, grad_fn=)\n", "848 LOSS DIFF: tensor(5.3142, grad_fn=) tensor(5.2487, grad_fn=)\n", "849 LOSS DIFF: tensor(5.3734, grad_fn=) tensor(5.2986, grad_fn=)\n", "850 LOSS DIFF: tensor(5.2452, grad_fn=) tensor(5.1219, grad_fn=)\n", "851 LOSS DIFF: tensor(5.2957, grad_fn=) tensor(5.2452, grad_fn=)\n", "852 LOSS DIFF: tensor(5.2852, grad_fn=) tensor(5.2758, grad_fn=)\n", "853 LOSS DIFF: tensor(5.3498, grad_fn=) tensor(5.2852, grad_fn=)\n", "854 LOSS DIFF: tensor(5.4008, grad_fn=) tensor(5.3498, grad_fn=)\n", "855 LOSS DIFF: tensor(5.2165, grad_fn=) tensor(5.1128, grad_fn=)\n", "856 LOSS DIFF: tensor(5.2850, grad_fn=) tensor(5.2165, grad_fn=)\n", "857 LOSS DIFF: tensor(5.3881, grad_fn=) tensor(5.2850, grad_fn=)\n", "858 LOSS DIFF: tensor(5.2249, grad_fn=) tensor(5.2228, grad_fn=)\n", "859 LOSS DIFF: tensor(5.2559, grad_fn=) tensor(5.2249, grad_fn=)\n", "860 LOSS DIFF: tensor(5.2867, grad_fn=) tensor(5.2559, grad_fn=)\n", "861 LOSS DIFF: tensor(5.4387, grad_fn=) tensor(5.2314, grad_fn=)\n", "862 LOSS DIFF: tensor(5.2867, grad_fn=) tensor(5.2233, grad_fn=)\n", "863 LOSS DIFF: tensor(5.3220, grad_fn=) tensor(5.2867, grad_fn=)\n", "864 LOSS DIFF: tensor(5.2581, grad_fn=) tensor(5.2269, grad_fn=)\n", "865 LOSS DIFF: tensor(5.2703, grad_fn=) tensor(5.2581, grad_fn=)\n", "866 LOSS DIFF: tensor(5.2300, grad_fn=) tensor(5.1481, grad_fn=)\n", "867 LOSS DIFF: tensor(5.2460, grad_fn=) tensor(5.2300, grad_fn=)\n", "868 LOSS DIFF: tensor(5.3260, grad_fn=) tensor(5.2460, grad_fn=)\n", "869 LOSS DIFF: tensor(5.2582, grad_fn=) tensor(5.1454, grad_fn=)\n", "870 LOSS DIFF: tensor(5.3153, grad_fn=) tensor(5.2582, grad_fn=)\n", "871 LOSS DIFF: tensor(5.2967, grad_fn=) tensor(5.0807, grad_fn=)\n", "872 LOSS DIFF: tensor(5.3636, grad_fn=) tensor(5.2188, grad_fn=)\n", "873 LOSS DIFF: tensor(5.3807, grad_fn=) tensor(5.3636, grad_fn=)\n", "874 LOSS DIFF: tensor(5.3318, grad_fn=) tensor(5.2364, grad_fn=)\n", "875 LOSS DIFF: tensor(5.3220, grad_fn=) tensor(5.2170, grad_fn=)\n", "876 LOSS DIFF: tensor(5.2753, grad_fn=) tensor(5.1677, grad_fn=)\n", "877 LOSS DIFF: tensor(5.3142, grad_fn=) tensor(5.2753, grad_fn=)\n", "878 LOSS DIFF: tensor(5.3142, grad_fn=) tensor(5.1974, grad_fn=)\n", "879 LOSS DIFF: tensor(5.1746, grad_fn=) tensor(5.0885, grad_fn=)\n", "880 LOSS DIFF: tensor(5.3789, grad_fn=) tensor(5.1746, grad_fn=)\n", "881 LOSS DIFF: tensor(5.3057, grad_fn=) tensor(5.2196, grad_fn=)\n", "882 LOSS DIFF: tensor(5.2886, grad_fn=) tensor(5.2158, grad_fn=)\n", "883 LOSS DIFF: tensor(5.3288, grad_fn=) tensor(5.2491, grad_fn=)\n", "884 LOSS DIFF: tensor(5.4903, grad_fn=) tensor(5.3288, grad_fn=)\n", "885 LOSS DIFF: tensor(5.4034, grad_fn=) tensor(5.2798, grad_fn=)\n", "886 LOSS DIFF: tensor(5.3601, grad_fn=) tensor(5.1771, grad_fn=)\n", "887 LOSS DIFF: tensor(5.2809, grad_fn=) tensor(5.1809, grad_fn=)\n", "888 LOSS DIFF: tensor(5.3620, grad_fn=) tensor(5.2748, grad_fn=)\n", "889 LOSS DIFF: tensor(5.3855, grad_fn=) tensor(5.2573, grad_fn=)\n", "890 LOSS DIFF: tensor(5.3124, grad_fn=) tensor(5.2379, grad_fn=)\n", "891 LOSS DIFF: tensor(5.3192, grad_fn=) tensor(5.3124, grad_fn=)\n", "892 LOSS DIFF: tensor(5.3423, grad_fn=) tensor(5.3192, grad_fn=)\n", "893 LOSS DIFF: tensor(5.4086, grad_fn=) tensor(5.1976, grad_fn=)\n", "894 LOSS DIFF: tensor(5.3156, grad_fn=) tensor(5.2619, grad_fn=)\n", "895 LOSS DIFF: tensor(5.3277, grad_fn=) tensor(5.3156, grad_fn=)\n", "896 LOSS DIFF: tensor(5.2352, grad_fn=) tensor(5.2142, grad_fn=)\n", "897 LOSS DIFF: tensor(5.3471, grad_fn=) tensor(5.2059, grad_fn=)\n", "898 LOSS DIFF: tensor(5.2658, grad_fn=) tensor(5.1801, grad_fn=)\n", "1800 tensor(5.4171, grad_fn=)\n", "899 LOSS DIFF: tensor(5.4171, grad_fn=) tensor(5.2658, grad_fn=)\n", "900 LOSS DIFF: tensor(5.3919, grad_fn=) tensor(5.2872, grad_fn=)\n", "901 LOSS DIFF: tensor(5.2667, grad_fn=) tensor(5.1940, grad_fn=)\n", "902 LOSS DIFF: tensor(5.3631, grad_fn=) tensor(5.2667, grad_fn=)\n", "903 LOSS DIFF: tensor(5.3693, grad_fn=) tensor(5.2566, grad_fn=)\n", "904 LOSS DIFF: tensor(5.3239, grad_fn=) tensor(5.2152, grad_fn=)\n", "905 LOSS DIFF: tensor(5.3641, grad_fn=) tensor(5.3239, grad_fn=)\n", "906 LOSS DIFF: tensor(5.2443, grad_fn=) tensor(5.1951, grad_fn=)\n", "907 LOSS DIFF: tensor(5.4277, grad_fn=) tensor(5.1634, grad_fn=)\n", "908 LOSS DIFF: tensor(5.2730, grad_fn=) tensor(5.0604, grad_fn=)\n", "909 LOSS DIFF: tensor(5.2867, grad_fn=) tensor(5.2566, grad_fn=)\n", "910 LOSS DIFF: tensor(5.4127, grad_fn=) tensor(5.2155, grad_fn=)\n", "911 LOSS DIFF: tensor(5.3634, grad_fn=) tensor(5.3211, grad_fn=)\n", "912 LOSS DIFF: tensor(5.2831, grad_fn=) tensor(5.2335, grad_fn=)\n", "913 LOSS DIFF: tensor(5.2755, grad_fn=) tensor(5.2735, grad_fn=)\n", "914 LOSS DIFF: tensor(5.2826, grad_fn=) tensor(5.2755, grad_fn=)\n", "915 LOSS DIFF: tensor(5.3887, grad_fn=) tensor(5.0861, grad_fn=)\n", "916 LOSS DIFF: tensor(5.3065, grad_fn=) tensor(5.2729, grad_fn=)\n", "917 LOSS DIFF: tensor(5.2632, grad_fn=) tensor(5.1560, grad_fn=)\n", "918 LOSS DIFF: tensor(5.2920, grad_fn=) tensor(5.1884, grad_fn=)\n", "919 LOSS DIFF: tensor(5.3229, grad_fn=) tensor(5.2920, grad_fn=)\n", "920 LOSS DIFF: tensor(5.2855, grad_fn=) tensor(5.1965, grad_fn=)\n", "921 LOSS DIFF: tensor(5.3634, grad_fn=) tensor(5.2855, grad_fn=)\n", "922 LOSS DIFF: tensor(5.3724, grad_fn=) tensor(5.0690, grad_fn=)\n", "923 LOSS DIFF: tensor(5.2805, grad_fn=) tensor(5.2636, grad_fn=)\n", "924 LOSS DIFF: tensor(5.2306, grad_fn=) tensor(5.0033, grad_fn=)\n", "925 LOSS DIFF: tensor(5.2542, grad_fn=) tensor(5.2243, grad_fn=)\n", "926 LOSS DIFF: tensor(5.3378, grad_fn=) tensor(5.2542, grad_fn=)\n", "927 LOSS DIFF: tensor(5.2164, grad_fn=) tensor(5.1267, grad_fn=)\n", "928 LOSS DIFF: tensor(5.3090, grad_fn=) tensor(5.2164, grad_fn=)\n", "929 LOSS DIFF: tensor(5.3777, grad_fn=) tensor(5.3090, grad_fn=)\n", "930 LOSS DIFF: tensor(5.2597, grad_fn=) tensor(5.2556, grad_fn=)\n", "931 LOSS DIFF: tensor(5.4438, grad_fn=) tensor(5.2080, grad_fn=)\n", "932 LOSS DIFF: tensor(5.2762, grad_fn=) tensor(5.2386, grad_fn=)\n", "933 LOSS DIFF: tensor(5.3475, grad_fn=) tensor(5.1511, grad_fn=)\n", "934 LOSS DIFF: tensor(5.3897, grad_fn=) tensor(5.3475, grad_fn=)\n", "935 LOSS DIFF: tensor(5.2932, grad_fn=) tensor(5.1943, grad_fn=)\n", "936 LOSS DIFF: tensor(5.3678, grad_fn=) tensor(5.2932, grad_fn=)\n", "937 LOSS DIFF: tensor(5.3282, grad_fn=) tensor(5.2433, grad_fn=)\n", "938 LOSS DIFF: tensor(5.3416, grad_fn=) tensor(5.3282, grad_fn=)\n", "939 LOSS DIFF: tensor(5.2709, grad_fn=) tensor(5.1789, grad_fn=)\n", "940 LOSS DIFF: tensor(5.3140, grad_fn=) tensor(5.2709, grad_fn=)\n", "941 LOSS DIFF: tensor(5.2993, grad_fn=) tensor(5.2861, grad_fn=)\n", "942 LOSS DIFF: tensor(5.1903, grad_fn=) tensor(5.1216, grad_fn=)\n", "943 LOSS DIFF: tensor(5.2935, grad_fn=) tensor(5.1903, grad_fn=)\n", "944 LOSS DIFF: tensor(5.2984, grad_fn=) tensor(5.2935, grad_fn=)\n", "945 LOSS DIFF: tensor(5.3579, grad_fn=) tensor(5.2984, grad_fn=)\n", "946 LOSS DIFF: tensor(5.2808, grad_fn=) tensor(5.1785, grad_fn=)\n", "947 LOSS DIFF: tensor(5.2995, grad_fn=) tensor(5.2629, grad_fn=)\n", "948 LOSS DIFF: tensor(5.3437, grad_fn=) tensor(5.2995, grad_fn=)\n", "949 LOSS DIFF: tensor(5.3592, grad_fn=) tensor(5.3437, grad_fn=)\n", "950 LOSS DIFF: tensor(5.4155, grad_fn=) tensor(5.3592, grad_fn=)\n", "951 LOSS DIFF: tensor(5.3014, grad_fn=) tensor(5.2301, grad_fn=)\n", "1900 tensor(5.3040, grad_fn=)\n", "952 LOSS DIFF: tensor(5.3040, grad_fn=) tensor(5.2344, grad_fn=)\n", "953 LOSS DIFF: tensor(5.2827, grad_fn=) tensor(5.2677, grad_fn=)\n", "954 LOSS DIFF: tensor(5.3628, grad_fn=) tensor(5.2827, grad_fn=)\n", "955 LOSS DIFF: tensor(5.2943, grad_fn=) tensor(5.2210, grad_fn=)\n", "956 LOSS DIFF: tensor(5.1808, grad_fn=) tensor(5.1610, grad_fn=)\n", "957 LOSS DIFF: tensor(5.3546, grad_fn=) tensor(5.1808, grad_fn=)\n", "958 LOSS DIFF: tensor(5.1927, grad_fn=) tensor(5.1525, grad_fn=)\n", "959 LOSS DIFF: tensor(5.3402, grad_fn=) tensor(5.1927, grad_fn=)\n", "960 LOSS DIFF: tensor(5.3660, grad_fn=) tensor(5.2197, grad_fn=)\n", "961 LOSS DIFF: tensor(5.3701, grad_fn=) tensor(5.3660, grad_fn=)\n", "962 LOSS DIFF: tensor(5.1755, grad_fn=) tensor(5.1572, grad_fn=)\n", "963 LOSS DIFF: tensor(5.2423, grad_fn=) tensor(5.1755, grad_fn=)\n", "964 LOSS DIFF: tensor(5.4032, grad_fn=) tensor(5.2423, grad_fn=)\n", "965 LOSS DIFF: tensor(5.3041, grad_fn=) tensor(5.1882, grad_fn=)\n", "966 LOSS DIFF: tensor(5.3328, grad_fn=) tensor(5.3041, grad_fn=)\n", "967 LOSS DIFF: tensor(5.1994, grad_fn=) tensor(5.1086, grad_fn=)\n", "968 LOSS DIFF: tensor(5.2771, grad_fn=) tensor(5.1994, grad_fn=)\n", "969 LOSS DIFF: tensor(5.3016, grad_fn=) tensor(5.2771, grad_fn=)\n", "970 LOSS DIFF: tensor(5.3162, grad_fn=) tensor(5.3016, grad_fn=)\n", "971 LOSS DIFF: tensor(5.3276, grad_fn=) tensor(5.2404, grad_fn=)\n", "972 LOSS DIFF: tensor(5.3335, grad_fn=) tensor(5.3276, grad_fn=)\n", "973 LOSS DIFF: tensor(5.3803, grad_fn=) tensor(5.2597, grad_fn=)\n", "974 LOSS DIFF: tensor(5.2477, grad_fn=) tensor(5.1569, grad_fn=)\n", "975 LOSS DIFF: tensor(5.3720, grad_fn=) tensor(5.2477, grad_fn=)\n", "976 LOSS DIFF: tensor(5.3752, grad_fn=) tensor(5.3720, grad_fn=)\n", "977 LOSS DIFF: tensor(5.2881, grad_fn=) tensor(5.2406, grad_fn=)\n", "978 LOSS DIFF: tensor(5.4561, grad_fn=) tensor(5.2564, grad_fn=)\n", "979 LOSS DIFF: tensor(5.3796, grad_fn=) tensor(5.3418, grad_fn=)\n", "980 LOSS DIFF: tensor(5.2454, grad_fn=) tensor(5.2276, grad_fn=)\n", "981 LOSS DIFF: tensor(5.3129, grad_fn=) tensor(5.2454, grad_fn=)\n", "982 LOSS DIFF: tensor(5.3334, grad_fn=) tensor(5.3129, grad_fn=)\n", "983 LOSS DIFF: tensor(5.3955, grad_fn=) tensor(5.3334, grad_fn=)\n", "984 LOSS DIFF: tensor(5.4304, grad_fn=) tensor(5.2307, grad_fn=)\n", "985 LOSS DIFF: tensor(5.3111, grad_fn=) tensor(5.1737, grad_fn=)\n", "986 LOSS DIFF: tensor(5.3549, grad_fn=) tensor(5.3111, grad_fn=)\n", "987 LOSS DIFF: tensor(5.3662, grad_fn=) tensor(5.2584, grad_fn=)\n", "988 LOSS DIFF: tensor(5.3705, grad_fn=) tensor(5.1949, grad_fn=)\n", "989 LOSS DIFF: tensor(5.2877, grad_fn=) tensor(5.2517, grad_fn=)\n", "990 LOSS DIFF: tensor(5.2987, grad_fn=) tensor(5.2175, grad_fn=)\n", "991 LOSS DIFF: tensor(5.3813, grad_fn=) tensor(5.1823, grad_fn=)\n", "992 LOSS DIFF: tensor(5.3100, grad_fn=) tensor(5.2477, grad_fn=)\n", "993 LOSS DIFF: tensor(5.3208, grad_fn=) tensor(5.1584, grad_fn=)\n", "994 LOSS DIFF: tensor(5.3709, grad_fn=) tensor(5.3208, grad_fn=)\n", "995 LOSS DIFF: tensor(5.2744, grad_fn=) tensor(5.1538, grad_fn=)\n", "996 LOSS DIFF: tensor(5.2920, grad_fn=) tensor(5.2744, grad_fn=)\n", "997 LOSS DIFF: tensor(5.3297, grad_fn=) tensor(5.2446, grad_fn=)\n", "998 LOSS DIFF: tensor(5.3818, grad_fn=) tensor(5.3297, grad_fn=)\n", "999 LOSS DIFF: tensor(5.2615, grad_fn=) tensor(5.1173, grad_fn=)\n", "1000 LOSS DIFF: tensor(5.3420, grad_fn=) tensor(5.2615, grad_fn=)\n" ] } ], "source": [ "loss_track = []\n", "\n", "device = 'cpu'\n", "model = SimpleBigramNeuralLanguageModel(vocab_size, embed_size).to(device)\n", "data = DataLoader(train_dataset, batch_size=6000)\n", "optimizer = torch.optim.Adam(model.parameters())\n", "criterion = torch.nn.NLLLoss()\n", "\n", "last_loss = 1_000\n", "trigger_count = 0\n", "\n", "model.train()\n", "step = 0\n", "for x, y in data:\n", " x = x.to(device)\n", " y = y.to(device)\n", " optimizer.zero_grad()\n", " ypredicted = model(x)\n", " loss = criterion(torch.log(ypredicted), y)\n", " if step % 100 == 0:\n", " print(step, loss)\n", " step += 1\n", " loss.backward()\n", " optimizer.step()\n", "\n", " if loss > last_loss:\n", " trigger_count += 1 \n", " print(trigger_count, 'LOSS DIFF:', loss, last_loss)\n", "\n", " if trigger_count >= 1_000:\n", " break\n", "\n", " loss_track.append(loss)\n", " last_loss = loss" ] }, { "cell_type": "code", "execution_count": 92, "metadata": {}, "outputs": [], "source": [ "loss_track2 = [t.detach().numpy() for t in loss_track]" ] }, { "cell_type": "code", "execution_count": 98, "metadata": {}, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAhYAAAGdCAYAAABO2DpVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABPI0lEQVR4nO3dd3hTZf8G8DtJ27Sli9JFoRRK2XuWgoAIMgXcgIg4UFH8uRHRFwVRQfFVHIjjZSkqTnAiUvZe0kIZhbLKHoUuSlfy/P5Ik+Zkp0172tP7c1292iYnyXMa6Ln7jO+jEkIIEBEREXmAWu4GEBERkXIwWBAREZHHMFgQERGRxzBYEBERkccwWBAREZHHMFgQERGRxzBYEBERkccwWBAREZHHeFX1C+r1epw7dw6BgYFQqVRV/fJERERUDkII5ObmIjo6Gmq1/X6JKg8W586dQ0xMTFW/LBEREXnA6dOn0bBhQ7v3V3mwCAwMBGBoWFBQUFW/PBEREZVDTk4OYmJiTNdxe6o8WBiHP4KCghgsiIiIahhn0xg4eZOIiIg8hsGCiIiIPIbBgoiIiDyGwYKIiIg8hsGCiIiIPIbBgoiIiDyGwYKIiIg8hsGCiIiIPIbBgoiIiDyGwYKIiIg8hsGCiIiIPIbBgoiIiDymyjchqyzv/5OGnIISTOzbFFHBvnI3h4iIqFZSTI/Fsl2nsXjrSVy9XiR3U4iIiGotxQQLdek2rnohZG4JERFR7aWgYGH4zGBBREQkH8UEC5Wpx0LmhhAREdViigkWGjWHQoiIiOSmmGBhHAoRDBZERESyUVCw4FAIERGR3BQTLEpzBXRMFkRERLJRTLDgHAsiIiL5KSZYGIdCmCuIiIjko5hgoWKBLCIiItkpJlioOceCiIhIdooJFsY5FuywICIiko9iggWHQoiIiOTndrDIzc3Fs88+i9jYWPj5+aFnz57YtWtXZbTNLWV7hcjbDiIiotrM7WAxYcIErF69Gl9//TX279+PgQMHYsCAATh79mxltM9lxlUhnGNBREQkH7eCxY0bN/Dzzz/j3XffRZ8+fRAfH4/p06cjPj4e8+fPr6w2ukRjWm7KYEFERCQXL3cOLikpgU6ng6+vr+R2Pz8/bN682eZjCgsLUVhYaPo+JyenHM10TsWhECIiItm51WMRGBiIxMREzJw5E+fOnYNOp8PSpUuxbds2nD9/3uZjZs2aheDgYNNHTEyMRxpuSc3Jm0RERLJze47F119/DSEEGjRoAK1Wi48++ghjxoyBWm37qaZOnYrs7GzTx+nTpyvcaFuML89gQUREJB+3hkIAoGnTptiwYQOuX7+OnJwc1K9fH6NGjUJcXJzN47VaLbRabYUb6gx7LIiIiORX7joWderUQf369XHt2jWsWrUKI0eO9GS73GYKFnpZm0FERFSrud1jsWrVKggh0KJFC6Snp2Py5Mlo2bIlHnroocpon8vK6liwx4KIiEgubvdYZGdnY9KkSWjZsiUeeOAB3HTTTVi1ahW8vb0ro30u41AIERGR/Nzusbj33ntx7733VkZbKkStNgYLmRtCRERUiylmrxAOhRAREclPQcGCPRZERERyU16wYLIgIiKSjWKChYpDIURERLJTTLDQcPImERGR7BQTLNTc3ZSIiEh2igkWHAohIiKSn2KChbHHQseS3kRERLJRTLDQsPImERGR7BQTLIzbpnOOBRERkXwUEyxULJBFREQkO8UEC2NJbx2TBRERkWwUEyw0XG5KREQkO8UECw6FEBERyU8xwULNVSFERESyU1CwMHzWMVgQERHJRjHBwrhXCHMFERGRfBQTLFTcNp2IiEh2igkWatNeIfK2g4iIqDZTULDg5E0iIiK5KSdYqBksiIiI5KacYMFt04mIiGSnoGDBAllERERyU1CwMHzmqhAiIiL5KCdYcI4FERGR7JQTLDgUQkREJDsFBQvDZ/ZYEBERyUdBwYKVN4mIiOSmmGBhxFhBREQkH8UEC+NeIRwJISIiko9igoVxjgVzBRERkXwUEyxKcwUnbxIREclIOcFCxS4LIiIiuSkmWJQNhTBZEBERyUUxwQKm5aYyt4OIiKgWU0ywMM6xYI8FERGRfBQTLFjSm4iISH6KCRamuZsMFkRERLJRTrAwfcVkQUREJBfFBAsOhRAREclPMcECpqEQJgsiIiK5KCZYlK0KISIiIrkoJlhwKISIiEh+igkWKg6FEBERyU5xwYKIiIjko5hgUTYUwh4LIiIiuSgmWBgxVxAREclHMcHC2GPBYEFERCQfxQQL4xwLDoUQERHJRznBorSSBWMFERGRfBQTLNSskEVERCQ7xQQLDoUQERHJTzHBAhwKISIikp1igoWalTeJiIhkp5hgoeJeIURERLJTTrAo/cxcQUREJB/FBAu18Uw4FEJERCQbxQQLYx0LDoUQERHJRzHBwjgWIjgYQkREJBvFBAvT7qZ6mRtCRERUiykmWHDyJhERkfyUEyxYx4KIiEh2igkW3DadiIhIfooJFmVDIUwWREREclFMsDCtCmGuICIiko1igoVpVQiTBRERkWwUEyy4KoSIiEh+ygkWpmUh8raDiIioNlNMsDBum86hECIiIvm4FSx0Oh2mTZuGJk2awM/PD02bNsXMmTOrRe0IdlgQERHJz8udg9955x3Mnz8fS5YsQZs2bbB792489NBDCA4OxtNPP11ZbXQR61gQERHJza1gsXXrVowcORLDhg0DADRu3Bjfffcddu7cWSmNcweHQoiIiOTn1lBIz549sWbNGhw5cgQAkJKSgs2bN2PIkCF2H1NYWIicnBzJR2VQsfImERGR7NzqsXj55ZeRk5ODli1bQqPRQKfT4a233sLYsWPtPmbWrFmYMWNGhRvqjMr5IURERFTJ3Oqx+OGHH/DNN9/g22+/xb///oslS5bgvffew5IlS+w+ZurUqcjOzjZ9nD59usKNtoUFsoiIiOTnVo/F5MmT8fLLL2P06NEAgHbt2uHUqVOYNWsWxo8fb/MxWq0WWq224i11QsWS3kRERLJzq8ciPz8farX0IRqNBnq93qONqghuQkZERCQft3oshg8fjrfeeguNGjVCmzZtsHfvXrz//vt4+OGHK6t9LisbCpG5IURERLWYW8Hi448/xrRp0/Dkk0/i0qVLiI6OxuOPP47XXnutstrnMg6FEBERyc+tYBEYGIi5c+di7ty5ldSc8lOZloUwWRAREclFQXuFcCiEiIhIbooJFqZt0zkWQkREJBvlBAtuQkZERCQ7BQWL0qEQjoUQERHJRjnBovQzYwUREZF8lBMsuAkZERGR7BQTLNSmOhZMFkRERHJRTLBQlQ6GMFYQERHJRznBorTHgrubEhERyUdxwYK5goiISD4KChYcCiEiIpKbcoJF6WdO3iQiIpKPYoKFmstNiYiIZKeYYMGS3kRERPJTTrAo/cxVIURERPJRTrDgUAgREZHsFBQsyr7mBE4iIiJ5KCdYmH3NXEFERCQPxQQLtVmXBXMFERGRPBQTLDgUQkREJD8FBYuyZKFnriAiIpKFgoJF2deCgyFERESyUE6wMPuaIyFERETyUEywkEzeZLAgIiKShWKCBYdCiIiI5KecYAH2WBAREclNOcHCrMeC+4UQERHJQ5HBgrGCiIhIHsoJFuZDIXoZG0JERFSLKSZYqDl5k4iISHaKCRYqLjclIiKSnXKChdnXnLxJREQkD+UEC07eJCIikp2CggWHQoiIiOSmmGABlPVacNt0IiIieSgqWBj3C2GsICIikoeigoVxMIQdFkRERPJQVrAoTRZcFUJERCQPhQULDoUQERHJSVnBovQzJ28SERHJQ1nBwrQqRN52EBER1VaKChamVSEMFkRERLJQVLAwDYVwlgUREZEslBUsSnss9MwVREREslBYsDB85uRNIiIieSgrWJR+ZqwgIiKSh7KChWnyJqMFERGRHBQVLNRcbkpERCQrRQULVt4kIiKSl7KCReln7hVCREQkD2UFCxbIIiIikpXCgoXhM3ssiIiI5KGsYFH6mbmCiIhIHooKFsa9QoiIiEgeigoWHAohIiKSl7KCReln5goiIiJ5KCtYsI4FERGRrBQWLAyfORRCREQkD0UGC+YKIiIieSgqWJStCmGyICIikoOigkVZSW9Zm0FERFRrKStYsKQ3ERGRrBQWLAyfBZMFERGRLJQVLEo/cyiEiIhIHsoKFqY6FkwWREREclBUsFBzUQgREZGsFBUsVKWDIRwKISIikoeygoVx8ia7LIiIiGThVrBo3LgxVCqV1cekSZMqq31u4XJTIiIieXm5c/CuXbug0+lM36empuLWW2/FPffc4/GGlUfZqhAmCyIiIjm4FSzCw8Ml38+ePRtNmzZF3759Pdqo8iobCiEiIiI5lHuORVFREZYuXYqHH37YNAQhNzWTBRERkazc6rEwt2LFCmRlZeHBBx90eFxhYSEKCwtN3+fk5JT3JZ3itulERETyKnePxYIFCzBkyBBER0c7PG7WrFkIDg42fcTExJT3JZ3i5E0iIiJ5lStYnDp1CklJSZgwYYLTY6dOnYrs7GzTx+nTp8vzki7h5E0iIiJ5lWsoZNGiRYiIiMCwYcOcHqvVaqHVasvzMm47cy0fAJB06CIGtomqktckIiKiMm73WOj1eixatAjjx4+Hl1e5p2hUiit5RQCAEh17LIiIiOTgdrBISkpCRkYGHn744cpoT4U82rsJACDIz1vmlhAREdVObnc5DBw4EKKazmHw9zGcToleL3NLiIiIaidF7RXirTFM3+RQCBERkTwUFSw0asPplHB7UyIiIlkoKliU9VhwKISIiEgOigoWXmpDsChmjwUREZEslBUsNKVDIeyxICIikoWygkVpj4WOPRZERESyUFawKO2xKOaqECIiIlkoKliYJm+yjgUREZEsFBUsvIzLTdljQUREJAtFBQuN2thjwWBBREQkB0UFi9JcwW3TiYiIZKKoYGHssWCHBRERkTwUFSzUKkOwqK6bpBERESmdooJFaa5gHQsiIiKZKCpYcCiEiIhIXooKFhwKISIikpeiggWHQoiIiOSlqGChURmHQhgsiIiI5KCoYKFWG4dCZG4IERFRLaWsYGEcCmGyICIikoXCggWHQoiIiOSkzGDBzU2JiIhkocxgwR4LIiIiWSgrWJSeDYMFERGRPJQVLFSsvElERCQnZQYLJgsiIiJZKCpYaDgUQkREJCtFBQsVh0KIiIhkpahgwaEQIiIieSkqWHCvECIiInkpKlgYdze9XqTDir1n5W0MERFRLaSoYGHchAwAnv0+Wb6GEBER1VKKChbGoRAiIiKSh6KChdoiV2TlF8nTECIiolpKUcFCY5EsOr6xGjquECEiIqoyigoW3l7Wp5NfVCJDS4iIiGonRQULH4316ew/my1DS4iIiGonRQULbxvBYsKS3TK0hIiIqHZSVLCwnGMBAPlFOhlaQkREVDspKlgQERGRvBgsiIiIyGMYLIiIiMhjFB8sBraOlLsJREREtYbig8WBczk4fjlP7mYQERHVCooPFmezbuCW/26QuxlERES1guKDBREREVUdxQWLpY8koFd8PdzZuYHk9o1HLsvUIiIiotpDccHipmZh+GZCD8SF1ZHc/sDCnUhleW8iIqJKpbhgYWRrU9O0C7lV3xAiIqJaRLHB4lzWDavbvDTWJb+JiIjIcxQbLApL9Fa3PbMsGULY6MogIiIij1BssCjWWQcLAHjvn7QqbgkREVHtUeuCxbx1x6q4JURERLWHgoMFhzyIiIiqmoKDhe0eCwDYf4bLTomIiCqDYoNFWIDW7n0Tl+6pwpYQERHVHooNFlOHtkSPuFCb9+UVllRxa4iIiGoHxQaLiEBfLH6ou837dHqB/WeykV/EgEFERORJig0WAOCtsX16eYUlGP7JZtw+b0sVt4iIiEjZFB0sNGrHlTaPXMxDVn4RACC/qAQZmflV0SwiIiLFUnSwcMX6NMOup33nrEefOetw+EKOzC0iIiKquWp9sCgqLf19ObcQALD28CU5m0NERFSj1fpgUViiM4ULAFCruFEZERFReTFYlOgx4/cDpu+dTMsgIiIiBxQfLBLj6jm8X6VS4ZsdGWXfg8mCiIiovBQfLLy9HJ+iZYzgSAgREVH5KT5Y1PHROLxfp5duVuZsiSoRERHZp/hg8crQVg7vL9ZLNytjrCAiIio/xQeLmFB/HHxjkN37Syy2V1ezx4KIiKjc3A4WZ8+exf3334969erBz88P7dq1w+7duyujbR7j7+OFt+5oa/M+y+3Vv952qiqaREREpEhe7hx87do19OrVC/369cPKlSsRHh6Oo0ePom7dupXVPo/JK7C94djK1AuS749eysO6tEto3yAY9RxsvU5ERETW3AoW77zzDmJiYrBo0SLTbU2aNPF4oyrDtfxim7enX8qzuu2hRbvQsK4fNk+5BQCQfDoL+85kYVyPWKi4bISIiMgut4ZCfvvtN3Tt2hX33HMPIiIi0KlTJ3z55ZcOH1NYWIicnBzJhxzCAnzcOv7MtRu4UaQDANw+bwte+/UAfvn3bGU0jYiISDHcChbHjx/H/Pnz0axZM6xatQpPPPEEnn76aSxZssTuY2bNmoXg4GDTR0xMTIUbXR5jE2IxPjEWXz3cHW0bBLn0mKEfbZJ8b9xHZOHmE/jvP2kebyMREVFNpxJCCOeHGfj4+KBr167YunWr6bann34au3btwrZt22w+prCwEIWFhabvc3JyEBMTg+zsbAQFuXaB9zSdXuD131KxdHuG02NPzh6Gxi//CQDo1yIcXz7QFfGvrgQAbJ7SDw3r+ldqW4mIiKqDnJwcBAcHO71+u9VjUb9+fbRu3VpyW6tWrZCRYf8CrdVqERQUJPmQm0atgsViEJfoBXDgXNlQDudbEBERSbk1ebNXr15IS5MOARw5cgSxsbEebVRV0Dop9W1k3qGz4chlbDhy2fT9hewCNAjx83jbiIiIaiq3eiyee+45bN++HW+//TbS09Px7bff4osvvsCkSZMqq32Vpo7WcalvoxvFOrv33TV/K37ec8ZTTSIiIqrx3AoW3bp1w/Lly/Hdd9+hbdu2mDlzJubOnYuxY8dWVvsqTc+mYS4dd9vHmx3e/87fhz3RHCIiIkVwaygEAG677TbcdtttldGWKtUrPgz16vgg83qRw+OOX77u8H6XZ74SERHVAorfK8SRfi0jKvwcrq+pISIiUr5aHSz0HkkFTBZERERGtTtY6CseCoQATmVex6ItJ1BgNtEz6eBFPLJ4F67kFTp4NBERkbLU6mCh80BngwAwaO5GzPj9IFpO+xvf7zLU9Jjw1W6sOXwJr/96oOIvQkREVEPU6mCh8UB9q6vXi1BQXFZta8rP+yX3H7mY6/Q5MvMKkVNge5M0IiKimqRWB4sXBrZAVJAvBreJqrTXuJZvWHWSfaMYtqqnXy8sQZc3k9B++j+V1gYiIqKqUquDRUyoP7ZNvQWfjeuC7VP7e+x5i0rKejAKivVIOZ2FDjP+weSf9lkdm3E13/S1J+Z8AMDFnAKbIYaIiKiy1epgAZTt9xEV7Cu53dWS37Z8svao6etinR4fJB0BAPxko0qn2my/kWJ9OTYwMXM26wbmrUtHwttr8JKNEENERFTZan2wsGdsQtn+Jz3iQt167K8p50xfF5bosT7tst1j1WbzPIorMJtUCIFes9dizirDXi4/stQ4ERHJgMHCjmKz7U9Hd2vk1mNPZebbve/tvw5JvlebJYuS8my5WspDoyhEREQV4nZJ79qisKSsJkX3JqFoEOKHs1k3Kvy8X2w8jvAALbo3CcUn69IlcyFOX72BEH8f0/cLNp/AtetFeHFQC6fP65liX0RERBXDYGGH+bBEaB0fzB3dEfd8ts0jz/2WRa+F0fBPNuPk7GEo1unhpVZh5h8HAQAjO0ajWWSgw+fUscuCiIiqAQYLO6bd1hoXsgsQHqiFr7cG3Rq7N8+ivOatS8cna9Px7aMJptsyrxehmZPHsceCiIiqAwYLGx7vG4fQOj747rEeVf7axsmXs1eWbceeV1Bi+nr53jPQ6YG7uzSUPM5Rh4UQAou3nkSb6GB0b1I1AakmE0KYVgsREZF7OHnTzKpn++DFgc3xbP/mNu+PCfWrsraY90BM+Go3zmffwNykI3ju+xS8+GMKUs9mS453NBSyPu0yZvx+EPd+7v5Qzr8Z11yaW7Jk60ncPm8LsvNrdgXRv1PPo9tba7DjeKbcTSEiqpEYLMy0iArEU7c0g5+Pxub9b4xoW2Vt2XXymuT7xFlrMTeprD7GbR9vRkGxDheyCwA4Lq514sr1crXh0Pkc3PnpVvSavdbpsa//dgDJp7Mwf8Oxcr1WdTFx6b+4kleI8Yt2unR8yuksPLF0DzIcrAQiIqpNGCzcUN16xwfP3Yges9bgxJXrlTLHYvfJq6avtx3LxPM/JOPa9SKHj8kvKnF4f03h6mTYkfO2YGXqBUxcuqeSW0REVDMwWLjBvErmf4a1krElBidL/0p+b1UadJUQLMxXxoz5cjt++fesVR0OMjiVWb5eISIipWGwcIN5sHi4VxMZWyL15/7zcDdXfLPjFAbP3YhTmdchhECujd1Vj13Os7rt9LWq7/I/dD4HW9OvVPnruoNrcoiIDBgs3GBeftu8YmZ1YKvr3hgMzIdJBry/AesOX8Kry1Nx+EIu+s5Zj+e+T0a76f/gxR9TJAW7vtmRYfWcKlT9eQ/5cBPu+98OnL7KeQxERNUdg4U7LK6pC8Z3RVx4HXnaYuFjs43PjPr/dwPe+P0g3vyzbPgi/VIeHlq8S3LcimTD3iY/7TmD1QcvOnwdW/NMth4r602ozHIaVdlbIkeAIiJSAgYLN7SKCpJ8379VJFY+01um1kh9t/O0zdsXbjnh1vMcvZSHG0U6pJzOcvkx9325w6XjKrotvCsXeyEE/rfpODYfrdqhE9YnIyIyYIEsN9St44Mdr/SHr3fZclR1dVsqUkF6vcD4RTux88RVm/dvPZaJ+77cjltaRuDOzg0RWsfH5nGWcguK0eXNJBSV6PGfYa0woXec1TElOj2OXb6O5pEBpgJVws0r9pb0TFMPzcnZw9x6rEQF3tbfUs5hx/FMzBjRBl4aZnciql34W89NkUG+CPbzNn2vUViw+Cv1gt1QYbT1mOHiPfHrPfgwSToEI+xMY/wt5RyKSgy7txov/EIIHL6QY9pJ9tXlqRg0dyO+3HTc9DjzYRxXftSeGi4pKtHj8a9340peoUvH3ygu27Tu6e/24psdGfh93zmPtIWIqCZhj0UFKSxX4ND5HJeP3XnyKnaelIYQdzoYlmw9iem/H8TwDtH4eEwnfL/bMJzz9l+HoRfAuawb+GrbKdefENIJthW16sBF6PTA/8Z3den4a9eLsMvs53H1evWpQrrjeCYahvqjQUjVVY8lotqJPRYV5Ik9JbrG1sVfT/dGQpNQLH0kwfkDaiBb8yM+XW+o0vl7ivVf9rNXHrYKFc5+0r+lnEPaBeslso4IIfDxmqNYn3bJ5v3nXChnbrTz5FU89nVZoazqkjn/zbiGUV9sd6mCKhFRRbHHooq1jArE4Qu5ktsWPtQNQb7e+P7xRJlaVXmEEDhupzKoJ+envLp8v83lseb+t+k4NqdfwefjukDrZZgns/bwJfx39REAtudkqN2I3panKMeK5KSDF/HxunS8f28HNA0PACCtoEpEVNnYY+Fh3hoV9k8faPq+X4twyf0LH+yGrx/pLrktyNdb8v27d7WvvAZWsvwiHT5dn47/rNiPcQt24Jllyej/3w34z4pUq2PNL7wv/ZTi9LlHfbEdeYW2S4Y7ChV5hSX436bjePPPQ1ifdhm/Jpf1kJwv3WvFHvfm0EiThcYiWWxJv4KV+887fgYh8O2ODKtN5lw14avdSDmdhRd+KPt5cuksEVUlBgsP0wsg0Ncb797VHpP6NcXUodLS33X9fdC7WbidRxtYToD8aWLN6clYvvcs3v07DUu3Z2DT0Sv4zcYwB2C9NfkPu8+49PwLN5ctny0s0WHZzgynwxUzLWp5pJ7NNi199XLSrWCrEFpeYQn+2HcO1y1CjmWPRbFOoMBsUufY/+3AE9/8a7Vb7Pv/pOH2eVtQUKzDtzsz8Mry/bjt480O2+VMjlklVXsTah0p1unx1baTSL/k3tAS0YFz2Thuo2ov1R4cCvEwYwXMe7vFmG777P4upk2qfLycZ7lAix6MLrF1PdjC6uG3lHOSi66rVqZewNP9mwEA5q07ho/WHEWgr+1/xhuOXEbf5uHYdPSy5Pavtp2CCsCMkW2dVlDdm5FlddsLPyRj1YGLGNouyuFj3/jjID5YfQTJrw+U9M78nXoBX248jnoBPogM8sXaw4b5Hb8mn8Wry617dsrDWWByZsnWk55ZtlsOqWezsevkVYxPbCxrhdvMvELUC9DK9vo10dXrRRj2kSEUV/W/G6o+2GNRBTrHhpi+tuwet6Vdg2DJ9yqVCt0bh5q+f3Wo/BugVdQzy5KR6WSnVFsOnc/B0Yu5KCzR4aM1hqWuuQW2h0fGL9yJ9Eu5OGdjuGNJ6cRQ8wuwvZoZlrevOmCoTvrX/guS220tL80tLEFeQQlKzIqDzfzjIC7kFODAuRxTqAAgOaaiNO5MDrHh34xrHmqJ+277eDNm/H4Qv+w9K1sb5q1LR5c3k/DVtpOytaEmOnutrDfO3Ro0pBwMFlUgItAXSx9JwIpJvVw6PibUH3d2bgAAuLtLQwDSZa2P9olD6oxB2Db1Fo+3tSb4+d+z+GLDcecHAhjw/kaH95sHvRNXbO9QWlCsd+m1LIOGuUUuVEC1nM9x4Fw2Jv+YUq5uZWNg0usFsm/Is+z1RpGuXL1SRgfOlW+eSUWV6PSYsyoNAPDarwdkaYMSeDAnUw3DoZAqclOzMLeOf//ejnhuQHNT3QHLOYQBWi8EaL2gUatsbkCmZJ9tOOaR5+n/3/U4drksTNzy3w02j8stKIafjwa/Jp+1ubrFmRK9Hm//ddjpcZbd/sYu5S3pV7B1an+3XtMYmB5Zsgvr0sqGgnILihHo643DF3Lw8s/78eLAFripWRj0eoGNRy/j1eWpmDyohcOQ5IpinR7tpq+CRq3CwTcGu9RTZ8nej3rZzgyknMnGW7c7H8oqjw+Sjnj8OWsjnV6U632varkFxTh5JR9tGwR5pHwAscfC4yYPauGx54oJ9Tf94qxXx/ZYb4eGwVa3ab3UmHdfZ0QF+eLD0R0R4u9t45FkHiocySkoQUGxDs8sS8Zz3ztfvWLpu52Ol8Ea2fuVdi67AAXFOuw4nokSnWu9J4fO50AIIQkVAHD3/G3YdPQyBs/dhOTTWbh/gWGfl/GLduLBRbtwNusGnv0+2enzn76aj+3HM+3en5lXhBK9QGGJHnmFJcgvKjFVWHWVvRD38i/78d3ODCQdcrxhniOzVx7Gg4t22vx5LtpystzPW9uZX5fLE8LlMHjuJgz/ZDPWH7ns/GByCYOFB3wzIQFv3dEWGybfjCdvbloprzHtttbo3jgUH4/pJLl93tjOVscWlugxrH19bH+lP0Z2bIAvxpVVjtw29Ra3JlV1bhRS7jYrxdXrRbjz063lfvx7/7j2F/Dkn/bZve+V5fsx6ovtpnklgOEvwvRLudDpBR79ajdmryzrFSks0eMfGzvVpl3MxbgFOyW35ReVYJODTduEEFi85QR6zlqDE1eu4/CFHPR+dx1Gf7FdsixWpxfYcTwT+UUlkrLshcU69J2zHjfPWY/NR6/g4cW7cCrTeahzdmHKKZ1bo9MLrD18EZlm5dePXMy1WrVj7rMNx7A+7TI2HrW+mFjOdXlvVRpGfrIZ03+rucMimXmF2H48UzLv4UpeIcYv3Im/Ux0vgS4vd4NFsU6PP/adw+Vc18roe4pxlZazpeDkOgYLD+gVH4axCbGIrVen0rrSooJ98cPERAzvEC25vX6wdYlmyxUB3ZuEYuGDXfHxmE6m47+ZkIAx3RuZjmkU6o9fnuxp9Vwfju5kdZs9NaDXs1x+SzmLg26UOq8Mv/xrmMj40dp0022Tf0rBgPc34rnvk7H64EWrIaJf/nVtCe/t87Y4vL/J1L8w/feDOJddgH7vrcfguZtM9+03CxYLNh/HqC+2o8OMf7DAbFnwpdxCXM4txNmsG7h/wQ6sPXwJzyxLlryGEMKq92Dp9gzkF5XghR9SsMNG74jxn9u3O07h4cW7MeITw3lsPXYFAz/YiKEfbbJ6jCVbE38thxY/WZeOlDPZWLz1pNPnc9fhCznYdybL5eMz8woxbsEO/FV6Ebx2vUgSFq5eL8LMPw4izaIIX7/31mP0F9ux2ixsvrcqDRuOXMbEpf9W7CTscHeI9ouNx/HUt3ud/nusLNV5Q8mMzHzMTTqCrHz3J7zLgcFCgRY/1N3qtltaRkpCSa/4MIw2WxLrrVGhc6OKLWs9PHNIhR5fXV3Irtq/oJxZXDoR1Bg27NUKMa5ecebIxfLXHPAx273129IiZcU66QUlv8h6AueZa9JaHqM+346+c9abNqozav3aKvz87xmM+mI7Nh65LPnr2rjwZWWqYT6I8S/PP/YZjjmVadiQLiu/CHOTjtjsJSm0MTG3KuYsncu6gRGfbMbguZsw4pMtkrojjsxeeRibjl7Bk9/8i7WHL6LTzNWY9mvZEuVpK1KxYPMJDJornbRs7N0xHz7acsx+L5VRiU6PvRnX3B7GAgC9mw8x9rBZ1nmpiIs5BVb/puzxdK44eeU65qw6LOlJK68752/B3KSjeMlBr2Z1wmChMLPubOfyRFHzrkovG8sTb2kZgSA/1+dn+HipER6ovHX/yaez5G6CxPTfD2LEJxUroOUpWm81sm8U41zWDav6K0aTbVRVtezd2nnyKs5m3UCqg5UgDyzcKfnrOjvfcDF2Vkp9+m8HMDfpKPrOWW8VLq4X2R8ucUSnF9hw5DKulmPJtLFN+86UnavxXJy5ZvYX6zsrDStXlm7PQPaNYmw+egUpTno/jJlpz6lrOH217AJuPqR1LuuGKVy99dch3PHpVjR7dSXWHnZvTovOhaGQG0U6HCtd9eTt4S7PIxdzkfD2Gtz9mavDmK69fkZmvqnNjoyctwXz1h1zOMTpqit5hvd927FM/PLvGWx2MHRZHTBYyGRg60gAwNiERk6OdI87/zUjg3xNX9uqzvjp2M6SLeJdEa3A3TNd3Tq9KplflOSk9dIgcdYa9Jy9VjIsYs7Yc2DO2O2cnV8s2RzNnWH56b8ftHm7eQnzu+dvxQqzEu5956yXDB3M+P0gvtlxCn+nXjBVY3XE+Jf77ynnMH7hTpvd9kIIXLBTKt742lkWS4CdrW4RQmDfmSzJ0ue0i2XDHfd8thX3L9hh1RM0N+kIPkwqm5dj/GNitcX8m4yrhvco6eBF9Jy9Fk99awhw5hNZH16822EbLbnS83P7vC3o/98N2HE8E14a94JFiU6Px77abXeV2B+lPXmu/l+x12Nx8FwOfk029A7q9AJ95qxD//9uwMUcx9sBGJd577KxV09+UQk2HLnscm+KUW5hCZ7/IcU06bq64nJTmXw4uhO2n8hEz6b1PPq87owTmocAW78DfL0NG3V1bhSCf21UoLSlaVgdpHjwL/yXh7REYlw9jJRp3JUcu5ZfZHOow5kLOQXQ6wWW7jhl0fXt3jDEtetFVqHY/L/A7lPWhb7Mhw4AmKqdjusRi5m3t3X4es1eXYmxCY1Mf7EaL8iAoWbIu6vSsOrABZy4ch0fju6IER2icfB8DnaduIpNR6/gSl4hfn6ip1XxKLUK+HR9OtYfvozFD3eDv0/Zr+b/rNiPpdsdryyyNZx1vbAEc81CBQDTj9fy9Sf/mIKdJ66awuHK1AuSycDOrD18ERvSLmNEx7LhVlcmbxrD0S//noW3xr2/c1cduIh/Dho+JvaVTpq/mFOAunV8TN9bbiFgi/mk0bf/OoSiEj2mj2hjmqsTFqCVVEFOeHsNnri5KaYMbum4oTZ+DP/37V6sOXwJT/WLx4sWKwkPnsuBSgW0qh/k+HmrMQYLmfj5aNCvRYTnn7icvYnuzuC+p0tD/LjHenLgy0Nbulwx0c9bgxtmBZTu6dIQhSV6yZyB8YmNcbWGTFiqjd792/WLj6XjV/KsegkWbj7p1nN0mrla8v0ry/eb5nrYY+8i/fX2U06DBWC94d2mo5fRu1k4Vh24IPnr+ZllyZj5xyGrHq/4V1daPacQwLt/G4Y2Wr+2CgNbR+LzcV2gUqmchgp7bBV8Mw5PWP5/v16kw+KtJ9EyKtB0m72egNNX87H12BWczy7AiA7RiAsPMPVmmPfEmPdYFOv0eO77ZCQ2rQdfLw3OZ9/AU7c0M92fcibLatdnwDDfQuulRpiN0ur2hrE+23AMs1ceRj2zYFFYojf9oWSPsRcnv6gEX2w0rGoyX+W39vAlq3lo89cfw33dGyE6xM9uzY7cwhIUlegl2zmsKa24+/X2U5JgcaNIZwoyh2cOdtrm6opDIQrj7szmBxJjAQAvuVl/Y849HWzeHhHoa/N2W5JfvxVbXy6rHvrOXe3xn9uk5crVatdWmzQI8cNyG6taqHIZx37L+9hDF6Srbf6s4JI/Z6HCmbnlKI41bsFO6PQCl2wsk3R1GM1yaew/By9ibtJRPP3dXrfbYzRhifXQhV4AP+4+jS832a4Ea+vibqnPnHWY8vN+zE06isEfSlfemPfgGJZD5+FybiF++fcM/th3Hq8uT8ULP6bgvX+O4OC5svfe1usah8m6vpmEez/bhj0WvU+WvxZOXLmO64Ulpp4W8y0DjEuIU89m4+M1R1FQrEOJTm818fmdvw+j75z1pu+LzCatLth8wua8kd7vrsOT3+yR3GY5l+eV5futHgcY/rgyZz6Jt6BYh4zMfLsBz9jrtOnoZbz/Txp0eoH9Z7IxbUVquef+eAp7LBTgk/s64alvDb+A3O2wmDGiDZ7p38xqsyVX8snaF/qaqlV+cl/ZstRnBzTD2sOXcHPzcMnySEtaLw2iQ/zw7t3tEeTrDbVaZTWJ1Eutdmnbb41aVWPTfW01Z1Wa1cVCblZDBy7adfIqXq9AnYt1aZesbvtwTfnaYnTBxhwAIUSFJxOaX1st5wiYrx45fS0f931pmAvw4sDmVs/jqNT8dzszMPWXsovxzpNXcdf8raYaPJY7Gu8/k43hDiY060pXKhl3DQ7x90aJXmCGxTyd+eulF3HLHPHG77bf41UHLmLT0cuY8tM+vDa8jWnTSaOf9pzB1CEtrX7P+vtoTJvdFRTr8J8V0mG6oR9tQp6deiz3L9iB8ACtaQ6R+e/avMISfDCqo83HVQUGCwW4rX10WbBwM1moVCqnOzjaGyQxHwfuHV+2FfyzA5rj2QHNscyi4mSv+HrYkm5dj+DermXLXi27E9Uq185Jo1Y5nfgXE+onmQlP8qpuoaIiRn+xvUKPd6XkuydUtBims8mGJWZLjc13Bi608bgNDipdmocKSz/tOYMXf0xBgLbs98+yXY57qkos1r6ezzZsAuiM5c/rh932a8MYC89Zhgqj539IwcIHu2HqL2XB7viV6+jyZhJaRAYisWk9yaRavYDdUAHA5u9SoyMXnfc8VSYOhSiE8eLbzWwX1PJ4vG8cAGDasNZ2j3mqXzwASFaM+PlY9xZYLj39ZkIPp69vWdxLpVK5VJJco1Y5nCcSGaTFppdq56ZtREYVLbPd/D/W80PMmQ9pmNfMsBUsyrPnz9t/HcKLPxqWL5tfdJ1V69TphWTCaqCvt0t/sIyc57ll3RuOXMY/By7YDCdpF3OtCrBVpJ6Kl5sTYT2NwUIhkqcNxPoXb0ZMqH+FnuflwS2x9eVb8PBNTUy3GedhGBknG/n5aJD0fF+sfaGvZGKSUZ/m4fB2cwmZrSVnWi8Npg+3Djqvm93m5aTH4renbrJ5e5OwOm61z57nb7Xu6lWCBgpcPlybVcb+He/YmcAr6bGowC635oyTKi3ZKl9vLr9Ih5Nmy543Hrns0rDxNRfri7hq6zH7vQyWvnfSC+OIp2uCuIvBQiGC/b3R2AMXSZVKZVWL4vaODeweHx8RgLjwAJv3eWvUOPrWUHz9SHdseqmfS6+v9dLgo9L9UJpFlD1vuI1JoXX9y2Z9q1UqtKofaHWMkXnNDnPuTFod0CrS7n2D2kS5/Dzl9WDPxpX+GuZOzBqKLS+zl0dJUk57vv6J5bwEW5ZsO+Xx13XHze+tR7/31pu+33Y802qDvqrwb4brw3+u7jFki7s1QTyNwYKcUqlUpotqWICPk6Ot9W4W7lZPyogO0Tg8czBWPtPbrA3Wx5l3FXppVHa7/36301sBAD3jpVVK+zYPx3v3dLC5+dr/xndFaB3b5+9Oz+NDvRpb3dYi0n4oMrI13FRZBraOrBZbSG9lsPEoWxM6qeq4Mq/DE9ytCeJpDBbkknfuaoenb4nH8id7Veh5tKVDJjGhjrvYfb01kqBg6xJnvvTL3jLbqCBftDPbWn7FpLL2J792q1Vl0VeHtcLdXRpa9dr0Li2TbutV3r6jneT1f5yYKLl///SBpq/DArR4fXgbq+f44oEuNttvro4bwaJxPesg1zzSds8SAHwxruz1PxzdEZ+Pc9weW7P8K4MSK7kSVTYGC6oR6gVo8fzAFhWew/H1Iwno2zwcS2xslOaIrdwgJHud2A4WlmPKHWNC8NPERCx/sidCSodSPru/7CJaR2t7oZTxWPN2eKlVeGNkG9yX0EhSfbJdg2DJY8330LDVzol9m9otrmPOx0uNd+9ub3X7f4a1srqtrkXPys5X+2PabfYn5CbElVWA9VKrnfZWmBc3Mjym6no37u9hXQb/tvb1K+W1+jQPl3z/8hAnVRaJqoGq/P9oC4MFVanuTUKx5OHududl2GPrQhcfUTZ8YNxr4bP7O0uGFWxNrO7aOBSdzCrotTYrnWuc9NTbYiO3uNL5K92bGFbd1PHR4MibQ/BAYmMAkFQGdFRPwzJAdG8SipeHtLS5CZylED8f3Ns1xmquha35eDNHSitIalQqaOyEhenDW0t+ETn7Y2dw6XwS8/kXAb5e2DvtVscPdNP/3RJv8/YQP+vhqOkjrHuBPOFhi2Grpm7+uyWSQ+b1oirZpdce1rGgGsH8kvjH/92Ek5nXJXX7g0p7BQa3rY/Bbeuj8ct/lt7j/D+X+fXW2IV4d5cYBPl6QycE9p3JNu1F8PYd7dA0PAB3dGog2TgqKtgXSx9JcLo01nL1jHFoyDxwvDiwOdIv5Uk2zwKABnUNwwIvDW6BphEBmFZaTMfWihzz0syGc1RZLfl74dbm+L/+hp6HArNZ+/Z6K0L8vfHPs31MIcp8xYgQ1r0kFfVwL8PKpC8f6Irpvx0w7Smi9VJj77RbsXT7Kfx3tWGCm60w91CvxpJNtIzevas9Gtb1w33/c76RU8O6fjj+9lDEvfIXAPknxXmaSlXx2hZU/ew5dQ1X8grtTlqvbOyxoBrB/GLXtkEwbmtv2Oxozt3t0ap+EKaPsN3N70pol2wfX3rh0KhVGNKuPm5rH41XhrYyTZwM8ffBCwNb2OxxualZGNpaDIMYzbqzHUL8vTHXohpeoK+X6fWMBraJwtzRnUxVBo0SS4cr/H28MK5HLCYPaoEusXUlBcaMNGoVHusTZ/pepxdWm4WZz18wH5O117MBABFBvjZ34jQOS0V58BeZcd7Kra0j8f3jZTVQNBoV6tbxQWRw2Wv52OhmCfW3DjqJcfVwb7cY9IwPc755FAzDQubnG++gx8Lyva0JtDZCaXUR5Mu/eytCzuGQ6vuvisiMvf8i93SNwcpneqNhXdtzP1xZt2/eZeipSU/GyY3GGiBjujfC3mm3okNMiOS4QK2hhyPEzxsdYkKQ0CTU5sUrLMDH6oI+qV88fn6ip83VIiqVCi+YTbCso9XA11tt9timuL1T2TJi86e2N9/D0a8p409w7uiODo5yj/mupdHBZSHIuCW5+YRZHy817ugkXRZta1+H/q3KNv4b1c06kFky/ixWP9cHPzyeaHeO0Qu3NsftnRpgVGnIMx9Ke7BnY5vBx5Fe8fWw9JEE3GSxasmW+IgApL052K3nNzKf/1PdyNmVrwRyFsliJKQawV9bvqWW7nbzeirlT+oXj0FtoiRj8raGGNrHGHo41GoVVjzZ08GkScft+mliIt5ffURSgEfrpcEf/3cThDD0ctzcIgL3dGmIDjEhuL+HtOiZ+eva6pFwVVez4SkA6N8yAocv5Eq2Rh/UJhKrDtgvaDQ2oRF0emGaMGvZppzSPSYCLf6i/WBUR2xJv2LaDMxy51QApjkxACRBy5lmZvN2vNQq06ZWRveUBoo3bm+DkZ2i0TU2FKsPXoRGrcLgtlGYOrQlnv8hBX/uc32TtZuaheHY5TxsTr/i8Lim4XWg9bL//6N1/SAcPG97meOYbjEO9/MprwYhfjibdQOJcfWw7bjzolBNwupY7cZq+TM2+u2pXhjxyRaPtFPJ3C1O6EnssaAaoUeTeri9YzQmu7kLqys9Fk3C6qB3szAMa1/fYylfpVKhWWSg3Yv0TxMT8eLA5hjdrZHkMfafz/HrdW0cim8f7YEPRnXAoge7mW5v2yDYtNxWo1Zhzj0drEKFJcuhEGOhssFtHRQBK/0xW/78OsSEYPOUfmhYt6zH4d27O2Daba2x45X+eGVoS8RHBJj+0geAt+5oh9l3Wa9+eX14a0QEajGptKT8gFaRGNauPqaardQw722x7LF4ZWhLyXwUexdjZ0ttN5oVe2tczx+HZw5GVOmwjNZLg55Nw+Djpcaw9vVNPzOtlwbz7utsetydnewXnTPnKOO9Prw17u/RyLTV+2f3d7Z5nOUcnDHdYxAZpMXM29uih9lqoHoenCPz3j0d8NYdbTFvrO02WUpoYr0Vga2quDe3CEf7hiGVtgoIKNuyoLK9NLiFzZo25hLN3p+fn0jE+ETH/3fNuTIhvLKwx4JqBLVahbmjOzk/sNQdnRpg+d6zePJm578kVCoVvn4koSLNc1vXxqHoWsF9XWy5o1PDcj82IlCLS7mFpl4Uo28f7YGkQxcxokO028+pgnVgCvbzxiOlJeMf69MUj/VpiuwbxTh19TpGOqjy+lCvJniwZ2PT82nUKqsLl/nwyF2dG2LeurKqkH4+0l93lkM+zSICsOyxHpIlx7aCofncFB8vdfl21XUSFF8caAjQ9sKmr7caD/VqIrltcNuyi233xqHYefIqAEMPUPLpLADA+MRYzBjZFkIIqFQqCCHwxsg2aBMdhN9TzlvtVwEYltjuOnEVaw6X7cA6rkcsvt5eVk1z+9T+6DFrjen7egE+SGzq2kUwtp4/+rWMwLJdpyW3fzq2s2n3ZJXKsCx7eOncKleLt52YNRQ3vbNO0mPmTFVNTWjXIBi9m4XbnGBsVMesp7ZLbCi6xIa6XMVUzh4LBgtSpHfvbo9He8c5LPNdk7hTHKu8Nk3ph6ISvdW4e3igFmO6W9eOAIBRXWPw/e7TeGZAM5v3u1q8M9jPG8seS3R6nLMLyvQRbfDoV7vxeN84xIUHIOW1gejwxj8ADMHJkdXP9zV9PbpbDPKLdIgOdjwZtbzTACw70jo0NFxkgv28ER3iZ1oOba/wm/kuorYMaB1hChZhAVocf3soSvTC1Hth/DmqVCrT8NDvKbaHaSb2bYqJfZvi4zVH8ce+8/j+8R4I8feRBIsoi5+TO3OV/vdAV8RHBODbCQmSlTrmE6TDArSSybauXjJVKpXTuRr392iEpdvL9uUY37Mxvt15GoPbRuLX5HPILbC/w2hFuFK7piJzYOSsnMuhEFIkb40araODqkVZ6or4dGxnxIXVcblLuSK0Xhq3f5G9fWc7JD3f19QDAQDD2pX95VzVP/9bW0ci5bWBmDrEUDQs2N8b04e3xl2dG+JWB3u9WJp9V3t8NKaT0/aXd1OvIl3Z0t+YUD/8+tRNeHFQCzzaJw7DzLr5A+ysjLA3/8DIPJBElq7ksbUs2ZyxW958mMa8Zsr/9W+GVc/1Mc19+bB0ou7bd7Szei7h4s9l5u1t0SwyECqVCj3jw0yvZ7kvj+U12FEVWUvO3iOdxcar9QK02PlKf7x5eztMN6uS+89zffC42UorZ5z9MeDKUlDLeUTmGoX6V9uS9+yxIKrGhrarj6HtKm88uaI0ahXiI6S/5OeO7og/90v/+q3r74Mz11zvjq6IYItaIg9aDBl4lJu5wlg3onvjUNNEzh8et99TM8RiXkuv+HrYkp6JOzs7nqPhpVbhi3FdcD67AK2jgxweaxRbrw4OvTEYvt5q/LL3LADHy1FHdmyAga2jbK5KMs89T98Sb3eC6CiLpdJTh7ZE3xbhVnMuLOf9TOgdh9yCEnxuZ7dTaVukb5JxYqmRSmWYV/O/Tcfx33s6AigbArujUwP4emvQISYYDev6m2rJuMIw38iwxHvefZ2x4cgl05bpT98S71KxNVvB4r17OmDeunR8+UAXRIf4Yecr/dH97TU2Hi0f9lgQkUfZ6gb/YFRHtGsQjC8f6CpDi+xzNjzijLs9FrteHYDfn7pJUsAs0sbOvUbeGjXGdDdcfAe3icL8+7vgw9Ed8ebtbe0+BjAEvoFtojDezR1x/Xw0kl4aZ2dnb2O8OLOJlze3jLB5TJ/m4TYKxmnQr0UE/C3mw1j2HPl6azB1qHUpe1ssl/o+2rsJPjXrAVSrDLsT/zixJxpZ7LGjVqswrH1903J288nWzpjPcYgI0uLx0iJ7ACQTqM0ncLaqLw2Bxp4bf7Of891dGmLdizebKg9HBPl6tH6MJ7DHgogqTXjphTs+IgC//5/9XWblElrHx7Q8tTza2CmIZk9YgBZhAVpsMVtC6mx57+vD22Bg6ygkxIXC38fL4QRXI8sLc3m5OqRh7v17O0jOyXxY5utHumPcgp0AYHMHYUvG5apjbewPA5T14DjyydjOuPPTrabvvTRqSS+gvXkstjgbTjLnpVZj5sg2OHb5OrrG1sWxy2XLaX3NgsLrw9ugY0wIsvKLMbp7DH7ecxavLN8PAGgdHYSk5/siPMBxAN74Uj+sTD2Pb3dkYMeJqy63sbIwWBCRx/1nWCtkXM23KlpV3XxyXye8+OM+PNPf9uRTe1Y92wc/7Tnt0qojW5zNkTDn661BPzt/9Vt6/tbm2HniKoaXYwWPLeWZQmJ5nTbPTa3qB2HVs32wLu2S1Z43tvxvfFfsO5Nt2qPH0oejO2HZzgy8988Ru8/RuVFdnJg1FE2m/mXzfneChT3hgVpctgiobRsEYZxZ3ZQSfdlkDj+LlUTmYbFxWFmviY9GbTXUaIuPlxojOzbAlbwiBgsiUqYJvV2f5Can+IhArJjUy+3HtYgKxKvD7O8W60z3xqEIrePjsER4eTztZkBypjxTUx31lnipVWgRFYgWUa6t1qqj9UJi03p27w8L0OKpW5rh9NUb2H82G9Ehfkg6ZF18rSI1YlxhK4BZ1mJpGh6A6GBfhPj7OFw1Y77ix93Jzx0auteDVlkYLIiIqpifjwbbp/aXtdaAK9zpsZg6pCVSzmRZregw75yprDLT79xtuIgLIZB9oxhTft5nt9fGukel4u+B5ZDRXZ0bSnY8BgzzZTa81M/p63WOrQtvjQpxYe6Hzq6NQ7Hwwa5oXM+6uFhVYrAgIpKBO+P1chFu9FmYT040J9nkr5KrT6lUKoT4++DzcfYnCVuGpfpOapW4IsDXC5nXi8zaYfs4V+p7BGi9sH/6oHLvW3RLS9eXVVeW6v8vm4iIZOGJLdXN/5p3pShUZetSup/N5+O6YHS3GIxzo0y2pS8f6IrmkQGSVSaA6wW87PH11lSLn1V5sceCiIgqjXk4kXMr7x2v9MeF7ALTks5BbaIwqI2D/W+cUKkMBdlubW3oIXBlhUptwR4LIiKqNOZzLOSshBsZ5IsOMSEVfp5FD3VDw7p++HZCD8ntC802/6vt3AoW06dPh0qlkny0bNnS+QOJiKjGiQn1d36QE17VfIKqu/q1iMDmKbdYrVYx3y23hu8kUGFuD4W0adMGSUlJZU/gxdEUIiIl+XZCAjYcuYxxPco//8CoY8MQDGkbZVXVUslUFZ5lUbO5nQq8vLwQFVX+cSkiIqreesaHoWd8mEeeS61WYf79XTzyXFQzuD3H4ujRo4iOjkZcXBzGjh2LjIwMh8cXFhYiJydH8kFERKRUnVwoV65kKuFGMfiVK1ciLy8PLVq0wPnz5zFjxgycPXsWqampCAy0XUlt+vTpmDFjhtXt2dnZCApybdc9IiKi6i79Ui52nbyGe7vG1Ojlovbk5OQgODjY6fXbrWBhKSsrC7GxsXj//ffxyCOP2DymsLAQhYVlNdRzcnIQExPDYEFERFSDuBosKjTzMiQkBM2bN0d6errdY7RaLbTaim1NTERERDVDhepY5OXl4dixY6hfv77zg4mIiEjx3AoWL774IjZs2ICTJ09i69atuOOOO6DRaDBmzJjKah8RERHVIG4NhZw5cwZjxoxBZmYmwsPDcdNNN2H79u0IDw+vrPYRERFRDeJWsFi2bFlltYOIiIgUgHuFEBERkccwWBAREZHHMFgQERGRxzBYEBERkccwWBAREZHHMFgQERGRxzBYEBERkccwWBAREZHHVGgTsvIwbqaak5NT1S9NRERE5WS8bjvbFL3Kg0Vubi4AICYmpqpfmoiIiCooNzcXwcHBdu9XCWfRw8P0ej3OnTuHwMBAqFQqjz1vTk4OYmJicPr0aYf7xNdkSj9HpZ8foPxzVPr5Aco/R6WfH6D8c6ys8xNCIDc3F9HR0VCr7c+kqPIeC7VajYYNG1ba8wcFBSnyH4o5pZ+j0s8PUP45Kv38AOWfo9LPD1D+OVbG+TnqqTDi5E0iIiLyGAYLIiIi8hjFBAutVovXX38dWq1W7qZUGqWfo9LPD1D+OSr9/ADln6PSzw9Q/jnKfX5VPnmTiIiIlEsxPRZEREQkPwYLIiIi8hgGCyIiIvIYBgsiIiLyGMUEi3nz5qFx48bw9fVFQkICdu7cKXeTXDJr1ix069YNgYGBiIiIwO233460tDTJMTfffDNUKpXkY+LEiZJjMjIyMGzYMPj7+yMiIgKTJ09GSUlJVZ6KTdOnT7dqe8uWLU33FxQUYNKkSahXrx4CAgJw11134eLFi5LnqK7nZtS4cWOrc1SpVJg0aRKAmvf+bdy4EcOHD0d0dDRUKhVWrFghuV8Igddeew3169eHn58fBgwYgKNHj0qOuXr1KsaOHYugoCCEhITgkUceQV5enuSYffv2oXfv3vD19UVMTAzefffdyj41E0fnWFxcjClTpqBdu3aoU6cOoqOj8cADD+DcuXOS57D1vs+ePVtyjFzn6Ow9fPDBB63aPnjwYMkxNfk9BGDz/6RKpcKcOXNMx1Tn99CVa4Onfn+uX78enTt3hlarRXx8PBYvXlyxxgsFWLZsmfDx8RELFy4UBw4cEI8++qgICQkRFy9elLtpTg0aNEgsWrRIpKamiuTkZDF06FDRqFEjkZeXZzqmb9++4tFHHxXnz583fWRnZ5vuLykpEW3bthUDBgwQe/fuFX/99ZcICwsTU6dOleOUJF5//XXRpk0bSdsvX75sun/ixIkiJiZGrFmzRuzevVv06NFD9OzZ03R/dT43o0uXLknOb/Xq1QKAWLdunRCi5r1/f/31l3j11VfFL7/8IgCI5cuXS+6fPXu2CA4OFitWrBApKSlixIgRokmTJuLGjRumYwYPHiw6dOggtm/fLjZt2iTi4+PFmDFjTPdnZ2eLyMhIMXbsWJGamiq+++474efnJz7//HPZzzErK0sMGDBAfP/99+Lw4cNi27Ztonv37qJLly6S54iNjRVvvPGG5H01/38r5zk6ew/Hjx8vBg8eLGn71atXJcfU5PdQCCE5t/Pnz4uFCxcKlUoljh07ZjqmOr+HrlwbPPH78/jx48Lf3188//zz4uDBg+Ljjz8WGo1G/P333+VuuyKCRffu3cWkSZNM3+t0OhEdHS1mzZolY6vK59KlSwKA2LBhg+m2vn37imeeecbuY/766y+hVqvFhQsXTLfNnz9fBAUFicLCwspsrlOvv/666NChg837srKyhLe3t/jxxx9Ntx06dEgAENu2bRNCVO9zs+eZZ54RTZs2FXq9XghRs98/y1/Yer1eREVFiTlz5phuy8rKElqtVnz33XdCCCEOHjwoAIhdu3aZjlm5cqVQqVTi7NmzQgghPv30U1G3bl3J+U2ZMkW0aNGiks/Imq2LkqWdO3cKAOLUqVOm22JjY8UHH3xg9zHV5RztBYuRI0fafYwS38ORI0eKW265RXJbTXkPhbC+Nnjq9+dLL70k2rRpI3mtUaNGiUGDBpW7rTV+KKSoqAh79uzBgAEDTLep1WoMGDAA27Ztk7Fl5ZOdnQ0ACA0Nldz+zTffICwsDG3btsXUqVORn59vum/btm1o164dIiMjTbcNGjQIOTk5OHDgQNU03IGjR48iOjoacXFxGDt2LDIyMgAAe/bsQXFxseS9a9myJRo1amR676r7uVkqKirC0qVL8fDDD0s22avJ75+5EydO4MKFC5L3LDg4GAkJCZL3LCQkBF27djUdM2DAAKjVauzYscN0TJ8+feDj42M6ZtCgQUhLS8O1a9eq6Gxcl52dDZVKhZCQEMnts2fPRr169dCpUyfMmTNH0sVc3c9x/fr1iIiIQIsWLfDEE08gMzPTdJ/S3sOLFy/izz//xCOPPGJ1X015Dy2vDZ76/blt2zbJcxiPqcj1s8o3IfO0K1euQKfTSX5wABAZGYnDhw/L1Kry0ev1ePbZZ9GrVy+0bdvWdPt9992H2NhYREdHY9++fZgyZQrS0tLwyy+/AAAuXLhg8/yN98kpISEBixcvRosWLXD+/HnMmDEDvXv3RmpqKi5cuAAfHx+rX9aRkZGmdlfnc7NlxYoVyMrKwoMPPmi6rSa/f5aM7bHVXvP3LCIiQnK/l5cXQkNDJcc0adLE6jmM99WtW7dS2l8eBQUFmDJlCsaMGSPZ0Onpp59G586dERoaiq1bt2Lq1Kk4f/483n//fQDV+xwHDx6MO++8E02aNMGxY8fwyiuvYMiQIdi2bRs0Go3i3sMlS5YgMDAQd955p+T2mvIe2ro2eOr3p71jcnJycOPGDfj5+bnd3hofLJRk0qRJSE1NxebNmyW3P/bYY6av27Vrh/r166N///44duwYmjZtWtXNdMuQIUNMX7dv3x4JCQmIjY3FDz/8UK5/sNXdggULMGTIEERHR5tuq8nvX21XXFyMe++9F0IIzJ8/X3Lf888/b/q6ffv28PHxweOPP45Zs2ZV+1LRo0ePNn3drl07tG/fHk2bNsX69evRv39/GVtWORYuXIixY8fC19dXcntNeQ/tXRuqqxo/FBIWFgaNRmM1E/bixYuIioqSqVXue+qpp/DHH39g3bp1TreVT0hIAACkp6cDAKKiomyev/G+6iQkJATNmzdHeno6oqKiUFRUhKysLMkx5u9dTTq3U6dOISkpCRMmTHB4XE1+/4ztcfT/LSoqCpcuXZLcX1JSgqtXr9ao99UYKk6dOoXVq1c73X46ISEBJSUlOHnyJICacY5GcXFxCAsLk/ybVMJ7CACbNm1CWlqa0/+XQPV8D+1dGzz1+9PeMUFBQeX+46/GBwsfHx906dIFa9asMd2m1+uxZs0aJCYmytgy1wgh8NRTT2H58uVYu3atVbebLcnJyQCA+vXrAwASExOxf/9+yS8C4y/C1q1bV0q7yysvLw/Hjh1D/fr10aVLF3h7e0veu7S0NGRkZJjeu5p0bosWLUJERASGDRvm8Lia/P41adIEUVFRkvcsJycHO3bskLxnWVlZ2LNnj+mYtWvXQq/Xm0JVYmIiNm7ciOLiYtMxq1evRosWLapFF7oxVBw9ehRJSUmoV6+e08ckJydDrVabhhCq+zmaO3PmDDIzMyX/Jmv6e2i0YMECdOnSBR06dHB6bHV6D51dGzz1+zMxMVHyHMZjKnT9LPe0z2pk2bJlQqvVisWLF4uDBw+Kxx57TISEhEhmwlZXTzzxhAgODhbr16+XLHnKz88XQgiRnp4u3njjDbF7925x4sQJ8euvv4q4uDjRp08f03MYlxQNHDhQJCcni7///luEh4dXiyWZL7zwgli/fr04ceKE2LJlixgwYIAICwsTly5dEkIYlks1atRIrF27VuzevVskJiaKxMRE0+Or87mZ0+l0olGjRmLKlCmS22vi+5ebmyv27t0r9u7dKwCI999/X+zdu9e0ImL27NkiJCRE/Prrr2Lfvn1i5MiRNpebdurUSezYsUNs3rxZNGvWTLJUMSsrS0RGRopx48aJ1NRUsWzZMuHv719lSxUdnWNRUZEYMWKEaNiwoUhOTpb8vzTOpN+6dav44IMPRHJysjh27JhYunSpCA8PFw888EC1OEdH55ebmytefPFFsW3bNnHixAmRlJQkOnfuLJo1ayYKCgpMz1GT30Oj7Oxs4e/vL+bPn2/1+Or+Hjq7Ngjhmd+fxuWmkydPFocOHRLz5s3jclOjjz/+WDRq1Ej4+PiI7t27i+3bt8vdJJcAsPmxaNEiIYQQGRkZok+fPiI0NFRotVoRHx8vJk+eLKmDIIQQJ0+eFEOGDBF+fn4iLCxMvPDCC6K4uFiGM5IaNWqUqF+/vvDx8RENGjQQo0aNEunp6ab7b9y4IZ588klRt25d4e/vL+644w5x/vx5yXNU13Mzt2rVKgFApKWlSW6vie/funXrbP6bHD9+vBDCsOR02rRpIjIyUmi1WtG/f3+r887MzBRjxowRAQEBIigoSDz00EMiNzdXckxKSoq46aabhFarFQ0aNBCzZ8+uqlN0eI4nTpyw+//SWJtkz549IiEhQQQHBwtfX1/RqlUr8fbbb0suzHKeo6Pzy8/PFwMHDhTh4eHC29tbxMbGikcffdTqD7Ga/B4aff7558LPz09kZWVZPb66v4fOrg1CeO7357p160THjh2Fj4+PiIuLk7xGeXDbdCIiIvKYGj/HgoiIiKoPBgsiIiLyGAYLIiIi8hgGCyIiIvIYBgsiIiLyGAYLIiIi8hgGCyIiIvIYBgsiIiLyGAYLIiIi8hgGCyIiIvIYBgsiIiLyGAYLIiIi8pj/B2QC2eM6gTtrAAAAAElFTkSuQmCC", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "import matplotlib.pyplot as plt\n", "\n", "plt.plot(loss_track2)\n", "plt.show()" ] }, { "cell_type": "code", "execution_count": 99, "metadata": {}, "outputs": [], "source": [ "torch.save(model.state_dict(), 'model.bin')" ] }, { "cell_type": "code", "execution_count": 100, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "c:\\PROGRAMY\\Anaconda3\\envs\\scweet\\lib\\site-packages\\torch\\nn\\modules\\container.py:217: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.\n", " input = module(input)\n" ] }, { "data": { "text/plain": [ "[('', 0, 0.1108938604593277),\n", " ('was', 12, 0.0792110487818718),\n", " ('had', 37, 0.07402306795120239),\n", " ('is', 8, 0.04529397189617157),\n", " ('has', 39, 0.03909718990325928),\n", " ('would', 48, 0.038855526596307755),\n", " ('said', 43, 0.022579118609428406),\n", " ('will', 27, 0.02008220925927162),\n", " ('went', 251, 0.013605386018753052),\n", " ('did', 151, 0.013007525354623795)]" ] }, "execution_count": 100, "metadata": {}, "output_type": "execute_result" } ], "source": [ "device = 'cpu'\n", "model = SimpleBigramNeuralLanguageModel(vocab_size, embed_size).to(device)\n", "model.load_state_dict(torch.load('model.bin'))\n", "model.eval()\n", "\n", "ixs = torch.tensor(vocab.forward(['he'])).to(device)\n", "\n", "out = model(ixs)\n", "top = torch.topk(out[0], 10)\n", "top_indices = top.indices.tolist()\n", "top_probs = top.values.tolist()\n", "top_words = vocab.lookup_tokens(top_indices)\n", "list(zip(top_words, top_indices, top_probs))" ] }, { "cell_type": "code", "execution_count": 101, "metadata": {}, "outputs": [], "source": [ "def prediction(word: str) -> str:\n", " ixs = torch.tensor(vocab.forward([word])).to(device)\n", " out = model(ixs)\n", " top = torch.topk(out[0], 5)\n", " top_indices = top.indices.tolist()\n", " top_probs = top.values.tolist()\n", " top_words = vocab.lookup_tokens(top_indices)\n", " zipped = list(zip(top_words, top_probs))\n", " for index, element in enumerate(zipped):\n", " unk = None\n", " if '' in element:\n", " unk = zipped.pop(index)\n", " zipped.append(('', unk[1]))\n", " break\n", " if unk is None:\n", " zipped[-1] = ('', zipped[-1][1])\n", " return ' '.join([f'{x[0]}:{x[1]}' for x in zipped])" ] }, { "cell_type": "code", "execution_count": 102, "metadata": {}, "outputs": [], "source": [ "def create_outputs(folder_name):\n", " print(f'Creating outputs in {folder_name}')\n", " with lzma.open(f'{folder_name}/in.tsv.xz', mode='rt', encoding='utf-8') as fid:\n", " with open(f'{folder_name}/out.tsv', 'w', encoding='utf-8', newline='\\n') as f:\n", " for line in fid:\n", " separated = line.split('\\t')\n", " prefix = separated[6].replace(r'\\n', ' ').split()[-1]\n", " output_line = prediction(prefix)\n", " f.write(output_line + '\\n')" ] }, { "cell_type": "code", "execution_count": 103, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Creating outputs in dev-0\n", "Creating outputs in test-A\n" ] } ], "source": [ "create_outputs('dev-0')\n", "create_outputs('test-A')" ] } ], "metadata": { "kernelspec": { "display_name": "scweet", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.15" }, "orig_nbformat": 4 }, "nbformat": 4, "nbformat_minor": 2 }