challenging-america-word-ga.../nb_nn.ipynb

1456 lines
152 KiB
Plaintext
Raw Normal View History

2023-04-28 00:39:34 +02:00
{
"cells": [
{
"cell_type": "code",
"execution_count": 74,
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"import lzma\n",
"from itertools import islice\n",
"import regex as re\n",
"import sys\n",
"from torchtext.vocab import build_vocab_from_iterator\n",
"from torch import nn\n",
"from torch.utils.data import IterableDataset\n",
"import itertools\n",
"from torch.utils.data import DataLoader"
]
},
{
"cell_type": "code",
"execution_count": 75,
"metadata": {},
"outputs": [],
"source": [
"# torch.cuda.is_available()"
]
},
{
"cell_type": "code",
"execution_count": 76,
"metadata": {},
"outputs": [],
"source": [
"# torch.cuda.device_count()"
]
},
{
"cell_type": "code",
"execution_count": 77,
"metadata": {},
"outputs": [],
"source": [
"# torch.cuda.current_device()"
]
},
{
"cell_type": "code",
"execution_count": 78,
"metadata": {},
"outputs": [],
"source": [
"# torch.cuda.device(0)"
]
},
{
"cell_type": "code",
"execution_count": 79,
"metadata": {},
"outputs": [],
"source": [
"# torch.cuda.get_device_name(0)"
]
},
{
"cell_type": "code",
"execution_count": 80,
"metadata": {},
"outputs": [
{
"ename": "KeyboardInterrupt",
"evalue": "",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
"Cell \u001b[1;32mIn[80], line 18\u001b[0m\n\u001b[0;32m 14\u001b[0m \u001b[39myield\u001b[39;00m get_words_from_line(line)\n\u001b[0;32m 16\u001b[0m vocab_size \u001b[39m=\u001b[39m \u001b[39m10_000\u001b[39m\n\u001b[1;32m---> 18\u001b[0m vocab \u001b[39m=\u001b[39m build_vocab_from_iterator(\n\u001b[0;32m 19\u001b[0m get_word_lines_from_file(\u001b[39m\"\u001b[39;49m\u001b[39mtrain/in.tsv.xz\u001b[39;49m\u001b[39m\"\u001b[39;49m),\n\u001b[0;32m 20\u001b[0m max_tokens \u001b[39m=\u001b[39;49m vocab_size,\n\u001b[0;32m 21\u001b[0m specials \u001b[39m=\u001b[39;49m [\u001b[39m'\u001b[39;49m\u001b[39m<unk>\u001b[39;49m\u001b[39m'\u001b[39;49m])\n\u001b[0;32m 23\u001b[0m vocab\u001b[39m.\u001b[39mset_default_index(vocab[\u001b[39m'\u001b[39m\u001b[39m<unk>\u001b[39m\u001b[39m'\u001b[39m])\n",
"File \u001b[1;32mc:\\PROGRAMY\\Anaconda3\\envs\\scweet\\lib\\site-packages\\torchtext\\vocab\\vocab_factory.py:99\u001b[0m, in \u001b[0;36mbuild_vocab_from_iterator\u001b[1;34m(iterator, min_freq, specials, special_first, max_tokens)\u001b[0m\n\u001b[0;32m 97\u001b[0m counter \u001b[39m=\u001b[39m Counter()\n\u001b[0;32m 98\u001b[0m \u001b[39mfor\u001b[39;00m tokens \u001b[39min\u001b[39;00m iterator:\n\u001b[1;32m---> 99\u001b[0m counter\u001b[39m.\u001b[39;49mupdate(tokens)\n\u001b[0;32m 101\u001b[0m specials \u001b[39m=\u001b[39m specials \u001b[39mor\u001b[39;00m []\n\u001b[0;32m 103\u001b[0m \u001b[39m# First sort by descending frequency, then lexicographically\u001b[39;00m\n",
"File \u001b[1;32mc:\\PROGRAMY\\Anaconda3\\envs\\scweet\\lib\\collections\\__init__.py:637\u001b[0m, in \u001b[0;36mCounter.update\u001b[1;34m(self, iterable, **kwds)\u001b[0m\n\u001b[0;32m 635\u001b[0m \u001b[39msuper\u001b[39m(Counter, \u001b[39mself\u001b[39m)\u001b[39m.\u001b[39mupdate(iterable) \u001b[39m# fast path when counter is empty\u001b[39;00m\n\u001b[0;32m 636\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m--> 637\u001b[0m _count_elements(\u001b[39mself\u001b[39;49m, iterable)\n\u001b[0;32m 638\u001b[0m \u001b[39mif\u001b[39;00m kwds:\n\u001b[0;32m 639\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mupdate(kwds)\n",
"Cell \u001b[1;32mIn[80], line 4\u001b[0m, in \u001b[0;36mget_words_from_line\u001b[1;34m(line)\u001b[0m\n\u001b[0;32m 2\u001b[0m line \u001b[39m=\u001b[39m line\u001b[39m.\u001b[39mrstrip()\n\u001b[0;32m 3\u001b[0m line \u001b[39m=\u001b[39m re\u001b[39m.\u001b[39msub(\u001b[39mr\u001b[39m\u001b[39m'\u001b[39m\u001b[39m\\\\\u001b[39;00m\u001b[39mn\u001b[39m\u001b[39m'\u001b[39m, \u001b[39m'\u001b[39m\u001b[39m \u001b[39m\u001b[39m'\u001b[39m, line)\n\u001b[1;32m----> 4\u001b[0m line \u001b[39m=\u001b[39m re\u001b[39m.\u001b[39;49msub(\u001b[39mr\u001b[39;49m\u001b[39m'\u001b[39;49m\u001b[39m[^a-zA-Z] \u001b[39;49m\u001b[39m'\u001b[39;49m, \u001b[39m'\u001b[39;49m\u001b[39m \u001b[39;49m\u001b[39m'\u001b[39;49m, line)\n\u001b[0;32m 5\u001b[0m line \u001b[39m=\u001b[39m line\u001b[39m.\u001b[39mlower()\n\u001b[0;32m 6\u001b[0m \u001b[39myield\u001b[39;00m \u001b[39m'\u001b[39m\u001b[39m<s>\u001b[39m\u001b[39m'\u001b[39m\n",
"File \u001b[1;32mc:\\PROGRAMY\\Anaconda3\\envs\\scweet\\lib\\site-packages\\regex\\regex.py:278\u001b[0m, in \u001b[0;36msub\u001b[1;34m(pattern, repl, string, count, flags, pos, endpos, concurrent, timeout, ignore_unused, **kwargs)\u001b[0m\n\u001b[0;32m 272\u001b[0m \u001b[39m\u001b[39m\u001b[39m\"\"\"Return the string obtained by replacing the leftmost (or rightmost with a\u001b[39;00m\n\u001b[0;32m 273\u001b[0m \u001b[39mreverse pattern) non-overlapping occurrences of the pattern in string by the\u001b[39;00m\n\u001b[0;32m 274\u001b[0m \u001b[39mreplacement repl. repl can be either a string or a callable; if a string,\u001b[39;00m\n\u001b[0;32m 275\u001b[0m \u001b[39mbackslash escapes in it are processed; if a callable, it's passed the match\u001b[39;00m\n\u001b[0;32m 276\u001b[0m \u001b[39mobject and must return a replacement string to be used.\"\"\"\u001b[39;00m\n\u001b[0;32m 277\u001b[0m pat \u001b[39m=\u001b[39m _compile(pattern, flags, ignore_unused, kwargs, \u001b[39mTrue\u001b[39;00m)\n\u001b[1;32m--> 278\u001b[0m \u001b[39mreturn\u001b[39;00m pat\u001b[39m.\u001b[39;49msub(repl, string, count, pos, endpos, concurrent, timeout)\n",
"\u001b[1;31mKeyboardInterrupt\u001b[0m: "
]
}
],
"source": [
"def get_words_from_line(line):\n",
" line = line.rstrip()\n",
" line = re.sub(r'\\\\n', ' ', line)\n",
" line = re.sub(r'[^a-zA-Z] ', ' ', line)\n",
" line = line.lower()\n",
" yield '<s>'\n",
" for t in line.split():\n",
" yield t\n",
" yield '</s>'\n",
"\n",
"def get_word_lines_from_file(file_name):\n",
" with lzma.open(file_name, encoding='utf8', mode=\"rt\") as fh:\n",
" for line in fh:\n",
" yield get_words_from_line(line)\n",
"\n",
"vocab_size = 10_000\n",
"\n",
"vocab = build_vocab_from_iterator(\n",
" get_word_lines_from_file(\"train/in.tsv.xz\"),\n",
" max_tokens = vocab_size,\n",
" specials = ['<unk>'])\n",
"\n",
"vocab.set_default_index(vocab['<unk>'])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def look_ahead_iterator(gen):\n",
" prev = None\n",
" for item in gen:\n",
" if prev is not None:\n",
" yield (prev, item)\n",
" prev = item\n",
"\n",
"class Bigrams(IterableDataset):\n",
" def __init__(self, text_file, vocabulary_size):\n",
" self.vocab = vocab\n",
" self.vocab.set_default_index(self.vocab['<unk>'])\n",
" self.vocabulary_size = vocabulary_size\n",
" self.text_file = text_file\n",
"\n",
" def __iter__(self):\n",
" return look_ahead_iterator(\n",
" (self.vocab[t] for t in itertools.chain.from_iterable(get_word_lines_from_file(self.text_file))))\n",
"\n",
"train_dataset = Bigrams(\"train/in.tsv.xz\", vocab_size)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[tensor([ 33, 0, 226, 35, 0, 6421, 6420, 219, 5781, 1]),\n",
" tensor([ 0, 226, 35, 0, 6421, 6420, 219, 5781, 1, 113])]"
]
},
"execution_count": 67,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"next(iter(DataLoader(train_dataset, batch_size=10)))"
]
},
{
"cell_type": "code",
"execution_count": 85,
"metadata": {},
"outputs": [],
"source": [
"embed_size = 100\n",
"\n",
"class SimpleBigramNeuralLanguageModel(nn.Module):\n",
" def __init__(self, vocabulary_size, embedding_size):\n",
" super(SimpleBigramNeuralLanguageModel, self).__init__()\n",
" self.model = nn.Sequential(\n",
" nn.Embedding(vocabulary_size, embedding_size),\n",
" nn.Linear(embedding_size, 1000),\n",
" nn.ReLU(),\n",
" nn.Linear(1000, 500),\n",
" nn.ReLU(),\n",
" nn.Linear(500, vocabulary_size),\n",
" nn.Softmax()\n",
" )\n",
"\n",
" def forward(self, x):\n",
" return self.model(x)\n",
"\n",
"model = SimpleBigramNeuralLanguageModel(vocab_size, embed_size)\n",
"\n",
"vocab.set_default_index(vocab['<unk>'])\n",
"ixs = torch.tensor(vocab.forward(['is']))"
]
},
{
"cell_type": "code",
"execution_count": 86,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"tensor(8.4503e-05, grad_fn=<SelectBackward0>)"
]
},
"execution_count": 86,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"out = model(ixs)\n",
"out[0][vocab['is']]"
]
},
{
"cell_type": "code",
"execution_count": 87,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0 tensor(9.2249, grad_fn=<NllLossBackward0>)\n",
"1 LOSS DIFF: tensor(6.9568, grad_fn=<NllLossBackward0>) tensor(6.9539, grad_fn=<NllLossBackward0>)\n",
"2 LOSS DIFF: tensor(6.5283, grad_fn=<NllLossBackward0>) tensor(6.3437, grad_fn=<NllLossBackward0>)\n",
"3 LOSS DIFF: tensor(6.4010, grad_fn=<NllLossBackward0>) tensor(6.3773, grad_fn=<NllLossBackward0>)\n",
"4 LOSS DIFF: tensor(6.4818, grad_fn=<NllLossBackward0>) tensor(6.4010, grad_fn=<NllLossBackward0>)\n",
"5 LOSS DIFF: tensor(6.4520, grad_fn=<NllLossBackward0>) tensor(6.3898, grad_fn=<NllLossBackward0>)\n",
"6 LOSS DIFF: tensor(6.2989, grad_fn=<NllLossBackward0>) tensor(6.2184, grad_fn=<NllLossBackward0>)\n",
"7 LOSS DIFF: tensor(6.3109, grad_fn=<NllLossBackward0>) tensor(6.2989, grad_fn=<NllLossBackward0>)\n",
"8 LOSS DIFF: tensor(6.3028, grad_fn=<NllLossBackward0>) tensor(6.2805, grad_fn=<NllLossBackward0>)\n",
"9 LOSS DIFF: tensor(6.3590, grad_fn=<NllLossBackward0>) tensor(6.3028, grad_fn=<NllLossBackward0>)\n",
"10 LOSS DIFF: tensor(6.1484, grad_fn=<NllLossBackward0>) tensor(6.1278, grad_fn=<NllLossBackward0>)\n",
"11 LOSS DIFF: tensor(6.2458, grad_fn=<NllLossBackward0>) tensor(6.0779, grad_fn=<NllLossBackward0>)\n",
"12 LOSS DIFF: tensor(6.3209, grad_fn=<NllLossBackward0>) tensor(6.2458, grad_fn=<NllLossBackward0>)\n",
"13 LOSS DIFF: tensor(6.2801, grad_fn=<NllLossBackward0>) tensor(6.1436, grad_fn=<NllLossBackward0>)\n",
"14 LOSS DIFF: tensor(6.1245, grad_fn=<NllLossBackward0>) tensor(6.0657, grad_fn=<NllLossBackward0>)\n",
"15 LOSS DIFF: tensor(6.2682, grad_fn=<NllLossBackward0>) tensor(6.0906, grad_fn=<NllLossBackward0>)\n",
"16 LOSS DIFF: tensor(6.0394, grad_fn=<NllLossBackward0>) tensor(6.0062, grad_fn=<NllLossBackward0>)\n",
"17 LOSS DIFF: tensor(6.1070, grad_fn=<NllLossBackward0>) tensor(6.0394, grad_fn=<NllLossBackward0>)\n",
"18 LOSS DIFF: tensor(6.2271, grad_fn=<NllLossBackward0>) tensor(6.1070, grad_fn=<NllLossBackward0>)\n",
"19 LOSS DIFF: tensor(6.0964, grad_fn=<NllLossBackward0>) tensor(6.0577, grad_fn=<NllLossBackward0>)\n",
"20 LOSS DIFF: tensor(6.0909, grad_fn=<NllLossBackward0>) tensor(6.0436, grad_fn=<NllLossBackward0>)\n",
"21 LOSS DIFF: tensor(6.0210, grad_fn=<NllLossBackward0>) tensor(6.0016, grad_fn=<NllLossBackward0>)\n",
"22 LOSS DIFF: tensor(6.0296, grad_fn=<NllLossBackward0>) tensor(6.0210, grad_fn=<NllLossBackward0>)\n",
"23 LOSS DIFF: tensor(6.1812, grad_fn=<NllLossBackward0>) tensor(6.0296, grad_fn=<NllLossBackward0>)\n",
"24 LOSS DIFF: tensor(6.1665, grad_fn=<NllLossBackward0>) tensor(6.0736, grad_fn=<NllLossBackward0>)\n",
"25 LOSS DIFF: tensor(6.0107, grad_fn=<NllLossBackward0>) tensor(5.9340, grad_fn=<NllLossBackward0>)\n",
"26 LOSS DIFF: tensor(5.9806, grad_fn=<NllLossBackward0>) tensor(5.9473, grad_fn=<NllLossBackward0>)\n",
"27 LOSS DIFF: tensor(5.9364, grad_fn=<NllLossBackward0>) tensor(5.8515, grad_fn=<NllLossBackward0>)\n",
"28 LOSS DIFF: tensor(5.9202, grad_fn=<NllLossBackward0>) tensor(5.9180, grad_fn=<NllLossBackward0>)\n",
"29 LOSS DIFF: tensor(6.0357, grad_fn=<NllLossBackward0>) tensor(5.8964, grad_fn=<NllLossBackward0>)\n",
"30 LOSS DIFF: tensor(6.1189, grad_fn=<NllLossBackward0>) tensor(5.9309, grad_fn=<NllLossBackward0>)\n",
"31 LOSS DIFF: tensor(6.0280, grad_fn=<NllLossBackward0>) tensor(5.8488, grad_fn=<NllLossBackward0>)\n",
"32 LOSS DIFF: tensor(6.1555, grad_fn=<NllLossBackward0>) tensor(6.0280, grad_fn=<NllLossBackward0>)\n",
"33 LOSS DIFF: tensor(6.0389, grad_fn=<NllLossBackward0>) tensor(5.9000, grad_fn=<NllLossBackward0>)\n",
"34 LOSS DIFF: tensor(5.8367, grad_fn=<NllLossBackward0>) tensor(5.7437, grad_fn=<NllLossBackward0>)\n",
"35 LOSS DIFF: tensor(5.9835, grad_fn=<NllLossBackward0>) tensor(5.8367, grad_fn=<NllLossBackward0>)\n",
"36 LOSS DIFF: tensor(5.9613, grad_fn=<NllLossBackward0>) tensor(5.7643, grad_fn=<NllLossBackward0>)\n",
"37 LOSS DIFF: tensor(6.0189, grad_fn=<NllLossBackward0>) tensor(5.9613, grad_fn=<NllLossBackward0>)\n",
"38 LOSS DIFF: tensor(5.9064, grad_fn=<NllLossBackward0>) tensor(5.8300, grad_fn=<NllLossBackward0>)\n",
"39 LOSS DIFF: tensor(5.9395, grad_fn=<NllLossBackward0>) tensor(5.8984, grad_fn=<NllLossBackward0>)\n",
"40 LOSS DIFF: tensor(5.9919, grad_fn=<NllLossBackward0>) tensor(5.9395, grad_fn=<NllLossBackward0>)\n",
"41 LOSS DIFF: tensor(5.8834, grad_fn=<NllLossBackward0>) tensor(5.8792, grad_fn=<NllLossBackward0>)\n",
"42 LOSS DIFF: tensor(5.7971, grad_fn=<NllLossBackward0>) tensor(5.7641, grad_fn=<NllLossBackward0>)\n",
"43 LOSS DIFF: tensor(5.8632, grad_fn=<NllLossBackward0>) tensor(5.7971, grad_fn=<NllLossBackward0>)\n",
"44 LOSS DIFF: tensor(5.8988, grad_fn=<NllLossBackward0>) tensor(5.8632, grad_fn=<NllLossBackward0>)\n",
"45 LOSS DIFF: tensor(5.9258, grad_fn=<NllLossBackward0>) tensor(5.8670, grad_fn=<NllLossBackward0>)\n",
"100 tensor(5.8536, grad_fn=<NllLossBackward0>)\n",
"46 LOSS DIFF: tensor(5.8536, grad_fn=<NllLossBackward0>) tensor(5.8226, grad_fn=<NllLossBackward0>)\n",
"47 LOSS DIFF: tensor(5.8648, grad_fn=<NllLossBackward0>) tensor(5.8536, grad_fn=<NllLossBackward0>)\n",
"48 LOSS DIFF: tensor(6.0083, grad_fn=<NllLossBackward0>) tensor(5.8648, grad_fn=<NllLossBackward0>)\n",
"49 LOSS DIFF: tensor(5.8324, grad_fn=<NllLossBackward0>) tensor(5.7953, grad_fn=<NllLossBackward0>)\n",
"50 LOSS DIFF: tensor(5.9055, grad_fn=<NllLossBackward0>) tensor(5.8324, grad_fn=<NllLossBackward0>)\n",
"51 LOSS DIFF: tensor(5.9507, grad_fn=<NllLossBackward0>) tensor(5.7720, grad_fn=<NllLossBackward0>)\n",
"52 LOSS DIFF: tensor(5.8892, grad_fn=<NllLossBackward0>) tensor(5.7376, grad_fn=<NllLossBackward0>)\n",
"53 LOSS DIFF: tensor(5.8218, grad_fn=<NllLossBackward0>) tensor(5.6474, grad_fn=<NllLossBackward0>)\n",
"54 LOSS DIFF: tensor(5.8381, grad_fn=<NllLossBackward0>) tensor(5.8218, grad_fn=<NllLossBackward0>)\n",
"55 LOSS DIFF: tensor(5.9608, grad_fn=<NllLossBackward0>) tensor(5.8381, grad_fn=<NllLossBackward0>)\n",
"56 LOSS DIFF: tensor(5.9855, grad_fn=<NllLossBackward0>) tensor(5.9496, grad_fn=<NllLossBackward0>)\n",
"57 LOSS DIFF: tensor(5.9235, grad_fn=<NllLossBackward0>) tensor(5.7299, grad_fn=<NllLossBackward0>)\n",
"58 LOSS DIFF: tensor(5.9411, grad_fn=<NllLossBackward0>) tensor(5.7029, grad_fn=<NllLossBackward0>)\n",
"59 LOSS DIFF: tensor(5.8516, grad_fn=<NllLossBackward0>) tensor(5.7566, grad_fn=<NllLossBackward0>)\n",
"60 LOSS DIFF: tensor(5.8243, grad_fn=<NllLossBackward0>) tensor(5.6658, grad_fn=<NllLossBackward0>)\n",
"61 LOSS DIFF: tensor(5.8496, grad_fn=<NllLossBackward0>) tensor(5.7968, grad_fn=<NllLossBackward0>)\n",
"62 LOSS DIFF: tensor(5.7651, grad_fn=<NllLossBackward0>) tensor(5.6680, grad_fn=<NllLossBackward0>)\n",
"63 LOSS DIFF: tensor(5.8133, grad_fn=<NllLossBackward0>) tensor(5.7651, grad_fn=<NllLossBackward0>)\n",
"64 LOSS DIFF: tensor(5.8699, grad_fn=<NllLossBackward0>) tensor(5.4926, grad_fn=<NllLossBackward0>)\n",
"65 LOSS DIFF: tensor(5.7983, grad_fn=<NllLossBackward0>) tensor(5.7203, grad_fn=<NllLossBackward0>)\n",
"66 LOSS DIFF: tensor(5.8621, grad_fn=<NllLossBackward0>) tensor(5.4968, grad_fn=<NllLossBackward0>)\n",
"67 LOSS DIFF: tensor(5.8183, grad_fn=<NllLossBackward0>) tensor(5.6879, grad_fn=<NllLossBackward0>)\n",
"68 LOSS DIFF: tensor(5.7855, grad_fn=<NllLossBackward0>) tensor(5.7245, grad_fn=<NllLossBackward0>)\n",
"69 LOSS DIFF: tensor(5.7728, grad_fn=<NllLossBackward0>) tensor(5.6484, grad_fn=<NllLossBackward0>)\n",
"70 LOSS DIFF: tensor(5.7415, grad_fn=<NllLossBackward0>) tensor(5.5859, grad_fn=<NllLossBackward0>)\n",
"71 LOSS DIFF: tensor(5.7307, grad_fn=<NllLossBackward0>) tensor(5.6239, grad_fn=<NllLossBackward0>)\n",
"72 LOSS DIFF: tensor(5.7754, grad_fn=<NllLossBackward0>) tensor(5.6253, grad_fn=<NllLossBackward0>)\n",
"73 LOSS DIFF: tensor(5.8733, grad_fn=<NllLossBackward0>) tensor(5.5662, grad_fn=<NllLossBackward0>)\n",
"74 LOSS DIFF: tensor(5.7932, grad_fn=<NllLossBackward0>) tensor(5.7448, grad_fn=<NllLossBackward0>)\n",
"75 LOSS DIFF: tensor(5.7643, grad_fn=<NllLossBackward0>) tensor(5.6964, grad_fn=<NllLossBackward0>)\n",
"76 LOSS DIFF: tensor(5.6395, grad_fn=<NllLossBackward0>) tensor(5.6045, grad_fn=<NllLossBackward0>)\n",
"77 LOSS DIFF: tensor(5.7189, grad_fn=<NllLossBackward0>) tensor(5.6395, grad_fn=<NllLossBackward0>)\n",
"78 LOSS DIFF: tensor(5.7524, grad_fn=<NllLossBackward0>) tensor(5.5841, grad_fn=<NllLossBackward0>)\n",
"79 LOSS DIFF: tensor(5.7829, grad_fn=<NllLossBackward0>) tensor(5.5593, grad_fn=<NllLossBackward0>)\n",
"80 LOSS DIFF: tensor(5.8024, grad_fn=<NllLossBackward0>) tensor(5.7829, grad_fn=<NllLossBackward0>)\n",
"81 LOSS DIFF: tensor(5.8275, grad_fn=<NllLossBackward0>) tensor(5.7907, grad_fn=<NllLossBackward0>)\n",
"82 LOSS DIFF: tensor(5.6191, grad_fn=<NllLossBackward0>) tensor(5.5317, grad_fn=<NllLossBackward0>)\n",
"83 LOSS DIFF: tensor(5.7328, grad_fn=<NllLossBackward0>) tensor(5.6191, grad_fn=<NllLossBackward0>)\n",
"84 LOSS DIFF: tensor(5.7513, grad_fn=<NllLossBackward0>) tensor(5.6999, grad_fn=<NllLossBackward0>)\n",
"85 LOSS DIFF: tensor(5.7847, grad_fn=<NllLossBackward0>) tensor(5.7513, grad_fn=<NllLossBackward0>)\n",
"86 LOSS DIFF: tensor(5.7548, grad_fn=<NllLossBackward0>) tensor(5.6437, grad_fn=<NllLossBackward0>)\n",
"87 LOSS DIFF: tensor(5.7529, grad_fn=<NllLossBackward0>) tensor(5.7198, grad_fn=<NllLossBackward0>)\n",
"88 LOSS DIFF: tensor(5.7664, grad_fn=<NllLossBackward0>) tensor(5.5831, grad_fn=<NllLossBackward0>)\n",
"89 LOSS DIFF: tensor(5.7668, grad_fn=<NllLossBackward0>) tensor(5.6415, grad_fn=<NllLossBackward0>)\n",
"90 LOSS DIFF: tensor(5.7174, grad_fn=<NllLossBackward0>) tensor(5.6232, grad_fn=<NllLossBackward0>)\n",
"91 LOSS DIFF: tensor(5.7451, grad_fn=<NllLossBackward0>) tensor(5.6730, grad_fn=<NllLossBackward0>)\n",
"92 LOSS DIFF: tensor(5.7578, grad_fn=<NllLossBackward0>) tensor(5.7451, grad_fn=<NllLossBackward0>)\n",
"93 LOSS DIFF: tensor(5.6858, grad_fn=<NllLossBackward0>) tensor(5.4322, grad_fn=<NllLossBackward0>)\n",
"94 LOSS DIFF: tensor(5.7738, grad_fn=<NllLossBackward0>) tensor(5.6858, grad_fn=<NllLossBackward0>)\n",
"200 tensor(5.7337, grad_fn=<NllLossBackward0>)\n",
"95 LOSS DIFF: tensor(5.7337, grad_fn=<NllLossBackward0>) tensor(5.6356, grad_fn=<NllLossBackward0>)\n",
"96 LOSS DIFF: tensor(5.6635, grad_fn=<NllLossBackward0>) tensor(5.5954, grad_fn=<NllLossBackward0>)\n",
"97 LOSS DIFF: tensor(5.6635, grad_fn=<NllLossBackward0>) tensor(5.6516, grad_fn=<NllLossBackward0>)\n",
"98 LOSS DIFF: tensor(5.8410, grad_fn=<NllLossBackward0>) tensor(5.6141, grad_fn=<NllLossBackward0>)\n",
"99 LOSS DIFF: tensor(5.7671, grad_fn=<NllLossBackward0>) tensor(5.6264, grad_fn=<NllLossBackward0>)\n",
"100 LOSS DIFF: tensor(5.6642, grad_fn=<NllLossBackward0>) tensor(5.6263, grad_fn=<NllLossBackward0>)\n",
"101 LOSS DIFF: tensor(5.7031, grad_fn=<NllLossBackward0>) tensor(5.6022, grad_fn=<NllLossBackward0>)\n",
"102 LOSS DIFF: tensor(5.7371, grad_fn=<NllLossBackward0>) tensor(5.7031, grad_fn=<NllLossBackward0>)\n",
"103 LOSS DIFF: tensor(5.6638, grad_fn=<NllLossBackward0>) tensor(5.6220, grad_fn=<NllLossBackward0>)\n",
"104 LOSS DIFF: tensor(5.6687, grad_fn=<NllLossBackward0>) tensor(5.6638, grad_fn=<NllLossBackward0>)\n",
"105 LOSS DIFF: tensor(5.7376, grad_fn=<NllLossBackward0>) tensor(5.6687, grad_fn=<NllLossBackward0>)\n",
"106 LOSS DIFF: tensor(5.7511, grad_fn=<NllLossBackward0>) tensor(5.7249, grad_fn=<NllLossBackward0>)\n",
"107 LOSS DIFF: tensor(5.6811, grad_fn=<NllLossBackward0>) tensor(5.6714, grad_fn=<NllLossBackward0>)\n",
"108 LOSS DIFF: tensor(5.7101, grad_fn=<NllLossBackward0>) tensor(5.5892, grad_fn=<NllLossBackward0>)\n",
"109 LOSS DIFF: tensor(5.6188, grad_fn=<NllLossBackward0>) tensor(5.5320, grad_fn=<NllLossBackward0>)\n",
"110 LOSS DIFF: tensor(5.6656, grad_fn=<NllLossBackward0>) tensor(5.6188, grad_fn=<NllLossBackward0>)\n",
"111 LOSS DIFF: tensor(5.6711, grad_fn=<NllLossBackward0>) tensor(5.5220, grad_fn=<NllLossBackward0>)\n",
"112 LOSS DIFF: tensor(5.7719, grad_fn=<NllLossBackward0>) tensor(5.6711, grad_fn=<NllLossBackward0>)\n",
"113 LOSS DIFF: tensor(5.7275, grad_fn=<NllLossBackward0>) tensor(5.6023, grad_fn=<NllLossBackward0>)\n",
"114 LOSS DIFF: tensor(5.7216, grad_fn=<NllLossBackward0>) tensor(5.6046, grad_fn=<NllLossBackward0>)\n",
"115 LOSS DIFF: tensor(5.6189, grad_fn=<NllLossBackward0>) tensor(5.5715, grad_fn=<NllLossBackward0>)\n",
"116 LOSS DIFF: tensor(5.6879, grad_fn=<NllLossBackward0>) tensor(5.6189, grad_fn=<NllLossBackward0>)\n",
"117 LOSS DIFF: tensor(5.7076, grad_fn=<NllLossBackward0>) tensor(5.6879, grad_fn=<NllLossBackward0>)\n",
"118 LOSS DIFF: tensor(5.6123, grad_fn=<NllLossBackward0>) tensor(5.5496, grad_fn=<NllLossBackward0>)\n",
"119 LOSS DIFF: tensor(5.6219, grad_fn=<NllLossBackward0>) tensor(5.6123, grad_fn=<NllLossBackward0>)\n",
"120 LOSS DIFF: tensor(5.6567, grad_fn=<NllLossBackward0>) tensor(5.4889, grad_fn=<NllLossBackward0>)\n",
"121 LOSS DIFF: tensor(5.7262, grad_fn=<NllLossBackward0>) tensor(5.6334, grad_fn=<NllLossBackward0>)\n",
"122 LOSS DIFF: tensor(5.7325, grad_fn=<NllLossBackward0>) tensor(5.6450, grad_fn=<NllLossBackward0>)\n",
"123 LOSS DIFF: tensor(5.7161, grad_fn=<NllLossBackward0>) tensor(5.5794, grad_fn=<NllLossBackward0>)\n",
"124 LOSS DIFF: tensor(5.5623, grad_fn=<NllLossBackward0>) tensor(5.5361, grad_fn=<NllLossBackward0>)\n",
"125 LOSS DIFF: tensor(5.5797, grad_fn=<NllLossBackward0>) tensor(5.5623, grad_fn=<NllLossBackward0>)\n",
"126 LOSS DIFF: tensor(5.6225, grad_fn=<NllLossBackward0>) tensor(5.5797, grad_fn=<NllLossBackward0>)\n",
"127 LOSS DIFF: tensor(5.5912, grad_fn=<NllLossBackward0>) tensor(5.5347, grad_fn=<NllLossBackward0>)\n",
"128 LOSS DIFF: tensor(5.6655, grad_fn=<NllLossBackward0>) tensor(5.5912, grad_fn=<NllLossBackward0>)\n",
"129 LOSS DIFF: tensor(5.6695, grad_fn=<NllLossBackward0>) tensor(5.6655, grad_fn=<NllLossBackward0>)\n",
"130 LOSS DIFF: tensor(5.7027, grad_fn=<NllLossBackward0>) tensor(5.6695, grad_fn=<NllLossBackward0>)\n",
"131 LOSS DIFF: tensor(5.6836, grad_fn=<NllLossBackward0>) tensor(5.5821, grad_fn=<NllLossBackward0>)\n",
"132 LOSS DIFF: tensor(5.5875, grad_fn=<NllLossBackward0>) tensor(5.5289, grad_fn=<NllLossBackward0>)\n",
"133 LOSS DIFF: tensor(5.6111, grad_fn=<NllLossBackward0>) tensor(5.4911, grad_fn=<NllLossBackward0>)\n",
"134 LOSS DIFF: tensor(5.6462, grad_fn=<NllLossBackward0>) tensor(5.6111, grad_fn=<NllLossBackward0>)\n",
"135 LOSS DIFF: tensor(5.4761, grad_fn=<NllLossBackward0>) tensor(5.3862, grad_fn=<NllLossBackward0>)\n",
"136 LOSS DIFF: tensor(5.5751, grad_fn=<NllLossBackward0>) tensor(5.4761, grad_fn=<NllLossBackward0>)\n",
"137 LOSS DIFF: tensor(5.5107, grad_fn=<NllLossBackward0>) tensor(5.3580, grad_fn=<NllLossBackward0>)\n",
"138 LOSS DIFF: tensor(5.5294, grad_fn=<NllLossBackward0>) tensor(5.5032, grad_fn=<NllLossBackward0>)\n",
"139 LOSS DIFF: tensor(5.8044, grad_fn=<NllLossBackward0>) tensor(5.5294, grad_fn=<NllLossBackward0>)\n",
"140 LOSS DIFF: tensor(5.5610, grad_fn=<NllLossBackward0>) tensor(5.4624, grad_fn=<NllLossBackward0>)\n",
"141 LOSS DIFF: tensor(5.6199, grad_fn=<NllLossBackward0>) tensor(5.5610, grad_fn=<NllLossBackward0>)\n",
"142 LOSS DIFF: tensor(5.6073, grad_fn=<NllLossBackward0>) tensor(5.5645, grad_fn=<NllLossBackward0>)\n",
"143 LOSS DIFF: tensor(5.8155, grad_fn=<NllLossBackward0>) tensor(5.6073, grad_fn=<NllLossBackward0>)\n",
"144 LOSS DIFF: tensor(5.6119, grad_fn=<NllLossBackward0>) tensor(5.5148, grad_fn=<NllLossBackward0>)\n",
"145 LOSS DIFF: tensor(5.6557, grad_fn=<NllLossBackward0>) tensor(5.5193, grad_fn=<NllLossBackward0>)\n",
"300 tensor(5.5923, grad_fn=<NllLossBackward0>)\n",
"146 LOSS DIFF: tensor(5.6352, grad_fn=<NllLossBackward0>) tensor(5.5923, grad_fn=<NllLossBackward0>)\n",
"147 LOSS DIFF: tensor(5.6034, grad_fn=<NllLossBackward0>) tensor(5.4999, grad_fn=<NllLossBackward0>)\n",
"148 LOSS DIFF: tensor(5.6058, grad_fn=<NllLossBackward0>) tensor(5.6034, grad_fn=<NllLossBackward0>)\n",
"149 LOSS DIFF: tensor(5.6262, grad_fn=<NllLossBackward0>) tensor(5.5992, grad_fn=<NllLossBackward0>)\n",
"150 LOSS DIFF: tensor(5.6428, grad_fn=<NllLossBackward0>) tensor(5.5092, grad_fn=<NllLossBackward0>)\n",
"151 LOSS DIFF: tensor(5.6501, grad_fn=<NllLossBackward0>) tensor(5.5660, grad_fn=<NllLossBackward0>)\n",
"152 LOSS DIFF: tensor(5.6203, grad_fn=<NllLossBackward0>) tensor(5.5295, grad_fn=<NllLossBackward0>)\n",
"153 LOSS DIFF: tensor(5.6420, grad_fn=<NllLossBackward0>) tensor(5.6203, grad_fn=<NllLossBackward0>)\n",
"154 LOSS DIFF: tensor(5.7322, grad_fn=<NllLossBackward0>) tensor(5.4864, grad_fn=<NllLossBackward0>)\n",
"155 LOSS DIFF: tensor(5.6117, grad_fn=<NllLossBackward0>) tensor(5.4803, grad_fn=<NllLossBackward0>)\n",
"156 LOSS DIFF: tensor(5.5395, grad_fn=<NllLossBackward0>) tensor(5.4970, grad_fn=<NllLossBackward0>)\n",
"157 LOSS DIFF: tensor(5.6619, grad_fn=<NllLossBackward0>) tensor(5.5060, grad_fn=<NllLossBackward0>)\n",
"158 LOSS DIFF: tensor(5.6368, grad_fn=<NllLossBackward0>) tensor(5.5258, grad_fn=<NllLossBackward0>)\n",
"159 LOSS DIFF: tensor(5.5889, grad_fn=<NllLossBackward0>) tensor(5.5490, grad_fn=<NllLossBackward0>)\n",
"160 LOSS DIFF: tensor(5.6312, grad_fn=<NllLossBackward0>) tensor(5.5038, grad_fn=<NllLossBackward0>)\n",
"161 LOSS DIFF: tensor(5.5349, grad_fn=<NllLossBackward0>) tensor(5.5015, grad_fn=<NllLossBackward0>)\n",
"162 LOSS DIFF: tensor(5.6371, grad_fn=<NllLossBackward0>) tensor(5.5349, grad_fn=<NllLossBackward0>)\n",
"163 LOSS DIFF: tensor(5.6482, grad_fn=<NllLossBackward0>) tensor(5.6371, grad_fn=<NllLossBackward0>)\n",
"164 LOSS DIFF: tensor(5.6638, grad_fn=<NllLossBackward0>) tensor(5.6482, grad_fn=<NllLossBackward0>)\n",
"165 LOSS DIFF: tensor(5.6737, grad_fn=<NllLossBackward0>) tensor(5.4801, grad_fn=<NllLossBackward0>)\n",
"166 LOSS DIFF: tensor(5.4878, grad_fn=<NllLossBackward0>) tensor(5.4866, grad_fn=<NllLossBackward0>)\n",
"167 LOSS DIFF: tensor(5.6624, grad_fn=<NllLossBackward0>) tensor(5.4878, grad_fn=<NllLossBackward0>)\n",
"168 LOSS DIFF: tensor(5.5738, grad_fn=<NllLossBackward0>) tensor(5.5648, grad_fn=<NllLossBackward0>)\n",
"169 LOSS DIFF: tensor(5.5267, grad_fn=<NllLossBackward0>) tensor(5.4309, grad_fn=<NllLossBackward0>)\n",
"170 LOSS DIFF: tensor(5.6041, grad_fn=<NllLossBackward0>) tensor(5.3970, grad_fn=<NllLossBackward0>)\n",
"171 LOSS DIFF: tensor(5.6640, grad_fn=<NllLossBackward0>) tensor(5.4885, grad_fn=<NllLossBackward0>)\n",
"172 LOSS DIFF: tensor(5.6136, grad_fn=<NllLossBackward0>) tensor(5.4977, grad_fn=<NllLossBackward0>)\n",
"173 LOSS DIFF: tensor(5.6567, grad_fn=<NllLossBackward0>) tensor(5.5459, grad_fn=<NllLossBackward0>)\n",
"174 LOSS DIFF: tensor(5.5721, grad_fn=<NllLossBackward0>) tensor(5.4921, grad_fn=<NllLossBackward0>)\n",
"175 LOSS DIFF: tensor(5.5685, grad_fn=<NllLossBackward0>) tensor(5.5363, grad_fn=<NllLossBackward0>)\n",
"176 LOSS DIFF: tensor(5.5438, grad_fn=<NllLossBackward0>) tensor(5.4754, grad_fn=<NllLossBackward0>)\n",
"177 LOSS DIFF: tensor(5.6087, grad_fn=<NllLossBackward0>) tensor(5.5345, grad_fn=<NllLossBackward0>)\n",
"178 LOSS DIFF: tensor(5.5624, grad_fn=<NllLossBackward0>) tensor(5.3589, grad_fn=<NllLossBackward0>)\n",
"179 LOSS DIFF: tensor(5.6284, grad_fn=<NllLossBackward0>) tensor(5.4887, grad_fn=<NllLossBackward0>)\n",
"180 LOSS DIFF: tensor(5.4859, grad_fn=<NllLossBackward0>) tensor(5.4453, grad_fn=<NllLossBackward0>)\n",
"181 LOSS DIFF: tensor(5.4949, grad_fn=<NllLossBackward0>) tensor(5.4859, grad_fn=<NllLossBackward0>)\n",
"182 LOSS DIFF: tensor(5.5938, grad_fn=<NllLossBackward0>) tensor(5.4949, grad_fn=<NllLossBackward0>)\n",
"183 LOSS DIFF: tensor(5.5222, grad_fn=<NllLossBackward0>) tensor(5.4890, grad_fn=<NllLossBackward0>)\n",
"184 LOSS DIFF: tensor(5.6673, grad_fn=<NllLossBackward0>) tensor(5.5222, grad_fn=<NllLossBackward0>)\n",
"185 LOSS DIFF: tensor(5.6337, grad_fn=<NllLossBackward0>) tensor(5.5833, grad_fn=<NllLossBackward0>)\n",
"186 LOSS DIFF: tensor(5.7171, grad_fn=<NllLossBackward0>) tensor(5.6337, grad_fn=<NllLossBackward0>)\n",
"187 LOSS DIFF: tensor(5.5721, grad_fn=<NllLossBackward0>) tensor(5.4927, grad_fn=<NllLossBackward0>)\n",
"188 LOSS DIFF: tensor(5.5771, grad_fn=<NllLossBackward0>) tensor(5.5721, grad_fn=<NllLossBackward0>)\n",
"189 LOSS DIFF: tensor(5.6379, grad_fn=<NllLossBackward0>) tensor(5.5771, grad_fn=<NllLossBackward0>)\n",
"190 LOSS DIFF: tensor(5.6032, grad_fn=<NllLossBackward0>) tensor(5.4434, grad_fn=<NllLossBackward0>)\n",
"191 LOSS DIFF: tensor(5.5389, grad_fn=<NllLossBackward0>) tensor(5.3454, grad_fn=<NllLossBackward0>)\n",
"192 LOSS DIFF: tensor(5.6966, grad_fn=<NllLossBackward0>) tensor(5.4275, grad_fn=<NllLossBackward0>)\n",
"193 LOSS DIFF: tensor(5.3675, grad_fn=<NllLossBackward0>) tensor(5.3163, grad_fn=<NllLossBackward0>)\n",
"194 LOSS DIFF: tensor(5.4924, grad_fn=<NllLossBackward0>) tensor(5.3675, grad_fn=<NllLossBackward0>)\n",
"195 LOSS DIFF: tensor(5.5475, grad_fn=<NllLossBackward0>) tensor(5.4881, grad_fn=<NllLossBackward0>)\n",
"196 LOSS DIFF: tensor(5.6223, grad_fn=<NllLossBackward0>) tensor(5.3634, grad_fn=<NllLossBackward0>)\n",
"400 tensor(5.5316, grad_fn=<NllLossBackward0>)\n",
"197 LOSS DIFF: tensor(5.5377, grad_fn=<NllLossBackward0>) tensor(5.4920, grad_fn=<NllLossBackward0>)\n",
"198 LOSS DIFF: tensor(5.6185, grad_fn=<NllLossBackward0>) tensor(5.4576, grad_fn=<NllLossBackward0>)\n",
"199 LOSS DIFF: tensor(5.4915, grad_fn=<NllLossBackward0>) tensor(5.4151, grad_fn=<NllLossBackward0>)\n",
"200 LOSS DIFF: tensor(5.5837, grad_fn=<NllLossBackward0>) tensor(5.4915, grad_fn=<NllLossBackward0>)\n",
"201 LOSS DIFF: tensor(5.5875, grad_fn=<NllLossBackward0>) tensor(5.5837, grad_fn=<NllLossBackward0>)\n",
"202 LOSS DIFF: tensor(5.5331, grad_fn=<NllLossBackward0>) tensor(5.4873, grad_fn=<NllLossBackward0>)\n",
"203 LOSS DIFF: tensor(5.5345, grad_fn=<NllLossBackward0>) tensor(5.3964, grad_fn=<NllLossBackward0>)\n",
"204 LOSS DIFF: tensor(5.5764, grad_fn=<NllLossBackward0>) tensor(5.5345, grad_fn=<NllLossBackward0>)\n",
"205 LOSS DIFF: tensor(5.6070, grad_fn=<NllLossBackward0>) tensor(5.5764, grad_fn=<NllLossBackward0>)\n",
"206 LOSS DIFF: tensor(5.5005, grad_fn=<NllLossBackward0>) tensor(5.3572, grad_fn=<NllLossBackward0>)\n",
"207 LOSS DIFF: tensor(5.5520, grad_fn=<NllLossBackward0>) tensor(5.3860, grad_fn=<NllLossBackward0>)\n",
"208 LOSS DIFF: tensor(5.5800, grad_fn=<NllLossBackward0>) tensor(5.5520, grad_fn=<NllLossBackward0>)\n",
"209 LOSS DIFF: tensor(5.6465, grad_fn=<NllLossBackward0>) tensor(5.5469, grad_fn=<NllLossBackward0>)\n",
"210 LOSS DIFF: tensor(5.5691, grad_fn=<NllLossBackward0>) tensor(5.5241, grad_fn=<NllLossBackward0>)\n",
"211 LOSS DIFF: tensor(5.7237, grad_fn=<NllLossBackward0>) tensor(5.4803, grad_fn=<NllLossBackward0>)\n",
"212 LOSS DIFF: tensor(5.5532, grad_fn=<NllLossBackward0>) tensor(5.5012, grad_fn=<NllLossBackward0>)\n",
"213 LOSS DIFF: tensor(5.5011, grad_fn=<NllLossBackward0>) tensor(5.4712, grad_fn=<NllLossBackward0>)\n",
"214 LOSS DIFF: tensor(5.5370, grad_fn=<NllLossBackward0>) tensor(5.5011, grad_fn=<NllLossBackward0>)\n",
"215 LOSS DIFF: tensor(5.5579, grad_fn=<NllLossBackward0>) tensor(5.4126, grad_fn=<NllLossBackward0>)\n",
"216 LOSS DIFF: tensor(5.5109, grad_fn=<NllLossBackward0>) tensor(5.3875, grad_fn=<NllLossBackward0>)\n",
"217 LOSS DIFF: tensor(5.5403, grad_fn=<NllLossBackward0>) tensor(5.4174, grad_fn=<NllLossBackward0>)\n",
"218 LOSS DIFF: tensor(5.5404, grad_fn=<NllLossBackward0>) tensor(5.5403, grad_fn=<NllLossBackward0>)\n",
"219 LOSS DIFF: tensor(5.5593, grad_fn=<NllLossBackward0>) tensor(5.5404, grad_fn=<NllLossBackward0>)\n",
"220 LOSS DIFF: tensor(5.5262, grad_fn=<NllLossBackward0>) tensor(5.5250, grad_fn=<NllLossBackward0>)\n",
"221 LOSS DIFF: tensor(5.4107, grad_fn=<NllLossBackward0>) tensor(5.4092, grad_fn=<NllLossBackward0>)\n",
"222 LOSS DIFF: tensor(5.4920, grad_fn=<NllLossBackward0>) tensor(5.3499, grad_fn=<NllLossBackward0>)\n",
"223 LOSS DIFF: tensor(5.5064, grad_fn=<NllLossBackward0>) tensor(5.4920, grad_fn=<NllLossBackward0>)\n",
"224 LOSS DIFF: tensor(5.5648, grad_fn=<NllLossBackward0>) tensor(5.5064, grad_fn=<NllLossBackward0>)\n",
"225 LOSS DIFF: tensor(5.5107, grad_fn=<NllLossBackward0>) tensor(5.3439, grad_fn=<NllLossBackward0>)\n",
"226 LOSS DIFF: tensor(5.4968, grad_fn=<NllLossBackward0>) tensor(5.4720, grad_fn=<NllLossBackward0>)\n",
"227 LOSS DIFF: tensor(5.5473, grad_fn=<NllLossBackward0>) tensor(5.4854, grad_fn=<NllLossBackward0>)\n",
"228 LOSS DIFF: tensor(5.4800, grad_fn=<NllLossBackward0>) tensor(5.3762, grad_fn=<NllLossBackward0>)\n",
"229 LOSS DIFF: tensor(5.6251, grad_fn=<NllLossBackward0>) tensor(5.4800, grad_fn=<NllLossBackward0>)\n",
"230 LOSS DIFF: tensor(5.6237, grad_fn=<NllLossBackward0>) tensor(5.4478, grad_fn=<NllLossBackward0>)\n",
"231 LOSS DIFF: tensor(5.5439, grad_fn=<NllLossBackward0>) tensor(5.4108, grad_fn=<NllLossBackward0>)\n",
"232 LOSS DIFF: tensor(5.3186, grad_fn=<NllLossBackward0>) tensor(5.3012, grad_fn=<NllLossBackward0>)\n",
"233 LOSS DIFF: tensor(5.5069, grad_fn=<NllLossBackward0>) tensor(5.3186, grad_fn=<NllLossBackward0>)\n",
"234 LOSS DIFF: tensor(5.5190, grad_fn=<NllLossBackward0>) tensor(5.5043, grad_fn=<NllLossBackward0>)\n",
"235 LOSS DIFF: tensor(5.4706, grad_fn=<NllLossBackward0>) tensor(5.4560, grad_fn=<NllLossBackward0>)\n",
"236 LOSS DIFF: tensor(5.5252, grad_fn=<NllLossBackward0>) tensor(5.4706, grad_fn=<NllLossBackward0>)\n",
"237 LOSS DIFF: tensor(5.4765, grad_fn=<NllLossBackward0>) tensor(5.4103, grad_fn=<NllLossBackward0>)\n",
"238 LOSS DIFF: tensor(5.5218, grad_fn=<NllLossBackward0>) tensor(5.4765, grad_fn=<NllLossBackward0>)\n",
"239 LOSS DIFF: tensor(5.6028, grad_fn=<NllLossBackward0>) tensor(5.4596, grad_fn=<NllLossBackward0>)\n",
"240 LOSS DIFF: tensor(5.5504, grad_fn=<NllLossBackward0>) tensor(5.5021, grad_fn=<NllLossBackward0>)\n",
"241 LOSS DIFF: tensor(5.4777, grad_fn=<NllLossBackward0>) tensor(5.4091, grad_fn=<NllLossBackward0>)\n",
"242 LOSS DIFF: tensor(5.4404, grad_fn=<NllLossBackward0>) tensor(5.3918, grad_fn=<NllLossBackward0>)\n",
"243 LOSS DIFF: tensor(5.5580, grad_fn=<NllLossBackward0>) tensor(5.4404, grad_fn=<NllLossBackward0>)\n",
"244 LOSS DIFF: tensor(5.4812, grad_fn=<NllLossBackward0>) tensor(5.4398, grad_fn=<NllLossBackward0>)\n",
"500 tensor(5.5214, grad_fn=<NllLossBackward0>)\n",
"245 LOSS DIFF: tensor(5.5214, grad_fn=<NllLossBackward0>) tensor(5.4142, grad_fn=<NllLossBackward0>)\n",
"246 LOSS DIFF: tensor(5.6153, grad_fn=<NllLossBackward0>) tensor(5.5214, grad_fn=<NllLossBackward0>)\n",
"247 LOSS DIFF: tensor(5.4794, grad_fn=<NllLossBackward0>) tensor(5.4672, grad_fn=<NllLossBackward0>)\n",
"248 LOSS DIFF: tensor(5.5978, grad_fn=<NllLossBackward0>) tensor(5.4794, grad_fn=<NllLossBackward0>)\n",
"249 LOSS DIFF: tensor(5.4549, grad_fn=<NllLossBackward0>) tensor(5.3421, grad_fn=<NllLossBackward0>)\n",
"250 LOSS DIFF: tensor(5.4747, grad_fn=<NllLossBackward0>) tensor(5.4549, grad_fn=<NllLossBackward0>)\n",
"251 LOSS DIFF: tensor(5.5439, grad_fn=<NllLossBackward0>) tensor(5.3348, grad_fn=<NllLossBackward0>)\n",
"252 LOSS DIFF: tensor(5.5953, grad_fn=<NllLossBackward0>) tensor(5.5439, grad_fn=<NllLossBackward0>)\n",
"253 LOSS DIFF: tensor(5.5308, grad_fn=<NllLossBackward0>) tensor(5.4385, grad_fn=<NllLossBackward0>)\n",
"254 LOSS DIFF: tensor(5.5379, grad_fn=<NllLossBackward0>) tensor(5.4373, grad_fn=<NllLossBackward0>)\n",
"255 LOSS DIFF: tensor(5.5022, grad_fn=<NllLossBackward0>) tensor(5.4306, grad_fn=<NllLossBackward0>)\n",
"256 LOSS DIFF: tensor(5.5225, grad_fn=<NllLossBackward0>) tensor(5.4898, grad_fn=<NllLossBackward0>)\n",
"257 LOSS DIFF: tensor(5.6141, grad_fn=<NllLossBackward0>) tensor(5.5225, grad_fn=<NllLossBackward0>)\n",
"258 LOSS DIFF: tensor(5.4873, grad_fn=<NllLossBackward0>) tensor(5.4444, grad_fn=<NllLossBackward0>)\n",
"259 LOSS DIFF: tensor(5.6677, grad_fn=<NllLossBackward0>) tensor(5.4873, grad_fn=<NllLossBackward0>)\n",
"260 LOSS DIFF: tensor(5.5404, grad_fn=<NllLossBackward0>) tensor(5.4581, grad_fn=<NllLossBackward0>)\n",
"261 LOSS DIFF: tensor(5.5603, grad_fn=<NllLossBackward0>) tensor(5.3583, grad_fn=<NllLossBackward0>)\n",
"262 LOSS DIFF: tensor(5.5292, grad_fn=<NllLossBackward0>) tensor(5.2255, grad_fn=<NllLossBackward0>)\n",
"263 LOSS DIFF: tensor(5.4456, grad_fn=<NllLossBackward0>) tensor(5.3846, grad_fn=<NllLossBackward0>)\n",
"264 LOSS DIFF: tensor(5.4504, grad_fn=<NllLossBackward0>) tensor(5.4456, grad_fn=<NllLossBackward0>)\n",
"265 LOSS DIFF: tensor(5.4899, grad_fn=<NllLossBackward0>) tensor(5.3406, grad_fn=<NllLossBackward0>)\n",
"266 LOSS DIFF: tensor(5.5023, grad_fn=<NllLossBackward0>) tensor(5.4899, grad_fn=<NllLossBackward0>)\n",
"267 LOSS DIFF: tensor(5.3884, grad_fn=<NllLossBackward0>) tensor(5.2800, grad_fn=<NllLossBackward0>)\n",
"268 LOSS DIFF: tensor(5.4713, grad_fn=<NllLossBackward0>) tensor(5.3884, grad_fn=<NllLossBackward0>)\n",
"269 LOSS DIFF: tensor(5.4810, grad_fn=<NllLossBackward0>) tensor(5.4713, grad_fn=<NllLossBackward0>)\n",
"270 LOSS DIFF: tensor(5.3896, grad_fn=<NllLossBackward0>) tensor(5.3593, grad_fn=<NllLossBackward0>)\n",
"271 LOSS DIFF: tensor(5.5195, grad_fn=<NllLossBackward0>) tensor(5.3896, grad_fn=<NllLossBackward0>)\n",
"272 LOSS DIFF: tensor(5.4173, grad_fn=<NllLossBackward0>) tensor(5.3982, grad_fn=<NllLossBackward0>)\n",
"273 LOSS DIFF: tensor(5.5428, grad_fn=<NllLossBackward0>) tensor(5.3779, grad_fn=<NllLossBackward0>)\n",
"274 LOSS DIFF: tensor(5.4749, grad_fn=<NllLossBackward0>) tensor(5.4675, grad_fn=<NllLossBackward0>)\n",
"275 LOSS DIFF: tensor(5.3978, grad_fn=<NllLossBackward0>) tensor(5.2620, grad_fn=<NllLossBackward0>)\n",
"276 LOSS DIFF: tensor(5.4689, grad_fn=<NllLossBackward0>) tensor(5.3978, grad_fn=<NllLossBackward0>)\n",
"277 LOSS DIFF: tensor(5.4733, grad_fn=<NllLossBackward0>) tensor(5.4689, grad_fn=<NllLossBackward0>)\n",
"278 LOSS DIFF: tensor(5.5054, grad_fn=<NllLossBackward0>) tensor(5.4733, grad_fn=<NllLossBackward0>)\n",
"279 LOSS DIFF: tensor(5.4809, grad_fn=<NllLossBackward0>) tensor(5.4288, grad_fn=<NllLossBackward0>)\n",
"280 LOSS DIFF: tensor(5.5698, grad_fn=<NllLossBackward0>) tensor(5.4809, grad_fn=<NllLossBackward0>)\n",
"281 LOSS DIFF: tensor(5.5550, grad_fn=<NllLossBackward0>) tensor(5.4103, grad_fn=<NllLossBackward0>)\n",
"282 LOSS DIFF: tensor(5.5803, grad_fn=<NllLossBackward0>) tensor(5.5550, grad_fn=<NllLossBackward0>)\n",
"283 LOSS DIFF: tensor(5.5616, grad_fn=<NllLossBackward0>) tensor(5.4858, grad_fn=<NllLossBackward0>)\n",
"284 LOSS DIFF: tensor(5.4863, grad_fn=<NllLossBackward0>) tensor(5.3357, grad_fn=<NllLossBackward0>)\n",
"285 LOSS DIFF: tensor(5.3506, grad_fn=<NllLossBackward0>) tensor(5.2871, grad_fn=<NllLossBackward0>)\n",
"286 LOSS DIFF: tensor(5.6320, grad_fn=<NllLossBackward0>) tensor(5.3506, grad_fn=<NllLossBackward0>)\n",
"287 LOSS DIFF: tensor(5.4488, grad_fn=<NllLossBackward0>) tensor(5.4314, grad_fn=<NllLossBackward0>)\n",
"288 LOSS DIFF: tensor(5.4596, grad_fn=<NllLossBackward0>) tensor(5.4488, grad_fn=<NllLossBackward0>)\n",
"289 LOSS DIFF: tensor(5.5325, grad_fn=<NllLossBackward0>) tensor(5.4596, grad_fn=<NllLossBackward0>)\n",
"290 LOSS DIFF: tensor(5.4566, grad_fn=<NllLossBackward0>) tensor(5.2072, grad_fn=<NllLossBackward0>)\n",
"291 LOSS DIFF: tensor(5.4784, grad_fn=<NllLossBackward0>) tensor(5.4303, grad_fn=<NllLossBackward0>)\n",
"292 LOSS DIFF: tensor(5.4439, grad_fn=<NllLossBackward0>) tensor(5.3270, grad_fn=<NllLossBackward0>)\n",
"293 LOSS DIFF: tensor(5.5160, grad_fn=<NllLossBackward0>) tensor(5.4439, grad_fn=<NllLossBackward0>)\n",
"294 LOSS DIFF: tensor(5.4134, grad_fn=<NllLossBackward0>) tensor(5.3536, grad_fn=<NllLossBackward0>)\n",
"295 LOSS DIFF: tensor(5.4426, grad_fn=<NllLossBackward0>) tensor(5.4134, grad_fn=<NllLossBackward0>)\n",
"296 LOSS DIFF: tensor(5.3758, grad_fn=<NllLossBackward0>) tensor(5.3700, grad_fn=<NllLossBackward0>)\n",
"297 LOSS DIFF: tensor(5.5559, grad_fn=<NllLossBackward0>) tensor(5.3758, grad_fn=<NllLossBackward0>)\n",
"600 tensor(5.4824, grad_fn=<NllLossBackward0>)\n",
"298 LOSS DIFF: tensor(5.3795, grad_fn=<NllLossBackward0>) tensor(5.3762, grad_fn=<NllLossBackward0>)\n",
"299 LOSS DIFF: tensor(5.3878, grad_fn=<NllLossBackward0>) tensor(5.3795, grad_fn=<NllLossBackward0>)\n",
"300 LOSS DIFF: tensor(5.4699, grad_fn=<NllLossBackward0>) tensor(5.3878, grad_fn=<NllLossBackward0>)\n",
"301 LOSS DIFF: tensor(5.4967, grad_fn=<NllLossBackward0>) tensor(5.4699, grad_fn=<NllLossBackward0>)\n",
"302 LOSS DIFF: tensor(5.5724, grad_fn=<NllLossBackward0>) tensor(5.4967, grad_fn=<NllLossBackward0>)\n",
"303 LOSS DIFF: tensor(5.4520, grad_fn=<NllLossBackward0>) tensor(5.4072, grad_fn=<NllLossBackward0>)\n",
"304 LOSS DIFF: tensor(5.5089, grad_fn=<NllLossBackward0>) tensor(5.4520, grad_fn=<NllLossBackward0>)\n",
"305 LOSS DIFF: tensor(5.5398, grad_fn=<NllLossBackward0>) tensor(5.3168, grad_fn=<NllLossBackward0>)\n",
"306 LOSS DIFF: tensor(5.3561, grad_fn=<NllLossBackward0>) tensor(5.3058, grad_fn=<NllLossBackward0>)\n",
"307 LOSS DIFF: tensor(5.4668, grad_fn=<NllLossBackward0>) tensor(5.3448, grad_fn=<NllLossBackward0>)\n",
"308 LOSS DIFF: tensor(5.4964, grad_fn=<NllLossBackward0>) tensor(5.4668, grad_fn=<NllLossBackward0>)\n",
"309 LOSS DIFF: tensor(5.4440, grad_fn=<NllLossBackward0>) tensor(5.3221, grad_fn=<NllLossBackward0>)\n",
"310 LOSS DIFF: tensor(5.4516, grad_fn=<NllLossBackward0>) tensor(5.4289, grad_fn=<NllLossBackward0>)\n",
"311 LOSS DIFF: tensor(5.4969, grad_fn=<NllLossBackward0>) tensor(5.3983, grad_fn=<NllLossBackward0>)\n",
"312 LOSS DIFF: tensor(5.4254, grad_fn=<NllLossBackward0>) tensor(5.3790, grad_fn=<NllLossBackward0>)\n",
"313 LOSS DIFF: tensor(5.4874, grad_fn=<NllLossBackward0>) tensor(5.4254, grad_fn=<NllLossBackward0>)\n",
"314 LOSS DIFF: tensor(5.3839, grad_fn=<NllLossBackward0>) tensor(5.3470, grad_fn=<NllLossBackward0>)\n",
"315 LOSS DIFF: tensor(5.5822, grad_fn=<NllLossBackward0>) tensor(5.3839, grad_fn=<NllLossBackward0>)\n",
"316 LOSS DIFF: tensor(5.4169, grad_fn=<NllLossBackward0>) tensor(5.3044, grad_fn=<NllLossBackward0>)\n",
"317 LOSS DIFF: tensor(5.4778, grad_fn=<NllLossBackward0>) tensor(5.4169, grad_fn=<NllLossBackward0>)\n",
"318 LOSS DIFF: tensor(5.3589, grad_fn=<NllLossBackward0>) tensor(5.2238, grad_fn=<NllLossBackward0>)\n",
"319 LOSS DIFF: tensor(5.3547, grad_fn=<NllLossBackward0>) tensor(5.3184, grad_fn=<NllLossBackward0>)\n",
"320 LOSS DIFF: tensor(5.5022, grad_fn=<NllLossBackward0>) tensor(5.3547, grad_fn=<NllLossBackward0>)\n",
"321 LOSS DIFF: tensor(5.4749, grad_fn=<NllLossBackward0>) tensor(5.4294, grad_fn=<NllLossBackward0>)\n",
"322 LOSS DIFF: tensor(5.3813, grad_fn=<NllLossBackward0>) tensor(5.3557, grad_fn=<NllLossBackward0>)\n",
"323 LOSS DIFF: tensor(5.4019, grad_fn=<NllLossBackward0>) tensor(5.3813, grad_fn=<NllLossBackward0>)\n",
"324 LOSS DIFF: tensor(5.7250, grad_fn=<NllLossBackward0>) tensor(5.4019, grad_fn=<NllLossBackward0>)\n",
"325 LOSS DIFF: tensor(5.4055, grad_fn=<NllLossBackward0>) tensor(5.3304, grad_fn=<NllLossBackward0>)\n",
"326 LOSS DIFF: tensor(5.4721, grad_fn=<NllLossBackward0>) tensor(5.4055, grad_fn=<NllLossBackward0>)\n",
"327 LOSS DIFF: tensor(5.4590, grad_fn=<NllLossBackward0>) tensor(5.3773, grad_fn=<NllLossBackward0>)\n",
"328 LOSS DIFF: tensor(5.6097, grad_fn=<NllLossBackward0>) tensor(5.4590, grad_fn=<NllLossBackward0>)\n",
"329 LOSS DIFF: tensor(5.5304, grad_fn=<NllLossBackward0>) tensor(5.2807, grad_fn=<NllLossBackward0>)\n",
"330 LOSS DIFF: tensor(5.4286, grad_fn=<NllLossBackward0>) tensor(5.3879, grad_fn=<NllLossBackward0>)\n",
"331 LOSS DIFF: tensor(5.4221, grad_fn=<NllLossBackward0>) tensor(5.2779, grad_fn=<NllLossBackward0>)\n",
"332 LOSS DIFF: tensor(5.3690, grad_fn=<NllLossBackward0>) tensor(5.3191, grad_fn=<NllLossBackward0>)\n",
"333 LOSS DIFF: tensor(5.3814, grad_fn=<NllLossBackward0>) tensor(5.3690, grad_fn=<NllLossBackward0>)\n",
"334 LOSS DIFF: tensor(5.4241, grad_fn=<NllLossBackward0>) tensor(5.3760, grad_fn=<NllLossBackward0>)\n",
"335 LOSS DIFF: tensor(5.4727, grad_fn=<NllLossBackward0>) tensor(5.4241, grad_fn=<NllLossBackward0>)\n",
"336 LOSS DIFF: tensor(5.4216, grad_fn=<NllLossBackward0>) tensor(5.3401, grad_fn=<NllLossBackward0>)\n",
"337 LOSS DIFF: tensor(5.4938, grad_fn=<NllLossBackward0>) tensor(5.3908, grad_fn=<NllLossBackward0>)\n",
"338 LOSS DIFF: tensor(5.4742, grad_fn=<NllLossBackward0>) tensor(5.3384, grad_fn=<NllLossBackward0>)\n",
"339 LOSS DIFF: tensor(5.4628, grad_fn=<NllLossBackward0>) tensor(5.2785, grad_fn=<NllLossBackward0>)\n",
"340 LOSS DIFF: tensor(5.5419, grad_fn=<NllLossBackward0>) tensor(5.3019, grad_fn=<NllLossBackward0>)\n",
"341 LOSS DIFF: tensor(5.4736, grad_fn=<NllLossBackward0>) tensor(5.3646, grad_fn=<NllLossBackward0>)\n",
"342 LOSS DIFF: tensor(5.4150, grad_fn=<NllLossBackward0>) tensor(5.3511, grad_fn=<NllLossBackward0>)\n",
"343 LOSS DIFF: tensor(5.4531, grad_fn=<NllLossBackward0>) tensor(5.2982, grad_fn=<NllLossBackward0>)\n",
"344 LOSS DIFF: tensor(5.4617, grad_fn=<NllLossBackward0>) tensor(5.4531, grad_fn=<NllLossBackward0>)\n",
"345 LOSS DIFF: tensor(5.4939, grad_fn=<NllLossBackward0>) tensor(5.4617, grad_fn=<NllLossBackward0>)\n",
"346 LOSS DIFF: tensor(5.4178, grad_fn=<NllLossBackward0>) tensor(5.3127, grad_fn=<NllLossBackward0>)\n",
"700 tensor(5.7095, grad_fn=<NllLossBackward0>)\n",
"347 LOSS DIFF: tensor(5.7095, grad_fn=<NllLossBackward0>) tensor(5.3593, grad_fn=<NllLossBackward0>)\n",
"348 LOSS DIFF: tensor(5.4054, grad_fn=<NllLossBackward0>) tensor(5.3883, grad_fn=<NllLossBackward0>)\n",
"349 LOSS DIFF: tensor(5.6016, grad_fn=<NllLossBackward0>) tensor(5.4054, grad_fn=<NllLossBackward0>)\n",
"350 LOSS DIFF: tensor(5.4695, grad_fn=<NllLossBackward0>) tensor(5.4424, grad_fn=<NllLossBackward0>)\n",
"351 LOSS DIFF: tensor(5.5022, grad_fn=<NllLossBackward0>) tensor(5.4695, grad_fn=<NllLossBackward0>)\n",
"352 LOSS DIFF: tensor(5.5172, grad_fn=<NllLossBackward0>) tensor(5.4135, grad_fn=<NllLossBackward0>)\n",
"353 LOSS DIFF: tensor(5.5003, grad_fn=<NllLossBackward0>) tensor(5.3490, grad_fn=<NllLossBackward0>)\n",
"354 LOSS DIFF: tensor(5.3198, grad_fn=<NllLossBackward0>) tensor(5.2805, grad_fn=<NllLossBackward0>)\n",
"355 LOSS DIFF: tensor(5.3726, grad_fn=<NllLossBackward0>) tensor(5.3198, grad_fn=<NllLossBackward0>)\n",
"356 LOSS DIFF: tensor(5.3992, grad_fn=<NllLossBackward0>) tensor(5.3726, grad_fn=<NllLossBackward0>)\n",
"357 LOSS DIFF: tensor(5.5122, grad_fn=<NllLossBackward0>) tensor(5.3992, grad_fn=<NllLossBackward0>)\n",
"358 LOSS DIFF: tensor(5.6000, grad_fn=<NllLossBackward0>) tensor(5.3476, grad_fn=<NllLossBackward0>)\n",
"359 LOSS DIFF: tensor(5.4421, grad_fn=<NllLossBackward0>) tensor(5.3207, grad_fn=<NllLossBackward0>)\n",
"360 LOSS DIFF: tensor(5.6211, grad_fn=<NllLossBackward0>) tensor(5.4421, grad_fn=<NllLossBackward0>)\n",
"361 LOSS DIFF: tensor(5.3617, grad_fn=<NllLossBackward0>) tensor(5.3425, grad_fn=<NllLossBackward0>)\n",
"362 LOSS DIFF: tensor(5.3828, grad_fn=<NllLossBackward0>) tensor(5.3617, grad_fn=<NllLossBackward0>)\n",
"363 LOSS DIFF: tensor(5.4569, grad_fn=<NllLossBackward0>) tensor(5.3828, grad_fn=<NllLossBackward0>)\n",
"364 LOSS DIFF: tensor(5.4314, grad_fn=<NllLossBackward0>) tensor(5.2452, grad_fn=<NllLossBackward0>)\n",
"365 LOSS DIFF: tensor(5.5384, grad_fn=<NllLossBackward0>) tensor(5.4314, grad_fn=<NllLossBackward0>)\n",
"366 LOSS DIFF: tensor(5.4293, grad_fn=<NllLossBackward0>) tensor(5.3797, grad_fn=<NllLossBackward0>)\n",
"367 LOSS DIFF: tensor(5.4823, grad_fn=<NllLossBackward0>) tensor(5.4289, grad_fn=<NllLossBackward0>)\n",
"368 LOSS DIFF: tensor(5.4602, grad_fn=<NllLossBackward0>) tensor(5.3212, grad_fn=<NllLossBackward0>)\n",
"369 LOSS DIFF: tensor(5.4459, grad_fn=<NllLossBackward0>) tensor(5.3457, grad_fn=<NllLossBackward0>)\n",
"370 LOSS DIFF: tensor(5.5089, grad_fn=<NllLossBackward0>) tensor(5.3548, grad_fn=<NllLossBackward0>)\n",
"371 LOSS DIFF: tensor(5.3639, grad_fn=<NllLossBackward0>) tensor(5.2607, grad_fn=<NllLossBackward0>)\n",
"372 LOSS DIFF: tensor(5.4079, grad_fn=<NllLossBackward0>) tensor(5.3639, grad_fn=<NllLossBackward0>)\n",
"373 LOSS DIFF: tensor(5.5557, grad_fn=<NllLossBackward0>) tensor(5.4079, grad_fn=<NllLossBackward0>)\n",
"374 LOSS DIFF: tensor(5.3965, grad_fn=<NllLossBackward0>) tensor(5.3427, grad_fn=<NllLossBackward0>)\n",
"375 LOSS DIFF: tensor(5.4149, grad_fn=<NllLossBackward0>) tensor(5.3965, grad_fn=<NllLossBackward0>)\n",
"376 LOSS DIFF: tensor(5.3285, grad_fn=<NllLossBackward0>) tensor(5.3265, grad_fn=<NllLossBackward0>)\n",
"377 LOSS DIFF: tensor(5.3672, grad_fn=<NllLossBackward0>) tensor(5.3285, grad_fn=<NllLossBackward0>)\n",
"378 LOSS DIFF: tensor(5.4523, grad_fn=<NllLossBackward0>) tensor(5.3471, grad_fn=<NllLossBackward0>)\n",
"379 LOSS DIFF: tensor(5.4315, grad_fn=<NllLossBackward0>) tensor(5.4231, grad_fn=<NllLossBackward0>)\n",
"380 LOSS DIFF: tensor(5.5363, grad_fn=<NllLossBackward0>) tensor(5.4315, grad_fn=<NllLossBackward0>)\n",
"381 LOSS DIFF: tensor(5.4404, grad_fn=<NllLossBackward0>) tensor(5.4114, grad_fn=<NllLossBackward0>)\n",
"382 LOSS DIFF: tensor(5.2667, grad_fn=<NllLossBackward0>) tensor(5.2283, grad_fn=<NllLossBackward0>)\n",
"383 LOSS DIFF: tensor(5.3342, grad_fn=<NllLossBackward0>) tensor(5.2667, grad_fn=<NllLossBackward0>)\n",
"384 LOSS DIFF: tensor(5.4847, grad_fn=<NllLossBackward0>) tensor(5.3342, grad_fn=<NllLossBackward0>)\n",
"385 LOSS DIFF: tensor(5.5349, grad_fn=<NllLossBackward0>) tensor(5.4847, grad_fn=<NllLossBackward0>)\n",
"386 LOSS DIFF: tensor(5.4216, grad_fn=<NllLossBackward0>) tensor(5.2991, grad_fn=<NllLossBackward0>)\n",
"387 LOSS DIFF: tensor(5.4483, grad_fn=<NllLossBackward0>) tensor(5.3455, grad_fn=<NllLossBackward0>)\n",
"388 LOSS DIFF: tensor(5.4229, grad_fn=<NllLossBackward0>) tensor(5.3271, grad_fn=<NllLossBackward0>)\n",
"389 LOSS DIFF: tensor(5.5482, grad_fn=<NllLossBackward0>) tensor(5.4229, grad_fn=<NllLossBackward0>)\n",
"390 LOSS DIFF: tensor(5.4596, grad_fn=<NllLossBackward0>) tensor(5.3374, grad_fn=<NllLossBackward0>)\n",
"391 LOSS DIFF: tensor(5.4694, grad_fn=<NllLossBackward0>) tensor(5.4596, grad_fn=<NllLossBackward0>)\n",
"392 LOSS DIFF: tensor(5.4744, grad_fn=<NllLossBackward0>) tensor(5.3277, grad_fn=<NllLossBackward0>)\n",
"393 LOSS DIFF: tensor(5.4301, grad_fn=<NllLossBackward0>) tensor(5.3380, grad_fn=<NllLossBackward0>)\n",
"394 LOSS DIFF: tensor(5.2605, grad_fn=<NllLossBackward0>) tensor(5.2482, grad_fn=<NllLossBackward0>)\n",
"395 LOSS DIFF: tensor(5.4596, grad_fn=<NllLossBackward0>) tensor(5.2605, grad_fn=<NllLossBackward0>)\n",
"396 LOSS DIFF: tensor(5.3527, grad_fn=<NllLossBackward0>) tensor(5.2774, grad_fn=<NllLossBackward0>)\n",
"397 LOSS DIFF: tensor(5.5415, grad_fn=<NllLossBackward0>) tensor(5.3283, grad_fn=<NllLossBackward0>)\n",
"398 LOSS DIFF: tensor(5.5558, grad_fn=<NllLossBackward0>) tensor(5.4762, grad_fn=<NllLossBackward0>)\n",
"399 LOSS DIFF: tensor(5.3862, grad_fn=<NllLossBackward0>) tensor(5.3796, grad_fn=<NllLossBackward0>)\n",
"400 LOSS DIFF: tensor(5.5006, grad_fn=<NllLossBackward0>) tensor(5.2756, grad_fn=<NllLossBackward0>)\n",
"401 LOSS DIFF: tensor(5.4776, grad_fn=<NllLossBackward0>) tensor(5.2884, grad_fn=<NllLossBackward0>)\n",
"800 tensor(5.4405, grad_fn=<NllLossBackward0>)\n",
"402 LOSS DIFF: tensor(5.5078, grad_fn=<NllLossBackward0>) tensor(5.2731, grad_fn=<NllLossBackward0>)\n",
"403 LOSS DIFF: tensor(5.4186, grad_fn=<NllLossBackward0>) tensor(5.3394, grad_fn=<NllLossBackward0>)\n",
"404 LOSS DIFF: tensor(5.4645, grad_fn=<NllLossBackward0>) tensor(5.4186, grad_fn=<NllLossBackward0>)\n",
"405 LOSS DIFF: tensor(5.3991, grad_fn=<NllLossBackward0>) tensor(5.1863, grad_fn=<NllLossBackward0>)\n",
"406 LOSS DIFF: tensor(5.4625, grad_fn=<NllLossBackward0>) tensor(5.3991, grad_fn=<NllLossBackward0>)\n",
"407 LOSS DIFF: tensor(5.2887, grad_fn=<NllLossBackward0>) tensor(5.2630, grad_fn=<NllLossBackward0>)\n",
"408 LOSS DIFF: tensor(5.3613, grad_fn=<NllLossBackward0>) tensor(5.2887, grad_fn=<NllLossBackward0>)\n",
"409 LOSS DIFF: tensor(5.4549, grad_fn=<NllLossBackward0>) tensor(5.3613, grad_fn=<NllLossBackward0>)\n",
"410 LOSS DIFF: tensor(5.4254, grad_fn=<NllLossBackward0>) tensor(5.3545, grad_fn=<NllLossBackward0>)\n",
"411 LOSS DIFF: tensor(5.4779, grad_fn=<NllLossBackward0>) tensor(5.4254, grad_fn=<NllLossBackward0>)\n",
"412 LOSS DIFF: tensor(5.4206, grad_fn=<NllLossBackward0>) tensor(5.3494, grad_fn=<NllLossBackward0>)\n",
"413 LOSS DIFF: tensor(5.4468, grad_fn=<NllLossBackward0>) tensor(5.3558, grad_fn=<NllLossBackward0>)\n",
"414 LOSS DIFF: tensor(5.3703, grad_fn=<NllLossBackward0>) tensor(5.3009, grad_fn=<NllLossBackward0>)\n",
"415 LOSS DIFF: tensor(5.4129, grad_fn=<NllLossBackward0>) tensor(5.3703, grad_fn=<NllLossBackward0>)\n",
"416 LOSS DIFF: tensor(5.4347, grad_fn=<NllLossBackward0>) tensor(5.3186, grad_fn=<NllLossBackward0>)\n",
"417 LOSS DIFF: tensor(5.3410, grad_fn=<NllLossBackward0>) tensor(5.2797, grad_fn=<NllLossBackward0>)\n",
"418 LOSS DIFF: tensor(5.4206, grad_fn=<NllLossBackward0>) tensor(5.3410, grad_fn=<NllLossBackward0>)\n",
"419 LOSS DIFF: tensor(5.3961, grad_fn=<NllLossBackward0>) tensor(5.3201, grad_fn=<NllLossBackward0>)\n",
"420 LOSS DIFF: tensor(5.3999, grad_fn=<NllLossBackward0>) tensor(5.3961, grad_fn=<NllLossBackward0>)\n",
"421 LOSS DIFF: tensor(5.4644, grad_fn=<NllLossBackward0>) tensor(5.2622, grad_fn=<NllLossBackward0>)\n",
"422 LOSS DIFF: tensor(5.3218, grad_fn=<NllLossBackward0>) tensor(5.3111, grad_fn=<NllLossBackward0>)\n",
"423 LOSS DIFF: tensor(5.3554, grad_fn=<NllLossBackward0>) tensor(5.3218, grad_fn=<NllLossBackward0>)\n",
"424 LOSS DIFF: tensor(5.4028, grad_fn=<NllLossBackward0>) tensor(5.3554, grad_fn=<NllLossBackward0>)\n",
"425 LOSS DIFF: tensor(5.3832, grad_fn=<NllLossBackward0>) tensor(5.3375, grad_fn=<NllLossBackward0>)\n",
"426 LOSS DIFF: tensor(5.4313, grad_fn=<NllLossBackward0>) tensor(5.3181, grad_fn=<NllLossBackward0>)\n",
"427 LOSS DIFF: tensor(5.4721, grad_fn=<NllLossBackward0>) tensor(5.3831, grad_fn=<NllLossBackward0>)\n",
"428 LOSS DIFF: tensor(5.3902, grad_fn=<NllLossBackward0>) tensor(5.2394, grad_fn=<NllLossBackward0>)\n",
"429 LOSS DIFF: tensor(5.3492, grad_fn=<NllLossBackward0>) tensor(5.3336, grad_fn=<NllLossBackward0>)\n",
"430 LOSS DIFF: tensor(5.3523, grad_fn=<NllLossBackward0>) tensor(5.3492, grad_fn=<NllLossBackward0>)\n",
"431 LOSS DIFF: tensor(5.4211, grad_fn=<NllLossBackward0>) tensor(5.3486, grad_fn=<NllLossBackward0>)\n",
"432 LOSS DIFF: tensor(5.4755, grad_fn=<NllLossBackward0>) tensor(5.2288, grad_fn=<NllLossBackward0>)\n",
"433 LOSS DIFF: tensor(5.5728, grad_fn=<NllLossBackward0>) tensor(5.4755, grad_fn=<NllLossBackward0>)\n",
"434 LOSS DIFF: tensor(5.3855, grad_fn=<NllLossBackward0>) tensor(5.3527, grad_fn=<NllLossBackward0>)\n",
"435 LOSS DIFF: tensor(5.4776, grad_fn=<NllLossBackward0>) tensor(5.3855, grad_fn=<NllLossBackward0>)\n",
"436 LOSS DIFF: tensor(5.3750, grad_fn=<NllLossBackward0>) tensor(5.3262, grad_fn=<NllLossBackward0>)\n",
"437 LOSS DIFF: tensor(5.3902, grad_fn=<NllLossBackward0>) tensor(5.3750, grad_fn=<NllLossBackward0>)\n",
"438 LOSS DIFF: tensor(5.3135, grad_fn=<NllLossBackward0>) tensor(5.2863, grad_fn=<NllLossBackward0>)\n",
"439 LOSS DIFF: tensor(5.4483, grad_fn=<NllLossBackward0>) tensor(5.3135, grad_fn=<NllLossBackward0>)\n",
"440 LOSS DIFF: tensor(5.3201, grad_fn=<NllLossBackward0>) tensor(5.2603, grad_fn=<NllLossBackward0>)\n",
"441 LOSS DIFF: tensor(5.3807, grad_fn=<NllLossBackward0>) tensor(5.3201, grad_fn=<NllLossBackward0>)\n",
"442 LOSS DIFF: tensor(5.5009, grad_fn=<NllLossBackward0>) tensor(5.2434, grad_fn=<NllLossBackward0>)\n",
"443 LOSS DIFF: tensor(5.4282, grad_fn=<NllLossBackward0>) tensor(5.4278, grad_fn=<NllLossBackward0>)\n",
"444 LOSS DIFF: tensor(5.3787, grad_fn=<NllLossBackward0>) tensor(5.3128, grad_fn=<NllLossBackward0>)\n",
"445 LOSS DIFF: tensor(5.5917, grad_fn=<NllLossBackward0>) tensor(5.3324, grad_fn=<NllLossBackward0>)\n",
"446 LOSS DIFF: tensor(5.4186, grad_fn=<NllLossBackward0>) tensor(5.3144, grad_fn=<NllLossBackward0>)\n",
"447 LOSS DIFF: tensor(5.4553, grad_fn=<NllLossBackward0>) tensor(5.4186, grad_fn=<NllLossBackward0>)\n",
"448 LOSS DIFF: tensor(5.4903, grad_fn=<NllLossBackward0>) tensor(5.4553, grad_fn=<NllLossBackward0>)\n",
"449 LOSS DIFF: tensor(5.4295, grad_fn=<NllLossBackward0>) tensor(5.3503, grad_fn=<NllLossBackward0>)\n",
"450 LOSS DIFF: tensor(5.3945, grad_fn=<NllLossBackward0>) tensor(5.3607, grad_fn=<NllLossBackward0>)\n",
"451 LOSS DIFF: tensor(5.2822, grad_fn=<NllLossBackward0>) tensor(5.2387, grad_fn=<NllLossBackward0>)\n",
"452 LOSS DIFF: tensor(5.3334, grad_fn=<NllLossBackward0>) tensor(5.2822, grad_fn=<NllLossBackward0>)\n",
"453 LOSS DIFF: tensor(5.4073, grad_fn=<NllLossBackward0>) tensor(5.3334, grad_fn=<NllLossBackward0>)\n",
"454 LOSS DIFF: tensor(5.3797, grad_fn=<NllLossBackward0>) tensor(5.3469, grad_fn=<NllLossBackward0>)\n",
"455 LOSS DIFF: tensor(5.4848, grad_fn=<NllLossBackward0>) tensor(5.2529, grad_fn=<NllLossBackward0>)\n",
"900 tensor(5.3078, grad_fn=<NllLossBackward0>)\n",
"456 LOSS DIFF: tensor(5.4695, grad_fn=<NllLossBackward0>) tensor(5.3078, grad_fn=<NllLossBackward0>)\n",
"457 LOSS DIFF: tensor(5.4369, grad_fn=<NllLossBackward0>) tensor(5.3834, grad_fn=<NllLossBackward0>)\n",
"458 LOSS DIFF: tensor(5.4973, grad_fn=<NllLossBackward0>) tensor(5.4369, grad_fn=<NllLossBackward0>)\n",
"459 LOSS DIFF: tensor(5.4526, grad_fn=<NllLossBackward0>) tensor(5.3075, grad_fn=<NllLossBackward0>)\n",
"460 LOSS DIFF: tensor(5.4022, grad_fn=<NllLossBackward0>) tensor(5.2870, grad_fn=<NllLossBackward0>)\n",
"461 LOSS DIFF: tensor(5.3850, grad_fn=<NllLossBackward0>) tensor(5.2879, grad_fn=<NllLossBackward0>)\n",
"462 LOSS DIFF: tensor(5.4370, grad_fn=<NllLossBackward0>) tensor(5.3154, grad_fn=<NllLossBackward0>)\n",
"463 LOSS DIFF: tensor(5.4111, grad_fn=<NllLossBackward0>) tensor(5.3927, grad_fn=<NllLossBackward0>)\n",
"464 LOSS DIFF: tensor(5.4638, grad_fn=<NllLossBackward0>) tensor(5.4111, grad_fn=<NllLossBackward0>)\n",
"465 LOSS DIFF: tensor(5.3719, grad_fn=<NllLossBackward0>) tensor(5.3195, grad_fn=<NllLossBackward0>)\n",
"466 LOSS DIFF: tensor(5.4880, grad_fn=<NllLossBackward0>) tensor(5.3719, grad_fn=<NllLossBackward0>)\n",
"467 LOSS DIFF: tensor(5.4762, grad_fn=<NllLossBackward0>) tensor(5.4186, grad_fn=<NllLossBackward0>)\n",
"468 LOSS DIFF: tensor(5.3155, grad_fn=<NllLossBackward0>) tensor(5.2086, grad_fn=<NllLossBackward0>)\n",
"469 LOSS DIFF: tensor(5.4985, grad_fn=<NllLossBackward0>) tensor(5.3155, grad_fn=<NllLossBackward0>)\n",
"470 LOSS DIFF: tensor(5.4505, grad_fn=<NllLossBackward0>) tensor(5.3731, grad_fn=<NllLossBackward0>)\n",
"471 LOSS DIFF: tensor(5.4291, grad_fn=<NllLossBackward0>) tensor(5.3408, grad_fn=<NllLossBackward0>)\n",
"472 LOSS DIFF: tensor(5.3826, grad_fn=<NllLossBackward0>) tensor(5.3232, grad_fn=<NllLossBackward0>)\n",
"473 LOSS DIFF: tensor(5.4152, grad_fn=<NllLossBackward0>) tensor(5.3468, grad_fn=<NllLossBackward0>)\n",
"474 LOSS DIFF: tensor(5.4983, grad_fn=<NllLossBackward0>) tensor(5.4152, grad_fn=<NllLossBackward0>)\n",
"475 LOSS DIFF: tensor(5.5432, grad_fn=<NllLossBackward0>) tensor(5.3502, grad_fn=<NllLossBackward0>)\n",
"476 LOSS DIFF: tensor(5.3989, grad_fn=<NllLossBackward0>) tensor(5.3489, grad_fn=<NllLossBackward0>)\n",
"477 LOSS DIFF: tensor(5.4624, grad_fn=<NllLossBackward0>) tensor(5.3761, grad_fn=<NllLossBackward0>)\n",
"478 LOSS DIFF: tensor(5.4082, grad_fn=<NllLossBackward0>) tensor(5.4043, grad_fn=<NllLossBackward0>)\n",
"479 LOSS DIFF: tensor(5.4074, grad_fn=<NllLossBackward0>) tensor(5.3588, grad_fn=<NllLossBackward0>)\n",
"480 LOSS DIFF: tensor(5.4588, grad_fn=<NllLossBackward0>) tensor(5.4074, grad_fn=<NllLossBackward0>)\n",
"481 LOSS DIFF: tensor(5.3339, grad_fn=<NllLossBackward0>) tensor(5.2172, grad_fn=<NllLossBackward0>)\n",
"482 LOSS DIFF: tensor(5.4468, grad_fn=<NllLossBackward0>) tensor(5.3339, grad_fn=<NllLossBackward0>)\n",
"483 LOSS DIFF: tensor(5.4736, grad_fn=<NllLossBackward0>) tensor(5.4024, grad_fn=<NllLossBackward0>)\n",
"484 LOSS DIFF: tensor(5.3780, grad_fn=<NllLossBackward0>) tensor(5.3095, grad_fn=<NllLossBackward0>)\n",
"485 LOSS DIFF: tensor(5.4251, grad_fn=<NllLossBackward0>) tensor(5.3780, grad_fn=<NllLossBackward0>)\n",
"486 LOSS DIFF: tensor(5.4035, grad_fn=<NllLossBackward0>) tensor(5.3474, grad_fn=<NllLossBackward0>)\n",
"487 LOSS DIFF: tensor(5.3575, grad_fn=<NllLossBackward0>) tensor(5.2837, grad_fn=<NllLossBackward0>)\n",
"488 LOSS DIFF: tensor(5.4629, grad_fn=<NllLossBackward0>) tensor(5.3298, grad_fn=<NllLossBackward0>)\n",
"489 LOSS DIFF: tensor(5.4593, grad_fn=<NllLossBackward0>) tensor(5.4124, grad_fn=<NllLossBackward0>)\n",
"490 LOSS DIFF: tensor(5.4040, grad_fn=<NllLossBackward0>) tensor(5.3532, grad_fn=<NllLossBackward0>)\n",
"491 LOSS DIFF: tensor(5.4693, grad_fn=<NllLossBackward0>) tensor(5.4040, grad_fn=<NllLossBackward0>)\n",
"492 LOSS DIFF: tensor(5.4201, grad_fn=<NllLossBackward0>) tensor(5.3561, grad_fn=<NllLossBackward0>)\n",
"493 LOSS DIFF: tensor(5.4786, grad_fn=<NllLossBackward0>) tensor(5.4201, grad_fn=<NllLossBackward0>)\n",
"494 LOSS DIFF: tensor(5.3819, grad_fn=<NllLossBackward0>) tensor(5.3108, grad_fn=<NllLossBackward0>)\n",
"495 LOSS DIFF: tensor(5.3170, grad_fn=<NllLossBackward0>) tensor(5.3080, grad_fn=<NllLossBackward0>)\n",
"496 LOSS DIFF: tensor(5.3305, grad_fn=<NllLossBackward0>) tensor(5.2931, grad_fn=<NllLossBackward0>)\n",
"497 LOSS DIFF: tensor(5.3719, grad_fn=<NllLossBackward0>) tensor(5.3305, grad_fn=<NllLossBackward0>)\n",
"498 LOSS DIFF: tensor(5.3756, grad_fn=<NllLossBackward0>) tensor(5.3702, grad_fn=<NllLossBackward0>)\n",
"499 LOSS DIFF: tensor(5.4073, grad_fn=<NllLossBackward0>) tensor(5.1951, grad_fn=<NllLossBackward0>)\n",
"500 LOSS DIFF: tensor(5.4267, grad_fn=<NllLossBackward0>) tensor(5.3957, grad_fn=<NllLossBackward0>)\n",
"501 LOSS DIFF: tensor(5.3842, grad_fn=<NllLossBackward0>) tensor(5.3569, grad_fn=<NllLossBackward0>)\n",
"502 LOSS DIFF: tensor(5.4202, grad_fn=<NllLossBackward0>) tensor(5.3842, grad_fn=<NllLossBackward0>)\n",
"503 LOSS DIFF: tensor(5.3634, grad_fn=<NllLossBackward0>) tensor(5.2962, grad_fn=<NllLossBackward0>)\n",
"504 LOSS DIFF: tensor(5.4654, grad_fn=<NllLossBackward0>) tensor(5.3512, grad_fn=<NllLossBackward0>)\n",
"1000 tensor(5.4063, grad_fn=<NllLossBackward0>)\n",
"505 LOSS DIFF: tensor(5.4063, grad_fn=<NllLossBackward0>) tensor(5.3712, grad_fn=<NllLossBackward0>)\n",
"506 LOSS DIFF: tensor(5.3378, grad_fn=<NllLossBackward0>) tensor(5.2547, grad_fn=<NllLossBackward0>)\n",
"507 LOSS DIFF: tensor(5.3185, grad_fn=<NllLossBackward0>) tensor(5.2350, grad_fn=<NllLossBackward0>)\n",
"508 LOSS DIFF: tensor(5.3049, grad_fn=<NllLossBackward0>) tensor(5.1821, grad_fn=<NllLossBackward0>)\n",
"509 LOSS DIFF: tensor(5.4689, grad_fn=<NllLossBackward0>) tensor(5.3049, grad_fn=<NllLossBackward0>)\n",
"510 LOSS DIFF: tensor(5.1437, grad_fn=<NllLossBackward0>) tensor(5.1380, grad_fn=<NllLossBackward0>)\n",
"511 LOSS DIFF: tensor(5.3984, grad_fn=<NllLossBackward0>) tensor(5.1437, grad_fn=<NllLossBackward0>)\n",
"512 LOSS DIFF: tensor(5.5009, grad_fn=<NllLossBackward0>) tensor(5.2426, grad_fn=<NllLossBackward0>)\n",
"513 LOSS DIFF: tensor(5.3734, grad_fn=<NllLossBackward0>) tensor(5.3096, grad_fn=<NllLossBackward0>)\n",
"514 LOSS DIFF: tensor(5.3889, grad_fn=<NllLossBackward0>) tensor(5.3734, grad_fn=<NllLossBackward0>)\n",
"515 LOSS DIFF: tensor(5.4053, grad_fn=<NllLossBackward0>) tensor(5.3114, grad_fn=<NllLossBackward0>)\n",
"516 LOSS DIFF: tensor(5.3912, grad_fn=<NllLossBackward0>) tensor(5.2357, grad_fn=<NllLossBackward0>)\n",
"517 LOSS DIFF: tensor(5.4400, grad_fn=<NllLossBackward0>) tensor(5.3115, grad_fn=<NllLossBackward0>)\n",
"518 LOSS DIFF: tensor(5.4756, grad_fn=<NllLossBackward0>) tensor(5.2689, grad_fn=<NllLossBackward0>)\n",
"519 LOSS DIFF: tensor(5.3111, grad_fn=<NllLossBackward0>) tensor(5.1618, grad_fn=<NllLossBackward0>)\n",
"520 LOSS DIFF: tensor(5.3974, grad_fn=<NllLossBackward0>) tensor(5.3030, grad_fn=<NllLossBackward0>)\n",
"521 LOSS DIFF: tensor(5.3955, grad_fn=<NllLossBackward0>) tensor(5.2872, grad_fn=<NllLossBackward0>)\n",
"522 LOSS DIFF: tensor(5.4712, grad_fn=<NllLossBackward0>) tensor(5.3863, grad_fn=<NllLossBackward0>)\n",
"523 LOSS DIFF: tensor(5.4095, grad_fn=<NllLossBackward0>) tensor(5.3686, grad_fn=<NllLossBackward0>)\n",
"524 LOSS DIFF: tensor(5.3285, grad_fn=<NllLossBackward0>) tensor(5.2293, grad_fn=<NllLossBackward0>)\n",
"525 LOSS DIFF: tensor(5.3468, grad_fn=<NllLossBackward0>) tensor(5.2348, grad_fn=<NllLossBackward0>)\n",
"526 LOSS DIFF: tensor(5.3140, grad_fn=<NllLossBackward0>) tensor(5.2460, grad_fn=<NllLossBackward0>)\n",
"527 LOSS DIFF: tensor(5.3772, grad_fn=<NllLossBackward0>) tensor(5.3140, grad_fn=<NllLossBackward0>)\n",
"528 LOSS DIFF: tensor(5.3576, grad_fn=<NllLossBackward0>) tensor(5.3363, grad_fn=<NllLossBackward0>)\n",
"529 LOSS DIFF: tensor(5.2631, grad_fn=<NllLossBackward0>) tensor(5.2239, grad_fn=<NllLossBackward0>)\n",
"530 LOSS DIFF: tensor(5.4207, grad_fn=<NllLossBackward0>) tensor(5.2631, grad_fn=<NllLossBackward0>)\n",
"531 LOSS DIFF: tensor(5.4238, grad_fn=<NllLossBackward0>) tensor(5.2798, grad_fn=<NllLossBackward0>)\n",
"532 LOSS DIFF: tensor(5.4496, grad_fn=<NllLossBackward0>) tensor(5.2819, grad_fn=<NllLossBackward0>)\n",
"533 LOSS DIFF: tensor(5.2788, grad_fn=<NllLossBackward0>) tensor(5.2125, grad_fn=<NllLossBackward0>)\n",
"534 LOSS DIFF: tensor(5.3159, grad_fn=<NllLossBackward0>) tensor(5.2788, grad_fn=<NllLossBackward0>)\n",
"535 LOSS DIFF: tensor(5.3200, grad_fn=<NllLossBackward0>) tensor(5.3159, grad_fn=<NllLossBackward0>)\n",
"536 LOSS DIFF: tensor(5.3934, grad_fn=<NllLossBackward0>) tensor(5.3087, grad_fn=<NllLossBackward0>)\n",
"537 LOSS DIFF: tensor(5.2843, grad_fn=<NllLossBackward0>) tensor(5.2815, grad_fn=<NllLossBackward0>)\n",
"538 LOSS DIFF: tensor(5.5309, grad_fn=<NllLossBackward0>) tensor(5.2377, grad_fn=<NllLossBackward0>)\n",
"539 LOSS DIFF: tensor(5.4258, grad_fn=<NllLossBackward0>) tensor(5.3734, grad_fn=<NllLossBackward0>)\n",
"540 LOSS DIFF: tensor(5.4562, grad_fn=<NllLossBackward0>) tensor(5.2893, grad_fn=<NllLossBackward0>)\n",
"541 LOSS DIFF: tensor(5.3672, grad_fn=<NllLossBackward0>) tensor(5.3331, grad_fn=<NllLossBackward0>)\n",
"542 LOSS DIFF: tensor(5.3475, grad_fn=<NllLossBackward0>) tensor(5.3409, grad_fn=<NllLossBackward0>)\n",
"543 LOSS DIFF: tensor(5.3826, grad_fn=<NllLossBackward0>) tensor(5.3475, grad_fn=<NllLossBackward0>)\n",
"544 LOSS DIFF: tensor(5.4529, grad_fn=<NllLossBackward0>) tensor(5.3826, grad_fn=<NllLossBackward0>)\n",
"545 LOSS DIFF: tensor(5.4554, grad_fn=<NllLossBackward0>) tensor(5.3758, grad_fn=<NllLossBackward0>)\n",
"546 LOSS DIFF: tensor(5.3725, grad_fn=<NllLossBackward0>) tensor(5.2762, grad_fn=<NllLossBackward0>)\n",
"547 LOSS DIFF: tensor(5.3809, grad_fn=<NllLossBackward0>) tensor(5.3140, grad_fn=<NllLossBackward0>)\n",
"548 LOSS DIFF: tensor(5.4411, grad_fn=<NllLossBackward0>) tensor(5.3809, grad_fn=<NllLossBackward0>)\n",
"1100 tensor(5.2577, grad_fn=<NllLossBackward0>)\n",
"549 LOSS DIFF: tensor(5.3207, grad_fn=<NllLossBackward0>) tensor(5.2233, grad_fn=<NllLossBackward0>)\n",
"550 LOSS DIFF: tensor(5.3287, grad_fn=<NllLossBackward0>) tensor(5.3207, grad_fn=<NllLossBackward0>)\n",
"551 LOSS DIFF: tensor(5.4455, grad_fn=<NllLossBackward0>) tensor(5.3140, grad_fn=<NllLossBackward0>)\n",
"552 LOSS DIFF: tensor(5.3970, grad_fn=<NllLossBackward0>) tensor(5.3160, grad_fn=<NllLossBackward0>)\n",
"553 LOSS DIFF: tensor(5.4958, grad_fn=<NllLossBackward0>) tensor(5.3970, grad_fn=<NllLossBackward0>)\n",
"554 LOSS DIFF: tensor(5.4289, grad_fn=<NllLossBackward0>) tensor(5.3781, grad_fn=<NllLossBackward0>)\n",
"555 LOSS DIFF: tensor(5.3988, grad_fn=<NllLossBackward0>) tensor(5.2830, grad_fn=<NllLossBackward0>)\n",
"556 LOSS DIFF: tensor(5.3452, grad_fn=<NllLossBackward0>) tensor(5.3121, grad_fn=<NllLossBackward0>)\n",
"557 LOSS DIFF: tensor(5.3707, grad_fn=<NllLossBackward0>) tensor(5.3452, grad_fn=<NllLossBackward0>)\n",
"558 LOSS DIFF: tensor(5.4004, grad_fn=<NllLossBackward0>) tensor(5.3490, grad_fn=<NllLossBackward0>)\n",
"559 LOSS DIFF: tensor(5.3442, grad_fn=<NllLossBackward0>) tensor(5.2255, grad_fn=<NllLossBackward0>)\n",
"560 LOSS DIFF: tensor(5.3311, grad_fn=<NllLossBackward0>) tensor(5.3145, grad_fn=<NllLossBackward0>)\n",
"561 LOSS DIFF: tensor(5.4662, grad_fn=<NllLossBackward0>) tensor(5.3171, grad_fn=<NllLossBackward0>)\n",
"562 LOSS DIFF: tensor(5.3376, grad_fn=<NllLossBackward0>) tensor(5.3006, grad_fn=<NllLossBackward0>)\n",
"563 LOSS DIFF: tensor(5.3617, grad_fn=<NllLossBackward0>) tensor(5.3376, grad_fn=<NllLossBackward0>)\n",
"564 LOSS DIFF: tensor(5.3627, grad_fn=<NllLossBackward0>) tensor(5.3617, grad_fn=<NllLossBackward0>)\n",
"565 LOSS DIFF: tensor(5.3169, grad_fn=<NllLossBackward0>) tensor(5.2494, grad_fn=<NllLossBackward0>)\n",
"566 LOSS DIFF: tensor(5.3391, grad_fn=<NllLossBackward0>) tensor(5.2797, grad_fn=<NllLossBackward0>)\n",
"567 LOSS DIFF: tensor(5.3793, grad_fn=<NllLossBackward0>) tensor(5.3391, grad_fn=<NllLossBackward0>)\n",
"568 LOSS DIFF: tensor(5.3983, grad_fn=<NllLossBackward0>) tensor(5.3793, grad_fn=<NllLossBackward0>)\n",
"569 LOSS DIFF: tensor(5.3797, grad_fn=<NllLossBackward0>) tensor(5.1963, grad_fn=<NllLossBackward0>)\n",
"570 LOSS DIFF: tensor(5.3978, grad_fn=<NllLossBackward0>) tensor(5.3797, grad_fn=<NllLossBackward0>)\n",
"571 LOSS DIFF: tensor(5.4648, grad_fn=<NllLossBackward0>) tensor(5.2794, grad_fn=<NllLossBackward0>)\n",
"572 LOSS DIFF: tensor(5.3364, grad_fn=<NllLossBackward0>) tensor(5.3139, grad_fn=<NllLossBackward0>)\n",
"573 LOSS DIFF: tensor(5.3724, grad_fn=<NllLossBackward0>) tensor(5.3364, grad_fn=<NllLossBackward0>)\n",
"574 LOSS DIFF: tensor(5.4125, grad_fn=<NllLossBackward0>) tensor(5.3724, grad_fn=<NllLossBackward0>)\n",
"575 LOSS DIFF: tensor(5.4216, grad_fn=<NllLossBackward0>) tensor(5.3249, grad_fn=<NllLossBackward0>)\n",
"576 LOSS DIFF: tensor(5.3209, grad_fn=<NllLossBackward0>) tensor(5.2087, grad_fn=<NllLossBackward0>)\n",
"577 LOSS DIFF: tensor(5.2730, grad_fn=<NllLossBackward0>) tensor(5.2515, grad_fn=<NllLossBackward0>)\n",
"578 LOSS DIFF: tensor(5.3871, grad_fn=<NllLossBackward0>) tensor(5.2537, grad_fn=<NllLossBackward0>)\n",
"579 LOSS DIFF: tensor(5.2357, grad_fn=<NllLossBackward0>) tensor(5.1883, grad_fn=<NllLossBackward0>)\n",
"580 LOSS DIFF: tensor(5.4435, grad_fn=<NllLossBackward0>) tensor(5.2357, grad_fn=<NllLossBackward0>)\n",
"581 LOSS DIFF: tensor(5.3116, grad_fn=<NllLossBackward0>) tensor(5.2408, grad_fn=<NllLossBackward0>)\n",
"582 LOSS DIFF: tensor(5.4295, grad_fn=<NllLossBackward0>) tensor(5.3116, grad_fn=<NllLossBackward0>)\n",
"583 LOSS DIFF: tensor(5.3725, grad_fn=<NllLossBackward0>) tensor(5.2704, grad_fn=<NllLossBackward0>)\n",
"584 LOSS DIFF: tensor(5.3951, grad_fn=<NllLossBackward0>) tensor(5.3211, grad_fn=<NllLossBackward0>)\n",
"585 LOSS DIFF: tensor(5.4080, grad_fn=<NllLossBackward0>) tensor(5.3951, grad_fn=<NllLossBackward0>)\n",
"586 LOSS DIFF: tensor(5.3569, grad_fn=<NllLossBackward0>) tensor(5.2900, grad_fn=<NllLossBackward0>)\n",
"587 LOSS DIFF: tensor(5.3004, grad_fn=<NllLossBackward0>) tensor(5.2806, grad_fn=<NllLossBackward0>)\n",
"588 LOSS DIFF: tensor(5.3874, grad_fn=<NllLossBackward0>) tensor(5.3004, grad_fn=<NllLossBackward0>)\n",
"589 LOSS DIFF: tensor(5.4849, grad_fn=<NllLossBackward0>) tensor(5.2921, grad_fn=<NllLossBackward0>)\n",
"590 LOSS DIFF: tensor(5.2856, grad_fn=<NllLossBackward0>) tensor(5.2661, grad_fn=<NllLossBackward0>)\n",
"591 LOSS DIFF: tensor(5.4242, grad_fn=<NllLossBackward0>) tensor(5.2856, grad_fn=<NllLossBackward0>)\n",
"592 LOSS DIFF: tensor(5.2910, grad_fn=<NllLossBackward0>) tensor(5.1762, grad_fn=<NllLossBackward0>)\n",
"593 LOSS DIFF: tensor(5.3048, grad_fn=<NllLossBackward0>) tensor(5.1369, grad_fn=<NllLossBackward0>)\n",
"594 LOSS DIFF: tensor(5.3170, grad_fn=<NllLossBackward0>) tensor(5.3048, grad_fn=<NllLossBackward0>)\n",
"595 LOSS DIFF: tensor(5.4164, grad_fn=<NllLossBackward0>) tensor(5.3170, grad_fn=<NllLossBackward0>)\n",
"1200 tensor(5.2414, grad_fn=<NllLossBackward0>)\n",
"596 LOSS DIFF: tensor(5.4063, grad_fn=<NllLossBackward0>) tensor(5.2414, grad_fn=<NllLossBackward0>)\n",
"597 LOSS DIFF: tensor(5.3547, grad_fn=<NllLossBackward0>) tensor(5.2150, grad_fn=<NllLossBackward0>)\n",
"598 LOSS DIFF: tensor(5.2713, grad_fn=<NllLossBackward0>) tensor(5.2182, grad_fn=<NllLossBackward0>)\n",
"599 LOSS DIFF: tensor(5.2934, grad_fn=<NllLossBackward0>) tensor(5.2713, grad_fn=<NllLossBackward0>)\n",
"600 LOSS DIFF: tensor(5.3680, grad_fn=<NllLossBackward0>) tensor(5.2934, grad_fn=<NllLossBackward0>)\n",
"601 LOSS DIFF: tensor(5.3810, grad_fn=<NllLossBackward0>) tensor(5.2937, grad_fn=<NllLossBackward0>)\n",
"602 LOSS DIFF: tensor(5.2992, grad_fn=<NllLossBackward0>) tensor(5.2390, grad_fn=<NllLossBackward0>)\n",
"603 LOSS DIFF: tensor(5.3592, grad_fn=<NllLossBackward0>) tensor(5.2325, grad_fn=<NllLossBackward0>)\n",
"604 LOSS DIFF: tensor(5.4165, grad_fn=<NllLossBackward0>) tensor(5.2317, grad_fn=<NllLossBackward0>)\n",
"605 LOSS DIFF: tensor(5.5033, grad_fn=<NllLossBackward0>) tensor(5.4165, grad_fn=<NllLossBackward0>)\n",
"606 LOSS DIFF: tensor(5.4137, grad_fn=<NllLossBackward0>) tensor(5.1996, grad_fn=<NllLossBackward0>)\n",
"607 LOSS DIFF: tensor(5.5262, grad_fn=<NllLossBackward0>) tensor(5.4137, grad_fn=<NllLossBackward0>)\n",
"608 LOSS DIFF: tensor(5.3964, grad_fn=<NllLossBackward0>) tensor(5.3314, grad_fn=<NllLossBackward0>)\n",
"609 LOSS DIFF: tensor(5.3722, grad_fn=<NllLossBackward0>) tensor(5.3268, grad_fn=<NllLossBackward0>)\n",
"610 LOSS DIFF: tensor(5.3378, grad_fn=<NllLossBackward0>) tensor(5.3186, grad_fn=<NllLossBackward0>)\n",
"611 LOSS DIFF: tensor(5.4699, grad_fn=<NllLossBackward0>) tensor(5.3378, grad_fn=<NllLossBackward0>)\n",
"612 LOSS DIFF: tensor(5.4191, grad_fn=<NllLossBackward0>) tensor(5.3715, grad_fn=<NllLossBackward0>)\n",
"613 LOSS DIFF: tensor(5.3107, grad_fn=<NllLossBackward0>) tensor(5.2864, grad_fn=<NllLossBackward0>)\n",
"614 LOSS DIFF: tensor(5.3746, grad_fn=<NllLossBackward0>) tensor(5.2844, grad_fn=<NllLossBackward0>)\n",
"615 LOSS DIFF: tensor(5.4486, grad_fn=<NllLossBackward0>) tensor(5.3746, grad_fn=<NllLossBackward0>)\n",
"616 LOSS DIFF: tensor(5.4732, grad_fn=<NllLossBackward0>) tensor(5.4486, grad_fn=<NllLossBackward0>)\n",
"617 LOSS DIFF: tensor(5.3487, grad_fn=<NllLossBackward0>) tensor(5.2559, grad_fn=<NllLossBackward0>)\n",
"618 LOSS DIFF: tensor(5.3737, grad_fn=<NllLossBackward0>) tensor(5.3487, grad_fn=<NllLossBackward0>)\n",
"619 LOSS DIFF: tensor(5.3524, grad_fn=<NllLossBackward0>) tensor(5.3056, grad_fn=<NllLossBackward0>)\n",
"620 LOSS DIFF: tensor(5.4119, grad_fn=<NllLossBackward0>) tensor(5.3524, grad_fn=<NllLossBackward0>)\n",
"621 LOSS DIFF: tensor(5.3877, grad_fn=<NllLossBackward0>) tensor(5.3544, grad_fn=<NllLossBackward0>)\n",
"622 LOSS DIFF: tensor(5.3305, grad_fn=<NllLossBackward0>) tensor(5.3165, grad_fn=<NllLossBackward0>)\n",
"623 LOSS DIFF: tensor(5.4056, grad_fn=<NllLossBackward0>) tensor(5.3305, grad_fn=<NllLossBackward0>)\n",
"624 LOSS DIFF: tensor(5.3550, grad_fn=<NllLossBackward0>) tensor(5.3069, grad_fn=<NllLossBackward0>)\n",
"625 LOSS DIFF: tensor(5.3018, grad_fn=<NllLossBackward0>) tensor(5.2306, grad_fn=<NllLossBackward0>)\n",
"626 LOSS DIFF: tensor(5.3613, grad_fn=<NllLossBackward0>) tensor(5.3018, grad_fn=<NllLossBackward0>)\n",
"627 LOSS DIFF: tensor(5.3056, grad_fn=<NllLossBackward0>) tensor(5.2849, grad_fn=<NllLossBackward0>)\n",
"628 LOSS DIFF: tensor(5.4281, grad_fn=<NllLossBackward0>) tensor(5.1398, grad_fn=<NllLossBackward0>)\n",
"629 LOSS DIFF: tensor(5.3037, grad_fn=<NllLossBackward0>) tensor(5.2343, grad_fn=<NllLossBackward0>)\n",
"630 LOSS DIFF: tensor(5.3630, grad_fn=<NllLossBackward0>) tensor(5.2993, grad_fn=<NllLossBackward0>)\n",
"631 LOSS DIFF: tensor(5.3922, grad_fn=<NllLossBackward0>) tensor(5.3630, grad_fn=<NllLossBackward0>)\n",
"632 LOSS DIFF: tensor(5.3583, grad_fn=<NllLossBackward0>) tensor(5.2346, grad_fn=<NllLossBackward0>)\n",
"633 LOSS DIFF: tensor(5.3638, grad_fn=<NllLossBackward0>) tensor(5.3486, grad_fn=<NllLossBackward0>)\n",
"634 LOSS DIFF: tensor(5.2703, grad_fn=<NllLossBackward0>) tensor(5.2605, grad_fn=<NllLossBackward0>)\n",
"635 LOSS DIFF: tensor(5.3341, grad_fn=<NllLossBackward0>) tensor(5.2703, grad_fn=<NllLossBackward0>)\n",
"636 LOSS DIFF: tensor(5.3615, grad_fn=<NllLossBackward0>) tensor(5.3341, grad_fn=<NllLossBackward0>)\n",
"637 LOSS DIFF: tensor(5.3735, grad_fn=<NllLossBackward0>) tensor(5.3225, grad_fn=<NllLossBackward0>)\n",
"638 LOSS DIFF: tensor(5.3535, grad_fn=<NllLossBackward0>) tensor(5.2765, grad_fn=<NllLossBackward0>)\n",
"639 LOSS DIFF: tensor(5.4068, grad_fn=<NllLossBackward0>) tensor(5.3535, grad_fn=<NllLossBackward0>)\n",
"640 LOSS DIFF: tensor(5.3669, grad_fn=<NllLossBackward0>) tensor(5.2441, grad_fn=<NllLossBackward0>)\n",
"641 LOSS DIFF: tensor(5.3348, grad_fn=<NllLossBackward0>) tensor(5.2892, grad_fn=<NllLossBackward0>)\n",
"642 LOSS DIFF: tensor(5.4134, grad_fn=<NllLossBackward0>) tensor(5.3348, grad_fn=<NllLossBackward0>)\n",
"643 LOSS DIFF: tensor(5.3649, grad_fn=<NllLossBackward0>) tensor(5.3365, grad_fn=<NllLossBackward0>)\n",
"644 LOSS DIFF: tensor(5.3606, grad_fn=<NllLossBackward0>) tensor(5.2532, grad_fn=<NllLossBackward0>)\n",
"645 LOSS DIFF: tensor(5.3622, grad_fn=<NllLossBackward0>) tensor(5.2414, grad_fn=<NllLossBackward0>)\n",
"646 LOSS DIFF: tensor(5.3985, grad_fn=<NllLossBackward0>) tensor(5.3297, grad_fn=<NllLossBackward0>)\n",
"1300 tensor(5.2993, grad_fn=<NllLossBackward0>)\n",
"647 LOSS DIFF: tensor(5.2993, grad_fn=<NllLossBackward0>) tensor(5.2568, grad_fn=<NllLossBackward0>)\n",
"648 LOSS DIFF: tensor(5.3153, grad_fn=<NllLossBackward0>) tensor(5.2993, grad_fn=<NllLossBackward0>)\n",
"649 LOSS DIFF: tensor(5.3619, grad_fn=<NllLossBackward0>) tensor(5.2734, grad_fn=<NllLossBackward0>)\n",
"650 LOSS DIFF: tensor(5.4052, grad_fn=<NllLossBackward0>) tensor(5.2523, grad_fn=<NllLossBackward0>)\n",
"651 LOSS DIFF: tensor(5.3573, grad_fn=<NllLossBackward0>) tensor(5.3209, grad_fn=<NllLossBackward0>)\n",
"652 LOSS DIFF: tensor(5.2472, grad_fn=<NllLossBackward0>) tensor(5.2427, grad_fn=<NllLossBackward0>)\n",
"653 LOSS DIFF: tensor(5.4110, grad_fn=<NllLossBackward0>) tensor(5.2472, grad_fn=<NllLossBackward0>)\n",
"654 LOSS DIFF: tensor(5.2660, grad_fn=<NllLossBackward0>) tensor(5.2397, grad_fn=<NllLossBackward0>)\n",
"655 LOSS DIFF: tensor(5.3451, grad_fn=<NllLossBackward0>) tensor(5.2660, grad_fn=<NllLossBackward0>)\n",
"656 LOSS DIFF: tensor(5.2828, grad_fn=<NllLossBackward0>) tensor(5.1689, grad_fn=<NllLossBackward0>)\n",
"657 LOSS DIFF: tensor(5.3989, grad_fn=<NllLossBackward0>) tensor(5.2828, grad_fn=<NllLossBackward0>)\n",
"658 LOSS DIFF: tensor(5.3128, grad_fn=<NllLossBackward0>) tensor(5.2708, grad_fn=<NllLossBackward0>)\n",
"659 LOSS DIFF: tensor(5.2602, grad_fn=<NllLossBackward0>) tensor(5.2357, grad_fn=<NllLossBackward0>)\n",
"660 LOSS DIFF: tensor(5.3591, grad_fn=<NllLossBackward0>) tensor(5.2602, grad_fn=<NllLossBackward0>)\n",
"661 LOSS DIFF: tensor(5.4472, grad_fn=<NllLossBackward0>) tensor(5.2953, grad_fn=<NllLossBackward0>)\n",
"662 LOSS DIFF: tensor(5.2631, grad_fn=<NllLossBackward0>) tensor(5.1217, grad_fn=<NllLossBackward0>)\n",
"663 LOSS DIFF: tensor(5.3468, grad_fn=<NllLossBackward0>) tensor(5.2631, grad_fn=<NllLossBackward0>)\n",
"664 LOSS DIFF: tensor(5.3112, grad_fn=<NllLossBackward0>) tensor(5.1798, grad_fn=<NllLossBackward0>)\n",
"665 LOSS DIFF: tensor(5.4536, grad_fn=<NllLossBackward0>) tensor(5.3112, grad_fn=<NllLossBackward0>)\n",
"666 LOSS DIFF: tensor(5.2946, grad_fn=<NllLossBackward0>) tensor(5.2031, grad_fn=<NllLossBackward0>)\n",
"667 LOSS DIFF: tensor(5.3658, grad_fn=<NllLossBackward0>) tensor(5.2946, grad_fn=<NllLossBackward0>)\n",
"668 LOSS DIFF: tensor(5.3176, grad_fn=<NllLossBackward0>) tensor(5.3126, grad_fn=<NllLossBackward0>)\n",
"669 LOSS DIFF: tensor(5.3397, grad_fn=<NllLossBackward0>) tensor(5.2761, grad_fn=<NllLossBackward0>)\n",
"670 LOSS DIFF: tensor(5.3414, grad_fn=<NllLossBackward0>) tensor(5.1992, grad_fn=<NllLossBackward0>)\n",
"671 LOSS DIFF: tensor(5.3593, grad_fn=<NllLossBackward0>) tensor(5.2940, grad_fn=<NllLossBackward0>)\n",
"672 LOSS DIFF: tensor(5.3734, grad_fn=<NllLossBackward0>) tensor(5.3593, grad_fn=<NllLossBackward0>)\n",
"673 LOSS DIFF: tensor(5.3879, grad_fn=<NllLossBackward0>) tensor(5.3734, grad_fn=<NllLossBackward0>)\n",
"674 LOSS DIFF: tensor(5.4095, grad_fn=<NllLossBackward0>) tensor(5.3879, grad_fn=<NllLossBackward0>)\n",
"675 LOSS DIFF: tensor(5.3731, grad_fn=<NllLossBackward0>) tensor(5.3149, grad_fn=<NllLossBackward0>)\n",
"676 LOSS DIFF: tensor(5.3762, grad_fn=<NllLossBackward0>) tensor(5.2030, grad_fn=<NllLossBackward0>)\n",
"677 LOSS DIFF: tensor(5.3640, grad_fn=<NllLossBackward0>) tensor(5.2093, grad_fn=<NllLossBackward0>)\n",
"678 LOSS DIFF: tensor(5.3913, grad_fn=<NllLossBackward0>) tensor(5.3640, grad_fn=<NllLossBackward0>)\n",
"679 LOSS DIFF: tensor(5.3979, grad_fn=<NllLossBackward0>) tensor(5.3913, grad_fn=<NllLossBackward0>)\n",
"680 LOSS DIFF: tensor(5.3584, grad_fn=<NllLossBackward0>) tensor(5.2680, grad_fn=<NllLossBackward0>)\n",
"681 LOSS DIFF: tensor(5.3767, grad_fn=<NllLossBackward0>) tensor(5.3584, grad_fn=<NllLossBackward0>)\n",
"682 LOSS DIFF: tensor(5.3828, grad_fn=<NllLossBackward0>) tensor(5.2542, grad_fn=<NllLossBackward0>)\n",
"683 LOSS DIFF: tensor(5.3277, grad_fn=<NllLossBackward0>) tensor(5.2771, grad_fn=<NllLossBackward0>)\n",
"684 LOSS DIFF: tensor(5.2910, grad_fn=<NllLossBackward0>) tensor(5.2756, grad_fn=<NllLossBackward0>)\n",
"685 LOSS DIFF: tensor(5.3150, grad_fn=<NllLossBackward0>) tensor(5.2910, grad_fn=<NllLossBackward0>)\n",
"686 LOSS DIFF: tensor(5.3208, grad_fn=<NllLossBackward0>) tensor(5.3150, grad_fn=<NllLossBackward0>)\n",
"687 LOSS DIFF: tensor(5.4099, grad_fn=<NllLossBackward0>) tensor(5.1751, grad_fn=<NllLossBackward0>)\n",
"688 LOSS DIFF: tensor(5.3103, grad_fn=<NllLossBackward0>) tensor(5.1557, grad_fn=<NllLossBackward0>)\n",
"689 LOSS DIFF: tensor(5.2464, grad_fn=<NllLossBackward0>) tensor(5.2038, grad_fn=<NllLossBackward0>)\n",
"690 LOSS DIFF: tensor(5.4148, grad_fn=<NllLossBackward0>) tensor(5.2464, grad_fn=<NllLossBackward0>)\n",
"691 LOSS DIFF: tensor(5.3898, grad_fn=<NllLossBackward0>) tensor(5.1863, grad_fn=<NllLossBackward0>)\n",
"692 LOSS DIFF: tensor(5.3926, grad_fn=<NllLossBackward0>) tensor(5.3898, grad_fn=<NllLossBackward0>)\n",
"693 LOSS DIFF: tensor(5.3975, grad_fn=<NllLossBackward0>) tensor(5.2156, grad_fn=<NllLossBackward0>)\n",
"694 LOSS DIFF: tensor(5.2680, grad_fn=<NllLossBackward0>) tensor(5.2367, grad_fn=<NllLossBackward0>)\n",
"695 LOSS DIFF: tensor(5.4590, grad_fn=<NllLossBackward0>) tensor(5.1675, grad_fn=<NllLossBackward0>)\n",
"696 LOSS DIFF: tensor(5.3168, grad_fn=<NllLossBackward0>) tensor(5.2447, grad_fn=<NllLossBackward0>)\n",
"697 LOSS DIFF: tensor(5.3581, grad_fn=<NllLossBackward0>) tensor(5.2256, grad_fn=<NllLossBackward0>)\n",
"698 LOSS DIFF: tensor(5.3668, grad_fn=<NllLossBackward0>) tensor(5.3399, grad_fn=<NllLossBackward0>)\n",
"1400 tensor(5.4240, grad_fn=<NllLossBackward0>)\n",
"699 LOSS DIFF: tensor(5.4240, grad_fn=<NllLossBackward0>) tensor(5.2860, grad_fn=<NllLossBackward0>)\n",
"700 LOSS DIFF: tensor(5.4507, grad_fn=<NllLossBackward0>) tensor(5.2273, grad_fn=<NllLossBackward0>)\n",
"701 LOSS DIFF: tensor(5.3034, grad_fn=<NllLossBackward0>) tensor(5.2823, grad_fn=<NllLossBackward0>)\n",
"702 LOSS DIFF: tensor(5.3641, grad_fn=<NllLossBackward0>) tensor(5.2678, grad_fn=<NllLossBackward0>)\n",
"703 LOSS DIFF: tensor(5.3712, grad_fn=<NllLossBackward0>) tensor(5.3641, grad_fn=<NllLossBackward0>)\n",
"704 LOSS DIFF: tensor(5.3199, grad_fn=<NllLossBackward0>) tensor(5.2634, grad_fn=<NllLossBackward0>)\n",
"705 LOSS DIFF: tensor(5.2937, grad_fn=<NllLossBackward0>) tensor(5.2929, grad_fn=<NllLossBackward0>)\n",
"706 LOSS DIFF: tensor(5.4281, grad_fn=<NllLossBackward0>) tensor(5.2937, grad_fn=<NllLossBackward0>)\n",
"707 LOSS DIFF: tensor(5.3490, grad_fn=<NllLossBackward0>) tensor(5.2559, grad_fn=<NllLossBackward0>)\n",
"708 LOSS DIFF: tensor(5.2956, grad_fn=<NllLossBackward0>) tensor(5.2263, grad_fn=<NllLossBackward0>)\n",
"709 LOSS DIFF: tensor(5.3573, grad_fn=<NllLossBackward0>) tensor(5.2956, grad_fn=<NllLossBackward0>)\n",
"710 LOSS DIFF: tensor(5.2388, grad_fn=<NllLossBackward0>) tensor(5.1368, grad_fn=<NllLossBackward0>)\n",
"711 LOSS DIFF: tensor(5.4568, grad_fn=<NllLossBackward0>) tensor(5.2388, grad_fn=<NllLossBackward0>)\n",
"712 LOSS DIFF: tensor(5.3657, grad_fn=<NllLossBackward0>) tensor(5.2206, grad_fn=<NllLossBackward0>)\n",
"713 LOSS DIFF: tensor(5.3937, grad_fn=<NllLossBackward0>) tensor(5.3657, grad_fn=<NllLossBackward0>)\n",
"714 LOSS DIFF: tensor(5.3151, grad_fn=<NllLossBackward0>) tensor(5.2181, grad_fn=<NllLossBackward0>)\n",
"715 LOSS DIFF: tensor(5.3477, grad_fn=<NllLossBackward0>) tensor(5.3151, grad_fn=<NllLossBackward0>)\n",
"716 LOSS DIFF: tensor(5.3319, grad_fn=<NllLossBackward0>) tensor(5.2977, grad_fn=<NllLossBackward0>)\n",
"717 LOSS DIFF: tensor(5.2638, grad_fn=<NllLossBackward0>) tensor(5.1780, grad_fn=<NllLossBackward0>)\n",
"718 LOSS DIFF: tensor(5.2669, grad_fn=<NllLossBackward0>) tensor(5.2638, grad_fn=<NllLossBackward0>)\n",
"719 LOSS DIFF: tensor(5.2977, grad_fn=<NllLossBackward0>) tensor(5.2669, grad_fn=<NllLossBackward0>)\n",
"720 LOSS DIFF: tensor(5.4203, grad_fn=<NllLossBackward0>) tensor(5.2977, grad_fn=<NllLossBackward0>)\n",
"721 LOSS DIFF: tensor(5.3931, grad_fn=<NllLossBackward0>) tensor(5.3073, grad_fn=<NllLossBackward0>)\n",
"722 LOSS DIFF: tensor(5.2668, grad_fn=<NllLossBackward0>) tensor(5.2528, grad_fn=<NllLossBackward0>)\n",
"723 LOSS DIFF: tensor(5.2713, grad_fn=<NllLossBackward0>) tensor(5.2102, grad_fn=<NllLossBackward0>)\n",
"724 LOSS DIFF: tensor(5.4657, grad_fn=<NllLossBackward0>) tensor(5.2713, grad_fn=<NllLossBackward0>)\n",
"725 LOSS DIFF: tensor(5.3160, grad_fn=<NllLossBackward0>) tensor(5.2097, grad_fn=<NllLossBackward0>)\n",
"726 LOSS DIFF: tensor(5.2945, grad_fn=<NllLossBackward0>) tensor(5.2223, grad_fn=<NllLossBackward0>)\n",
"727 LOSS DIFF: tensor(5.2871, grad_fn=<NllLossBackward0>) tensor(5.2417, grad_fn=<NllLossBackward0>)\n",
"728 LOSS DIFF: tensor(5.3049, grad_fn=<NllLossBackward0>) tensor(5.2871, grad_fn=<NllLossBackward0>)\n",
"729 LOSS DIFF: tensor(5.2566, grad_fn=<NllLossBackward0>) tensor(5.2405, grad_fn=<NllLossBackward0>)\n",
"730 LOSS DIFF: tensor(5.3831, grad_fn=<NllLossBackward0>) tensor(5.2566, grad_fn=<NllLossBackward0>)\n",
"731 LOSS DIFF: tensor(5.3322, grad_fn=<NllLossBackward0>) tensor(5.2234, grad_fn=<NllLossBackward0>)\n",
"732 LOSS DIFF: tensor(5.3731, grad_fn=<NllLossBackward0>) tensor(5.2365, grad_fn=<NllLossBackward0>)\n",
"733 LOSS DIFF: tensor(5.4400, grad_fn=<NllLossBackward0>) tensor(5.3731, grad_fn=<NllLossBackward0>)\n",
"734 LOSS DIFF: tensor(5.4715, grad_fn=<NllLossBackward0>) tensor(5.3013, grad_fn=<NllLossBackward0>)\n",
"735 LOSS DIFF: tensor(5.4422, grad_fn=<NllLossBackward0>) tensor(5.4010, grad_fn=<NllLossBackward0>)\n",
"736 LOSS DIFF: tensor(5.2298, grad_fn=<NllLossBackward0>) tensor(5.2163, grad_fn=<NllLossBackward0>)\n",
"737 LOSS DIFF: tensor(5.2493, grad_fn=<NllLossBackward0>) tensor(5.2298, grad_fn=<NllLossBackward0>)\n",
"738 LOSS DIFF: tensor(5.2958, grad_fn=<NllLossBackward0>) tensor(5.2493, grad_fn=<NllLossBackward0>)\n",
"739 LOSS DIFF: tensor(5.4094, grad_fn=<NllLossBackward0>) tensor(5.2502, grad_fn=<NllLossBackward0>)\n",
"740 LOSS DIFF: tensor(5.2576, grad_fn=<NllLossBackward0>) tensor(5.2305, grad_fn=<NllLossBackward0>)\n",
"741 LOSS DIFF: tensor(5.3885, grad_fn=<NllLossBackward0>) tensor(5.2576, grad_fn=<NllLossBackward0>)\n",
"742 LOSS DIFF: tensor(5.3493, grad_fn=<NllLossBackward0>) tensor(5.3387, grad_fn=<NllLossBackward0>)\n",
"743 LOSS DIFF: tensor(5.2640, grad_fn=<NllLossBackward0>) tensor(5.1842, grad_fn=<NllLossBackward0>)\n",
"744 LOSS DIFF: tensor(5.3568, grad_fn=<NllLossBackward0>) tensor(5.2640, grad_fn=<NllLossBackward0>)\n",
"745 LOSS DIFF: tensor(5.4262, grad_fn=<NllLossBackward0>) tensor(5.3232, grad_fn=<NllLossBackward0>)\n",
"746 LOSS DIFF: tensor(5.3020, grad_fn=<NllLossBackward0>) tensor(5.2816, grad_fn=<NllLossBackward0>)\n",
"1500 tensor(5.1988, grad_fn=<NllLossBackward0>)\n",
"747 LOSS DIFF: tensor(5.2921, grad_fn=<NllLossBackward0>) tensor(5.1988, grad_fn=<NllLossBackward0>)\n",
"748 LOSS DIFF: tensor(5.3279, grad_fn=<NllLossBackward0>) tensor(5.2921, grad_fn=<NllLossBackward0>)\n",
"749 LOSS DIFF: tensor(5.3318, grad_fn=<NllLossBackward0>) tensor(5.0392, grad_fn=<NllLossBackward0>)\n",
"750 LOSS DIFF: tensor(5.4100, grad_fn=<NllLossBackward0>) tensor(5.1959, grad_fn=<NllLossBackward0>)\n",
"751 LOSS DIFF: tensor(5.2634, grad_fn=<NllLossBackward0>) tensor(5.2334, grad_fn=<NllLossBackward0>)\n",
"752 LOSS DIFF: tensor(5.2761, grad_fn=<NllLossBackward0>) tensor(5.2634, grad_fn=<NllLossBackward0>)\n",
"753 LOSS DIFF: tensor(5.3743, grad_fn=<NllLossBackward0>) tensor(5.2761, grad_fn=<NllLossBackward0>)\n",
"754 LOSS DIFF: tensor(5.4399, grad_fn=<NllLossBackward0>) tensor(5.2495, grad_fn=<NllLossBackward0>)\n",
"755 LOSS DIFF: tensor(5.3723, grad_fn=<NllLossBackward0>) tensor(5.2125, grad_fn=<NllLossBackward0>)\n",
"756 LOSS DIFF: tensor(5.4313, grad_fn=<NllLossBackward0>) tensor(5.2310, grad_fn=<NllLossBackward0>)\n",
"757 LOSS DIFF: tensor(5.3316, grad_fn=<NllLossBackward0>) tensor(5.2243, grad_fn=<NllLossBackward0>)\n",
"758 LOSS DIFF: tensor(5.3435, grad_fn=<NllLossBackward0>) tensor(5.3128, grad_fn=<NllLossBackward0>)\n",
"759 LOSS DIFF: tensor(5.3396, grad_fn=<NllLossBackward0>) tensor(5.1988, grad_fn=<NllLossBackward0>)\n",
"760 LOSS DIFF: tensor(5.3344, grad_fn=<NllLossBackward0>) tensor(5.2798, grad_fn=<NllLossBackward0>)\n",
"761 LOSS DIFF: tensor(5.3503, grad_fn=<NllLossBackward0>) tensor(5.2845, grad_fn=<NllLossBackward0>)\n",
"762 LOSS DIFF: tensor(5.3522, grad_fn=<NllLossBackward0>) tensor(5.3503, grad_fn=<NllLossBackward0>)\n",
"763 LOSS DIFF: tensor(5.2487, grad_fn=<NllLossBackward0>) tensor(5.2103, grad_fn=<NllLossBackward0>)\n",
"764 LOSS DIFF: tensor(5.3914, grad_fn=<NllLossBackward0>) tensor(5.2487, grad_fn=<NllLossBackward0>)\n",
"765 LOSS DIFF: tensor(5.3346, grad_fn=<NllLossBackward0>) tensor(5.3265, grad_fn=<NllLossBackward0>)\n",
"766 LOSS DIFF: tensor(5.3932, grad_fn=<NllLossBackward0>) tensor(5.2668, grad_fn=<NllLossBackward0>)\n",
"767 LOSS DIFF: tensor(5.3308, grad_fn=<NllLossBackward0>) tensor(5.2136, grad_fn=<NllLossBackward0>)\n",
"768 LOSS DIFF: tensor(5.2342, grad_fn=<NllLossBackward0>) tensor(5.1842, grad_fn=<NllLossBackward0>)\n",
"769 LOSS DIFF: tensor(5.2779, grad_fn=<NllLossBackward0>) tensor(5.2342, grad_fn=<NllLossBackward0>)\n",
"770 LOSS DIFF: tensor(5.3309, grad_fn=<NllLossBackward0>) tensor(5.2779, grad_fn=<NllLossBackward0>)\n",
"771 LOSS DIFF: tensor(5.2772, grad_fn=<NllLossBackward0>) tensor(5.2208, grad_fn=<NllLossBackward0>)\n",
"772 LOSS DIFF: tensor(5.2998, grad_fn=<NllLossBackward0>) tensor(5.2772, grad_fn=<NllLossBackward0>)\n",
"773 LOSS DIFF: tensor(5.3198, grad_fn=<NllLossBackward0>) tensor(5.2998, grad_fn=<NllLossBackward0>)\n",
"774 LOSS DIFF: tensor(5.4071, grad_fn=<NllLossBackward0>) tensor(5.2555, grad_fn=<NllLossBackward0>)\n",
"775 LOSS DIFF: tensor(5.3407, grad_fn=<NllLossBackward0>) tensor(5.2137, grad_fn=<NllLossBackward0>)\n",
"776 LOSS DIFF: tensor(5.3168, grad_fn=<NllLossBackward0>) tensor(5.1123, grad_fn=<NllLossBackward0>)\n",
"777 LOSS DIFF: tensor(5.3270, grad_fn=<NllLossBackward0>) tensor(5.3168, grad_fn=<NllLossBackward0>)\n",
"778 LOSS DIFF: tensor(5.2770, grad_fn=<NllLossBackward0>) tensor(5.1605, grad_fn=<NllLossBackward0>)\n",
"779 LOSS DIFF: tensor(5.3174, grad_fn=<NllLossBackward0>) tensor(5.2770, grad_fn=<NllLossBackward0>)\n",
"780 LOSS DIFF: tensor(5.5412, grad_fn=<NllLossBackward0>) tensor(5.2626, grad_fn=<NllLossBackward0>)\n",
"781 LOSS DIFF: tensor(5.3245, grad_fn=<NllLossBackward0>) tensor(5.2973, grad_fn=<NllLossBackward0>)\n",
"782 LOSS DIFF: tensor(5.2911, grad_fn=<NllLossBackward0>) tensor(5.2910, grad_fn=<NllLossBackward0>)\n",
"783 LOSS DIFF: tensor(5.3198, grad_fn=<NllLossBackward0>) tensor(5.2911, grad_fn=<NllLossBackward0>)\n",
"784 LOSS DIFF: tensor(5.2661, grad_fn=<NllLossBackward0>) tensor(5.2297, grad_fn=<NllLossBackward0>)\n",
"785 LOSS DIFF: tensor(5.3086, grad_fn=<NllLossBackward0>) tensor(5.2661, grad_fn=<NllLossBackward0>)\n",
"786 LOSS DIFF: tensor(5.3143, grad_fn=<NllLossBackward0>) tensor(5.3086, grad_fn=<NllLossBackward0>)\n",
"787 LOSS DIFF: tensor(5.3467, grad_fn=<NllLossBackward0>) tensor(5.3143, grad_fn=<NllLossBackward0>)\n",
"788 LOSS DIFF: tensor(5.3771, grad_fn=<NllLossBackward0>) tensor(5.3003, grad_fn=<NllLossBackward0>)\n",
"789 LOSS DIFF: tensor(5.2802, grad_fn=<NllLossBackward0>) tensor(5.2619, grad_fn=<NllLossBackward0>)\n",
"790 LOSS DIFF: tensor(5.3205, grad_fn=<NllLossBackward0>) tensor(5.2489, grad_fn=<NllLossBackward0>)\n",
"791 LOSS DIFF: tensor(5.3028, grad_fn=<NllLossBackward0>) tensor(5.1770, grad_fn=<NllLossBackward0>)\n",
"792 LOSS DIFF: tensor(5.3130, grad_fn=<NllLossBackward0>) tensor(5.3028, grad_fn=<NllLossBackward0>)\n",
"793 LOSS DIFF: tensor(5.2011, grad_fn=<NllLossBackward0>) tensor(5.0365, grad_fn=<NllLossBackward0>)\n",
"794 LOSS DIFF: tensor(5.2648, grad_fn=<NllLossBackward0>) tensor(5.2011, grad_fn=<NllLossBackward0>)\n",
"795 LOSS DIFF: tensor(5.3135, grad_fn=<NllLossBackward0>) tensor(5.2648, grad_fn=<NllLossBackward0>)\n",
"796 LOSS DIFF: tensor(5.3958, grad_fn=<NllLossBackward0>) tensor(5.3135, grad_fn=<NllLossBackward0>)\n",
"797 LOSS DIFF: tensor(5.3604, grad_fn=<NllLossBackward0>) tensor(5.1652, grad_fn=<NllLossBackward0>)\n",
"1600 tensor(5.3680, grad_fn=<NllLossBackward0>)\n",
"798 LOSS DIFF: tensor(5.3680, grad_fn=<NllLossBackward0>) tensor(5.2941, grad_fn=<NllLossBackward0>)\n",
"799 LOSS DIFF: tensor(5.2164, grad_fn=<NllLossBackward0>) tensor(5.1485, grad_fn=<NllLossBackward0>)\n",
"800 LOSS DIFF: tensor(5.3943, grad_fn=<NllLossBackward0>) tensor(5.2164, grad_fn=<NllLossBackward0>)\n",
"801 LOSS DIFF: tensor(5.2456, grad_fn=<NllLossBackward0>) tensor(5.1408, grad_fn=<NllLossBackward0>)\n",
"802 LOSS DIFF: tensor(5.2624, grad_fn=<NllLossBackward0>) tensor(5.2268, grad_fn=<NllLossBackward0>)\n",
"803 LOSS DIFF: tensor(5.3054, grad_fn=<NllLossBackward0>) tensor(5.1765, grad_fn=<NllLossBackward0>)\n",
"804 LOSS DIFF: tensor(5.3530, grad_fn=<NllLossBackward0>) tensor(5.3054, grad_fn=<NllLossBackward0>)\n",
"805 LOSS DIFF: tensor(5.3219, grad_fn=<NllLossBackward0>) tensor(5.2960, grad_fn=<NllLossBackward0>)\n",
"806 LOSS DIFF: tensor(5.3445, grad_fn=<NllLossBackward0>) tensor(5.2025, grad_fn=<NllLossBackward0>)\n",
"807 LOSS DIFF: tensor(5.4269, grad_fn=<NllLossBackward0>) tensor(5.2403, grad_fn=<NllLossBackward0>)\n",
"808 LOSS DIFF: tensor(5.3550, grad_fn=<NllLossBackward0>) tensor(5.2981, grad_fn=<NllLossBackward0>)\n",
"809 LOSS DIFF: tensor(5.2882, grad_fn=<NllLossBackward0>) tensor(5.2592, grad_fn=<NllLossBackward0>)\n",
"810 LOSS DIFF: tensor(5.3459, grad_fn=<NllLossBackward0>) tensor(5.2882, grad_fn=<NllLossBackward0>)\n",
"811 LOSS DIFF: tensor(5.3961, grad_fn=<NllLossBackward0>) tensor(5.2398, grad_fn=<NllLossBackward0>)\n",
"812 LOSS DIFF: tensor(5.3464, grad_fn=<NllLossBackward0>) tensor(5.2061, grad_fn=<NllLossBackward0>)\n",
"813 LOSS DIFF: tensor(5.4667, grad_fn=<NllLossBackward0>) tensor(5.3051, grad_fn=<NllLossBackward0>)\n",
"814 LOSS DIFF: tensor(5.3144, grad_fn=<NllLossBackward0>) tensor(5.2452, grad_fn=<NllLossBackward0>)\n",
"815 LOSS DIFF: tensor(5.3118, grad_fn=<NllLossBackward0>) tensor(5.1809, grad_fn=<NllLossBackward0>)\n",
"816 LOSS DIFF: tensor(5.2670, grad_fn=<NllLossBackward0>) tensor(5.2661, grad_fn=<NllLossBackward0>)\n",
"817 LOSS DIFF: tensor(5.2897, grad_fn=<NllLossBackward0>) tensor(5.2135, grad_fn=<NllLossBackward0>)\n",
"818 LOSS DIFF: tensor(5.3138, grad_fn=<NllLossBackward0>) tensor(5.2798, grad_fn=<NllLossBackward0>)\n",
"819 LOSS DIFF: tensor(5.3730, grad_fn=<NllLossBackward0>) tensor(5.3138, grad_fn=<NllLossBackward0>)\n",
"820 LOSS DIFF: tensor(5.3392, grad_fn=<NllLossBackward0>) tensor(5.3115, grad_fn=<NllLossBackward0>)\n",
"821 LOSS DIFF: tensor(5.3534, grad_fn=<NllLossBackward0>) tensor(5.2959, grad_fn=<NllLossBackward0>)\n",
"822 LOSS DIFF: tensor(5.3893, grad_fn=<NllLossBackward0>) tensor(5.3500, grad_fn=<NllLossBackward0>)\n",
"823 LOSS DIFF: tensor(5.2580, grad_fn=<NllLossBackward0>) tensor(5.1436, grad_fn=<NllLossBackward0>)\n",
"824 LOSS DIFF: tensor(5.2688, grad_fn=<NllLossBackward0>) tensor(5.2580, grad_fn=<NllLossBackward0>)\n",
"825 LOSS DIFF: tensor(5.3212, grad_fn=<NllLossBackward0>) tensor(5.2688, grad_fn=<NllLossBackward0>)\n",
"826 LOSS DIFF: tensor(5.3839, grad_fn=<NllLossBackward0>) tensor(5.2897, grad_fn=<NllLossBackward0>)\n",
"827 LOSS DIFF: tensor(5.3353, grad_fn=<NllLossBackward0>) tensor(5.2536, grad_fn=<NllLossBackward0>)\n",
"828 LOSS DIFF: tensor(5.2735, grad_fn=<NllLossBackward0>) tensor(5.2156, grad_fn=<NllLossBackward0>)\n",
"829 LOSS DIFF: tensor(5.3446, grad_fn=<NllLossBackward0>) tensor(5.2735, grad_fn=<NllLossBackward0>)\n",
"830 LOSS DIFF: tensor(5.3156, grad_fn=<NllLossBackward0>) tensor(5.2965, grad_fn=<NllLossBackward0>)\n",
"831 LOSS DIFF: tensor(5.3263, grad_fn=<NllLossBackward0>) tensor(5.2847, grad_fn=<NllLossBackward0>)\n",
"832 LOSS DIFF: tensor(5.2776, grad_fn=<NllLossBackward0>) tensor(5.2448, grad_fn=<NllLossBackward0>)\n",
"833 LOSS DIFF: tensor(5.3394, grad_fn=<NllLossBackward0>) tensor(5.2776, grad_fn=<NllLossBackward0>)\n",
"834 LOSS DIFF: tensor(5.3633, grad_fn=<NllLossBackward0>) tensor(5.2746, grad_fn=<NllLossBackward0>)\n",
"835 LOSS DIFF: tensor(5.2726, grad_fn=<NllLossBackward0>) tensor(5.2409, grad_fn=<NllLossBackward0>)\n",
"836 LOSS DIFF: tensor(5.2986, grad_fn=<NllLossBackward0>) tensor(5.2726, grad_fn=<NllLossBackward0>)\n",
"837 LOSS DIFF: tensor(5.2534, grad_fn=<NllLossBackward0>) tensor(5.1774, grad_fn=<NllLossBackward0>)\n",
"838 LOSS DIFF: tensor(5.3111, grad_fn=<NllLossBackward0>) tensor(5.2534, grad_fn=<NllLossBackward0>)\n",
"839 LOSS DIFF: tensor(5.3127, grad_fn=<NllLossBackward0>) tensor(5.3111, grad_fn=<NllLossBackward0>)\n",
"840 LOSS DIFF: tensor(5.4215, grad_fn=<NllLossBackward0>) tensor(5.2348, grad_fn=<NllLossBackward0>)\n",
"841 LOSS DIFF: tensor(5.2974, grad_fn=<NllLossBackward0>) tensor(5.1407, grad_fn=<NllLossBackward0>)\n",
"842 LOSS DIFF: tensor(5.3341, grad_fn=<NllLossBackward0>) tensor(5.2498, grad_fn=<NllLossBackward0>)\n",
"843 LOSS DIFF: tensor(5.3087, grad_fn=<NllLossBackward0>) tensor(5.2148, grad_fn=<NllLossBackward0>)\n",
"844 LOSS DIFF: tensor(5.2507, grad_fn=<NllLossBackward0>) tensor(5.1230, grad_fn=<NllLossBackward0>)\n",
"1700 tensor(5.3550, grad_fn=<NllLossBackward0>)\n",
"845 LOSS DIFF: tensor(5.3550, grad_fn=<NllLossBackward0>) tensor(5.2507, grad_fn=<NllLossBackward0>)\n",
"846 LOSS DIFF: tensor(5.3766, grad_fn=<NllLossBackward0>) tensor(5.3550, grad_fn=<NllLossBackward0>)\n",
"847 LOSS DIFF: tensor(5.2487, grad_fn=<NllLossBackward0>) tensor(5.2300, grad_fn=<NllLossBackward0>)\n",
"848 LOSS DIFF: tensor(5.3142, grad_fn=<NllLossBackward0>) tensor(5.2487, grad_fn=<NllLossBackward0>)\n",
"849 LOSS DIFF: tensor(5.3734, grad_fn=<NllLossBackward0>) tensor(5.2986, grad_fn=<NllLossBackward0>)\n",
"850 LOSS DIFF: tensor(5.2452, grad_fn=<NllLossBackward0>) tensor(5.1219, grad_fn=<NllLossBackward0>)\n",
"851 LOSS DIFF: tensor(5.2957, grad_fn=<NllLossBackward0>) tensor(5.2452, grad_fn=<NllLossBackward0>)\n",
"852 LOSS DIFF: tensor(5.2852, grad_fn=<NllLossBackward0>) tensor(5.2758, grad_fn=<NllLossBackward0>)\n",
"853 LOSS DIFF: tensor(5.3498, grad_fn=<NllLossBackward0>) tensor(5.2852, grad_fn=<NllLossBackward0>)\n",
"854 LOSS DIFF: tensor(5.4008, grad_fn=<NllLossBackward0>) tensor(5.3498, grad_fn=<NllLossBackward0>)\n",
"855 LOSS DIFF: tensor(5.2165, grad_fn=<NllLossBackward0>) tensor(5.1128, grad_fn=<NllLossBackward0>)\n",
"856 LOSS DIFF: tensor(5.2850, grad_fn=<NllLossBackward0>) tensor(5.2165, grad_fn=<NllLossBackward0>)\n",
"857 LOSS DIFF: tensor(5.3881, grad_fn=<NllLossBackward0>) tensor(5.2850, grad_fn=<NllLossBackward0>)\n",
"858 LOSS DIFF: tensor(5.2249, grad_fn=<NllLossBackward0>) tensor(5.2228, grad_fn=<NllLossBackward0>)\n",
"859 LOSS DIFF: tensor(5.2559, grad_fn=<NllLossBackward0>) tensor(5.2249, grad_fn=<NllLossBackward0>)\n",
"860 LOSS DIFF: tensor(5.2867, grad_fn=<NllLossBackward0>) tensor(5.2559, grad_fn=<NllLossBackward0>)\n",
"861 LOSS DIFF: tensor(5.4387, grad_fn=<NllLossBackward0>) tensor(5.2314, grad_fn=<NllLossBackward0>)\n",
"862 LOSS DIFF: tensor(5.2867, grad_fn=<NllLossBackward0>) tensor(5.2233, grad_fn=<NllLossBackward0>)\n",
"863 LOSS DIFF: tensor(5.3220, grad_fn=<NllLossBackward0>) tensor(5.2867, grad_fn=<NllLossBackward0>)\n",
"864 LOSS DIFF: tensor(5.2581, grad_fn=<NllLossBackward0>) tensor(5.2269, grad_fn=<NllLossBackward0>)\n",
"865 LOSS DIFF: tensor(5.2703, grad_fn=<NllLossBackward0>) tensor(5.2581, grad_fn=<NllLossBackward0>)\n",
"866 LOSS DIFF: tensor(5.2300, grad_fn=<NllLossBackward0>) tensor(5.1481, grad_fn=<NllLossBackward0>)\n",
"867 LOSS DIFF: tensor(5.2460, grad_fn=<NllLossBackward0>) tensor(5.2300, grad_fn=<NllLossBackward0>)\n",
"868 LOSS DIFF: tensor(5.3260, grad_fn=<NllLossBackward0>) tensor(5.2460, grad_fn=<NllLossBackward0>)\n",
"869 LOSS DIFF: tensor(5.2582, grad_fn=<NllLossBackward0>) tensor(5.1454, grad_fn=<NllLossBackward0>)\n",
"870 LOSS DIFF: tensor(5.3153, grad_fn=<NllLossBackward0>) tensor(5.2582, grad_fn=<NllLossBackward0>)\n",
"871 LOSS DIFF: tensor(5.2967, grad_fn=<NllLossBackward0>) tensor(5.0807, grad_fn=<NllLossBackward0>)\n",
"872 LOSS DIFF: tensor(5.3636, grad_fn=<NllLossBackward0>) tensor(5.2188, grad_fn=<NllLossBackward0>)\n",
"873 LOSS DIFF: tensor(5.3807, grad_fn=<NllLossBackward0>) tensor(5.3636, grad_fn=<NllLossBackward0>)\n",
"874 LOSS DIFF: tensor(5.3318, grad_fn=<NllLossBackward0>) tensor(5.2364, grad_fn=<NllLossBackward0>)\n",
"875 LOSS DIFF: tensor(5.3220, grad_fn=<NllLossBackward0>) tensor(5.2170, grad_fn=<NllLossBackward0>)\n",
"876 LOSS DIFF: tensor(5.2753, grad_fn=<NllLossBackward0>) tensor(5.1677, grad_fn=<NllLossBackward0>)\n",
"877 LOSS DIFF: tensor(5.3142, grad_fn=<NllLossBackward0>) tensor(5.2753, grad_fn=<NllLossBackward0>)\n",
"878 LOSS DIFF: tensor(5.3142, grad_fn=<NllLossBackward0>) tensor(5.1974, grad_fn=<NllLossBackward0>)\n",
"879 LOSS DIFF: tensor(5.1746, grad_fn=<NllLossBackward0>) tensor(5.0885, grad_fn=<NllLossBackward0>)\n",
"880 LOSS DIFF: tensor(5.3789, grad_fn=<NllLossBackward0>) tensor(5.1746, grad_fn=<NllLossBackward0>)\n",
"881 LOSS DIFF: tensor(5.3057, grad_fn=<NllLossBackward0>) tensor(5.2196, grad_fn=<NllLossBackward0>)\n",
"882 LOSS DIFF: tensor(5.2886, grad_fn=<NllLossBackward0>) tensor(5.2158, grad_fn=<NllLossBackward0>)\n",
"883 LOSS DIFF: tensor(5.3288, grad_fn=<NllLossBackward0>) tensor(5.2491, grad_fn=<NllLossBackward0>)\n",
"884 LOSS DIFF: tensor(5.4903, grad_fn=<NllLossBackward0>) tensor(5.3288, grad_fn=<NllLossBackward0>)\n",
"885 LOSS DIFF: tensor(5.4034, grad_fn=<NllLossBackward0>) tensor(5.2798, grad_fn=<NllLossBackward0>)\n",
"886 LOSS DIFF: tensor(5.3601, grad_fn=<NllLossBackward0>) tensor(5.1771, grad_fn=<NllLossBackward0>)\n",
"887 LOSS DIFF: tensor(5.2809, grad_fn=<NllLossBackward0>) tensor(5.1809, grad_fn=<NllLossBackward0>)\n",
"888 LOSS DIFF: tensor(5.3620, grad_fn=<NllLossBackward0>) tensor(5.2748, grad_fn=<NllLossBackward0>)\n",
"889 LOSS DIFF: tensor(5.3855, grad_fn=<NllLossBackward0>) tensor(5.2573, grad_fn=<NllLossBackward0>)\n",
"890 LOSS DIFF: tensor(5.3124, grad_fn=<NllLossBackward0>) tensor(5.2379, grad_fn=<NllLossBackward0>)\n",
"891 LOSS DIFF: tensor(5.3192, grad_fn=<NllLossBackward0>) tensor(5.3124, grad_fn=<NllLossBackward0>)\n",
"892 LOSS DIFF: tensor(5.3423, grad_fn=<NllLossBackward0>) tensor(5.3192, grad_fn=<NllLossBackward0>)\n",
"893 LOSS DIFF: tensor(5.4086, grad_fn=<NllLossBackward0>) tensor(5.1976, grad_fn=<NllLossBackward0>)\n",
"894 LOSS DIFF: tensor(5.3156, grad_fn=<NllLossBackward0>) tensor(5.2619, grad_fn=<NllLossBackward0>)\n",
"895 LOSS DIFF: tensor(5.3277, grad_fn=<NllLossBackward0>) tensor(5.3156, grad_fn=<NllLossBackward0>)\n",
"896 LOSS DIFF: tensor(5.2352, grad_fn=<NllLossBackward0>) tensor(5.2142, grad_fn=<NllLossBackward0>)\n",
"897 LOSS DIFF: tensor(5.3471, grad_fn=<NllLossBackward0>) tensor(5.2059, grad_fn=<NllLossBackward0>)\n",
"898 LOSS DIFF: tensor(5.2658, grad_fn=<NllLossBackward0>) tensor(5.1801, grad_fn=<NllLossBackward0>)\n",
"1800 tensor(5.4171, grad_fn=<NllLossBackward0>)\n",
"899 LOSS DIFF: tensor(5.4171, grad_fn=<NllLossBackward0>) tensor(5.2658, grad_fn=<NllLossBackward0>)\n",
"900 LOSS DIFF: tensor(5.3919, grad_fn=<NllLossBackward0>) tensor(5.2872, grad_fn=<NllLossBackward0>)\n",
"901 LOSS DIFF: tensor(5.2667, grad_fn=<NllLossBackward0>) tensor(5.1940, grad_fn=<NllLossBackward0>)\n",
"902 LOSS DIFF: tensor(5.3631, grad_fn=<NllLossBackward0>) tensor(5.2667, grad_fn=<NllLossBackward0>)\n",
"903 LOSS DIFF: tensor(5.3693, grad_fn=<NllLossBackward0>) tensor(5.2566, grad_fn=<NllLossBackward0>)\n",
"904 LOSS DIFF: tensor(5.3239, grad_fn=<NllLossBackward0>) tensor(5.2152, grad_fn=<NllLossBackward0>)\n",
"905 LOSS DIFF: tensor(5.3641, grad_fn=<NllLossBackward0>) tensor(5.3239, grad_fn=<NllLossBackward0>)\n",
"906 LOSS DIFF: tensor(5.2443, grad_fn=<NllLossBackward0>) tensor(5.1951, grad_fn=<NllLossBackward0>)\n",
"907 LOSS DIFF: tensor(5.4277, grad_fn=<NllLossBackward0>) tensor(5.1634, grad_fn=<NllLossBackward0>)\n",
"908 LOSS DIFF: tensor(5.2730, grad_fn=<NllLossBackward0>) tensor(5.0604, grad_fn=<NllLossBackward0>)\n",
"909 LOSS DIFF: tensor(5.2867, grad_fn=<NllLossBackward0>) tensor(5.2566, grad_fn=<NllLossBackward0>)\n",
"910 LOSS DIFF: tensor(5.4127, grad_fn=<NllLossBackward0>) tensor(5.2155, grad_fn=<NllLossBackward0>)\n",
"911 LOSS DIFF: tensor(5.3634, grad_fn=<NllLossBackward0>) tensor(5.3211, grad_fn=<NllLossBackward0>)\n",
"912 LOSS DIFF: tensor(5.2831, grad_fn=<NllLossBackward0>) tensor(5.2335, grad_fn=<NllLossBackward0>)\n",
"913 LOSS DIFF: tensor(5.2755, grad_fn=<NllLossBackward0>) tensor(5.2735, grad_fn=<NllLossBackward0>)\n",
"914 LOSS DIFF: tensor(5.2826, grad_fn=<NllLossBackward0>) tensor(5.2755, grad_fn=<NllLossBackward0>)\n",
"915 LOSS DIFF: tensor(5.3887, grad_fn=<NllLossBackward0>) tensor(5.0861, grad_fn=<NllLossBackward0>)\n",
"916 LOSS DIFF: tensor(5.3065, grad_fn=<NllLossBackward0>) tensor(5.2729, grad_fn=<NllLossBackward0>)\n",
"917 LOSS DIFF: tensor(5.2632, grad_fn=<NllLossBackward0>) tensor(5.1560, grad_fn=<NllLossBackward0>)\n",
"918 LOSS DIFF: tensor(5.2920, grad_fn=<NllLossBackward0>) tensor(5.1884, grad_fn=<NllLossBackward0>)\n",
"919 LOSS DIFF: tensor(5.3229, grad_fn=<NllLossBackward0>) tensor(5.2920, grad_fn=<NllLossBackward0>)\n",
"920 LOSS DIFF: tensor(5.2855, grad_fn=<NllLossBackward0>) tensor(5.1965, grad_fn=<NllLossBackward0>)\n",
"921 LOSS DIFF: tensor(5.3634, grad_fn=<NllLossBackward0>) tensor(5.2855, grad_fn=<NllLossBackward0>)\n",
"922 LOSS DIFF: tensor(5.3724, grad_fn=<NllLossBackward0>) tensor(5.0690, grad_fn=<NllLossBackward0>)\n",
"923 LOSS DIFF: tensor(5.2805, grad_fn=<NllLossBackward0>) tensor(5.2636, grad_fn=<NllLossBackward0>)\n",
"924 LOSS DIFF: tensor(5.2306, grad_fn=<NllLossBackward0>) tensor(5.0033, grad_fn=<NllLossBackward0>)\n",
"925 LOSS DIFF: tensor(5.2542, grad_fn=<NllLossBackward0>) tensor(5.2243, grad_fn=<NllLossBackward0>)\n",
"926 LOSS DIFF: tensor(5.3378, grad_fn=<NllLossBackward0>) tensor(5.2542, grad_fn=<NllLossBackward0>)\n",
"927 LOSS DIFF: tensor(5.2164, grad_fn=<NllLossBackward0>) tensor(5.1267, grad_fn=<NllLossBackward0>)\n",
"928 LOSS DIFF: tensor(5.3090, grad_fn=<NllLossBackward0>) tensor(5.2164, grad_fn=<NllLossBackward0>)\n",
"929 LOSS DIFF: tensor(5.3777, grad_fn=<NllLossBackward0>) tensor(5.3090, grad_fn=<NllLossBackward0>)\n",
"930 LOSS DIFF: tensor(5.2597, grad_fn=<NllLossBackward0>) tensor(5.2556, grad_fn=<NllLossBackward0>)\n",
"931 LOSS DIFF: tensor(5.4438, grad_fn=<NllLossBackward0>) tensor(5.2080, grad_fn=<NllLossBackward0>)\n",
"932 LOSS DIFF: tensor(5.2762, grad_fn=<NllLossBackward0>) tensor(5.2386, grad_fn=<NllLossBackward0>)\n",
"933 LOSS DIFF: tensor(5.3475, grad_fn=<NllLossBackward0>) tensor(5.1511, grad_fn=<NllLossBackward0>)\n",
"934 LOSS DIFF: tensor(5.3897, grad_fn=<NllLossBackward0>) tensor(5.3475, grad_fn=<NllLossBackward0>)\n",
"935 LOSS DIFF: tensor(5.2932, grad_fn=<NllLossBackward0>) tensor(5.1943, grad_fn=<NllLossBackward0>)\n",
"936 LOSS DIFF: tensor(5.3678, grad_fn=<NllLossBackward0>) tensor(5.2932, grad_fn=<NllLossBackward0>)\n",
"937 LOSS DIFF: tensor(5.3282, grad_fn=<NllLossBackward0>) tensor(5.2433, grad_fn=<NllLossBackward0>)\n",
"938 LOSS DIFF: tensor(5.3416, grad_fn=<NllLossBackward0>) tensor(5.3282, grad_fn=<NllLossBackward0>)\n",
"939 LOSS DIFF: tensor(5.2709, grad_fn=<NllLossBackward0>) tensor(5.1789, grad_fn=<NllLossBackward0>)\n",
"940 LOSS DIFF: tensor(5.3140, grad_fn=<NllLossBackward0>) tensor(5.2709, grad_fn=<NllLossBackward0>)\n",
"941 LOSS DIFF: tensor(5.2993, grad_fn=<NllLossBackward0>) tensor(5.2861, grad_fn=<NllLossBackward0>)\n",
"942 LOSS DIFF: tensor(5.1903, grad_fn=<NllLossBackward0>) tensor(5.1216, grad_fn=<NllLossBackward0>)\n",
"943 LOSS DIFF: tensor(5.2935, grad_fn=<NllLossBackward0>) tensor(5.1903, grad_fn=<NllLossBackward0>)\n",
"944 LOSS DIFF: tensor(5.2984, grad_fn=<NllLossBackward0>) tensor(5.2935, grad_fn=<NllLossBackward0>)\n",
"945 LOSS DIFF: tensor(5.3579, grad_fn=<NllLossBackward0>) tensor(5.2984, grad_fn=<NllLossBackward0>)\n",
"946 LOSS DIFF: tensor(5.2808, grad_fn=<NllLossBackward0>) tensor(5.1785, grad_fn=<NllLossBackward0>)\n",
"947 LOSS DIFF: tensor(5.2995, grad_fn=<NllLossBackward0>) tensor(5.2629, grad_fn=<NllLossBackward0>)\n",
"948 LOSS DIFF: tensor(5.3437, grad_fn=<NllLossBackward0>) tensor(5.2995, grad_fn=<NllLossBackward0>)\n",
"949 LOSS DIFF: tensor(5.3592, grad_fn=<NllLossBackward0>) tensor(5.3437, grad_fn=<NllLossBackward0>)\n",
"950 LOSS DIFF: tensor(5.4155, grad_fn=<NllLossBackward0>) tensor(5.3592, grad_fn=<NllLossBackward0>)\n",
"951 LOSS DIFF: tensor(5.3014, grad_fn=<NllLossBackward0>) tensor(5.2301, grad_fn=<NllLossBackward0>)\n",
"1900 tensor(5.3040, grad_fn=<NllLossBackward0>)\n",
"952 LOSS DIFF: tensor(5.3040, grad_fn=<NllLossBackward0>) tensor(5.2344, grad_fn=<NllLossBackward0>)\n",
"953 LOSS DIFF: tensor(5.2827, grad_fn=<NllLossBackward0>) tensor(5.2677, grad_fn=<NllLossBackward0>)\n",
"954 LOSS DIFF: tensor(5.3628, grad_fn=<NllLossBackward0>) tensor(5.2827, grad_fn=<NllLossBackward0>)\n",
"955 LOSS DIFF: tensor(5.2943, grad_fn=<NllLossBackward0>) tensor(5.2210, grad_fn=<NllLossBackward0>)\n",
"956 LOSS DIFF: tensor(5.1808, grad_fn=<NllLossBackward0>) tensor(5.1610, grad_fn=<NllLossBackward0>)\n",
"957 LOSS DIFF: tensor(5.3546, grad_fn=<NllLossBackward0>) tensor(5.1808, grad_fn=<NllLossBackward0>)\n",
"958 LOSS DIFF: tensor(5.1927, grad_fn=<NllLossBackward0>) tensor(5.1525, grad_fn=<NllLossBackward0>)\n",
"959 LOSS DIFF: tensor(5.3402, grad_fn=<NllLossBackward0>) tensor(5.1927, grad_fn=<NllLossBackward0>)\n",
"960 LOSS DIFF: tensor(5.3660, grad_fn=<NllLossBackward0>) tensor(5.2197, grad_fn=<NllLossBackward0>)\n",
"961 LOSS DIFF: tensor(5.3701, grad_fn=<NllLossBackward0>) tensor(5.3660, grad_fn=<NllLossBackward0>)\n",
"962 LOSS DIFF: tensor(5.1755, grad_fn=<NllLossBackward0>) tensor(5.1572, grad_fn=<NllLossBackward0>)\n",
"963 LOSS DIFF: tensor(5.2423, grad_fn=<NllLossBackward0>) tensor(5.1755, grad_fn=<NllLossBackward0>)\n",
"964 LOSS DIFF: tensor(5.4032, grad_fn=<NllLossBackward0>) tensor(5.2423, grad_fn=<NllLossBackward0>)\n",
"965 LOSS DIFF: tensor(5.3041, grad_fn=<NllLossBackward0>) tensor(5.1882, grad_fn=<NllLossBackward0>)\n",
"966 LOSS DIFF: tensor(5.3328, grad_fn=<NllLossBackward0>) tensor(5.3041, grad_fn=<NllLossBackward0>)\n",
"967 LOSS DIFF: tensor(5.1994, grad_fn=<NllLossBackward0>) tensor(5.1086, grad_fn=<NllLossBackward0>)\n",
"968 LOSS DIFF: tensor(5.2771, grad_fn=<NllLossBackward0>) tensor(5.1994, grad_fn=<NllLossBackward0>)\n",
"969 LOSS DIFF: tensor(5.3016, grad_fn=<NllLossBackward0>) tensor(5.2771, grad_fn=<NllLossBackward0>)\n",
"970 LOSS DIFF: tensor(5.3162, grad_fn=<NllLossBackward0>) tensor(5.3016, grad_fn=<NllLossBackward0>)\n",
"971 LOSS DIFF: tensor(5.3276, grad_fn=<NllLossBackward0>) tensor(5.2404, grad_fn=<NllLossBackward0>)\n",
"972 LOSS DIFF: tensor(5.3335, grad_fn=<NllLossBackward0>) tensor(5.3276, grad_fn=<NllLossBackward0>)\n",
"973 LOSS DIFF: tensor(5.3803, grad_fn=<NllLossBackward0>) tensor(5.2597, grad_fn=<NllLossBackward0>)\n",
"974 LOSS DIFF: tensor(5.2477, grad_fn=<NllLossBackward0>) tensor(5.1569, grad_fn=<NllLossBackward0>)\n",
"975 LOSS DIFF: tensor(5.3720, grad_fn=<NllLossBackward0>) tensor(5.2477, grad_fn=<NllLossBackward0>)\n",
"976 LOSS DIFF: tensor(5.3752, grad_fn=<NllLossBackward0>) tensor(5.3720, grad_fn=<NllLossBackward0>)\n",
"977 LOSS DIFF: tensor(5.2881, grad_fn=<NllLossBackward0>) tensor(5.2406, grad_fn=<NllLossBackward0>)\n",
"978 LOSS DIFF: tensor(5.4561, grad_fn=<NllLossBackward0>) tensor(5.2564, grad_fn=<NllLossBackward0>)\n",
"979 LOSS DIFF: tensor(5.3796, grad_fn=<NllLossBackward0>) tensor(5.3418, grad_fn=<NllLossBackward0>)\n",
"980 LOSS DIFF: tensor(5.2454, grad_fn=<NllLossBackward0>) tensor(5.2276, grad_fn=<NllLossBackward0>)\n",
"981 LOSS DIFF: tensor(5.3129, grad_fn=<NllLossBackward0>) tensor(5.2454, grad_fn=<NllLossBackward0>)\n",
"982 LOSS DIFF: tensor(5.3334, grad_fn=<NllLossBackward0>) tensor(5.3129, grad_fn=<NllLossBackward0>)\n",
"983 LOSS DIFF: tensor(5.3955, grad_fn=<NllLossBackward0>) tensor(5.3334, grad_fn=<NllLossBackward0>)\n",
"984 LOSS DIFF: tensor(5.4304, grad_fn=<NllLossBackward0>) tensor(5.2307, grad_fn=<NllLossBackward0>)\n",
"985 LOSS DIFF: tensor(5.3111, grad_fn=<NllLossBackward0>) tensor(5.1737, grad_fn=<NllLossBackward0>)\n",
"986 LOSS DIFF: tensor(5.3549, grad_fn=<NllLossBackward0>) tensor(5.3111, grad_fn=<NllLossBackward0>)\n",
"987 LOSS DIFF: tensor(5.3662, grad_fn=<NllLossBackward0>) tensor(5.2584, grad_fn=<NllLossBackward0>)\n",
"988 LOSS DIFF: tensor(5.3705, grad_fn=<NllLossBackward0>) tensor(5.1949, grad_fn=<NllLossBackward0>)\n",
"989 LOSS DIFF: tensor(5.2877, grad_fn=<NllLossBackward0>) tensor(5.2517, grad_fn=<NllLossBackward0>)\n",
"990 LOSS DIFF: tensor(5.2987, grad_fn=<NllLossBackward0>) tensor(5.2175, grad_fn=<NllLossBackward0>)\n",
"991 LOSS DIFF: tensor(5.3813, grad_fn=<NllLossBackward0>) tensor(5.1823, grad_fn=<NllLossBackward0>)\n",
"992 LOSS DIFF: tensor(5.3100, grad_fn=<NllLossBackward0>) tensor(5.2477, grad_fn=<NllLossBackward0>)\n",
"993 LOSS DIFF: tensor(5.3208, grad_fn=<NllLossBackward0>) tensor(5.1584, grad_fn=<NllLossBackward0>)\n",
"994 LOSS DIFF: tensor(5.3709, grad_fn=<NllLossBackward0>) tensor(5.3208, grad_fn=<NllLossBackward0>)\n",
"995 LOSS DIFF: tensor(5.2744, grad_fn=<NllLossBackward0>) tensor(5.1538, grad_fn=<NllLossBackward0>)\n",
"996 LOSS DIFF: tensor(5.2920, grad_fn=<NllLossBackward0>) tensor(5.2744, grad_fn=<NllLossBackward0>)\n",
"997 LOSS DIFF: tensor(5.3297, grad_fn=<NllLossBackward0>) tensor(5.2446, grad_fn=<NllLossBackward0>)\n",
"998 LOSS DIFF: tensor(5.3818, grad_fn=<NllLossBackward0>) tensor(5.3297, grad_fn=<NllLossBackward0>)\n",
"999 LOSS DIFF: tensor(5.2615, grad_fn=<NllLossBackward0>) tensor(5.1173, grad_fn=<NllLossBackward0>)\n",
"1000 LOSS DIFF: tensor(5.3420, grad_fn=<NllLossBackward0>) tensor(5.2615, grad_fn=<NllLossBackward0>)\n"
]
}
],
"source": [
"loss_track = []\n",
"\n",
"device = 'cpu'\n",
"model = SimpleBigramNeuralLanguageModel(vocab_size, embed_size).to(device)\n",
"data = DataLoader(train_dataset, batch_size=6000)\n",
"optimizer = torch.optim.Adam(model.parameters())\n",
"criterion = torch.nn.NLLLoss()\n",
"\n",
"last_loss = 1_000\n",
"trigger_count = 0\n",
"\n",
"model.train()\n",
"step = 0\n",
"for x, y in data:\n",
" x = x.to(device)\n",
" y = y.to(device)\n",
" optimizer.zero_grad()\n",
" ypredicted = model(x)\n",
" loss = criterion(torch.log(ypredicted), y)\n",
" if step % 100 == 0:\n",
" print(step, loss)\n",
" step += 1\n",
" loss.backward()\n",
" optimizer.step()\n",
"\n",
" if loss > last_loss:\n",
" trigger_count += 1 \n",
" print(trigger_count, 'LOSS DIFF:', loss, last_loss)\n",
"\n",
" if trigger_count >= 1_000:\n",
" break\n",
"\n",
" loss_track.append(loss)\n",
" last_loss = loss"
]
},
{
"cell_type": "code",
"execution_count": 92,
"metadata": {},
"outputs": [],
"source": [
"loss_track2 = [t.detach().numpy() for t in loss_track]"
]
},
{
"cell_type": "code",
"execution_count": 98,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAhYAAAGdCAYAAABO2DpVAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABPI0lEQVR4nO3dd3hTZf8G8DtJ27Sli9JFoRRK2XuWgoAIMgXcgIg4UFH8uRHRFwVRQfFVHIjjZSkqTnAiUvZe0kIZhbLKHoUuSlfy/P5Ik+Zkp0172tP7c1292iYnyXMa6Ln7jO+jEkIIEBEREXmAWu4GEBERkXIwWBAREZHHMFgQERGRxzBYEBERkccwWBAREZHHMFgQERGRxzBYEBERkccwWBAREZHHeFX1C+r1epw7dw6BgYFQqVRV/fJERERUDkII5ObmIjo6Gmq1/X6JKg8W586dQ0xMTFW/LBEREXnA6dOn0bBhQ7v3V3mwCAwMBGBoWFBQUFW/PBEREZVDTk4OYmJiTNdxe6o8WBiHP4KCghgsiIiIahhn0xg4eZOIiIg8hsGCiIiIPIbBgoiIiDyGwYKIiIg8hsGCiIiIPIbBgoiIiDyGwYKIiIg8hsGCiIiIPIbBgoiIiDyGwYKIiIg8hsGCiIiIPIbBgoiIiDymyjchqyzv/5OGnIISTOzbFFHBvnI3h4iIqFZSTI/Fsl2nsXjrSVy9XiR3U4iIiGotxQQLdek2rnohZG4JERFR7aWgYGH4zGBBREQkH8UEC5Wpx0LmhhAREdViigkWGjWHQoiIiOSmmGBhHAoRDBZERESyUVCw4FAIERGR3BQTLEpzBXRMFkRERLJRTLDgHAsiIiL5KSZYGIdCmCuIiIjko5hgoWKBLCIiItkpJlioOceCiIhIdooJFsY5FuywICIiko9iggWHQoiIiOTndrDIzc3Fs88+i9jYWPj5+aFnz57YtWtXZbTNLWV7hcjbDiIiotrM7WAxYcIErF69Gl9//TX279+PgQMHYsCAATh79mxltM9lxlUhnGNBREQkH7eCxY0bN/Dzzz/j3XffRZ8+fRAfH4/p06cjPj4e8+fPr6w2ukRjWm7KYEFERCQXL3cOLikpgU6ng6+vr+R2Pz8/bN682eZjCgsLUVhYaPo+JyenHM10TsWhECIiItm51WMRGBiIxMREzJw5E+fOnYNOp8PSpUuxbds2nD9/3uZjZs2aheDgYNNHTEyMRxpuSc3Jm0RERLJze47F119/DSEEGjRoAK1Wi48++ghjxoyBWm37qaZOnYrs7GzTx+nTpyvcaFuML89gQUREJB+3hkIAoGnTptiwYQOuX7+OnJwc1K9fH6NGjUJcXJzN47VaLbRabYUb6gx7LIiIiORX7joWderUQf369XHt2jWsWrUKI0eO9GS73GYKFnpZm0FERFSrud1jsWrVKggh0KJFC6Snp2Py5Mlo2bIlHnroocpon8vK6liwx4KIiEgubvdYZGdnY9KkSWjZsiUeeOAB3HTTTVi1ahW8vb0ro30u41AIERGR/Nzusbj33ntx7733VkZbKkStNgYLmRtCRERUiylmrxAOhRAREclPQcGCPRZERERyU16wYLIgIiKSjWKChYpDIURERLJTTLDQcPImERGR7BQTLNTc3ZSIiEh2igkWHAohIiKSn2KChbHHQseS3kRERLJRTLDQsPImERGR7BQTLIzbpnOOBRERkXwUEyxULJBFREQkO8UEC2NJbx2TBRERkWwUEyw0XG5KREQkO8UECw6FEBERyU8xwULNVSFERESyU1CwMHzWMVgQERHJRjHBwrhXCHMFERGRfBQTLFTcNp2IiEh2igkWatNeIfK2g4iIqDZTULDg5E0iIiK5KSdYqBksiIiI5KacYMFt04mIiGSnoGDBAllERERyU1CwMHzmqhAiIiL5KCdYcI4FERGR7JQTLDgUQkREJDsFBQvDZ/ZYEBERyUdBwYKVN4mIiOSmmGBhxFhBREQkH8UEC+NeIRwJISIiko9igoVxjgVzBRERkXwUEyxKcwUnbxIREclIOcFCxS4LIiIiuSkmWJQNhTBZEBERyUUxwQKm5aYyt4OIiKgWU0ywMM6xYI8FERGRfBQTLFjSm4iISH6KCRamuZsMFkRERLJRTrAwfcVkQUREJBfFBAsOhRAREclPMcECpqEQJgsiIiK5KCZYlK0KISIiIrkoJlhwKISIiEh+igkWKg6FEBERyU5xwYKIiIjko5hgUTYUwh4LIiIiuSgmWBgxVxAREclHMcHC2GPBYEFERCQfxQQL4xwLDoUQERHJRznBorSSBWMFERGRfBQTLNSskEVERCQ7xQQLDoUQERHJTzHBAhwKISIikp1igoWalTeJiIhkp5hgoeJeIURERLJTTrAo/cxcQUREJB/FBAu18Uw4FEJERCQbxQQLYx0LDoUQERHJRzHBwjgWIjgYQkREJBvFBAvT7qZ6mRtCRERUiykmWHDyJhERkfyUEyxYx4KIiEh2igkW3DadiIhIfooJFmVDIUwWREREclFMsDCtCmGuICIiko1igoVpVQiTBRERkWwUEyy4KoSIiEh+ygkWpmUh8raDiIioNlNMsDBum86hECIiIvm4FSx0Oh2mTZuGJk2awM/PD02bNsXMmTOrRe0IdlgQERHJz8udg9955x3Mnz8fS5YsQZs2bbB792489NBDCA4OxtNPP11ZbXQR61gQERHJza1gsXXrVowcORLDhg0DADRu3Bjfffcddu7cWSmNcweHQoiIiOTn1lBIz549sWbNGhw5cgQAkJKSgs2bN2PIkCF2H1NYWIicnBzJR2VQsfImERGR7NzqsXj55ZeRk5ODli1bQqPRQKfT4a233sLYsWPtPmbWrFmYMWNGhRvqjMr5IURERFTJ3Oqx+OGHH/DNN9/g22+/xb///oslS5bgvffew5IlS+w+ZurUqcjOzjZ9nD59usKNtoUFsoiIiOTnVo/F5MmT8fLLL2P06NEAgHbt2uHUqVOYNWsWxo8fb/MxWq0WWq224i11QsWS3kRERLJzq8ciPz8farX0IRqNBnq93qONqghuQkZERCQft3oshg8fjrfeeguNGjVCmzZtsHfvXrz//vt4+OGHK6t9LisbCpG5IURERLWYW8Hi448/xrRp0/Dkk0/i0qVLiI6OxuOPP47XXnutstrnMg6FEBERyc+tYBEYGIi5c+di7ty5ldSc8lOZloUwWRAREclFQXuFcCiEiIhIbooJFqZt0zkWQkREJBvlBAtuQkZERCQ7BQWL0qEQjoUQERHJRjnBovQzYwUREZF8lBMsuAkZERGR7BQTLNSmOhZMFkRERHJRTLBQlQ6GMFYQERHJRznBorTHgrubEhERyUdxwYK5goiISD4KChYcCiEiIpKbcoJF6WdO3iQiIpKPYoKFmstNiYiIZKeYYMGS3kRERPJTTrAo/cxVIURERPJRTrDgUAgREZHsFBQsyr7mBE4iIiJ5KCdYmH3NXEFERCQPxQQLtVmXBXMFERGRPBQTLDgUQkREJD8FBYuyZKFnriAiIpKFgoJF2deCgyFERESyUE6wMPuaIyFERETyUEywkEzeZLAgIiKShWKCBYdCiIiI5KecYAH2WBAREclNOcHCrMeC+4UQERHJQ5HBgrGCiIhIHsoJFuZDIXoZG0JERFSLKSZYqDl5k4iISHaKCRYqLjclIiKSnXKChdnXnLxJREQkD+UEC07eJCIikp2CggWHQoiIiOSmmGABlPVacNt0IiIieSgqWBj3C2GsICIikoeigoVxMIQdFkRERPJQVrAoTRZcFUJERCQPhQULDoUQERHJSVnBovQzJ28SERHJQ1nBwrQqRN52EBER1VaKChamVSEMFkRERLJQVLAwDYVwlgUREZEslBUsSnss9MwVREREslBYsDB85uRNIiIieSgrWJR+ZqwgIiKSh7KChWnyJqMFERGRHBQVLNRcbkpERCQrRQULVt4kIiKSl7KCReln7hVCREQkD2UFCxbIIiIikpXCgoXhM3ssiIiI5KGsYFH6mbmCiIhIHooKFsa9QoiIiEgeigoWHAohIiK
"text/plain": [
"<Figure size 640x480 with 1 Axes>"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"import matplotlib.pyplot as plt\n",
"\n",
"plt.plot(loss_track2)\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": 99,
"metadata": {},
"outputs": [],
"source": [
"torch.save(model.state_dict(), 'model.bin')"
]
},
{
"cell_type": "code",
"execution_count": 100,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"c:\\PROGRAMY\\Anaconda3\\envs\\scweet\\lib\\site-packages\\torch\\nn\\modules\\container.py:217: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.\n",
" input = module(input)\n"
]
},
{
"data": {
"text/plain": [
"[('<unk>', 0, 0.1108938604593277),\n",
" ('was', 12, 0.0792110487818718),\n",
" ('had', 37, 0.07402306795120239),\n",
" ('is', 8, 0.04529397189617157),\n",
" ('has', 39, 0.03909718990325928),\n",
" ('would', 48, 0.038855526596307755),\n",
" ('said', 43, 0.022579118609428406),\n",
" ('will', 27, 0.02008220925927162),\n",
" ('went', 251, 0.013605386018753052),\n",
" ('did', 151, 0.013007525354623795)]"
]
},
"execution_count": 100,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"device = 'cpu'\n",
"model = SimpleBigramNeuralLanguageModel(vocab_size, embed_size).to(device)\n",
"model.load_state_dict(torch.load('model.bin'))\n",
"model.eval()\n",
"\n",
"ixs = torch.tensor(vocab.forward(['he'])).to(device)\n",
"\n",
"out = model(ixs)\n",
"top = torch.topk(out[0], 10)\n",
"top_indices = top.indices.tolist()\n",
"top_probs = top.values.tolist()\n",
"top_words = vocab.lookup_tokens(top_indices)\n",
"list(zip(top_words, top_indices, top_probs))"
]
},
{
"cell_type": "code",
"execution_count": 101,
"metadata": {},
"outputs": [],
"source": [
"def prediction(word: str) -> str:\n",
" ixs = torch.tensor(vocab.forward([word])).to(device)\n",
" out = model(ixs)\n",
" top = torch.topk(out[0], 5)\n",
" top_indices = top.indices.tolist()\n",
" top_probs = top.values.tolist()\n",
" top_words = vocab.lookup_tokens(top_indices)\n",
" zipped = list(zip(top_words, top_probs))\n",
" for index, element in enumerate(zipped):\n",
" unk = None\n",
" if '<unk>' in element:\n",
" unk = zipped.pop(index)\n",
" zipped.append(('', unk[1]))\n",
" break\n",
" if unk is None:\n",
" zipped[-1] = ('', zipped[-1][1])\n",
" return ' '.join([f'{x[0]}:{x[1]}' for x in zipped])"
]
},
{
"cell_type": "code",
"execution_count": 102,
"metadata": {},
"outputs": [],
"source": [
"def create_outputs(folder_name):\n",
" print(f'Creating outputs in {folder_name}')\n",
" with lzma.open(f'{folder_name}/in.tsv.xz', mode='rt', encoding='utf-8') as fid:\n",
" with open(f'{folder_name}/out.tsv', 'w', encoding='utf-8', newline='\\n') as f:\n",
" for line in fid:\n",
" separated = line.split('\\t')\n",
" prefix = separated[6].replace(r'\\n', ' ').split()[-1]\n",
" output_line = prediction(prefix)\n",
" f.write(output_line + '\\n')"
]
},
{
"cell_type": "code",
"execution_count": 103,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Creating outputs in dev-0\n",
"Creating outputs in test-A\n"
]
}
],
"source": [
"create_outputs('dev-0')\n",
"create_outputs('test-A')"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "scweet",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.15"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}