ff-lm-bengio/model lm bengio - ODPOWIEDZI.ipynb
2021-05-24 14:11:26 +02:00

100 KiB

importy

from gensim.utils import tokenize
import numpy as np
import torch
from tqdm.notebook import tqdm
/media/kuba/ssdsam/anaconda3/lib/python3.8/site-packages/gensim/similarities/__init__.py:15: UserWarning: The gensim.similarities.levenshtein submodule is disabled, because the optional Levenshtein package <https://pypi.org/project/python-Levenshtein/> is unavailable. Install Levenhstein (e.g. `pip install python-Levenshtein`) to suppress this warning.
  warnings.warn(msg)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
#device = 'cpu'
print('Using {} device'.format(device))
Using cuda device
device
device(type='cuda')

przygotowanie zbiorów

pan_tadeusz_path_train= '/home/kuba/Syncthing/przedmioty/2020-02/ISI/zajecia9_ngramowy_model_jDDezykowy/pan-tadeusz-train.txt'
corpora_train = open(pan_tadeusz_path_train).read()
corpora_train_tokenized = list(tokenize(corpora_train,lowercase = True))
vocab_itos = sorted(set(corpora_train_tokenized))
len(vocab_itos)
16598
vocab_itos = vocab_itos[:15005]
vocab_itos[15001] = "<UNK>"
vocab_itos[15002] = "<BOS>"
vocab_itos[15003] = "<EOS>"
vocab_itos[15004] = "<PAD>"
len(vocab_itos)
15005
vocab_stoi = dict()
for i, token in enumerate(vocab_itos):
    vocab_stoi[token] = i
NGRAMS = 4
def get_token_id(dataset):
    token_ids = [vocab_stoi['<PAD>']] * (NGRAMS-1) + [vocab_stoi['<BOS>']]
    for token in dataset:
        try:
            token_ids.append(vocab_stoi[token])
        except KeyError:
            token_ids.append(vocab_stoi['<UNK>'])
    token_ids.append(vocab_stoi['<EOS>'])
    return token_ids
train_ids = get_token_id(corpora_train_tokenized)
train_ids[:30]
[15004,
 15004,
 15004,
 15002,
 7,
 5002,
 7247,
 11955,
 1432,
 7018,
 14739,
 5506,
 4696,
 4276,
 7505,
 2642,
 8477,
 7259,
 10870,
 10530,
 7506,
 12968,
 7997,
 1911,
 12479,
 11129,
 13069,
 11797,
 5819,
 6268]
def get_samples(dataset):
    samples = []
    for i in range(len(dataset)-NGRAMS):
        samples.append(dataset[i:i+NGRAMS])
    return samples
train_ids = get_samples(train_ids)
train_ids = torch.tensor(train_ids, device = device)
train_ids[:30]
tensor([[15004, 15004, 15004, 15002],
        [15004, 15004, 15002,     7],
        [15004, 15002,     7,  5002],
        [15002,     7,  5002,  7247],
        [    7,  5002,  7247, 11955],
        [ 5002,  7247, 11955,  1432],
        [ 7247, 11955,  1432,  7018],
        [11955,  1432,  7018, 14739],
        [ 1432,  7018, 14739,  5506],
        [ 7018, 14739,  5506,  4696],
        [14739,  5506,  4696,  4276],
        [ 5506,  4696,  4276,  7505],
        [ 4696,  4276,  7505,  2642],
        [ 4276,  7505,  2642,  8477],
        [ 7505,  2642,  8477,  7259],
        [ 2642,  8477,  7259, 10870],
        [ 8477,  7259, 10870, 10530],
        [ 7259, 10870, 10530,  7506],
        [10870, 10530,  7506, 12968],
        [10530,  7506, 12968,  7997],
        [ 7506, 12968,  7997,  1911],
        [12968,  7997,  1911, 12479],
        [ 7997,  1911, 12479, 11129],
        [ 1911, 12479, 11129, 13069],
        [12479, 11129, 13069, 11797],
        [11129, 13069, 11797,  5819],
        [13069, 11797,  5819,  6268],
        [11797,  5819,  6268,  2807],
        [ 5819,  6268,  2807,  7831],
        [ 6268,  2807,  7831, 12893]], device='cuda:0')

model

class NeuralLM(torch.nn.Module):

    def __init__(self):
        super(NeuralLM, self).__init__()
        self.emb = torch.nn.Embedding(len(vocab_itos),200)
        self.fc1 = torch.nn.Linear( (NGRAMS - 1 ) * 200,len(vocab_itos))        

    def forward(self, x):
        x = self.emb(x)
        x = x.reshape(-1, (NGRAMS-1)*200)
        x = self.fc1(x)
        return x
lm = NeuralLM().to(device)
criterion = torch.nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(lm.parameters())
BATCH_SIZE = 50
EPOCHS = 2
lm.train()
for epoch in range(EPOCHS):
    batches = 0
    loss_sum =0
    acc_score = 0

    for i in tqdm(range(0, len(train_ids)-BATCH_SIZE+1, BATCH_SIZE)):
        X = train_ids[i:i+BATCH_SIZE,:3]
        Y = train_ids[i:i+BATCH_SIZE,3]
        predictions = lm(X)
        loss = criterion(predictions,Y)
    
    
        
        optimizer.zero_grad()
        loss.backward()
        optimizer.step()
        
        loss_sum += loss.item()
        batches += 1
        
        
        
        if i % 100 == 0:
            loss_current = loss_sum / batches
            print(f'updates: {i // BATCH_SIZE}/{len(train_ids) / BATCH_SIZE}',  end ='\t')
            print('loss: ', round(loss_current,5) , end = '\t')
            print(np.exp(loss_current))
HBox(children=(FloatProgress(value=0.0, max=1140.0), HTML(value='')))
updates: 0/1140.44	loss:  9.86444	19234.062158106106
updates: 2/1140.44	loss:  9.8484	18928.01779784466
updates: 4/1140.44	loss:  9.80547	18132.650966417892
updates: 6/1140.44	loss:  9.79965	18027.479440297415
updates: 8/1140.44	loss:  9.77038	17507.36347175203
updates: 10/1140.44	loss:  9.76398	17395.75469455178
updates: 12/1140.44	loss:  9.73982	16980.495266286664
updates: 14/1140.44	loss:  9.72326	16701.657281817905
updates: 16/1140.44	loss:  9.69892	16299.98679638986
updates: 18/1140.44	loss:  9.68147	16018.075454520396
updates: 20/1140.44	loss:  9.66146	15700.642679967945
updates: 22/1140.44	loss:  9.64514	15446.55612236878
updates: 24/1140.44	loss:  9.63481	15287.844411692564
updates: 26/1140.44	loss:  9.61562	14997.145144959657
updates: 28/1140.44	loss:  9.59612	14707.667295823418
updates: 30/1140.44	loss:  9.59734	14725.558518710543
updates: 32/1140.44	loss:  9.57319	14374.268338397032
updates: 34/1140.44	loss:  9.57122	14345.899296320918
updates: 36/1140.44	loss:  9.56701	14285.56806118898
updates: 38/1140.44	loss:  9.56705	14286.279366692625
updates: 40/1140.44	loss:  9.55483	14112.716199852684
updates: 42/1140.44	loss:  9.54847	14023.185526381143
updates: 44/1140.44	loss:  9.53628	13853.311093806895
updates: 46/1140.44	loss:  9.52915	13754.94314273374
updates: 48/1140.44	loss:  9.5163	13579.329368267778
updates: 50/1140.44	loss:  9.50758	13461.412437213186
updates: 52/1140.44	loss:  9.5012	13375.76489148916
updates: 54/1140.44	loss:  9.48498	13160.520211069877
updates: 56/1140.44	loss:  9.48351	13141.24414135997
updates: 58/1140.44	loss:  9.4793	13086.040619782183
updates: 60/1140.44	loss:  9.4693	12955.826789589635
updates: 62/1140.44	loss:  9.45711	12798.820739889548
updates: 64/1140.44	loss:  9.45498	12771.672536565737
updates: 66/1140.44	loss:  9.45263	12741.660749820296
updates: 68/1140.44	loss:  9.45132	12724.96446668678
updates: 70/1140.44	loss:  9.44457	12639.30335760076
updates: 72/1140.44	loss:  9.44033	12585.829880659916
updates: 74/1140.44	loss:  9.42779	12429.002202080706
updates: 76/1140.44	loss:  9.42272	12366.192784197907
updates: 78/1140.44	loss:  9.41757	12302.617375412821
updates: 80/1140.44	loss:  9.41148	12227.90597875709
updates: 82/1140.44	loss:  9.40725	12176.355513954843
updates: 84/1140.44	loss:  9.40192	12111.580565146021
updates: 86/1140.44	loss:  9.39689	12050.849471389334
updates: 88/1140.44	loss:  9.39121	11982.567013411552
updates: 90/1140.44	loss:  9.38475	11905.433821472729
updates: 92/1140.44	loss:  9.37616	11803.56663859979
updates: 94/1140.44	loss:  9.37446	11783.554123177306
updates: 96/1140.44	loss:  9.3765	11807.568931733695
updates: 98/1140.44	loss:  9.37796	11824.901494761023
updates: 100/1140.44	loss:  9.36802	11707.938330346582
updates: 102/1140.44	loss:  9.35723	11582.300252923045
updates: 104/1140.44	loss:  9.34826	11478.822758999582
updates: 106/1140.44	loss:  9.34181	11405.040953617527
updates: 108/1140.44	loss:  9.33673	11347.296568327127
updates: 110/1140.44	loss:  9.33414	11317.909510987543
updates: 112/1140.44	loss:  9.33469	11324.171257525839
updates: 114/1140.44	loss:  9.32007	11159.772464268459
updates: 116/1140.44	loss:  9.31356	11087.346440070609
updates: 118/1140.44	loss:  9.30683	11012.981115321441
updates: 120/1140.44	loss:  9.30269	10967.449950595814
updates: 122/1140.44	loss:  9.30209	10960.855147155384
updates: 124/1140.44	loss:  9.29887	10925.65400385966
updates: 126/1140.44	loss:  9.29767	10912.571610483568
updates: 128/1140.44	loss:  9.2952	10885.674474219924
updates: 130/1140.44	loss:  9.29539	10887.706529252391
updates: 132/1140.44	loss:  9.28469	10771.87999170365
updates: 134/1140.44	loss:  9.27856	10706.002529591899
updates: 136/1140.44	loss:  9.2749	10666.912725763812
updates: 138/1140.44	loss:  9.27104	10625.805688136119
updates: 140/1140.44	loss:  9.27178	10633.638810210972
updates: 142/1140.44	loss:  9.26796	10593.16940404968
updates: 144/1140.44	loss:  9.26434	10554.89283611906
updates: 146/1140.44	loss:  9.25682	10475.799020913953
updates: 148/1140.44	loss:  9.25293	10435.125131344192
updates: 150/1140.44	loss:  9.24837	10387.624944391351
updates: 152/1140.44	loss:  9.24303	10332.301963151596
updates: 154/1140.44	loss:  9.23939	10294.757515776588
updates: 156/1140.44	loss:  9.23526	10252.276330557435
updates: 158/1140.44	loss:  9.23783	10278.664350821904
updates: 160/1140.44	loss:  9.23373	10236.628631193564
updates: 162/1140.44	loss:  9.22935	10191.903426264354
updates: 164/1140.44	loss:  9.22971	10195.543594286717
updates: 166/1140.44	loss:  9.22622	10160.070461524845
updates: 168/1140.44	loss:  9.22417	10139.300673417529
updates: 170/1140.44	loss:  9.22285	10125.849838600767
updates: 172/1140.44	loss:  9.22212	10118.496531489998
updates: 174/1140.44	loss:  9.2224	10121.276969352899
updates: 176/1140.44	loss:  9.21642	10061.019910965366
updates: 178/1140.44	loss:  9.21146	10011.1668307345
updates: 180/1140.44	loss:  9.21059	10002.453365938658
updates: 182/1140.44	loss:  9.20718	9968.420839794166
updates: 184/1140.44	loss:  9.2095	9991.615915489974
updates: 186/1140.44	loss:  9.20842	9980.829506740562
updates: 188/1140.44	loss:  9.20886	9985.165047905835
updates: 190/1140.44	loss:  9.20843	9980.926034425029
updates: 192/1140.44	loss:  9.20236	9920.531416037213
updates: 194/1140.44	loss:  9.20061	9903.127896045871
updates: 196/1140.44	loss:  9.20071	9904.12472691749
updates: 198/1140.44	loss:  9.19935	9890.73411023782
updates: 200/1140.44	loss:  9.19465	9844.305532936287
updates: 202/1140.44	loss:  9.19114	9809.788602353941
updates: 204/1140.44	loss:  9.18912	9790.045663913235
updates: 206/1140.44	loss:  9.18473	9747.176568240211
updates: 208/1140.44	loss:  9.18817	9780.723701608218
updates: 210/1140.44	loss:  9.1895	9793.771820867336
updates: 212/1140.44	loss:  9.18649	9764.291211430418
updates: 214/1140.44	loss:  9.1835	9735.129089116019
updates: 216/1140.44	loss:  9.17781	9679.882943011597
updates: 218/1140.44	loss:  9.17744	9676.382649921077
updates: 220/1140.44	loss:  9.17801	9681.881839327794
updates: 222/1140.44	loss:  9.179	9691.48638532776
updates: 224/1140.44	loss:  9.17897	9691.197748483914
updates: 226/1140.44	loss:  9.17599	9662.341299710737
updates: 228/1140.44	loss:  9.17061	9610.485448004398
updates: 230/1140.44	loss:  9.1676	9581.648580685025
updates: 232/1140.44	loss:  9.1647	9553.890663138567
updates: 234/1140.44	loss:  9.16223	9530.239763253736
updates: 236/1140.44	loss:  9.16127	9521.102027211478
updates: 238/1140.44	loss:  9.16133	9521.75668331657
updates: 240/1140.44	loss:  9.15926	9502.027094969206
updates: 242/1140.44	loss:  9.1573	9483.403699087496
updates: 244/1140.44	loss:  9.15631	9474.030325927635
updates: 246/1140.44	loss:  9.15513	9462.90772040226
updates: 248/1140.44	loss:  9.15532	9464.622658511986
updates: 250/1140.44	loss:  9.15412	9453.310729424482
updates: 252/1140.44	loss:  9.15471	9458.857110199508
updates: 254/1140.44	loss:  9.15572	9468.46380613989
updates: 256/1140.44	loss:  9.15187	9432.082785606884
updates: 258/1140.44	loss:  9.15025	9416.791998203951
updates: 260/1140.44	loss:  9.15052	9419.300914563584
updates: 262/1140.44	loss:  9.15038	9418.003352624599
updates: 264/1140.44	loss:  9.15099	9423.736601555034
updates: 266/1140.44	loss:  9.14782	9393.958591486986
updates: 268/1140.44	loss:  9.14835	9398.94184785059
updates: 270/1140.44	loss:  9.14965	9411.187963150725
updates: 272/1140.44	loss:  9.15352	9447.671965318474
updates: 274/1140.44	loss:  9.1493	9407.853420842304
updates: 276/1140.44	loss:  9.14674	9383.76797933217
updates: 278/1140.44	loss:  9.14543	9371.50878865904
updates: 280/1140.44	loss:  9.1428	9346.907603776232
updates: 282/1140.44	loss:  9.14249	9343.998562576648
updates: 284/1140.44	loss:  9.13787	9300.928445146052
updates: 286/1140.44	loss:  9.13851	9306.846738092006
updates: 288/1140.44	loss:  9.13677	9290.71177689869
updates: 290/1140.44	loss:  9.13579	9281.643445190206
updates: 292/1140.44	loss:  9.13569	9280.71571440055
updates: 294/1140.44	loss:  9.13353	9260.691146466801
updates: 296/1140.44	loss:  9.13338	9259.247322912079
updates: 298/1140.44	loss:  9.13344	9259.86484396417
updates: 300/1140.44	loss:  9.13213	9247.719277922852
updates: 302/1140.44	loss:  9.12989	9226.97765137891
updates: 304/1140.44	loss:  9.12948	9223.214803908799
updates: 306/1140.44	loss:  9.12774	9207.203140235111
updates: 308/1140.44	loss:  9.1236	9169.192305510742
updates: 310/1140.44	loss:  9.12402	9173.046358419884
updates: 312/1140.44	loss:  9.12333	9166.714734206307
updates: 314/1140.44	loss:  9.12098	9145.117142994583
updates: 316/1140.44	loss:  9.11582	9098.084746979452
updates: 318/1140.44	loss:  9.11473	9088.169131675442
updates: 320/1140.44	loss:  9.11329	9075.093827812107
updates: 322/1140.44	loss:  9.1097	9042.613073898101
updates: 324/1140.44	loss:  9.10955	9041.229702202678
updates: 326/1140.44	loss:  9.11028	9047.861062324793
updates: 328/1140.44	loss:  9.1115	9058.895763138527
updates: 330/1140.44	loss:  9.11307	9073.127538588904
updates: 332/1140.44	loss:  9.11521	9092.55656620755
updates: 334/1140.44	loss:  9.11411	9082.506276953594
updates: 336/1140.44	loss:  9.11571	9097.134109738194
updates: 338/1140.44	loss:  9.11643	9103.684259568952
updates: 340/1140.44	loss:  9.11472	9088.128314022937
updates: 342/1140.44	loss:  9.11445	9085.648108887002
updates: 344/1140.44	loss:  9.11275	9070.196598788874
updates: 346/1140.44	loss:  9.11072	9051.816586981453
updates: 348/1140.44	loss:  9.10746	9022.324096148663
updates: 350/1140.44	loss:  9.09881	8944.644785746445
updates: 352/1140.44	loss:  9.0974	8932.002982678183
updates: 354/1140.44	loss:  9.09248	8888.200263524552
updates: 356/1140.44	loss:  9.09368	8898.855668455715
updates: 358/1140.44	loss:  9.09367	8898.789644671228
updates: 360/1140.44	loss:  9.09273	8890.413009589885
updates: 362/1140.44	loss:  9.09339	8896.294440774267
updates: 364/1140.44	loss:  9.09519	8912.316074222827
updates: 366/1140.44	loss:  9.09604	8919.91231027586
updates: 368/1140.44	loss:  9.09951	8950.921811933047
updates: 370/1140.44	loss:  9.09892	8945.662433327912
updates: 372/1140.44	loss:  9.09873	8943.89945718563
updates: 374/1140.44	loss:  9.09771	8934.834157857957
updates: 376/1140.44	loss:  9.09728	8931.007643071827
updates: 378/1140.44	loss:  9.09325	8895.030230358148
updates: 380/1140.44	loss:  9.0895	8861.743956641705
updates: 382/1140.44	loss:  9.08525	8824.197561894589
updates: 384/1140.44	loss:  9.08501	8822.030752768092
updates: 386/1140.44	loss:  9.08426	8815.41989963531
updates: 388/1140.44	loss:  9.08617	8832.274096754125
updates: 390/1140.44	loss:  9.08548	8826.224195814959
updates: 392/1140.44	loss:  9.08631	8833.53368930957
updates: 394/1140.44	loss:  9.08719	8841.263721737376
updates: 396/1140.44	loss:  9.08692	8838.959423335322
updates: 398/1140.44	loss:  9.08476	8819.889140191066
updates: 400/1140.44	loss:  9.08658	8835.958099393036
updates: 402/1140.44	loss:  9.0857	8828.18014101433
updates: 404/1140.44	loss:  9.08615	8832.147733587592
updates: 406/1140.44	loss:  9.08567	8827.839648411116
updates: 408/1140.44	loss:  9.08686	8838.365040014993
updates: 410/1140.44	loss:  9.08863	8854.084694087665
updates: 412/1140.44	loss:  9.08866	8854.304579066786
updates: 414/1140.44	loss:  9.08758	8844.77982277106
updates: 416/1140.44	loss:  9.08785	8847.173710199388
updates: 418/1140.44	loss:  9.08459	8818.35560453962
updates: 420/1140.44	loss:  9.08352	8808.891855794196
updates: 422/1140.44	loss:  9.08335	8807.394598673405
updates: 424/1140.44	loss:  9.08266	8801.374313064687
updates: 426/1140.44	loss:  9.07867	8766.301894892924
updates: 428/1140.44	loss:  9.07949	8773.516991606317
updates: 430/1140.44	loss:  9.08041	8781.537306517563
updates: 432/1140.44	loss:  9.0793	8771.803126714849
updates: 434/1140.44	loss:  9.07985	8776.685802900252
updates: 436/1140.44	loss:  9.07944	8773.036828882445
updates: 438/1140.44	loss:  9.07886	8767.977413527424
updates: 440/1140.44	loss:  9.07839	8763.849793503234
updates: 442/1140.44	loss:  9.07851	8764.8593427907
updates: 444/1140.44	loss:  9.07826	8762.680788400196
updates: 446/1140.44	loss:  9.07605	8743.332069467539
updates: 448/1140.44	loss:  9.07496	8733.837753444433
updates: 450/1140.44	loss:  9.07534	8737.142040932065
updates: 452/1140.44	loss:  9.07153	8703.919047427298
updates: 454/1140.44	loss:  9.07272	8714.328192635765
updates: 456/1140.44	loss:  9.07181	8706.354073836805
updates: 458/1140.44	loss:  9.06945	8685.802743071248
updates: 460/1140.44	loss:  9.07033	8693.51334541033
updates: 462/1140.44	loss:  9.0702	8692.319522693146
updates: 464/1140.44	loss:  9.07022	8692.566151113931
updates: 466/1140.44	loss:  9.0666	8661.134518137636
updates: 468/1140.44	loss:  9.06612	8656.96053287048
updates: 470/1140.44	loss:  9.06449	8642.900779190313
updates: 472/1140.44	loss:  9.06406	8639.129498028884
updates: 474/1140.44	loss:  9.06156	8617.595476266433
updates: 476/1140.44	loss:  9.06201	8621.43501121626
updates: 478/1140.44	loss:  9.0622	8623.118518575466
updates: 480/1140.44	loss:  9.06207	8622.02225808244
updates: 482/1140.44	loss:  9.06192	8620.700979185298
updates: 484/1140.44	loss:  9.06229	8623.892989926508
updates: 486/1140.44	loss:  9.06179	8619.576546062819
updates: 488/1140.44	loss:  9.0618	8619.61906600263
updates: 490/1140.44	loss:  9.05986	8602.936368012024
updates: 492/1140.44	loss:  9.05947	8599.629713170123
updates: 494/1140.44	loss:  9.05811	8587.918960088837
updates: 496/1140.44	loss:  9.05734	8581.292628473404
updates: 498/1140.44	loss:  9.0537	8550.102149378348
updates: 500/1140.44	loss:  9.05228	8538.003685550006
updates: 502/1140.44	loss:  9.04898	8509.824903969211
updates: 504/1140.44	loss:  9.04815	8502.817093001939
updates: 506/1140.44	loss:  9.04616	8485.91273753108
updates: 508/1140.44	loss:  9.04539	8479.334663312318
updates: 510/1140.44	loss:  9.04473	8473.791051700722
updates: 512/1140.44	loss:  9.04566	8481.65679096124
updates: 514/1140.44	loss:  9.04514	8477.22556791467
updates: 516/1140.44	loss:  9.04668	8490.274804561239
updates: 518/1140.44	loss:  9.04617	8486.011914202878
updates: 520/1140.44	loss:  9.04223	8452.59383170006
updates: 522/1140.44	loss:  9.04083	8440.766701984167
updates: 524/1140.44	loss:  9.0413	8444.712972939778
updates: 526/1140.44	loss:  9.03879	8423.556099811856
updates: 528/1140.44	loss:  9.03736	8411.532842304867
updates: 530/1140.44	loss:  9.03648	8404.116457065558
updates: 532/1140.44	loss:  9.03351	8379.227843054143
updates: 534/1140.44	loss:  9.03201	8366.683642857659
updates: 536/1140.44	loss:  9.02952	8345.877443781555
updates: 538/1140.44	loss:  9.02777	8331.276220675938
updates: 540/1140.44	loss:  9.02609	8317.288623731472
updates: 542/1140.44	loss:  9.02483	8306.802290058027
updates: 544/1140.44	loss:  9.02267	8288.855643469982
updates: 546/1140.44	loss:  9.0161	8234.633057620846
updates: 548/1140.44	loss:  9.01824	8252.219859078456
updates: 550/1140.44	loss:  9.01816	8251.591976160538
updates: 552/1140.44	loss:  9.01714	8243.157360273013
updates: 554/1140.44	loss:  9.01667	8239.285183525617
updates: 556/1140.44	loss:  9.01672	8239.678225649683
updates: 558/1140.44	loss:  9.01543	8229.079401329047
updates: 560/1140.44	loss:  9.01426	8219.444251173154
updates: 562/1140.44	loss:  9.01521	8227.250554985285
updates: 564/1140.44	loss:  9.01442	8220.760409112627
updates: 566/1140.44	loss:  9.01495	8225.164838972247
updates: 568/1140.44	loss:  9.01397	8217.050433058013
updates: 570/1140.44	loss:  9.01258	8205.630708626553
updates: 572/1140.44	loss:  9.01168	8198.312863853129
updates: 574/1140.44	loss:  9.01209	8201.638443263619
updates: 576/1140.44	loss:  9.01145	8196.408918365063
updates: 578/1140.44	loss:  9.01265	8206.220439458102
updates: 580/1140.44	loss:  9.012	8200.888136116835
updates: 582/1140.44	loss:  9.01228	8203.242828423887
updates: 584/1140.44	loss:  9.01288	8208.110501498863
updates: 586/1140.44	loss:  9.01273	8206.932549997924
updates: 588/1140.44	loss:  9.01135	8195.58833862466
updates: 590/1140.44	loss:  9.01162	8197.796027398748
updates: 592/1140.44	loss:  9.01145	8196.372344435464
updates: 594/1140.44	loss:  9.01147	8196.53415806482
updates: 596/1140.44	loss:  9.01207	8201.489605409859
updates: 598/1140.44	loss:  9.01061	8189.539771787929
updates: 600/1140.44	loss:  9.00899	8176.234046059826
updates: 602/1140.44	loss:  9.00765	8165.272708628003
updates: 604/1140.44	loss:  9.0046	8140.421866345255
updates: 606/1140.44	loss:  9.00398	8135.423506721505
updates: 608/1140.44	loss:  9.00329	8129.753336361415
updates: 610/1140.44	loss:  9.00349	8131.4172339677625
updates: 612/1140.44	loss:  9.00389	8134.644242417064
updates: 614/1140.44	loss:  9.00326	8129.512427183549
updates: 616/1140.44	loss:  9.0039	8134.749288209742
updates: 618/1140.44	loss:  9.00429	8137.954455246367
updates: 620/1140.44	loss:  9.00274	8125.338097810221
updates: 622/1140.44	loss:  9.00256	8123.829228598916
updates: 624/1140.44	loss:  9.00345	8131.074136323915
updates: 626/1140.44	loss:  9.00474	8141.603582112368
updates: 628/1140.44	loss:  9.00381	8134.03979952955
updates: 630/1140.44	loss:  9.00413	8136.639691755087
updates: 632/1140.44	loss:  9.00452	8139.769199827493
updates: 634/1140.44	loss:  9.00513	8144.740769142489
updates: 636/1140.44	loss:  9.00598	8151.682033374385
updates: 638/1140.44	loss:  9.00658	8156.609762768971
updates: 640/1140.44	loss:  9.00636	8154.773839699974
updates: 642/1140.44	loss:  9.00477	8141.787649224005
updates: 644/1140.44	loss:  9.0026	8124.1642112627915
updates: 646/1140.44	loss:  9.00122	8113.011969210169
updates: 648/1140.44	loss:  8.99956	8099.556719233404
updates: 650/1140.44	loss:  8.99833	8089.526728829583
updates: 652/1140.44	loss:  8.99874	8092.867817213899
updates: 654/1140.44	loss:  8.99561	8067.610422155493
updates: 656/1140.44	loss:  8.99594	8070.215986075882
updates: 658/1140.44	loss:  8.99553	8066.930390011858
updates: 660/1140.44	loss:  8.99527	8064.840906278532
updates: 662/1140.44	loss:  8.99473	8060.4751285435195
updates: 664/1140.44	loss:  8.99453	8058.861914746701
updates: 666/1140.44	loss:  8.99364	8051.727595812018
updates: 668/1140.44	loss:  8.99451	8058.6848486262925
updates: 670/1140.44	loss:  8.99405	8055.021602502067
updates: 672/1140.44	loss:  8.99419	8056.105948832661
updates: 674/1140.44	loss:  8.99388	8053.611608336102
updates: 676/1140.44	loss:  8.99369	8052.094117881097
updates: 678/1140.44	loss:  8.99224	8040.458152382287
updates: 680/1140.44	loss:  8.99194	8038.053335002441
updates: 682/1140.44	loss:  8.99141	8033.74830832689
updates: 684/1140.44	loss:  8.99209	8039.207644204204
updates: 686/1140.44	loss:  8.99282	8045.106282708816
updates: 688/1140.44	loss:  8.99282	8045.111940490653
updates: 690/1140.44	loss:  8.99322	8048.354424117917
updates: 692/1140.44	loss:  8.99375	8052.572291234546
updates: 694/1140.44	loss:  8.99274	8044.494845967595
updates: 696/1140.44	loss:  8.99331	8049.071159556381
updates: 698/1140.44	loss:  8.99081	8028.964131562491
updates: 700/1140.44	loss:  8.99127	8032.6636958293075
updates: 702/1140.44	loss:  8.99018	8023.862262169628
updates: 704/1140.44	loss:  8.98941	8017.710676411549
updates: 706/1140.44	loss:  8.98929	8016.730363645408
updates: 708/1140.44	loss:  8.98904	8014.729440681696
updates: 710/1140.44	loss:  8.98832	8009.0216676135815
updates: 712/1140.44	loss:  8.99018	8023.906081049602
updates: 714/1140.44	loss:  8.9896	8019.229896337034
updates: 716/1140.44	loss:  8.98825	8008.391911281847
updates: 718/1140.44	loss:  8.98911	8015.297601816686
updates: 720/1140.44	loss:  8.98947	8018.176824631876
updates: 722/1140.44	loss:  8.98778	8004.675657960088
updates: 724/1140.44	loss:  8.98804	8006.730553628521
updates: 726/1140.44	loss:  8.98782	8004.948462630163
updates: 728/1140.44	loss:  8.98686	7997.279722081281
updates: 730/1140.44	loss:  8.98591	7989.7315902008795
updates: 732/1140.44	loss:  8.98683	7997.055258801125
updates: 734/1140.44	loss:  8.98664	7995.510913220034
updates: 736/1140.44	loss:  8.98797	8006.161534643583
updates: 738/1140.44	loss:  8.98922	8016.196851359066
updates: 740/1140.44	loss:  8.98986	8021.3311400431185
updates: 742/1140.44	loss:  8.99084	8029.189731639376
updates: 744/1140.44	loss:  8.9894	8017.675713396739
updates: 746/1140.44	loss:  8.98952	8018.593487462703
updates: 748/1140.44	loss:  8.98902	8014.5760536679445
updates: 750/1140.44	loss:  8.98906	8014.8885867280715
updates: 752/1140.44	loss:  8.98835	8009.253483323332
updates: 754/1140.44	loss:  8.98734	8001.147709649025
updates: 756/1140.44	loss:  8.98687	7997.383572627127
updates: 758/1140.44	loss:  8.98625	7992.430066670758
updates: 760/1140.44	loss:  8.98402	7974.644379501137
updates: 762/1140.44	loss:  8.98327	7968.6854113098425
updates: 764/1140.44	loss:  8.98309	7967.221461487253
updates: 766/1140.44	loss:  8.98217	7959.885949639521
updates: 768/1140.44	loss:  8.98169	7956.051249110791
updates: 770/1140.44	loss:  8.98202	7958.682944959775
updates: 772/1140.44	loss:  8.98271	7964.149274491275
updates: 774/1140.44	loss:  8.98119	7952.057678840022
updates: 776/1140.44	loss:  8.97984	7941.3469704569525
updates: 778/1140.44	loss:  8.98011	7943.5056936303945
updates: 780/1140.44	loss:  8.97924	7936.609588434691
updates: 782/1140.44	loss:  8.97786	7925.6196863906935
updates: 784/1140.44	loss:  8.9767	7916.498082472749
updates: 786/1140.44	loss:  8.97703	7919.09614236027
updates: 788/1140.44	loss:  8.97629	7913.212451738551
updates: 790/1140.44	loss:  8.97574	7908.897953039073
updates: 792/1140.44	loss:  8.97638	7913.946275919675
updates: 794/1140.44	loss:  8.97597	7910.66792039204
updates: 796/1140.44	loss:  8.97621	7912.558558414883
updates: 798/1140.44	loss:  8.97686	7917.719900576509
updates: 800/1140.44	loss:  8.97664	7915.969119568859
updates: 802/1140.44	loss:  8.97558	7907.596798512669
updates: 804/1140.44	loss:  8.97564	7908.043874025064
updates: 806/1140.44	loss:  8.97507	7903.563669947768
updates: 808/1140.44	loss:  8.97449	7898.96426311424
updates: 810/1140.44	loss:  8.97428	7897.32052990529
updates: 812/1140.44	loss:  8.97372	7892.94464445439
updates: 814/1140.44	loss:  8.97376	7893.185508744869
updates: 816/1140.44	loss:  8.97301	7887.310701414765
updates: 818/1140.44	loss:  8.97371	7892.801827406501
updates: 820/1140.44	loss:  8.97206	7879.8279044977935
updates: 822/1140.44	loss:  8.96983	7862.231427273825
updates: 824/1140.44	loss:  8.96917	7857.113099635124
updates: 826/1140.44	loss:  8.96896	7855.4512300902425
updates: 828/1140.44	loss:  8.96819	7849.39142286545
updates: 830/1140.44	loss:  8.96884	7854.460936241637
updates: 832/1140.44	loss:  8.96856	7852.265892830345
updates: 834/1140.44	loss:  8.96792	7847.245320783718
updates: 836/1140.44	loss:  8.9681	7848.657556064398
updates: 838/1140.44	loss:  8.96815	7849.0640777449335
updates: 840/1140.44	loss:  8.9678	7846.334358288989
updates: 842/1140.44	loss:  8.96716	7841.288748619282
updates: 844/1140.44	loss:  8.96627	7834.33809553313
updates: 846/1140.44	loss:  8.96596	7831.928063663619
updates: 848/1140.44	loss:  8.96612	7833.137041147441
updates: 850/1140.44	loss:  8.96663	7837.149466956712
updates: 852/1140.44	loss:  8.9654	7827.510277227882
updates: 854/1140.44	loss:  8.96598	7832.038193765055
updates: 856/1140.44	loss:  8.96442	7819.822507098414
updates: 858/1140.44	loss:  8.96443	7819.908001932807
updates: 860/1140.44	loss:  8.96359	7813.328232371184
updates: 862/1140.44	loss:  8.96265	7806.026454510492
updates: 864/1140.44	loss:  8.96198	7800.8199109018515
updates: 866/1140.44	loss:  8.96138	7796.090109960083
updates: 868/1140.44	loss:  8.96103	7793.412446219686
updates: 870/1140.44	loss:  8.95984	7784.091896058413
updates: 872/1140.44	loss:  8.95869	7775.17509650934
updates: 874/1140.44	loss:  8.95839	7772.8054142594865
updates: 876/1140.44	loss:  8.95923	7779.37594833349
updates: 878/1140.44	loss:  8.95909	7778.249456936049
updates: 880/1140.44	loss:  8.95908	7778.220658020769
updates: 882/1140.44	loss:  8.95778	7768.111713478755
updates: 884/1140.44	loss:  8.95718	7763.422835058488
updates: 886/1140.44	loss:  8.95598	7754.100166228005
updates: 888/1140.44	loss:  8.95574	7752.231721496373
updates: 890/1140.44	loss:  8.95568	7751.826930496252
updates: 892/1140.44	loss:  8.95627	7756.360022733085
updates: 894/1140.44	loss:  8.95601	7754.341560442558
updates: 896/1140.44	loss:  8.95467	7743.956638920663
updates: 898/1140.44	loss:  8.95393	7738.206770449914
updates: 900/1140.44	loss:  8.95535	7749.213062822288
updates: 902/1140.44	loss:  8.95569	7751.8839561395735
updates: 904/1140.44	loss:  8.95629	7756.516052408002
updates: 906/1140.44	loss:  8.95812	7770.713239945776
updates: 908/1140.44	loss:  8.9581	7770.543149893761
updates: 910/1140.44	loss:  8.95694	7761.551247886195
updates: 912/1140.44	loss:  8.95737	7764.9440280099225
updates: 914/1140.44	loss:  8.95665	7759.337463864273
updates: 916/1140.44	loss:  8.95533	7749.100510782227
updates: 918/1140.44	loss:  8.95621	7755.872876754079
updates: 920/1140.44	loss:  8.95525	7748.4418630098835
updates: 922/1140.44	loss:  8.95506	7747.009096797362
updates: 924/1140.44	loss:  8.95474	7744.506584947785
updates: 926/1140.44	loss:  8.95353	7735.1182926463
updates: 928/1140.44	loss:  8.95344	7734.4369025890255
updates: 930/1140.44	loss:  8.95288	7730.107877701697
updates: 932/1140.44	loss:  8.95218	7724.677327676585
updates: 934/1140.44	loss:  8.95166	7720.723989555122
updates: 936/1140.44	loss:  8.95178	7721.588966517435
updates: 938/1140.44	loss:  8.95182	7721.943320988648
updates: 940/1140.44	loss:  8.95024	7709.71133934535
updates: 942/1140.44	loss:  8.94946	7703.699970573108
updates: 944/1140.44	loss:  8.94963	7705.072964421285
updates: 946/1140.44	loss:  8.95023	7709.667564618739
updates: 948/1140.44	loss:  8.95106	7716.075290625088
updates: 950/1140.44	loss:  8.95038	7710.82386706687
updates: 952/1140.44	loss:  8.94947	7703.769346834255
updates: 954/1140.44	loss:  8.94967	7705.344969363359
updates: 956/1140.44	loss:  8.94987	7706.908773723352
updates: 958/1140.44	loss:  8.94937	7703.000870476472
updates: 960/1140.44	loss:  8.94854	7696.629512378626
updates: 962/1140.44	loss:  8.94906	7700.682226412791
updates: 964/1140.44	loss:  8.94955	7704.4390716149865
updates: 966/1140.44	loss:  8.94917	7701.490650960408
updates: 968/1140.44	loss:  8.95018	7709.241493188176
updates: 970/1140.44	loss:  8.95035	7710.583747323928
updates: 972/1140.44	loss:  8.95045	7711.333567053296
updates: 974/1140.44	loss:  8.95069	7713.230505095142
updates: 976/1140.44	loss:  8.95146	7719.120767871713
updates: 978/1140.44	loss:  8.95094	7715.165326798026
updates: 980/1140.44	loss:  8.9507	7713.271815402412
updates: 982/1140.44	loss:  8.95019	7709.379071409926
updates: 984/1140.44	loss:  8.95023	7709.691118605801
updates: 986/1140.44	loss:  8.95043	7711.222550455951
updates: 988/1140.44	loss:  8.95099	7715.543392956979
updates: 990/1140.44	loss:  8.9501	7708.6611897298435
updates: 992/1140.44	loss:  8.94978	7706.195891875126
updates: 994/1140.44	loss:  8.94928	7702.321762185466
updates: 996/1140.44	loss:  8.94891	7699.501410081127
updates: 998/1140.44	loss:  8.94893	7699.672462898058
updates: 1000/1140.44	loss:  8.94846	7696.01866164988
updates: 1002/1140.44	loss:  8.94898	7700.036144682901
updates: 1004/1140.44	loss:  8.95002	7708.070308864663
updates: 1006/1140.44	loss:  8.95076	7713.746500033021
updates: 1008/1140.44	loss:  8.95178	7721.589750170172
updates: 1010/1140.44	loss:  8.95239	7726.314794093976
updates: 1012/1140.44	loss:  8.95248	7727.001743843481
updates: 1014/1140.44	loss:  8.95125	7717.554998534523
updates: 1016/1140.44	loss:  8.95245	7726.771794653452
updates: 1018/1140.44	loss:  8.95343	7734.361554381286
updates: 1020/1140.44	loss:  8.95281	7729.597111874374
updates: 1022/1140.44	loss:  8.95345	7734.552456172466
updates: 1024/1140.44	loss:  8.95393	7738.231207628671
updates: 1026/1140.44	loss:  8.9532	7732.616378307852
updates: 1028/1140.44	loss:  8.95325	7733.018758238669
updates: 1030/1140.44	loss:  8.95164	7720.556926368948
updates: 1032/1140.44	loss:  8.95086	7714.549583754533
updates: 1034/1140.44	loss:  8.94979	7706.241743498811
updates: 1036/1140.44	loss:  8.94927	7702.301058775793
updates: 1038/1140.44	loss:  8.9482	7694.018857019395
updates: 1040/1140.44	loss:  8.94789	7691.618307518843
updates: 1042/1140.44	loss:  8.94575	7675.231650685546
updates: 1044/1140.44	loss:  8.94501	7669.537334045489
updates: 1046/1140.44	loss:  8.94266	7651.492606099927
updates: 1048/1140.44	loss:  8.94137	7641.679672999777
updates: 1050/1140.44	loss:  8.94041	7634.305114791558
updates: 1052/1140.44	loss:  8.94003	7631.454741447617
updates: 1054/1140.44	loss:  8.9397	7628.872511464441
updates: 1056/1140.44	loss:  8.93941	7626.702179810916
updates: 1058/1140.44	loss:  8.9382	7617.446856720688
updates: 1060/1140.44	loss:  8.93679	7606.767764126965
updates: 1062/1140.44	loss:  8.93634	7603.314408594677
updates: 1064/1140.44	loss:  8.93666	7605.7679294894315
updates: 1066/1140.44	loss:  8.93669	7605.95684069711
updates: 1068/1140.44	loss:  8.93697	7608.145966506117
updates: 1070/1140.44	loss:  8.93696	7608.05732999003
updates: 1072/1140.44	loss:  8.93547	7596.736755483472
updates: 1074/1140.44	loss:  8.93509	7593.832058177845
updates: 1076/1140.44	loss:  8.93471	7590.930871471421
updates: 1078/1140.44	loss:  8.93446	7589.023438448109
updates: 1080/1140.44	loss:  8.93346	7581.444808278803
updates: 1082/1140.44	loss:  8.93285	7576.860430017206
updates: 1084/1140.44	loss:  8.93213	7571.3601665330025
updates: 1086/1140.44	loss:  8.9321	7571.1756737283395
updates: 1088/1140.44	loss:  8.93066	7560.272919127606
updates: 1090/1140.44	loss:  8.92949	7551.423964421477
updates: 1092/1140.44	loss:  8.9288	7546.190006987697
updates: 1094/1140.44	loss:  8.92803	7540.410783677575
updates: 1096/1140.44	loss:  8.92751	7536.452413119469
updates: 1098/1140.44	loss:  8.92647	7528.657563686831
updates: 1100/1140.44	loss:  8.92545	7520.931956571003
updates: 1102/1140.44	loss:  8.92603	7525.302405312761
updates: 1104/1140.44	loss:  8.92564	7522.3671745357415
updates: 1106/1140.44	loss:  8.92367	7507.590440773665
updates: 1108/1140.44	loss:  8.92323	7504.318468178457
updates: 1110/1140.44	loss:  8.92279	7500.976858755862
updates: 1112/1140.44	loss:  8.92301	7502.605313367174
updates: 1114/1140.44	loss:  8.9223	7497.293872562188
updates: 1116/1140.44	loss:  8.9215	7491.32056362267
updates: 1118/1140.44	loss:  8.92214	7496.129080768256
updates: 1120/1140.44	loss:  8.9207	7485.2989281949385
updates: 1122/1140.44	loss:  8.92007	7480.604082986556
updates: 1124/1140.44	loss:  8.91949	7476.269643224187
updates: 1126/1140.44	loss:  8.91955	7476.711473269333
updates: 1128/1140.44	loss:  8.91958	7476.912050159236
updates: 1130/1140.44	loss:  8.91945	7476.008985797601
updates: 1132/1140.44	loss:  8.92042	7483.221702510888
updates: 1134/1140.44	loss:  8.92017	7481.369967678803
updates: 1136/1140.44	loss:  8.91993	7479.552894624172
updates: 1138/1140.44	loss:  8.91985	7478.933625171214

HBox(children=(FloatProgress(value=0.0, max=1140.0), HTML(value='')))
updates: 0/1140.44	loss:  6.15737	472.1866640173176
updates: 2/1140.44	loss:  6.49404	661.187925979436
updates: 4/1140.44	loss:  6.65829	779.2170420167108
updates: 6/1140.44	loss:  6.62956	757.1489470634459
updates: 8/1140.44	loss:  6.65585	777.3152225941299
updates: 10/1140.44	loss:  6.67206	790.0180958432777
updates: 12/1140.44	loss:  6.63402	760.5324281009048
updates: 14/1140.44	loss:  6.68404	799.5459559090896
updates: 16/1140.44	loss:  6.61777	748.2778980564127
updates: 18/1140.44	loss:  6.57939	720.0980719875246
updates: 20/1140.44	loss:  6.51658	676.2620199169293
updates: 22/1140.44	loss:  6.46151	640.0248403780723
updates: 24/1140.44	loss:  6.46224	640.494371451859
updates: 26/1140.44	loss:  6.43054	620.5063001785295
updates: 28/1140.44	loss:  6.40764	606.4634738269821
updates: 30/1140.44	loss:  6.41106	608.5407457017043
updates: 32/1140.44	loss:  6.38026	590.0824163849289
updates: 34/1140.44	loss:  6.39267	597.4521930632917
updates: 36/1140.44	loss:  6.37786	588.665120539213
updates: 38/1140.44	loss:  6.37151	584.9397097863659
updates: 40/1140.44	loss:  6.37751	588.458199663264
updates: 42/1140.44	loss:  6.3009	545.0640733758702
updates: 44/1140.44	loss:  6.2839	535.8732075668663
updates: 46/1140.44	loss:  6.27901	533.2595611052308
updates: 48/1140.44	loss:  6.27316	530.1487077924804
updates: 50/1140.44	loss:  6.26251	524.5344628664483
updates: 52/1140.44	loss:  6.25503	520.6237906429659
updates: 54/1140.44	loss:  6.23855	512.1154248393076
updates: 56/1140.44	loss:  6.2447	515.2723374271866
updates: 58/1140.44	loss:  6.23928	512.4901010155256
updates: 60/1140.44	loss:  6.22163	503.5240792575841
updates: 62/1140.44	loss:  6.21763	501.51320207096137
updates: 64/1140.44	loss:  6.21714	501.2696911172825
updates: 66/1140.44	loss:  6.21107	498.235628095491
updates: 68/1140.44	loss:  6.2073	496.3612082402405
updates: 70/1140.44	loss:  6.18807	486.90358525578864
updates: 72/1140.44	loss:  6.17432	480.25650426520906
updates: 74/1140.44	loss:  6.16409	475.3688493192041
updates: 76/1140.44	loss:  6.15602	471.54631007074704
updates: 78/1140.44	loss:  6.14851	468.01992064447154
updates: 80/1140.44	loss:  6.1386	463.4057972387367
updates: 82/1140.44	loss:  6.13323	460.9236122992137
updates: 84/1140.44	loss:  6.12747	458.27388850340475
updates: 86/1140.44	loss:  6.122	455.773554795045
updates: 88/1140.44	loss:  6.11497	452.58469973861037
updates: 90/1140.44	loss:  6.10429	447.77501598925204
updates: 92/1140.44	loss:  6.09777	444.86518014689705
updates: 94/1140.44	loss:  6.09071	441.7366333466082
updates: 96/1140.44	loss:  6.08432	438.92176452078127
updates: 98/1140.44	loss:  6.08019	437.112509180127
updates: 100/1140.44	loss:  6.0703	432.80978936725774
updates: 102/1140.44	loss:  6.06095	428.78223616627025
updates: 104/1140.44	loss:  6.04885	423.6235817600417
updates: 106/1140.44	loss:  6.03673	418.523653880456
updates: 108/1140.44	loss:  6.02745	414.65486504304357
updates: 110/1140.44	loss:  6.02087	411.93610749721574
updates: 112/1140.44	loss:  6.02	411.57848714328964
updates: 114/1140.44	loss:  5.99905	403.04627272427297
updates: 116/1140.44	loss:  5.98804	398.6308599311958
updates: 118/1140.44	loss:  5.98103	395.84917246270834
updates: 120/1140.44	loss:  5.97341	392.8411739696762
updates: 122/1140.44	loss:  5.96803	390.73682061038744
updates: 124/1140.44	loss:  5.96061	387.845055699313
updates: 126/1140.44	loss:  5.95848	387.0229103793977
updates: 128/1140.44	loss:  5.95155	384.34701093567406
updates: 130/1140.44	loss:  5.94659	382.44806768448285
updates: 132/1140.44	loss:  5.93701	378.80089560778583
updates: 134/1140.44	loss:  5.92836	375.53994038876647
updates: 136/1140.44	loss:  5.92213	373.20472846874554
updates: 138/1140.44	loss:  5.91754	371.4975753596078
updates: 140/1140.44	loss:  5.91072	368.97208504881235
updates: 142/1140.44	loss:  5.906	367.232637061756
updates: 144/1140.44	loss:  5.89585	363.5266772184239
updates: 146/1140.44	loss:  5.88458	359.4516029872989
updates: 148/1140.44	loss:  5.87676	356.653017517491
updates: 150/1140.44	loss:  5.8708	354.53424809794444
updates: 152/1140.44	loss:  5.86518	352.54599535551364
updates: 154/1140.44	loss:  5.86111	351.114304926248
updates: 156/1140.44	loss:  5.85429	348.7269412966721
updates: 158/1140.44	loss:  5.84822	346.61634801479084
updates: 160/1140.44	loss:  5.84281	344.74759416140495
updates: 162/1140.44	loss:  5.83799	343.08828549777746
updates: 164/1140.44	loss:  5.83457	341.9188864341528
updates: 166/1140.44	loss:  5.82922	340.0949121742286
updates: 168/1140.44	loss:  5.82415	338.373022304464
updates: 170/1140.44	loss:  5.81994	336.95343918938676
updates: 172/1140.44	loss:  5.81335	334.73740302850166
updates: 174/1140.44	loss:  5.80952	333.4592702161141
updates: 176/1140.44	loss:  5.80299	331.28755900772256
updates: 178/1140.44	loss:  5.79532	328.7564611337489
updates: 180/1140.44	loss:  5.79274	327.9100864133935
updates: 182/1140.44	loss:  5.78713	326.07542834612093
updates: 184/1140.44	loss:  5.78572	325.6153673622597
updates: 186/1140.44	loss:  5.78009	323.7884433429742
updates: 188/1140.44	loss:  5.77621	322.5346868309791
updates: 190/1140.44	loss:  5.77097	320.8503710764777
updates: 192/1140.44	loss:  5.76412	318.6585233800739
updates: 194/1140.44	loss:  5.75758	316.581319084982
updates: 196/1140.44	loss:  5.75128	314.5944327926127
updates: 198/1140.44	loss:  5.74775	313.48367554152355
updates: 200/1140.44	loss:  5.74132	311.47373624436716
updates: 202/1140.44	loss:  5.73513	309.5524400350446
updates: 204/1140.44	loss:  5.73075	308.1998302051544
updates: 206/1140.44	loss:  5.72581	306.6818523479379
updates: 208/1140.44	loss:  5.72116	305.25835858263673
updates: 210/1140.44	loss:  5.7163	303.77951810441556
updates: 212/1140.44	loss:  5.71026	301.9504922308215
updates: 214/1140.44	loss:  5.70538	300.4785725417534
updates: 216/1140.44	loss:  5.69917	298.617995898629
updates: 218/1140.44	loss:  5.69245	296.6195039257595
updates: 220/1140.44	loss:  5.6906	296.0722667888474
updates: 222/1140.44	loss:  5.68605	294.72618135516217
updates: 224/1140.44	loss:  5.68173	293.456833756658
updates: 226/1140.44	loss:  5.67957	292.8220624954049
updates: 228/1140.44	loss:  5.67498	291.4814919415812
updates: 230/1140.44	loss:  5.66977	289.96699353012025
updates: 232/1140.44	loss:  5.66448	288.4379793439296
updates: 234/1140.44	loss:  5.65966	287.0510666149674
updates: 236/1140.44	loss:  5.65662	286.1788754506921
updates: 238/1140.44	loss:  5.6541	285.4588849288081
updates: 240/1140.44	loss:  5.64957	284.1701385385757
updates: 242/1140.44	loss:  5.64592	283.13428798909194
updates: 244/1140.44	loss:  5.6418	281.969378659925
updates: 246/1140.44	loss:  5.63719	280.6721507040315
updates: 248/1140.44	loss:  5.62994	278.6441850973377
updates: 250/1140.44	loss:  5.62809	278.1295418321608
updates: 252/1140.44	loss:  5.6226	276.60654702654523
updates: 254/1140.44	loss:  5.6144	274.3488133528427
updates: 256/1140.44	loss:  5.61036	273.24248799019193
updates: 258/1140.44	loss:  5.60581	272.00306530277715
updates: 260/1140.44	loss:  5.6016	270.8582919795496
updates: 262/1140.44	loss:  5.59881	270.10598047035654
updates: 264/1140.44	loss:  5.59527	269.15052162384757
updates: 266/1140.44	loss:  5.59362	268.7070719453792
updates: 268/1140.44	loss:  5.58952	267.6078529667354
updates: 270/1140.44	loss:  5.58594	266.6504752558538
updates: 272/1140.44	loss:  5.58434	266.2250553553925
updates: 274/1140.44	loss:  5.58181	265.55245049081697
updates: 276/1140.44	loss:  5.5792	264.86069316145955
updates: 278/1140.44	loss:  5.57418	263.53335155904153
updates: 280/1140.44	loss:  5.57245	263.07727019970287
updates: 282/1140.44	loss:  5.56651	261.5190215531252
updates: 284/1140.44	loss:  5.56325	260.6676951314574
updates: 286/1140.44	loss:  5.55923	259.62226447849787
updates: 288/1140.44	loss:  5.55579	258.7318118011902
updates: 290/1140.44	loss:  5.54953	257.11560151787006
updates: 292/1140.44	loss:  5.54344	255.55506142863246
updates: 294/1140.44	loss:  5.53861	254.32381360685616
updates: 296/1140.44	loss:  5.53681	253.86812846845052
updates: 298/1140.44	loss:  5.53392	253.13447240681526
updates: 300/1140.44	loss:  5.53079	252.3424405360357
updates: 302/1140.44	loss:  5.52735	251.4759994226179
updates: 304/1140.44	loss:  5.52398	250.62935247208165
updates: 306/1140.44	loss:  5.52032	249.71507378687863
updates: 308/1140.44	loss:  5.51676	248.82745916816165
updates: 310/1140.44	loss:  5.51356	248.0336971908984
updates: 312/1140.44	loss:  5.5102	247.201408269126
updates: 314/1140.44	loss:  5.50696	246.40183786528783
updates: 316/1140.44	loss:  5.50358	245.57053619965893
updates: 318/1140.44	loss:  5.5011	244.9604517990709
updates: 320/1140.44	loss:  5.49911	244.473800957751
updates: 322/1140.44	loss:  5.49582	243.6719470917393
updates: 324/1140.44	loss:  5.49355	243.11797347584906
updates: 326/1140.44	loss:  5.49157	242.6387042884375
updates: 328/1140.44	loss:  5.48668	241.45535882752625
updates: 330/1140.44	loss:  5.48293	240.55122041113518
updates: 332/1140.44	loss:  5.47759	239.26826156457057
updates: 334/1140.44	loss:  5.47425	238.47132434042138
updates: 336/1140.44	loss:  5.47124	237.75381036089195
updates: 338/1140.44	loss:  5.46828	237.0514708229553
updates: 340/1140.44	loss:  5.46633	236.59107588196017
updates: 342/1140.44	loss:  5.46323	235.85918885196514
updates: 344/1140.44	loss:  5.45979	235.0481202501975
updates: 346/1140.44	loss:  5.45482	233.88276728733683
updates: 348/1140.44	loss:  5.45129	233.05750880340034
updates: 350/1140.44	loss:  5.44064	230.59063053924396
updates: 352/1140.44	loss:  5.43762	229.89499424574507
updates: 354/1140.44	loss:  5.43238	228.69365939854498
updates: 356/1140.44	loss:  5.43083	228.33822474373926
updates: 358/1140.44	loss:  5.42814	227.72580915545606
updates: 360/1140.44	loss:  5.42688	227.43841499756414
updates: 362/1140.44	loss:  5.42288	226.5301297628514
updates: 364/1140.44	loss:  5.41899	225.65009437116206
updates: 366/1140.44	loss:  5.41739	225.29117644466837
updates: 368/1140.44	loss:  5.41514	224.78295260106574
updates: 370/1140.44	loss:  5.4114	223.94547906882872
updates: 372/1140.44	loss:  5.4089	223.38676084502666
updates: 374/1140.44	loss:  5.40472	222.45343077120768
updates: 376/1140.44	loss:  5.40144	221.7258330259122
updates: 378/1140.44	loss:  5.39783	220.92570010983235
updates: 380/1140.44	loss:  5.39468	220.23275470588226
updates: 382/1140.44	loss:  5.39349	219.96885473056952
updates: 384/1140.44	loss:  5.39158	219.54905427546566
updates: 386/1140.44	loss:  5.38896	218.9761599431377
updates: 388/1140.44	loss:  5.38573	218.26852683064286
updates: 390/1140.44	loss:  5.38286	217.64462250140053
updates: 392/1140.44	loss:  5.3798	216.97949042357538
updates: 394/1140.44	loss:  5.37687	216.34439814430746
updates: 396/1140.44	loss:  5.37364	215.6460197600609
updates: 398/1140.44	loss:  5.3713	215.14169319359053
updates: 400/1140.44	loss:  5.36834	214.50661440891156
updates: 402/1140.44	loss:  5.36445	213.67390110531161
updates: 404/1140.44	loss:  5.36152	213.04829910421992
updates: 406/1140.44	loss:  5.35835	212.37378919199918
updates: 408/1140.44	loss:  5.35575	211.82384134450692
updates: 410/1140.44	loss:  5.35432	211.5200160998817
updates: 412/1140.44	loss:  5.35335	211.3159282515888
updates: 414/1140.44	loss:  5.35055	210.72334447697864
updates: 416/1140.44	loss:  5.34858	210.3090103593823
updates: 418/1140.44	loss:  5.34596	209.75819265402245
updates: 420/1140.44	loss:  5.34519	209.5983784854467
updates: 422/1140.44	loss:  5.34264	209.06430274497603
updates: 424/1140.44	loss:  5.34093	208.70689285418769
updates: 426/1140.44	loss:  5.33689	207.8655547419628
updates: 428/1140.44	loss:  5.3339	207.24532904815095
updates: 430/1140.44	loss:  5.32916	206.2650057769366
updates: 432/1140.44	loss:  5.326	205.61369834091673
updates: 434/1140.44	loss:  5.32563	205.53703671856917
updates: 436/1140.44	loss:  5.32428	205.26138915974593
updates: 438/1140.44	loss:  5.32304	205.00545669815472
updates: 440/1140.44	loss:  5.31978	204.33961237237702
updates: 442/1140.44	loss:  5.317	203.77125972190726
updates: 444/1140.44	loss:  5.31616	203.60143740881375
updates: 446/1140.44	loss:  5.31321	203.00030949906213
updates: 448/1140.44	loss:  5.31173	202.69979762489731
updates: 450/1140.44	loss:  5.30868	202.08257423072865
updates: 452/1140.44	loss:  5.30618	201.57883733320676
updates: 454/1140.44	loss:  5.30263	200.8646951235837
updates: 456/1140.44	loss:  5.29883	200.10288536631774
updates: 458/1140.44	loss:  5.29357	199.0531136521789
updates: 460/1140.44	loss:  5.2919	198.71976385049962
updates: 462/1140.44	loss:  5.29045	198.43353973304892
updates: 464/1140.44	loss:  5.288	197.94782425416147
updates: 466/1140.44	loss:  5.28619	197.58959794865265
updates: 468/1140.44	loss:  5.28259	196.8782858899296
updates: 470/1140.44	loss:  5.27992	196.35452807008033
updates: 472/1140.44	loss:  5.27836	196.04723224695607
updates: 474/1140.44	loss:  5.2766	195.70347468492204
updates: 476/1140.44	loss:  5.27276	194.95421268633942
updates: 478/1140.44	loss:  5.26973	194.36336442071033
updates: 480/1140.44	loss:  5.26826	194.07886720242004
updates: 482/1140.44	loss:  5.26743	193.9170460343976
updates: 484/1140.44	loss:  5.26431	193.3136847446249
updates: 486/1140.44	loss:  5.26304	193.06824874014157
updates: 488/1140.44	loss:  5.26182	192.83278808194754
updates: 490/1140.44	loss:  5.26103	192.67969527505144
updates: 492/1140.44	loss:  5.25923	192.3329322433901
updates: 494/1140.44	loss:  5.25786	192.07031944671186
updates: 496/1140.44	loss:  5.25454	191.43275654066395
updates: 498/1140.44	loss:  5.25181	190.91190195412432
updates: 500/1140.44	loss:  5.24981	190.53081748990076
updates: 502/1140.44	loss:  5.24514	189.64319627747597
updates: 504/1140.44	loss:  5.24294	189.22505741108492
updates: 506/1140.44	loss:  5.24044	188.75379936266089
updates: 508/1140.44	loss:  5.23763	188.22385516379444
updates: 510/1140.44	loss:  5.23571	187.86240625340645
updates: 512/1140.44	loss:  5.23256	187.27147327706467
updates: 514/1140.44	loss:  5.2306	186.90535639446006
updates: 516/1140.44	loss:  5.22918	186.63877143168546
updates: 518/1140.44	loss:  5.22728	186.2860657417691
updates: 520/1140.44	loss:  5.22515	185.88957577816927
updates: 522/1140.44	loss:  5.22351	185.58366890350663
updates: 524/1140.44	loss:  5.2201	184.9524213997427
updates: 526/1140.44	loss:  5.21901	184.75128740560405
updates: 528/1140.44	loss:  5.21624	184.23952750435114
updates: 530/1140.44	loss:  5.21351	183.7378539804468
updates: 532/1140.44	loss:  5.21206	183.4709172417711
updates: 534/1140.44	loss:  5.21004	183.10118271818627
updates: 536/1140.44	loss:  5.20801	182.72932130942553
updates: 538/1140.44	loss:  5.20563	182.2964359156573
updates: 540/1140.44	loss:  5.20346	181.90043830178504
updates: 542/1140.44	loss:  5.2008	181.41780255394502
updates: 544/1140.44	loss:  5.19983	181.24123295096277
updates: 546/1140.44	loss:  5.19376	180.14487831830533
updates: 548/1140.44	loss:  5.19152	179.74098773090623
updates: 550/1140.44	loss:  5.1903	179.5231198631703
updates: 552/1140.44	loss:  5.1884	179.18090188185164
updates: 554/1140.44	loss:  5.18712	178.9531990350105
updates: 556/1140.44	loss:  5.18607	178.76529763477583
updates: 558/1140.44	loss:  5.18363	178.32825098756712
updates: 560/1140.44	loss:  5.1813	177.91480773965375
updates: 562/1140.44	loss:  5.17965	177.62035347281832
updates: 564/1140.44	loss:  5.1787	177.4516863584692
updates: 566/1140.44	loss:  5.17675	177.10638069360067
updates: 568/1140.44	loss:  5.17488	176.77584151538676
updates: 570/1140.44	loss:  5.17272	176.39319442059136
updates: 572/1140.44	loss:  5.17002	175.9183771630561
updates: 574/1140.44	loss:  5.16774	175.51845864608714
updates: 576/1140.44	loss:  5.16588	175.19237030607496
updates: 578/1140.44	loss:  5.16506	175.04806365030512
updates: 580/1140.44	loss:  5.16283	174.65752813128586
updates: 582/1140.44	loss:  5.16064	174.27547981855798
updates: 584/1140.44	loss:  5.15854	173.9097859082381
updates: 586/1140.44	loss:  5.1564	173.53910044396457
updates: 588/1140.44	loss:  5.15399	173.12105274604974
updates: 590/1140.44	loss:  5.15328	172.99759259062054
updates: 592/1140.44	loss:  5.15146	172.6828232374232
updates: 594/1140.44	loss:  5.14972	172.38393042477952
updates: 596/1140.44	loss:  5.1475	172.00044013768235
updates: 598/1140.44	loss:  5.14556	171.66819896526215
updates: 600/1140.44	loss:  5.14416	171.427785864427
updates: 602/1140.44	loss:  5.14253	171.14869865582867
updates: 604/1140.44	loss:  5.14028	170.763030620135
updates: 606/1140.44	loss:  5.13924	170.5858995805144
updates: 608/1140.44	loss:  5.13711	170.22332882824523
updates: 610/1140.44	loss:  5.13471	169.81549941621464
updates: 612/1140.44	loss:  5.13227	169.40144962831624
updates: 614/1140.44	loss:  5.13054	169.10884821894737
updates: 616/1140.44	loss:  5.12866	168.79127836059965
updates: 618/1140.44	loss:  5.12577	168.30342636614958
updates: 620/1140.44	loss:  5.12373	167.96004242668295
updates: 622/1140.44	loss:  5.12213	167.69188500100066
updates: 624/1140.44	loss:  5.11907	167.18060903237767
updates: 626/1140.44	loss:  5.11661	166.7696981844726
updates: 628/1140.44	loss:  5.11491	166.48589160578675
updates: 630/1140.44	loss:  5.11252	166.0887036691771
updates: 632/1140.44	loss:  5.11082	165.8066992477346
updates: 634/1140.44	loss:  5.10878	165.46815090855665
updates: 636/1140.44	loss:  5.10586	164.98566820888246
updates: 638/1140.44	loss:  5.10516	164.87094439654294
updates: 640/1140.44	loss:  5.10386	164.6559827092754
updates: 642/1140.44	loss:  5.10223	164.38781165796206
updates: 644/1140.44	loss:  5.10086	164.1624307143506
updates: 646/1140.44	loss:  5.09852	163.7798679381167
updates: 648/1140.44	loss:  5.09688	163.51079953071942
updates: 650/1140.44	loss:  5.09504	163.21101215164546
updates: 652/1140.44	loss:  5.09391	163.02544960539223
updates: 654/1140.44	loss:  5.09205	162.7232503588977
updates: 656/1140.44	loss:  5.09091	162.53747717430312
updates: 658/1140.44	loss:  5.08875	162.18721622206178
updates: 660/1140.44	loss:  5.0864	161.80597538187828
updates: 662/1140.44	loss:  5.08581	161.71160682141726
updates: 664/1140.44	loss:  5.08408	161.43214885667237
updates: 666/1140.44	loss:  5.08276	161.21816905386402
updates: 668/1140.44	loss:  5.08035	160.8303843062857
updates: 670/1140.44	loss:  5.07962	160.71266685347777
updates: 672/1140.44	loss:  5.07849	160.53077894316107
updates: 674/1140.44	loss:  5.0772	160.32454480905386
updates: 676/1140.44	loss:  5.07498	159.96868295986656
updates: 678/1140.44	loss:  5.07317	159.67927840044993
updates: 680/1140.44	loss:  5.07184	159.46688719152698
updates: 682/1140.44	loss:  5.06961	159.1128510694478
updates: 684/1140.44	loss:  5.06725	158.73788880465125
updates: 686/1140.44	loss:  5.06451	158.30269174571254
updates: 688/1140.44	loss:  5.06235	157.96098967621054
updates: 690/1140.44	loss:  5.06079	157.71571287983164
updates: 692/1140.44	loss:  5.05792	157.26378561246895
updates: 694/1140.44	loss:  5.05509	156.81901538312877
updates: 696/1140.44	loss:  5.05214	156.3570371113183
updates: 698/1140.44	loss:  5.05113	156.1993894696543
updates: 700/1140.44	loss:  5.04873	155.8249690999114
updates: 702/1140.44	loss:  5.04718	155.5831647396487
updates: 704/1140.44	loss:  5.0459	155.38387527354084
updates: 706/1140.44	loss:  5.04431	155.13655026387187
updates: 708/1140.44	loss:  5.04195	154.77218748451742
updates: 710/1140.44	loss:  5.03991	154.4554314768767
updates: 712/1140.44	loss:  5.03753	154.08917489535014
updates: 714/1140.44	loss:  5.0359	153.83851338059765
updates: 716/1140.44	loss:  5.03382	153.5184951626968
updates: 718/1140.44	loss:  5.03187	153.2186790427144
updates: 720/1140.44	loss:  5.03033	152.98292879248987
updates: 722/1140.44	loss:  5.02867	152.72915133054374
updates: 724/1140.44	loss:  5.02727	152.51668123925296
updates: 726/1140.44	loss:  5.02592	152.31063407488082
updates: 728/1140.44	loss:  5.02443	152.0840401128804
updates: 730/1140.44	loss:  5.02267	151.8162276635985
updates: 732/1140.44	loss:  5.0211	151.57834771462157
updates: 734/1140.44	loss:  5.01958	151.34783794597308
updates: 736/1140.44	loss:  5.0183	151.15459855947398
updates: 738/1140.44	loss:  5.01731	151.00441585215967
updates: 740/1140.44	loss:  5.01576	150.7712153259336
updates: 742/1140.44	loss:  5.01426	150.5450421230418
updates: 744/1140.44	loss:  5.01366	150.45437446792147
updates: 746/1140.44	loss:  5.01273	150.3142088236546
updates: 748/1140.44	loss:  5.01194	150.19650761751447
updates: 750/1140.44	loss:  5.01119	150.08347269994076
updates: 752/1140.44	loss:  5.01014	149.92572092758147
updates: 754/1140.44	loss:  5.00906	149.76355150343846
updates: 756/1140.44	loss:  5.00763	149.55043855376064
updates: 758/1140.44	loss:  5.00751	149.53212076335726
updates: 760/1140.44	loss:  5.00685	149.4338864934081
updates: 762/1140.44	loss:  5.00596	149.3003365744637
updates: 764/1140.44	loss:  5.00557	149.24217646407442
updates: 766/1140.44	loss:  5.00534	149.20823456076894
updates: 768/1140.44	loss:  5.00369	148.9618168331061
updates: 770/1140.44	loss:  5.00335	148.91119651220188
updates: 772/1140.44	loss:  5.00221	148.74084465535046
updates: 774/1140.44	loss:  5.00129	148.6050607790947
updates: 776/1140.44	loss:  5.00021	148.44422999667364
updates: 778/1140.44	loss:  4.99967	148.36480192920428
updates: 780/1140.44	loss:  4.9982	148.14618155760726
updates: 782/1140.44	loss:  4.99675	147.9313527718069
updates: 784/1140.44	loss:  4.99606	147.8300197006474
updates: 786/1140.44	loss:  4.99431	147.57046340377556
updates: 788/1140.44	loss:  4.99246	147.2985124253983
updates: 790/1140.44	loss:  4.99075	147.04606201404837
updates: 792/1140.44	loss:  4.98948	146.86057098824264
updates: 794/1140.44	loss:  4.98789	146.62614624003967
updates: 796/1140.44	loss:  4.98501	146.20528806088265
updates: 798/1140.44	loss:  4.98223	145.79966087151047
updates: 800/1140.44	loss:  4.98089	145.6043520592682
updates: 802/1140.44	loss:  4.97952	145.40450369362028
updates: 804/1140.44	loss:  4.9778	145.1552502235043
updates: 806/1140.44	loss:  4.97642	144.95399961989833
updates: 808/1140.44	loss:  4.97503	144.7529560699372
updates: 810/1140.44	loss:  4.97414	144.62456524329926
updates: 812/1140.44	loss:  4.97227	144.35351336118322
updates: 814/1140.44	loss:  4.97085	144.15002262811717
updates: 816/1140.44	loss:  4.96931	143.9275300353944
updates: 818/1140.44	loss:  4.96818	143.76540805776415
updates: 820/1140.44	loss:  4.96699	143.5940078310464
updates: 822/1140.44	loss:  4.9647	143.26532739634524
updates: 824/1140.44	loss:  4.96356	143.10251908914398
updates: 826/1140.44	loss:  4.96293	143.01280058858913
updates: 828/1140.44	loss:  4.96153	142.81238966391962
updates: 830/1140.44	loss:  4.96072	142.69641884552675
updates: 832/1140.44	loss:  4.95955	142.52904300497647
updates: 834/1140.44	loss:  4.95922	142.48233517613994
updates: 836/1140.44	loss:  4.95822	142.3399406990929
updates: 838/1140.44	loss:  4.95711	142.18224786759262
updates: 840/1140.44	loss:  4.95599	142.023584461179
updates: 842/1140.44	loss:  4.95483	141.85899401912837
updates: 844/1140.44	loss:  4.95442	141.8010081177304
updates: 846/1140.44	loss:  4.9535	141.67047825096407
updates: 848/1140.44	loss:  4.95305	141.60642522678611
updates: 850/1140.44	loss:  4.95307	141.60943595633213
updates: 852/1140.44	loss:  4.95254	141.53418758223145
updates: 854/1140.44	loss:  4.95158	141.39776901277395
updates: 856/1140.44	loss:  4.95084	141.29356658581509
updates: 858/1140.44	loss:  4.94942	141.09272310958931
updates: 860/1140.44	loss:  4.94822	140.92420727525692
updates: 862/1140.44	loss:  4.94738	140.8056817280763
updates: 864/1140.44	loss:  4.94673	140.7136484057234
updates: 866/1140.44	loss:  4.9453	140.51324900046293
updates: 868/1140.44	loss:  4.94502	140.47357187264276
updates: 870/1140.44	loss:  4.94456	140.40840140285422
updates: 872/1140.44	loss:  4.94372	140.29072068616713
updates: 874/1140.44	loss:  4.94176	140.016140866644
updates: 876/1140.44	loss:  4.94054	139.84558049822917
updates: 878/1140.44	loss:  4.93993	139.76114426225692
updates: 880/1140.44	loss:  4.93893	139.62084973561946
updates: 882/1140.44	loss:  4.93876	139.59750967426882
updates: 884/1140.44	loss:  4.93702	139.3541457878889
updates: 886/1140.44	loss:  4.93579	139.1834377319142
updates: 888/1140.44	loss:  4.93445	138.99695625289823
updates: 890/1140.44	loss:  4.93303	138.79986272706907
updates: 892/1140.44	loss:  4.93116	138.54030079294867
updates: 894/1140.44	loss:  4.92893	138.2311605596206
updates: 896/1140.44	loss:  4.92804	138.10874081845333
updates: 898/1140.44	loss:  4.92641	137.8836914308507
updates: 900/1140.44	loss:  4.92386	137.53186333137697
updates: 902/1140.44	loss:  4.92131	137.1825685524929
updates: 904/1140.44	loss:  4.9188	136.83887682116068
updates: 906/1140.44	loss:  4.91751	136.66187752031414
updates: 908/1140.44	loss:  4.91669	136.5492045104482
updates: 910/1140.44	loss:  4.91641	136.5121362607243
updates: 912/1140.44	loss:  4.91522	136.34864389483693
updates: 914/1140.44	loss:  4.91424	136.21639032188818
updates: 916/1140.44	loss:  4.91238	135.96259159911844
updates: 918/1140.44	loss:  4.9106	135.72078866152333
updates: 920/1140.44	loss:  4.90945	135.564812545854
updates: 922/1140.44	loss:  4.90932	135.5474165292021
updates: 924/1140.44	loss:  4.90775	135.33395188314884
updates: 926/1140.44	loss:  4.90742	135.29037266363366
updates: 928/1140.44	loss:  4.90692	135.2228540215766
updates: 930/1140.44	loss:  4.90553	135.0347265091675
updates: 932/1140.44	loss:  4.90331	134.73450784078562
updates: 934/1140.44	loss:  4.90185	134.53850550674758
updates: 936/1140.44	loss:  4.90075	134.39112486580774
updates: 938/1140.44	loss:  4.90056	134.36440287802202
updates: 940/1140.44	loss:  4.89993	134.28030104287527
updates: 942/1140.44	loss:  4.89877	134.1252271755654
updates: 944/1140.44	loss:  4.8982	134.0486217165225
updates: 946/1140.44	loss:  4.89744	133.94673804646683
updates: 948/1140.44	loss:  4.89638	133.80497182298237
updates: 950/1140.44	loss:  4.8959	133.74007718011157
updates: 952/1140.44	loss:  4.89543	133.67779107156508
updates: 954/1140.44	loss:  4.89475	133.58636158688435
updates: 956/1140.44	loss:  4.89316	133.37411718095845
updates: 958/1140.44	loss:  4.89194	133.2117580527046
updates: 960/1140.44	loss:  4.89168	133.1775555558632
updates: 962/1140.44	loss:  4.89058	133.0311688175584
updates: 964/1140.44	loss:  4.88962	132.902648246087
updates: 966/1140.44	loss:  4.88926	132.85502821576438
updates: 968/1140.44	loss:  4.88861	132.76940883967544
updates: 970/1140.44	loss:  4.88723	132.5851582446218
updates: 972/1140.44	loss:  4.88569	132.38138375491155
updates: 974/1140.44	loss:  4.88577	132.3926236289328
updates: 976/1140.44	loss:  4.88476	132.25815168966014
updates: 978/1140.44	loss:  4.88351	132.09348082073683
updates: 980/1140.44	loss:  4.88281	132.0017145994689
updates: 982/1140.44	loss:  4.88197	131.89039746525916
updates: 984/1140.44	loss:  4.88208	131.90483558438606
updates: 986/1140.44	loss:  4.88115	131.7822545430455
updates: 988/1140.44	loss:  4.87926	131.53386401852387
updates: 990/1140.44	loss:  4.87919	131.52447663850384
updates: 992/1140.44	loss:  4.87884	131.4784036228562
updates: 994/1140.44	loss:  4.87908	131.50993802792814
updates: 996/1140.44	loss:  4.87795	131.36167617574662
updates: 998/1140.44	loss:  4.8773	131.2752720962407
updates: 1000/1140.44	loss:  4.87727	131.27237122439055
updates: 1002/1140.44	loss:  4.87594	131.0969155552494
updates: 1004/1140.44	loss:  4.87398	130.84073101607936
updates: 1006/1140.44	loss:  4.87329	130.75092553694728
updates: 1008/1140.44	loss:  4.87267	130.66971440876225
updates: 1010/1140.44	loss:  4.87085	130.43130800560004
updates: 1012/1140.44	loss:  4.86962	130.27148542684589
updates: 1014/1140.44	loss:  4.86827	130.09519435102845
updates: 1016/1140.44	loss:  4.86702	129.93340453801687
updates: 1018/1140.44	loss:  4.86637	129.8480674565626
updates: 1020/1140.44	loss:  4.86515	129.69023051144623
updates: 1022/1140.44	loss:  4.86318	129.4356906505186
updates: 1024/1140.44	loss:  4.86209	129.29402291855033
updates: 1026/1140.44	loss:  4.86096	129.1487131766509
updates: 1028/1140.44	loss:  4.85994	129.01682401556738
updates: 1030/1140.44	loss:  4.85924	128.92603097761628
updates: 1032/1140.44	loss:  4.85834	128.81061582186257
updates: 1034/1140.44	loss:  4.85806	128.7737779615554
updates: 1036/1140.44	loss:  4.85808	128.77709147187798
updates: 1038/1140.44	loss:  4.85683	128.6156449527718
updates: 1040/1140.44	loss:  4.85576	128.4776743645171
updates: 1042/1140.44	loss:  4.85416	128.2725933528742
updates: 1044/1140.44	loss:  4.85432	128.29383798212407
updates: 1046/1140.44	loss:  4.85345	128.18221392754577
updates: 1048/1140.44	loss:  4.85213	128.01267189980373
updates: 1050/1140.44	loss:  4.85136	127.9141155939983
updates: 1052/1140.44	loss:  4.85021	127.76665453554867
updates: 1054/1140.44	loss:  4.8497	127.70247001025903
updates: 1056/1140.44	loss:  4.84919	127.636758290674
updates: 1058/1140.44	loss:  4.84834	127.5281848635421
updates: 1060/1140.44	loss:  4.84709	127.36966885836036
updates: 1062/1140.44	loss:  4.84665	127.31306256224546
updates: 1064/1140.44	loss:  4.84519	127.12706815957381
updates: 1066/1140.44	loss:  4.84352	126.91553052440344
updates: 1068/1140.44	loss:  4.84168	126.68157966181045
updates: 1070/1140.44	loss:  4.84101	126.59656546362845
updates: 1072/1140.44	loss:  4.84095	126.59001104873029
updates: 1074/1140.44	loss:  4.841	126.59545824687845
updates: 1076/1140.44	loss:  4.84058	126.54293842006372
updates: 1078/1140.44	loss:  4.83974	126.43687704802781
updates: 1080/1140.44	loss:  4.83928	126.37820954505321
updates: 1082/1140.44	loss:  4.83945	126.39927992768688
updates: 1084/1140.44	loss:  4.83913	126.35940796794382
updates: 1086/1140.44	loss:  4.8389	126.32984520177155
updates: 1088/1140.44	loss:  4.83849	126.27815702758163
updates: 1090/1140.44	loss:  4.8375	126.15303370524808
updates: 1092/1140.44	loss:  4.83684	126.07006075398033
updates: 1094/1140.44	loss:  4.83601	125.96623438490424
updates: 1096/1140.44	loss:  4.83539	125.88809170272184
updates: 1098/1140.44	loss:  4.83444	125.76805500286181
updates: 1100/1140.44	loss:  4.83399	125.7121040294529
updates: 1102/1140.44	loss:  4.83349	125.6489488142964
updates: 1104/1140.44	loss:  4.83283	125.56569900080676
updates: 1106/1140.44	loss:  4.83189	125.44783103973653
updates: 1108/1140.44	loss:  4.83169	125.42259491603347
updates: 1110/1140.44	loss:  4.83141	125.388073018812
updates: 1112/1140.44	loss:  4.83081	125.31278363691462
updates: 1114/1140.44	loss:  4.82998	125.20810953985931
updates: 1116/1140.44	loss:  4.82878	125.05859369237062
updates: 1118/1140.44	loss:  4.82766	124.91879187540883
updates: 1120/1140.44	loss:  4.82747	124.89488799149356
updates: 1122/1140.44	loss:  4.82709	124.84750827625616
updates: 1124/1140.44	loss:  4.82671	124.79916803988598
updates: 1126/1140.44	loss:  4.82583	124.68999994273116
updates: 1128/1140.44	loss:  4.82479	124.5600589585652
updates: 1130/1140.44	loss:  4.82376	124.43179282419189
updates: 1132/1140.44	loss:  4.82308	124.34783719542669
updates: 1134/1140.44	loss:  4.8215	124.1509023840513
updates: 1136/1140.44	loss:  4.82027	123.99908809258902
updates: 1138/1140.44	loss:  4.81874	123.80952102600538

ewaluacja

zadanie:

  • wczytać zbiór testowy
  • napisać funkcję liczącą perplexity dla dowolnego zbioru i policzyć perplexity dla zbioru testowego
pan_tadeusz_path_test= '/home/kuba/Syncthing/przedmioty/2020-02/ISI/zajecia9_ngramowy_model_jDDezykowy/pan-tadeusz-test.txt'
corpora_test = open(pan_tadeusz_path_test).read()
corpora_test_tokenized = list(tokenize(corpora_test,lowercase = True))
test_ids = get_token_id(corpora_test_tokenized)
test_ids = torch.tensor(get_samples(test_ids), dtype = torch.long, device = device)
test_ids
tensor([[15004, 15004, 15004, 15002],
        [15004, 15004, 15002,  5873],
        [15004, 15002,  5873, 15001],
        ...,
        [14552, 15001, 15001,  3116],
        [15001, 15001,  3116, 15001],
        [15001,  3116, 15001, 15001]], device='cuda:0')
def get_ppl(dataset_ids):
    lm.eval()

    batches = 0
    loss_sum =0
    acc_score = 0

    for i in tqdm(range(0, len(dataset_ids)-BATCH_SIZE+1, BATCH_SIZE)):
        X = dataset_ids[i:i+BATCH_SIZE,:3]
        Y = dataset_ids[i:i+BATCH_SIZE,3]
        predictions = lm(X)
        
        # equally distributted
        # predictions = torch.zeros_like(predictions)
        
        loss = criterion(predictions,Y)

        loss_sum += loss.item()
        batches += 1

        if i % 1000 == 0:
            loss_current = loss_sum / batches
            print(f'updates: {i // BATCH_SIZE}/{len(train_ids) / BATCH_SIZE}',  end ='\t')
            print('loss: ', round(loss_current,5) , end = '\t')
            print(np.exp(loss_current))
            
            #print(torch.argmax(torch.softmax(predictions,1),1))
    return np.exp(loss_current)
test_ids
tensor([[15004, 15004, 15004, 15002],
        [15004, 15004, 15002,  5873],
        [15004, 15002,  5873, 15001],
        ...,
        [14552, 15001, 15001,  3116],
        [15001, 15001,  3116, 15001],
        [15001,  3116, 15001, 15001]], device='cuda:0')
get_ppl(test_ids)
HBox(children=(FloatProgress(value=0.0, max=223.0), HTML(value='')))
updates: 0/1140.44	loss:  7.93691	2798.69023798398
updates: 20/1140.44	loss:  7.2917	1468.0696196794395
updates: 40/1140.44	loss:  7.02505	1124.4520230095873
updates: 60/1140.44	loss:  7.02153	1120.498903823149
updates: 80/1140.44	loss:  6.97483	1069.377989926981
updates: 100/1140.44	loss:  7.01626	1114.614360205092
updates: 120/1140.44	loss:  7.01344	1111.4692099745646
updates: 140/1140.44	loss:  7.02334	1122.5331375146593
updates: 160/1140.44	loss:  7.02724	1126.9124589152598
updates: 180/1140.44	loss:  7.047	1149.409103844335
updates: 200/1140.44	loss:  7.05975	1164.1482767554144
updates: 220/1140.44	loss:  7.06818	1174.0069599198582

1174.0069599198582

parametry modelu

list(lm.parameters())
[Parameter containing:
 tensor([[ 1.2962,  2.2548, -0.5289,  ...,  0.7910,  0.3451,  3.1669],
         [ 0.6231, -1.0074,  0.4776,  ...,  1.1894,  1.4892,  1.0638],
         [ 0.6153,  0.2596, -1.9959,  ..., -0.5816,  0.6102,  1.2497],
         ...,
         [ 3.6131, -1.3489, -0.8311,  ...,  0.9109,  2.3637, -0.5194],
         [ 0.1942,  0.9902, -0.4598,  ...,  0.5558,  0.7963, -0.0882],
         [ 0.1043,  1.7519, -0.5018,  ..., -0.0734,  0.5121, -1.0132]],
        device='cuda:0', requires_grad=True),
 Parameter containing:
 tensor([[ 0.0084, -0.0304, -0.0707,  ...,  0.0578, -0.0801, -0.0080],
         [-0.1369, -0.1782,  0.2099,  ..., -0.0111, -0.0396, -0.1987],
         [-0.0470, -0.0926,  0.0525,  ..., -0.0092,  0.0290, -0.0435],
         ...,
         [-0.0967,  0.0055,  0.0719,  ..., -0.0688,  0.0067, -0.0878],
         [-0.1023, -0.1981,  0.1966,  ..., -0.0676, -0.0961, -0.1946],
         [-0.1015, -0.1989,  0.1953,  ..., -0.0674, -0.0961, -0.1947]],
        device='cuda:0', requires_grad=True),
 Parameter containing:
 tensor([ 0.7597, -0.4984, -0.0507,  ..., -0.2868, -1.0517, -1.0112],
        device='cuda:0', requires_grad=True)]
list(lm.parameters())[0].shape
torch.Size([15005, 200])
list(lm.parameters())[1].shape
torch.Size([15005, 600])
list(lm.parameters())[2].shape
torch.Size([15005])

Inferencja

'Gości innych nie widział oprócz spółleśników'
'Gości innych nie widział oprócz spółleśników'
tokenized = list(tokenize('Gości innych nie widział oprócz spółleśników',lowercase = True))
tokenized = tokenized[-NGRAMS  :-1 ]
tokenized
['nie', 'widział', 'oprócz']
ids = []
for word in tokenized:
    if word in vocab_stoi:
        ids.append(vocab_stoi[word])
    else:
        ids.append(vocab_stoi['<UNK>'])
ids
[5873, 13240, 6938]
lm.eval()
NeuralLM(
  (emb): Embedding(15005, 200)
  (fc1): Linear(in_features=600, out_features=15005, bias=True)
)
ids = torch.tensor(ids, dtype = torch.long, device = device)
ids
tensor([ 5873, 13240,  6938], device='cuda:0')
preds= lm(torch.tensor(ids, dtype = torch.long))
<ipython-input-49-cbeeab1035ab>:1: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
  preds= lm(torch.tensor(ids, dtype = torch.long))
torch.argmax(torch.softmax(preds,1),1).item()
10861
torch.max(torch.softmax(preds,1),1)
torch.return_types.max(
values=tensor([0.2408], device='cuda:0', grad_fn=<MaxBackward0>),
indices=tensor([10861], device='cuda:0'))
vocab_itos[torch.argmax(torch.softmax(preds,1),1).item()]
'sposobu'