ium_470618/dane.ipynb

103 KiB
Raw Permalink Blame History

!kaggle competitions download -c titanic
/home/gedin/.local/lib/python3.10/site-packages/requests/__init__.py:102: RequestsDependencyWarning: urllib3 (1.26.13) or chardet (5.1.0)/charset_normalizer (2.0.12) doesn't match a supported version!
  warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
titanic.zip: Skipping, found more recently modified local copy (use --force to force download)
!unzip titanic.zip
Archive:  titanic.zip
  inflating: gender_submission.csv   
  inflating: test.csv                
  inflating: train.csv               

Dane o pliku

!wc -l train.csv
!wc -l test.csv
892 train.csv
419 test.csv
import pandas as pd
df = pd.read_csv("train.csv")
df.head(5)
PassengerId Survived Pclass Name Sex Age SibSp Parch Ticket Fare Cabin Embarked
0 1 0 3 Braund, Mr. Owen Harris male 22.0 1 0 A/5 21171 7.2500 NaN S
1 2 1 1 Cumings, Mrs. John Bradley (Florence Briggs Th... female 38.0 1 0 PC 17599 71.2833 C85 C
2 3 1 3 Heikkinen, Miss. Laina female 26.0 0 0 STON/O2. 3101282 7.9250 NaN S
3 4 1 1 Futrelle, Mrs. Jacques Heath (Lily May Peel) female 35.0 1 0 113803 53.1000 C123 S
4 5 0 3 Allen, Mr. William Henry male 35.0 0 0 373450 8.0500 NaN S
df.describe()
PassengerId Survived Pclass Age SibSp Parch Fare
count 891.000000 891.000000 891.000000 714.000000 891.000000 891.000000 891.000000
mean 446.000000 0.383838 2.308642 29.699118 0.523008 0.381594 32.204208
std 257.353842 0.486592 0.836071 14.526497 1.102743 0.806057 49.693429
min 1.000000 0.000000 1.000000 0.420000 0.000000 0.000000 0.000000
25% 223.500000 0.000000 2.000000 20.125000 0.000000 0.000000 7.910400
50% 446.000000 0.000000 3.000000 28.000000 0.000000 0.000000 14.454200
75% 668.500000 1.000000 3.000000 38.000000 1.000000 0.000000 31.000000
max 891.000000 1.000000 3.000000 80.000000 8.000000 6.000000 512.329200
df.hist(["Survived", "Pclass"])
array([[<Axes: title={'center': 'Survived'}>,
        <Axes: title={'center': 'Pclass'}>]], dtype=object)
embarked = df.value_counts("Embarked")
#later will be transformed to one-hot
embarked.plot.bar()
<Axes: xlabel='Embarked'>
# df.dropna()
#df.fillna()
columns_to_normalize=['Age','Fare']
for colname in columns_to_normalize:
    df[colname]=(df[colname]-df[colname].min())/(df[colname].max()-df[colname].min())
df.head(5)
PassengerId Survived Pclass Name Sex Age SibSp Parch Ticket Fare Cabin Embarked
0 1 0 3 Braund, Mr. Owen Harris male 0.271174 1 0 A/5 21171 0.014151 NaN S
1 2 1 1 Cumings, Mrs. John Bradley (Florence Briggs Th... female 0.472229 1 0 PC 17599 0.139136 C85 C
2 3 1 3 Heikkinen, Miss. Laina female 0.321438 0 0 STON/O2. 3101282 0.015469 NaN S
3 4 1 1 Futrelle, Mrs. Jacques Heath (Lily May Peel) female 0.434531 1 0 113803 0.103644 C123 S
4 5 0 3 Allen, Mr. William Henry male 0.434531 0 0 373450 0.015713 NaN S
import pandas as pd
df = pd.read_csv("train.csv")
# e19191c5.uam.onmicrosoft.com@emea.teams.ms

lab 5 ml

#data
cols = df.columns
print(cols)
Index(['PassengerId', 'Survived', 'Pclass', 'Name', 'Sex', 'Age', 'SibSp',
       'Parch', 'Ticket', 'Fare', 'Cabin', 'Embarked'],
      dtype='object')
import numpy as np
import torch
from torch import nn
from torch.autograd import Variable
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from keras.utils import to_categorical
import torch.nn.functional as F
class Model(nn.Module):
    def __init__(self, input_dim):
        super(Model, self).__init__()
        self.layer1 = nn.Linear(input_dim, 50)
        self.layer2 = nn.Linear(50, 20)
        self.layer3 = nn.Linear(20, 2)
        
    def forward(self, x):
        x = F.relu(self.layer1(x))
        x = F.relu(self.layer2(x))
        x = F.softmax(self.layer3(x))
        
        return x
        
df = df.dropna()
X = df[['Pclass', 'Sex', 'Age','SibSp', 'Fare']]
Y = df[['Survived']]

# X.loc[:,'Age'] = X.loc[:,'Age'].fillna(X['Age'].mean())
X['Sex'].replace(['female', 'male'], [0,1], inplace=True)

X
/tmp/ipykernel_7802/1323642195.py:6: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame

See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
  X['Sex'].replace(['female', 'male'], [0,1], inplace=True)
Pclass Sex Age SibSp Fare
1 1 0 0.472229 1 0.139136
3 1 0 0.434531 1 0.103644
6 1 1 0.673285 0 0.101229
10 3 0 0.044986 1 0.032596
11 1 0 0.723549 0 0.051822
... ... ... ... ... ...
871 1 0 0.585323 1 0.102579
872 1 1 0.409399 0 0.009759
879 1 0 0.698417 0 0.162314
887 1 0 0.233476 0 0.058556
889 1 1 0.321438 0 0.058556

183 rows × 5 columns

from sklearn.preprocessing import LabelEncoder
Y = np.ravel(Y)
encoder = LabelEncoder()
encoder.fit(Y)
Y = encoder.transform(Y)
print(Y)
[1 1 0 1 1 1 1 0 1 0 0 1 0 1 0 0 1 0 0 0 1 0 1 0 0 0 1 0 0 0 1 1 1 1 0 1 1
 1 1 1 0 1 0 0 1 0 0 1 1 0 1 1 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 1 0 1 1 1 1
 1 1 1 0 1 1 1 1 1 1 0 1 0 1 1 0 1 0 1 0 1 1 1 0 0 1 0 1 0 1 0 1 1 1 0 1 1
 0 1 1 1 1 1 1 1 0 1 1 1 1 1 1 1 0 1 1 1 1 1 1 0 0 0 1 1 1 1 0 0 1 1 1 1 1
 0 1 1 1 1 1 0 1 0 0 1 1 1 1 0 1 1 0 0 1 1 0 1 1 1 1 1 1 1 0 1 0 1 1 1]
X_train, X_test, Y_train, Y_test = train_test_split(X,Y, random_state=42, shuffle=True)
Xt = torch.tensor(X_train.values, dtype = torch.float32)
Yt = torch.tensor(Y_train, dtype=torch.long)
# .reshape(-1,1)
# Yt = Y_train
Yt.shape
torch.Size([137])
model = Model(Xt.shape[1])
optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
loss_fn = nn.CrossEntropyLoss()
epochs = 500

def print_(loss):
    print ("The loss calculated: ", loss)
from torch.utils.data import DataLoader

for epoch in range(1, epochs+1):
    print("Epoch #", epoch)
    y_pred = model(Xt)
#     print(y_pred)
    loss = loss_fn(y_pred, Yt)
    print_(loss.item())
    
    optimizer.zero_grad()
    loss.backward()
    optimizer.step()
Epoch # 1
The loss calculated:  0.6927047371864319
Epoch # 2
The loss calculated:  0.6760580539703369
Epoch # 3
The loss calculated:  0.6577760577201843
Epoch # 4
The loss calculated:  0.6410418152809143
Epoch # 5
The loss calculated:  0.6274042725563049
Epoch # 6
The loss calculated:  0.6176177263259888
Epoch # 7
The loss calculated:  0.6114543676376343
Epoch # 8
The loss calculated:  0.6079199314117432
Epoch # 9
The loss calculated:  0.6057404279708862
Epoch # 10
The loss calculated:  0.6039658188819885
Epoch # 11
The loss calculated:  0.6018784046173096
Epoch # 12
The loss calculated:  0.5988859534263611
Epoch # 13
The loss calculated:  0.5944192409515381
Epoch # 14
The loss calculated:  0.58795166015625
Epoch # 15
The loss calculated:  0.5793240666389465
Epoch # 16
The loss calculated:  0.569113552570343
Epoch # 17
The loss calculated:  0.5591343641281128
Epoch # 18
The loss calculated:  0.5525994300842285
Epoch # 19
The loss calculated:  0.549091637134552
Epoch # 20
The loss calculated:  0.5478854775428772
Epoch # 21
The loss calculated:  0.5459576845169067
Epoch # 22
The loss calculated:  0.5430701971054077
Epoch # 23
The loss calculated:  0.5398197174072266
Epoch # 24
The loss calculated:  0.5366366505622864
Epoch # 25
The loss calculated:  0.5338087677955627
Epoch # 26
The loss calculated:  0.5315443873405457
Epoch # 27
The loss calculated:  0.5298702716827393
Epoch # 28
The loss calculated:  0.5285016894340515
Epoch # 29
The loss calculated:  0.5272928476333618
Epoch # 30
The loss calculated:  0.5261989235877991
Epoch # 31
The loss calculated:  0.5251137018203735
Epoch # 32
The loss calculated:  0.5238412618637085
Epoch # 33
The loss calculated:  0.5226505398750305
Epoch # 34
The loss calculated:  0.5215187072753906
Epoch # 35
The loss calculated:  0.5204036235809326
Epoch # 36
The loss calculated:  0.5194926857948303
Epoch # 37
The loss calculated:  0.5188320875167847
Epoch # 38
The loss calculated:  0.5182497501373291
Epoch # 39
The loss calculated:  0.5176616907119751
Epoch # 40
The loss calculated:  0.5170402526855469
Epoch # 41
The loss calculated:  0.5162948369979858
Epoch # 42
The loss calculated:  0.5155003070831299
Epoch # 43
The loss calculated:  0.51481693983078
Epoch # 44
The loss calculated:  0.5142836570739746
Epoch # 45
The loss calculated:  0.5137770771980286
Epoch # 46
The loss calculated:  0.5132609009742737
Epoch # 47
The loss calculated:  0.5126983523368835
Epoch # 48
The loss calculated:  0.5120936036109924
Epoch # 49
The loss calculated:  0.5116094350814819
Epoch # 50
The loss calculated:  0.5111839175224304
Epoch # 51
The loss calculated:  0.5106979608535767
Epoch # 52
The loss calculated:  0.5101208686828613
Epoch # 53
The loss calculated:  0.5095392465591431
Epoch # 54
The loss calculated:  0.5090041756629944
Epoch # 55
The loss calculated:  0.5083613395690918
Epoch # 56
The loss calculated:  0.5075969099998474
Epoch # 57
The loss calculated:  0.5067813992500305
Epoch # 58
The loss calculated:  0.5060149431228638
Epoch # 59
The loss calculated:  0.5052304863929749
Epoch # 60
The loss calculated:  0.5044183135032654
Epoch # 61
The loss calculated:  0.5035461187362671
Epoch # 62
The loss calculated:  0.5025045871734619
Epoch # 63
The loss calculated:  0.5014879107475281
Epoch # 64
The loss calculated:  0.5006436705589294
Epoch # 65
The loss calculated:  0.499641090631485
Epoch # 66
The loss calculated:  0.4986647367477417
Epoch # 67
The loss calculated:  0.497800350189209
Epoch # 68
The loss calculated:  0.49712076783180237
Epoch # 69
The loss calculated:  0.49643078446388245
Epoch # 70
The loss calculated:  0.4957447350025177
Epoch # 71
The loss calculated:  0.4950644075870514
Epoch # 72
The loss calculated:  0.4944438636302948
Epoch # 73
The loss calculated:  0.4937107563018799
Epoch # 74
The loss calculated:  0.49320393800735474
Epoch # 75
The loss calculated:  0.49250030517578125
Epoch # 76
The loss calculated:  0.49141865968704224
Epoch # 77
The loss calculated:  0.49071067571640015
Epoch # 78
The loss calculated:  0.4899919629096985
Epoch # 79
The loss calculated:  0.48904943466186523
Epoch # 80
The loss calculated:  0.4885300099849701
Epoch # 81
The loss calculated:  0.48774540424346924
Epoch # 82
The loss calculated:  0.48720788955688477
Epoch # 83
The loss calculated:  0.4868374466896057
Epoch # 84
The loss calculated:  0.48623406887054443
Epoch # 85
The loss calculated:  0.48583683371543884
Epoch # 86
The loss calculated:  0.48502254486083984
Epoch # 87
The loss calculated:  0.4844677746295929
Epoch # 88
The loss calculated:  0.48361340165138245
Epoch # 89
The loss calculated:  0.4827542304992676
Epoch # 90
The loss calculated:  0.4817808270454407
Epoch # 91
The loss calculated:  0.4809269607067108
Epoch # 92
The loss calculated:  0.4804893136024475
Epoch # 93
The loss calculated:  0.48043856024742126
Epoch # 94
The loss calculated:  0.4801830053329468
Epoch # 95
The loss calculated:  0.479977011680603
Epoch # 96
The loss calculated:  0.47945544123649597
Epoch # 97
The loss calculated:  0.47897064685821533
Epoch # 98
The loss calculated:  0.4786403775215149
Epoch # 99
The loss calculated:  0.47828078269958496
Epoch # 100
The loss calculated:  0.47804537415504456
Epoch # 101
The loss calculated:  0.4777425527572632
Epoch # 102
The loss calculated:  0.4773750603199005
Epoch # 103
The loss calculated:  0.4768853187561035
Epoch # 104
The loss calculated:  0.4766947627067566
Epoch # 105
The loss calculated:  0.47633618116378784
Epoch # 106
The loss calculated:  0.47610870003700256
Epoch # 107
The loss calculated:  0.47584590315818787
Epoch # 108
The loss calculated:  0.47565311193466187
Epoch # 109
The loss calculated:  0.475361168384552
Epoch # 110
The loss calculated:  0.475079208612442
Epoch # 111
The loss calculated:  0.47482433915138245
Epoch # 112
The loss calculated:  0.47465214133262634
Epoch # 113
/tmp/ipykernel_7802/3372075492.py:11: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
  x = F.softmax(self.layer3(x))
The loss calculated:  0.4745003283023834
Epoch # 114
The loss calculated:  0.47428470849990845
Epoch # 115
The loss calculated:  0.47402113676071167
Epoch # 116
The loss calculated:  0.4738253355026245
Epoch # 117
The loss calculated:  0.47366538643836975
Epoch # 118
The loss calculated:  0.47345176339149475
Epoch # 119
The loss calculated:  0.47328999638557434
Epoch # 120
The loss calculated:  0.47304701805114746
Epoch # 121
The loss calculated:  0.47283679246902466
Epoch # 122
The loss calculated:  0.47269734740257263
Epoch # 123
The loss calculated:  0.47256502509117126
Epoch # 124
The loss calculated:  0.4723707437515259
Epoch # 125
The loss calculated:  0.4721546471118927
Epoch # 126
The loss calculated:  0.4719236493110657
Epoch # 127
The loss calculated:  0.4718014895915985
Epoch # 128
The loss calculated:  0.4715701937675476
Epoch # 129
The loss calculated:  0.47162505984306335
Epoch # 130
The loss calculated:  0.47140219807624817
Epoch # 131
The loss calculated:  0.47120794653892517
Epoch # 132
The loss calculated:  0.47121524810791016
Epoch # 133
The loss calculated:  0.4708421230316162
Epoch # 134
The loss calculated:  0.47080597281455994
Epoch # 135
The loss calculated:  0.470735102891922
Epoch # 136
The loss calculated:  0.47046154737472534
Epoch # 137
The loss calculated:  0.4704940617084503
Epoch # 138
The loss calculated:  0.4704982340335846
Epoch # 139
The loss calculated:  0.470112144947052
Epoch # 140
The loss calculated:  0.4701041877269745
Epoch # 141
The loss calculated:  0.47008904814720154
Epoch # 142
The loss calculated:  0.4698803722858429
Epoch # 143
The loss calculated:  0.46982747316360474
Epoch # 144
The loss calculated:  0.469696044921875
Epoch # 145
The loss calculated:  0.46962815523147583
Epoch # 146
The loss calculated:  0.469440758228302
Epoch # 147
The loss calculated:  0.46939632296562195
Epoch # 148
The loss calculated:  0.4695526957511902
Epoch # 149
The loss calculated:  0.4697006046772003
Epoch # 150
The loss calculated:  0.4692654609680176
Epoch # 151
The loss calculated:  0.4700072407722473
Epoch # 152
The loss calculated:  0.4690340757369995
Epoch # 153
The loss calculated:  0.47001826763153076
Epoch # 154
The loss calculated:  0.46880584955215454
Epoch # 155
The loss calculated:  0.46919724345207214
Epoch # 156
The loss calculated:  0.4687418043613434
Epoch # 157
The loss calculated:  0.4687948226928711
Epoch # 158
The loss calculated:  0.46873044967651367
Epoch # 159
The loss calculated:  0.46848490834236145
Epoch # 160
The loss calculated:  0.4686104953289032
Epoch # 161
The loss calculated:  0.4683172404766083
Epoch # 162
The loss calculated:  0.46831050515174866
Epoch # 163
The loss calculated:  0.46828699111938477
Epoch # 164
The loss calculated:  0.46824583411216736
Epoch # 165
The loss calculated:  0.468075156211853
Epoch # 166
The loss calculated:  0.46814292669296265
Epoch # 167
The loss calculated:  0.46796467900276184
Epoch # 168
The loss calculated:  0.46802079677581787
Epoch # 169
The loss calculated:  0.46778491139411926
Epoch # 170
The loss calculated:  0.4679405093193054
Epoch # 171
The loss calculated:  0.46800506114959717
Epoch # 172
The loss calculated:  0.467818945646286
Epoch # 173
The loss calculated:  0.4678487181663513
Epoch # 174
The loss calculated:  0.46776196360588074
Epoch # 175
The loss calculated:  0.46756404638290405
Epoch # 176
The loss calculated:  0.4682294726371765
Epoch # 177
The loss calculated:  0.46777990460395813
Epoch # 178
The loss calculated:  0.4677632451057434
Epoch # 179
The loss calculated:  0.46777427196502686
Epoch # 180
The loss calculated:  0.46746954321861267
Epoch # 181
The loss calculated:  0.4676474630832672
Epoch # 182
The loss calculated:  0.46711796522140503
Epoch # 183
The loss calculated:  0.4677950441837311
Epoch # 184
The loss calculated:  0.46725085377693176
Epoch # 185
The loss calculated:  0.4676659107208252
Epoch # 186
The loss calculated:  0.4672679901123047
Epoch # 187
The loss calculated:  0.46727195382118225
Epoch # 188
The loss calculated:  0.466960608959198
Epoch # 189
The loss calculated:  0.46708735823631287
Epoch # 190
The loss calculated:  0.4671291708946228
Epoch # 191
The loss calculated:  0.46684736013412476
Epoch # 192
The loss calculated:  0.4667331576347351
Epoch # 193
The loss calculated:  0.46685370802879333
Epoch # 194
The loss calculated:  0.4668591618537903
Epoch # 195
The loss calculated:  0.46671974658966064
Epoch # 196
The loss calculated:  0.46653658151626587
Epoch # 197
The loss calculated:  0.46659478545188904
Epoch # 198
The loss calculated:  0.4665440022945404
Epoch # 199
The loss calculated:  0.4664462208747864
Epoch # 200
The loss calculated:  0.466394305229187
Epoch # 201
The loss calculated:  0.4665300250053406
Epoch # 202
The loss calculated:  0.4664006531238556
Epoch # 203
The loss calculated:  0.46651187539100647
Epoch # 204
The loss calculated:  0.4662490487098694
Epoch # 205
The loss calculated:  0.46683457493782043
Epoch # 206
The loss calculated:  0.46636930108070374
Epoch # 207
The loss calculated:  0.4663969576358795
Epoch # 208
The loss calculated:  0.46641668677330017
Epoch # 209
The loss calculated:  0.46628400683403015
Epoch # 210
The loss calculated:  0.4664050042629242
Epoch # 211
The loss calculated:  0.4661887586116791
Epoch # 212
The loss calculated:  0.4660308063030243
Epoch # 213
The loss calculated:  0.4661027491092682
Epoch # 214
The loss calculated:  0.4660954177379608
Epoch # 215
The loss calculated:  0.4658938944339752
Epoch # 216
The loss calculated:  0.4660359025001526
Epoch # 217
The loss calculated:  0.46567121148109436
Epoch # 218
The loss calculated:  0.4657202959060669
Epoch # 219
The loss calculated:  0.4657045900821686
Epoch # 220
The loss calculated:  0.4655347168445587
Epoch # 221
The loss calculated:  0.4654804468154907
Epoch # 222
The loss calculated:  0.4656883180141449
Epoch # 223
The loss calculated:  0.46542859077453613
Epoch # 224
The loss calculated:  0.46529003977775574
Epoch # 225
The loss calculated:  0.46543607115745544
Epoch # 226
The loss calculated:  0.46531468629837036
Epoch # 227
The loss calculated:  0.4653342068195343
Epoch # 228
The loss calculated:  0.46527451276779175
Epoch # 229
The loss calculated:  0.4652668535709381
Epoch # 230
The loss calculated:  0.46513044834136963
Epoch # 231
The loss calculated:  0.4650672972202301
Epoch # 232
The loss calculated:  0.46511510014533997
Epoch # 233
The loss calculated:  0.4647628366947174
Epoch # 234
The loss calculated:  0.4647744596004486
Epoch # 235
The loss calculated:  0.4648566246032715
Epoch # 236
The loss calculated:  0.4646404981613159
Epoch # 237
The loss calculated:  0.4645318388938904
Epoch # 238
The loss calculated:  0.46459120512008667
Epoch # 239
The loss calculated:  0.46454647183418274
Epoch # 240
The loss calculated:  0.46439239382743835
Epoch # 241
The loss calculated:  0.464549720287323
Epoch # 242
The loss calculated:  0.4642981290817261
Epoch # 243
The loss calculated:  0.4640815258026123
Epoch # 244
The loss calculated:  0.4640815258026123
Epoch # 245
The loss calculated:  0.4638811945915222
Epoch # 246
The loss calculated:  0.46409285068511963
Epoch # 247
The loss calculated:  0.46399882435798645
Epoch # 248
The loss calculated:  0.4639054536819458
Epoch # 249
The loss calculated:  0.46384960412979126
Epoch # 250
The loss calculated:  0.46365633606910706
Epoch # 251
The loss calculated:  0.4635387361049652
Epoch # 252
The loss calculated:  0.46366339921951294
Epoch # 253
The loss calculated:  0.4635831415653229
Epoch # 254
The loss calculated:  0.46347707509994507
Epoch # 255
The loss calculated:  0.4633452892303467
Epoch # 256
The loss calculated:  0.4634377658367157
Epoch # 257
The loss calculated:  0.46325498819351196
Epoch # 258
The loss calculated:  0.46343502402305603
Epoch # 259
The loss calculated:  0.46319177746772766
Epoch # 260
The loss calculated:  0.4631631076335907
Epoch # 261
The loss calculated:  0.4630383253097534
Epoch # 262
The loss calculated:  0.4629758596420288
Epoch # 263
The loss calculated:  0.46284860372543335
Epoch # 264
The loss calculated:  0.46269962191581726
Epoch # 265
The loss calculated:  0.4628857374191284
Epoch # 266
The loss calculated:  0.4627268314361572
Epoch # 267
The loss calculated:  0.46238410472869873
Epoch # 268
The loss calculated:  0.4622679352760315
Epoch # 269
The loss calculated:  0.46253955364227295
Epoch # 270
The loss calculated:  0.46243607997894287
Epoch # 271
The loss calculated:  0.4622651934623718
Epoch # 272
The loss calculated:  0.4621260166168213
Epoch # 273
The loss calculated:  0.4619852304458618
Epoch # 274
The loss calculated:  0.4621600806713104
Epoch # 275
The loss calculated:  0.46188268065452576
Epoch # 276
The loss calculated:  0.4619770050048828
Epoch # 277
The loss calculated:  0.4617985486984253
Epoch # 278
The loss calculated:  0.46143385767936707
Epoch # 279
The loss calculated:  0.4618164002895355
Epoch # 280
The loss calculated:  0.461500883102417
Epoch # 281
The loss calculated:  0.4614565372467041
Epoch # 282
The loss calculated:  0.4613018035888672
Epoch # 283
The loss calculated:  0.4612286388874054
Epoch # 284
The loss calculated:  0.4610031545162201
Epoch # 285
The loss calculated:  0.4609623849391937
Epoch # 286
The loss calculated:  0.4608198404312134
Epoch # 287
The loss calculated:  0.46074378490448
Epoch # 288
The loss calculated:  0.46068280935287476
Epoch # 289
The loss calculated:  0.46061643958091736
Epoch # 290
The loss calculated:  0.4604104459285736
Epoch # 291
The loss calculated:  0.4607124626636505
Epoch # 292
The loss calculated:  0.4607458710670471
Epoch # 293
The loss calculated:  0.4601185619831085
Epoch # 294
The loss calculated:  0.460267573595047
Epoch # 295
The loss calculated:  0.4605766832828522
Epoch # 296
The loss calculated:  0.46028855443000793
Epoch # 297
The loss calculated:  0.4599803388118744
Epoch # 298
The loss calculated:  0.4600617587566376
Epoch # 299
The loss calculated:  0.46000462770462036
Epoch # 300
The loss calculated:  0.4595383405685425
Epoch # 301
The loss calculated:  0.4598424732685089
Epoch # 302
The loss calculated:  0.4597552418708801
Epoch # 303
The loss calculated:  0.45939505100250244
Epoch # 304
The loss calculated:  0.459394633769989
Epoch # 305
The loss calculated:  0.4592142403125763
Epoch # 306
The loss calculated:  0.4591156244277954
Epoch # 307
The loss calculated:  0.4590142071247101
Epoch # 308
The loss calculated:  0.45902881026268005
Epoch # 309
The loss calculated:  0.4590888023376465
Epoch # 310
The loss calculated:  0.45860469341278076
Epoch # 311
The loss calculated:  0.45852038264274597
Epoch # 312
The loss calculated:  0.4585433900356293
Epoch # 313
The loss calculated:  0.4586207866668701
Epoch # 314
The loss calculated:  0.45869746804237366
Epoch # 315
The loss calculated:  0.4585130214691162
Epoch # 316
The loss calculated:  0.45780810713768005
Epoch # 317
The loss calculated:  0.4584527313709259
Epoch # 318
The loss calculated:  0.4584985375404358
Epoch # 319
The loss calculated:  0.4577976167201996
Epoch # 320
The loss calculated:  0.4578183591365814
Epoch # 321
The loss calculated:  0.45760011672973633
Epoch # 322
The loss calculated:  0.4573518931865692
Epoch # 323
The loss calculated:  0.45755714178085327
Epoch # 324
The loss calculated:  0.4574785828590393
Epoch # 325
The loss calculated:  0.4572897255420685
Epoch # 326
The loss calculated:  0.45682093501091003
Epoch # 327
The loss calculated:  0.4571937322616577
Epoch # 328
The loss calculated:  0.45755869150161743
Epoch # 329
The loss calculated:  0.45663607120513916
Epoch # 330
The loss calculated:  0.4570084810256958
Epoch # 331
The loss calculated:  0.45761099457740784
Epoch # 332
The loss calculated:  0.456558495759964
Epoch # 333
The loss calculated:  0.45620036125183105
Epoch # 334
The loss calculated:  0.4563443958759308
Epoch # 335
The loss calculated:  0.45647644996643066
Epoch # 336
The loss calculated:  0.45592716336250305
Epoch # 337
The loss calculated:  0.455634742975235
Epoch # 338
The loss calculated:  0.4558946192264557
Epoch # 339
The loss calculated:  0.45598289370536804
Epoch # 340
The loss calculated:  0.4554951786994934
Epoch # 341
The loss calculated:  0.4554195702075958
Epoch # 342
The loss calculated:  0.4554871618747711
Epoch # 343
The loss calculated:  0.4549509584903717
Epoch # 344
The loss calculated:  0.4548693597316742
Epoch # 345
The loss calculated:  0.4558226466178894
Epoch # 346
The loss calculated:  0.45509448647499084
Epoch # 347
The loss calculated:  0.45454123616218567
Epoch # 348
The loss calculated:  0.4553173780441284
Epoch # 349
The loss calculated:  0.4548755884170532
Epoch # 350
The loss calculated:  0.45442134141921997
Epoch # 351
The loss calculated:  0.4545627236366272
Epoch # 352
The loss calculated:  0.4543512463569641
Epoch # 353
The loss calculated:  0.4541962146759033
Epoch # 354
The loss calculated:  0.4540751874446869
Epoch # 355
The loss calculated:  0.45386749505996704
Epoch # 356
The loss calculated:  0.4536762833595276
Epoch # 357
The loss calculated:  0.4532167911529541
Epoch # 358
The loss calculated:  0.4538520872592926
Epoch # 359
The loss calculated:  0.45413821935653687
Epoch # 360
The loss calculated:  0.45311087369918823
Epoch # 361
The loss calculated:  0.45335227251052856
Epoch # 362
The loss calculated:  0.45350611209869385
Epoch # 363
The loss calculated:  0.45265665650367737
Epoch # 364
The loss calculated:  0.4524100124835968
Epoch # 365
The loss calculated:  0.4523312449455261
Epoch # 366
The loss calculated:  0.4522554874420166
Epoch # 367
The loss calculated:  0.4523703455924988
Epoch # 368
The loss calculated:  0.4521876573562622
Epoch # 369
The loss calculated:  0.4517895579338074
Epoch # 370
The loss calculated:  0.4517730474472046
Epoch # 371
The loss calculated:  0.4515615999698639
Epoch # 372
The loss calculated:  0.45157772302627563
Epoch # 373
The loss calculated:  0.4515098035335541
Epoch # 374
The loss calculated:  0.45118868350982666
Epoch # 375
The loss calculated:  0.45117509365081787
Epoch # 376
The loss calculated:  0.45118534564971924
Epoch # 377
The loss calculated:  0.45082926750183105
Epoch # 378
The loss calculated:  0.4507909119129181
Epoch # 379
The loss calculated:  0.45116591453552246
Epoch # 380
The loss calculated:  0.45066720247268677
Epoch # 381
The loss calculated:  0.45026636123657227
Epoch # 382
The loss calculated:  0.4510788321495056
Epoch # 383
The loss calculated:  0.4512375593185425
Epoch # 384
The loss calculated:  0.450232595205307
Epoch # 385
The loss calculated:  0.44986671209335327
Epoch # 386
The loss calculated:  0.4502098262310028
Epoch # 387
The loss calculated:  0.4510081112384796
Epoch # 388
The loss calculated:  0.4499610960483551
Epoch # 389
The loss calculated:  0.44945529103279114
Epoch # 390
The loss calculated:  0.45030856132507324
Epoch # 391
The loss calculated:  0.4493928849697113
Epoch # 392
The loss calculated:  0.4490446448326111
Epoch # 393
The loss calculated:  0.4496527910232544
Epoch # 394
The loss calculated:  0.44922882318496704
Epoch # 395
The loss calculated:  0.4484827220439911
Epoch # 396
The loss calculated:  0.44952288269996643
Epoch # 397
The loss calculated:  0.4490470588207245
Epoch # 398
The loss calculated:  0.44837456941604614
Epoch # 399
The loss calculated:  0.44843804836273193
Epoch # 400
The loss calculated:  0.44825857877731323
Epoch # 401
The loss calculated:  0.4478710889816284
Epoch # 402
The loss calculated:  0.4478342533111572
Epoch # 403
The loss calculated:  0.44727033376693726
Epoch # 404
The loss calculated:  0.4474068582057953
Epoch # 405
The loss calculated:  0.4473791718482971
Epoch # 406
The loss calculated:  0.4471847414970398
Epoch # 407
The loss calculated:  0.44691354036331177
Epoch # 408
The loss calculated:  0.44677817821502686
Epoch # 409
The loss calculated:  0.4468446969985962
Epoch # 410
The loss calculated:  0.4465027153491974
Epoch # 411
The loss calculated:  0.44606125354766846
Epoch # 412
The loss calculated:  0.44594869017601013
Epoch # 413
The loss calculated:  0.4456939101219177
Epoch # 414
The loss calculated:  0.445888489484787
Epoch # 415
The loss calculated:  0.4455548822879791
Epoch # 416
The loss calculated:  0.44548290967941284
Epoch # 417
The loss calculated:  0.44544851779937744
Epoch # 418
The loss calculated:  0.44522538781166077
Epoch # 419
The loss calculated:  0.44501474499702454
Epoch # 420
The loss calculated:  0.4449530839920044
Epoch # 421
The loss calculated:  0.4445208013057709
Epoch # 422
The loss calculated:  0.4444122314453125
Epoch # 423
The loss calculated:  0.44473087787628174
Epoch # 424
The loss calculated:  0.4442698359489441
Epoch # 425
The loss calculated:  0.44399431347846985
Epoch # 426
The loss calculated:  0.4437970817089081
Epoch # 427
The loss calculated:  0.44364386796951294
Epoch # 428
The loss calculated:  0.4437081217765808
Epoch # 429
The loss calculated:  0.4436897039413452
Epoch # 430
The loss calculated:  0.44336003065109253
Epoch # 431
The loss calculated:  0.4430985748767853
Epoch # 432
The loss calculated:  0.44310933351516724
Epoch # 433
The loss calculated:  0.4428543746471405
Epoch # 434
The loss calculated:  0.44258877635002136
Epoch # 435
The loss calculated:  0.4427826404571533
Epoch # 436
The loss calculated:  0.44258812069892883
Epoch # 437
The loss calculated:  0.442533403635025
Epoch # 438
The loss calculated:  0.44270434975624084
Epoch # 439
The loss calculated:  0.4427698850631714
Epoch # 440
The loss calculated:  0.44257086515426636
Epoch # 441
The loss calculated:  0.4425719976425171
Epoch # 442
The loss calculated:  0.4420627951622009
Epoch # 443
The loss calculated:  0.4421764612197876
Epoch # 444
The loss calculated:  0.44193679094314575
Epoch # 445
The loss calculated:  0.44186508655548096
Epoch # 446
The loss calculated:  0.44136378169059753
Epoch # 447
The loss calculated:  0.44126731157302856
Epoch # 448
The loss calculated:  0.44119781255722046
Epoch # 449
The loss calculated:  0.4413573145866394
Epoch # 450
The loss calculated:  0.4411191940307617
Epoch # 451
The loss calculated:  0.4407786428928375
Epoch # 452
The loss calculated:  0.4407300055027008
Epoch # 453
The loss calculated:  0.4404629170894623
Epoch # 454
The loss calculated:  0.44039714336395264
Epoch # 455
The loss calculated:  0.44031772017478943
Epoch # 456
The loss calculated:  0.44058850407600403
Epoch # 457
The loss calculated:  0.44026416540145874
Epoch # 458
The loss calculated:  0.4401347041130066
Epoch # 459
The loss calculated:  0.44020867347717285
Epoch # 460
The loss calculated:  0.43979671597480774
Epoch # 461
The loss calculated:  0.44035604596138
Epoch # 462
The loss calculated:  0.4401366412639618
Epoch # 463
The loss calculated:  0.4404027760028839
Epoch # 464
The loss calculated:  0.439935564994812
Epoch # 465
The loss calculated:  0.4399685561656952
Epoch # 466
The loss calculated:  0.4409003257751465
Epoch # 467
The loss calculated:  0.43949607014656067
Epoch # 468
The loss calculated:  0.4398217797279358
Epoch # 469
The loss calculated:  0.43998679518699646
Epoch # 470
The loss calculated:  0.4403824508190155
Epoch # 471
The loss calculated:  0.43901607394218445
Epoch # 472
The loss calculated:  0.44028377532958984
Epoch # 473
The loss calculated:  0.4426659643650055
Epoch # 474
The loss calculated:  0.44038379192352295
Epoch # 475
The loss calculated:  0.4395928978919983
Epoch # 476
The loss calculated:  0.44086745381355286
Epoch # 477
The loss calculated:  0.43867841362953186
Epoch # 478
The loss calculated:  0.4390256404876709
Epoch # 479
The loss calculated:  0.4390667676925659
Epoch # 480
The loss calculated:  0.4384021759033203
Epoch # 481
The loss calculated:  0.4385366439819336
Epoch # 482
The loss calculated:  0.4384676516056061
Epoch # 483
The loss calculated:  0.4386775493621826
Epoch # 484
The loss calculated:  0.43819159269332886
Epoch # 485
The loss calculated:  0.4379732608795166
Epoch # 486
The loss calculated:  0.4379722476005554
Epoch # 487
The loss calculated:  0.4376266896724701
Epoch # 488
The loss calculated:  0.4373808205127716
Epoch # 489
The loss calculated:  0.43826723098754883
Epoch # 490
The loss calculated:  0.4379383623600006
Epoch # 491
The loss calculated:  0.4372965395450592
Epoch # 492
The loss calculated:  0.4375162422657013
Epoch # 493
The loss calculated:  0.43795913457870483
Epoch # 494
The loss calculated:  0.43740007281303406
Epoch # 495
The loss calculated:  0.43741703033447266
Epoch # 496
The loss calculated:  0.4373546838760376
Epoch # 497
The loss calculated:  0.4368191957473755
Epoch # 498
The loss calculated:  0.4367024898529053
Epoch # 499
The loss calculated:  0.43679192662239075
Epoch # 500
The loss calculated:  0.436893105506897
x_test = torch.tensor(X_test.values, dtype=torch.float32)
pred = model(x_test)
/tmp/ipykernel_7802/3372075492.py:11: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
  x = F.softmax(self.layer3(x))
pred = pred.detach().numpy()
pred
array([[1.3141002e-01, 8.6859006e-01],
       [3.0172759e-16, 1.0000000e+00],
       [5.9731257e-21, 1.0000000e+00],
       [8.7287611e-01, 1.2712391e-01],
       [3.3298880e-01, 6.6701120e-01],
       [9.9992323e-01, 7.6730175e-05],
       [6.9742590e-01, 3.0257410e-01],
       [1.8122771e-10, 1.0000000e+00],
       [8.1137923e-18, 1.0000000e+00],
       [9.9391985e-01, 6.0801902e-03],
       [9.9800962e-01, 1.9904438e-03],
       [1.4347603e-12, 1.0000000e+00],
       [8.8945550e-01, 1.1054446e-01],
       [5.3068206e-19, 1.0000000e+00],
       [4.4245785e-01, 5.5754209e-01],
       [3.9323148e-01, 6.0676849e-01],
       [5.0538932e-23, 1.0000000e+00],
       [6.8482041e-01, 3.1517953e-01],
       [9.9650586e-01, 3.4941665e-03],
       [3.6827392e-24, 1.0000000e+00],
       [3.4629088e-12, 1.0000000e+00],
       [2.4781654e-11, 1.0000000e+00],
       [8.4075117e-01, 1.5924890e-01],
       [9.9999881e-01, 1.2382451e-06],
       [9.9950111e-01, 4.9885432e-04],
       [1.1888127e-14, 1.0000000e+00],
       [1.5869159e-14, 1.0000000e+00],
       [9.4683814e-01, 5.3161871e-02],
       [7.3645154e-08, 9.9999988e-01],
       [1.2287432e-11, 1.0000000e+00],
       [5.7253930e-15, 1.0000000e+00],
       [7.9019060e-08, 9.9999988e-01],
       [5.5769521e-01, 4.4230482e-01],
       [1.8103112e-14, 1.0000000e+00],
       [9.9812454e-01, 1.8754901e-03],
       [2.5346470e-05, 9.9997461e-01],
       [1.6169167e-17, 1.0000000e+00],
       [9.3050295e-01, 6.9496997e-02],
       [6.1799776e-02, 9.3820024e-01],
       [9.7120519e-06, 9.9999034e-01],
       [9.9844283e-01, 1.5571705e-03],
       [8.0438519e-01, 1.9561480e-01],
       [2.0653886e-16, 1.0000000e+00],
       [7.0155847e-01, 2.9844159e-01],
       [9.9505252e-01, 4.9475045e-03],
       [9.3824464e-01, 6.1755374e-02]], dtype=float32)
print ("The accuracy is", accuracy_score(Y_test, np.argmax(pred, axis=1)))
The accuracy is 0.7391304347826086