From e933463a3201c0d9eed633ca3443a71a201b8375 Mon Sep 17 00:00:00 2001 From: Jakub Pokrywka Date: Wed, 9 Dec 2020 10:12:35 +0100 Subject: [PATCH] pytorch tutorial --- iris.data | 100 ++++++++++++++++++++++++++++ iris.data.multilabel | 151 +++++++++++++++++++++++++++++++++++++++++++ iris.names | 69 ++++++++++++++++++++ mieszkania.tsv | 121 ++++++++++++++++++++++++++++++++++ pytorch1.py | 22 +++++++ pytorch10.py | 71 ++++++++++++++++++++ pytorch2.py | 29 +++++++++ pytorch3.py | 27 ++++++++ pytorch4.py | 29 +++++++++ pytorch5.py | 32 +++++++++ pytorch6.py | 29 +++++++++ pytorch7.py | 41 ++++++++++++ pytorch8.py | 68 +++++++++++++++++++ pytorch9.py | 69 ++++++++++++++++++++ 14 files changed, 858 insertions(+) create mode 100644 iris.data create mode 100644 iris.data.multilabel create mode 100644 iris.names create mode 100644 mieszkania.tsv create mode 100755 pytorch1.py create mode 100755 pytorch10.py create mode 100755 pytorch2.py create mode 100755 pytorch3.py create mode 100755 pytorch4.py create mode 100755 pytorch5.py create mode 100755 pytorch6.py create mode 100755 pytorch7.py create mode 100755 pytorch8.py create mode 100755 pytorch9.py diff --git a/iris.data b/iris.data new file mode 100644 index 0000000..a5ec827 --- /dev/null +++ b/iris.data @@ -0,0 +1,100 @@ +5.9,3.0,4.2,1.5,Iris-versicolor +5.9,3.2,4.8,1.8,Iris-versicolor +6.7,3.1,4.7,1.5,Iris-versicolor +6.1,2.8,4.0,1.3,Iris-versicolor +5.2,4.1,1.5,0.1,Iris-setosa +5.4,3.4,1.7,0.2,Iris-setosa +6.6,2.9,4.6,1.3,Iris-versicolor +4.9,2.4,3.3,1.0,Iris-versicolor +4.9,3.1,1.5,0.1,Iris-setosa +5.0,3.3,1.4,0.2,Iris-setosa +4.8,3.4,1.9,0.2,Iris-setosa +5.6,2.9,3.6,1.3,Iris-versicolor +6.7,3.1,4.4,1.4,Iris-versicolor +5.1,3.8,1.5,0.3,Iris-setosa +5.7,3.0,4.2,1.2,Iris-versicolor +5.8,2.7,4.1,1.0,Iris-versicolor +6.1,2.8,4.7,1.2,Iris-versicolor +4.4,2.9,1.4,0.2,Iris-setosa +6.4,2.9,4.3,1.3,Iris-versicolor +6.0,2.9,4.5,1.5,Iris-versicolor +5.2,3.4,1.4,0.2,Iris-setosa +6.5,2.8,4.6,1.5,Iris-versicolor +6.8,2.8,4.8,1.4,Iris-versicolor +5.1,3.8,1.9,0.4,Iris-setosa +4.5,2.3,1.3,0.3,Iris-setosa +5.0,3.5,1.6,0.6,Iris-setosa +5.4,3.9,1.3,0.4,Iris-setosa +7.0,3.2,4.7,1.4,Iris-versicolor +4.7,3.2,1.3,0.2,Iris-setosa +5.6,3.0,4.5,1.5,Iris-versicolor +5.5,2.5,4.0,1.3,Iris-versicolor +5.7,3.8,1.7,0.3,Iris-setosa +5.0,3.2,1.2,0.2,Iris-setosa +6.7,3.0,5.0,1.7,Iris-versicolor +5.2,2.7,3.9,1.4,Iris-versicolor +5.5,2.6,4.4,1.2,Iris-versicolor +5.4,3.7,1.5,0.2,Iris-setosa +5.0,2.0,3.5,1.0,Iris-versicolor +5.7,2.9,4.2,1.3,Iris-versicolor +6.6,3.0,4.4,1.4,Iris-versicolor +5.1,3.8,1.6,0.2,Iris-setosa +5.7,2.8,4.1,1.3,Iris-versicolor +5.8,2.6,4.0,1.2,Iris-versicolor +4.9,3.1,1.5,0.1,Iris-setosa +5.6,2.5,3.9,1.1,Iris-versicolor +4.8,3.0,1.4,0.1,Iris-setosa +5.1,3.3,1.7,0.5,Iris-setosa +5.8,2.7,3.9,1.2,Iris-versicolor +5.7,2.8,4.5,1.3,Iris-versicolor +6.0,2.7,5.1,1.6,Iris-versicolor +5.5,2.3,4.0,1.3,Iris-versicolor +6.1,3.0,4.6,1.4,Iris-versicolor +5.1,3.4,1.5,0.2,Iris-setosa +5.4,3.4,1.5,0.4,Iris-setosa +6.0,2.2,4.0,1.0,Iris-versicolor +5.0,3.4,1.6,0.4,Iris-setosa +6.3,3.3,4.7,1.6,Iris-versicolor +5.7,2.6,3.5,1.0,Iris-versicolor +4.6,3.2,1.4,0.2,Iris-setosa +5.1,3.5,1.4,0.2,Iris-setosa +6.4,3.2,4.5,1.5,Iris-versicolor +5.0,3.5,1.3,0.3,Iris-setosa +4.6,3.4,1.4,0.3,Iris-setosa +4.9,3.0,1.4,0.2,Iris-setosa +5.1,2.5,3.0,1.1,Iris-versicolor +5.6,2.7,4.2,1.3,Iris-versicolor +6.2,2.9,4.3,1.3,Iris-versicolor +6.0,3.4,4.5,1.6,Iris-versicolor +4.4,3.2,1.3,0.2,Iris-setosa +5.2,3.5,1.5,0.2,Iris-setosa +5.8,4.0,1.2,0.2,Iris-setosa +5.0,3.6,1.4,0.2,Iris-setosa +4.3,3.0,1.1,0.1,Iris-setosa +5.7,4.4,1.5,0.4,Iris-setosa +5.3,3.7,1.5,0.2,Iris-setosa +4.8,3.1,1.6,0.2,Iris-setosa +5.4,3.9,1.7,0.4,Iris-setosa +5.6,3.0,4.1,1.3,Iris-versicolor +4.8,3.4,1.6,0.2,Iris-setosa +4.7,3.2,1.6,0.2,Iris-setosa +5.0,3.4,1.5,0.2,Iris-setosa +6.3,2.3,4.4,1.3,Iris-versicolor +5.5,3.5,1.3,0.2,Iris-setosa +6.2,2.2,4.5,1.5,Iris-versicolor +5.1,3.5,1.4,0.3,Iris-setosa +4.6,3.6,1.0,0.2,Iris-setosa +6.1,2.9,4.7,1.4,Iris-versicolor +4.4,3.0,1.3,0.2,Iris-setosa +4.9,3.1,1.5,0.1,Iris-setosa +5.0,2.3,3.3,1.0,Iris-versicolor +6.3,2.5,4.9,1.5,Iris-versicolor +5.5,4.2,1.4,0.2,Iris-setosa +5.5,2.4,3.7,1.0,Iris-versicolor +5.1,3.7,1.5,0.4,Iris-setosa +5.0,3.0,1.6,0.2,Iris-setosa +4.6,3.1,1.5,0.2,Iris-setosa +4.8,3.0,1.4,0.3,Iris-setosa +5.5,2.4,3.8,1.1,Iris-versicolor +5.4,3.0,4.5,1.5,Iris-versicolor +6.9,3.1,4.9,1.5,Iris-versicolor diff --git a/iris.data.multilabel b/iris.data.multilabel new file mode 100644 index 0000000..d59b708 --- /dev/null +++ b/iris.data.multilabel @@ -0,0 +1,151 @@ +6.4,3.2,5.3,2.3,Iris-virginica +5.1,3.8,1.5,0.3,Iris-setosa +5.0,3.5,1.6,0.6,Iris-setosa +5.7,3.0,4.2,1.2,Iris-versicolor +5.7,2.5,5.0,2.0,Iris-virginica +5.0,3.4,1.6,0.4,Iris-setosa +5.4,3.4,1.5,0.4,Iris-setosa +5.6,2.5,3.9,1.1,Iris-versicolor +4.9,3.1,1.5,0.1,Iris-setosa +4.9,2.4,3.3,1.0,Iris-versicolor +5.9,3.2,4.8,1.8,Iris-versicolor +6.7,3.1,5.6,2.4,Iris-virginica +6.5,3.0,5.2,2.0,Iris-virginica +5.5,2.3,4.0,1.3,Iris-versicolor +6.7,3.3,5.7,2.1,Iris-virginica +6.3,2.3,4.4,1.3,Iris-versicolor +5.4,3.9,1.3,0.4,Iris-setosa +6.4,3.2,4.5,1.5,Iris-versicolor +4.9,3.1,1.5,0.1,Iris-setosa +7.4,2.8,6.1,1.9,Iris-virginica +4.8,3.4,1.6,0.2,Iris-setosa +5.1,3.4,1.5,0.2,Iris-setosa +6.0,2.2,4.0,1.0,Iris-versicolor +6.5,3.0,5.5,1.8,Iris-virginica +4.4,3.2,1.3,0.2,Iris-setosa +5.0,3.2,1.2,0.2,Iris-setosa +7.6,3.0,6.6,2.1,Iris-virginica +5.0,2.3,3.3,1.0,Iris-versicolor +5.7,2.9,4.2,1.3,Iris-versicolor +5.1,3.3,1.7,0.5,Iris-setosa +5.8,2.7,4.1,1.0,Iris-versicolor +5.7,2.8,4.5,1.3,Iris-versicolor +6.1,2.8,4.7,1.2,Iris-versicolor +4.3,3.0,1.1,0.1,Iris-setosa +5.5,2.4,3.8,1.1,Iris-versicolor +5.8,2.7,5.1,1.9,Iris-virginica +6.1,2.9,4.7,1.4,Iris-versicolor +6.3,2.9,5.6,1.8,Iris-virginica +6.1,3.0,4.9,1.8,Iris-virginica +5.2,3.5,1.5,0.2,Iris-setosa +4.6,3.1,1.5,0.2,Iris-setosa +5.2,2.7,3.9,1.4,Iris-versicolor +6.4,2.7,5.3,1.9,Iris-virginica +6.3,2.5,4.9,1.5,Iris-versicolor +5.5,4.2,1.4,0.2,Iris-setosa +6.1,2.6,5.6,1.4,Iris-virginica +4.8,3.0,1.4,0.3,Iris-setosa +5.8,2.7,5.1,1.9,Iris-virginica +4.9,3.1,1.5,0.1,Iris-setosa +6.2,2.9,4.3,1.3,Iris-versicolor +7.0,3.2,4.7,1.4,Iris-versicolor +6.7,3.0,5.0,1.7,Iris-versicolor +6.3,3.4,5.6,2.4,Iris-virginica +5.6,3.0,4.5,1.5,Iris-versicolor +5.9,3.0,5.1,1.8,Iris-virginica +5.0,3.5,1.3,0.3,Iris-setosa +7.2,3.0,5.8,1.6,Iris-virginica +5.1,3.5,1.4,0.3,Iris-setosa +5.1,3.8,1.9,0.4,Iris-setosa +4.7,3.2,1.6,0.2,Iris-setosa +7.2,3.6,6.1,2.5,Iris-virginica + +5.5,2.4,3.7,1.0,Iris-versicolor +5.6,3.0,4.1,1.3,Iris-versicolor +6.8,3.2,5.9,2.3,Iris-virginica +5.4,3.7,1.5,0.2,Iris-setosa +6.3,2.5,5.0,1.9,Iris-virginica +4.6,3.4,1.4,0.3,Iris-setosa +4.9,2.5,4.5,1.7,Iris-virginica +5.0,3.3,1.4,0.2,Iris-setosa +5.7,3.8,1.7,0.3,Iris-setosa +4.4,2.9,1.4,0.2,Iris-setosa +7.7,3.8,6.7,2.2,Iris-virginica +6.5,3.0,5.8,2.2,Iris-virginica +6.7,2.5,5.8,1.8,Iris-virginica +7.3,2.9,6.3,1.8,Iris-virginica +6.2,2.2,4.5,1.5,Iris-versicolor +6.0,2.7,5.1,1.6,Iris-versicolor +6.3,3.3,4.7,1.6,Iris-versicolor +6.8,2.8,4.8,1.4,Iris-versicolor +6.5,2.8,4.6,1.5,Iris-versicolor +6.3,2.7,4.9,1.8,Iris-virginica +6.6,2.9,4.6,1.3,Iris-versicolor +6.9,3.1,5.1,2.3,Iris-virginica +5.4,3.9,1.7,0.4,Iris-setosa +5.7,4.4,1.5,0.4,Iris-setosa +6.5,3.2,5.1,2.0,Iris-virginica +6.9,3.2,5.7,2.3,Iris-virginica +7.1,3.0,5.9,2.1,Iris-virginica +5.8,4.0,1.2,0.2,Iris-setosa +5.7,2.6,3.5,1.0,Iris-versicolor +7.7,3.0,6.1,2.3,Iris-virginica +6.3,3.3,6.0,2.5,Iris-virginica +4.7,3.2,1.3,0.2,Iris-setosa +5.6,2.9,3.6,1.3,Iris-versicolor +6.0,3.4,4.5,1.6,Iris-versicolor +5.6,2.7,4.2,1.3,Iris-versicolor +4.4,3.0,1.3,0.2,Iris-setosa +5.8,2.8,5.1,2.4,Iris-virginica +6.7,3.1,4.7,1.5,Iris-versicolor +6.6,3.0,4.4,1.4,Iris-versicolor +4.8,3.1,1.6,0.2,Iris-setosa +5.5,2.5,4.0,1.3,Iris-versicolor +6.4,2.9,4.3,1.3,Iris-versicolor +6.3,2.8,5.1,1.5,Iris-virginica +5.1,2.5,3.0,1.1,Iris-versicolor +6.4,2.8,5.6,2.1,Iris-virginica +4.5,2.3,1.3,0.3,Iris-setosa +6.4,2.8,5.6,2.2,Iris-virginica +4.8,3.0,1.4,0.1,Iris-setosa +7.9,3.8,6.4,2.0,Iris-virginica +6.7,3.1,4.4,1.4,Iris-versicolor +5.0,2.0,3.5,1.0,Iris-versicolor +5.4,3.0,4.5,1.5,Iris-versicolor +5.0,3.4,1.5,0.2,Iris-setosa +5.2,3.4,1.4,0.2,Iris-setosa +5.9,3.0,4.2,1.5,Iris-versicolor +5.1,3.7,1.5,0.4,Iris-setosa +4.9,3.0,1.4,0.2,Iris-setosa +4.8,3.4,1.9,0.2,Iris-setosa +6.2,2.8,4.8,1.8,Iris-virginica +5.3,3.7,1.5,0.2,Iris-setosa +5.5,2.6,4.4,1.2,Iris-versicolor +6.2,3.4,5.4,2.3,Iris-virginica +5.1,3.8,1.6,0.2,Iris-setosa +5.8,2.7,3.9,1.2,Iris-versicolor +6.9,3.1,4.9,1.5,Iris-versicolor +6.1,2.8,4.0,1.3,Iris-versicolor +6.7,3.3,5.7,2.5,Iris-virginica +7.7,2.8,6.7,2.0,Iris-virginica +6.1,3.0,4.6,1.4,Iris-versicolor +5.0,3.0,1.6,0.2,Iris-setosa +5.4,3.4,1.7,0.2,Iris-setosa +6.8,3.0,5.5,2.1,Iris-virginica +5.7,2.8,4.1,1.3,Iris-versicolor +5.8,2.6,4.0,1.2,Iris-versicolor +5.6,2.8,4.9,2.0,Iris-virginica +4.6,3.2,1.4,0.2,Iris-setosa +6.4,3.1,5.5,1.8,Iris-virginica +7.7,2.6,6.9,2.3,Iris-virginica +5.1,3.5,1.4,0.2,Iris-setosa +6.7,3.0,5.2,2.3,Iris-virginica +5.0,3.6,1.4,0.2,Iris-setosa +6.9,3.1,5.4,2.1,Iris-virginica +4.6,3.6,1.0,0.2,Iris-setosa +5.5,3.5,1.3,0.2,Iris-setosa +5.2,4.1,1.5,0.1,Iris-setosa +6.0,2.2,5.0,1.5,Iris-virginica +6.0,3.0,4.8,1.8,Iris-virginica +7.2,3.2,6.0,1.8,Iris-virginica +6.0,2.9,4.5,1.5,Iris-versicolor diff --git a/iris.names b/iris.names new file mode 100644 index 0000000..062b486 --- /dev/null +++ b/iris.names @@ -0,0 +1,69 @@ +1. Title: Iris Plants Database + Updated Sept 21 by C.Blake - Added discrepency information + +2. Sources: + (a) Creator: R.A. Fisher + (b) Donor: Michael Marshall (MARSHALL%PLU@io.arc.nasa.gov) + (c) Date: July, 1988 + +3. Past Usage: + - Publications: too many to mention!!! Here are a few. + 1. Fisher,R.A. "The use of multiple measurements in taxonomic problems" + Annual Eugenics, 7, Part II, 179-188 (1936); also in "Contributions + to Mathematical Statistics" (John Wiley, NY, 1950). + 2. Duda,R.O., & Hart,P.E. (1973) Pattern Classification and Scene Analysis. + (Q327.D83) John Wiley & Sons. ISBN 0-471-22361-1. See page 218. + 3. Dasarathy, B.V. (1980) "Nosing Around the Neighborhood: A New System + Structure and Classification Rule for Recognition in Partially Exposed + Environments". IEEE Transactions on Pattern Analysis and Machine + Intelligence, Vol. PAMI-2, No. 1, 67-71. + -- Results: + -- very low misclassification rates (0% for the setosa class) + 4. Gates, G.W. (1972) "The Reduced Nearest Neighbor Rule". IEEE + Transactions on Information Theory, May 1972, 431-433. + -- Results: + -- very low misclassification rates again + 5. See also: 1988 MLC Proceedings, 54-64. Cheeseman et al's AUTOCLASS II + conceptual clustering system finds 3 classes in the data. + +4. Relevant Information: + --- This is perhaps the best known database to be found in the pattern + recognition literature. Fisher's paper is a classic in the field + and is referenced frequently to this day. (See Duda & Hart, for + example.) The data set contains 3 classes of 50 instances each, + where each class refers to a type of iris plant. One class is + linearly separable from the other 2; the latter are NOT linearly + separable from each other. + --- Predicted attribute: class of iris plant. + --- This is an exceedingly simple domain. + --- This data differs from the data presented in Fishers article + (identified by Steve Chadwick, spchadwick@espeedaz.net ) + The 35th sample should be: 4.9,3.1,1.5,0.2,"Iris-setosa" + where the error is in the fourth feature. + The 38th sample: 4.9,3.6,1.4,0.1,"Iris-setosa" + where the errors are in the second and third features. + +5. Number of Instances: 150 (50 in each of three classes) + +6. Number of Attributes: 4 numeric, predictive attributes and the class + +7. Attribute Information: + 1. sepal length in cm + 2. sepal width in cm + 3. petal length in cm + 4. petal width in cm + 5. class: + -- Iris Setosa + -- Iris Versicolour + -- Iris Virginica + +8. Missing Attribute Values: None + +Summary Statistics: + Min Max Mean SD Class Correlation + sepal length: 4.3 7.9 5.84 0.83 0.7826 + sepal width: 2.0 4.4 3.05 0.43 -0.4194 + petal length: 1.0 6.9 3.76 1.76 0.9490 (high!) + petal width: 0.1 2.5 1.20 0.76 0.9565 (high!) + +9. Class Distribution: 33.3% for each of 3 classes. diff --git a/mieszkania.tsv b/mieszkania.tsv new file mode 100644 index 0000000..e893fea --- /dev/null +++ b/mieszkania.tsv @@ -0,0 +1,121 @@ +powierzchnia cena +53 215000 +60.01 219990 +54 285000 +60 330000 +63 212000 +39 219000 +76.11 399000 +48 119000 +42.19 260000 +53.41 323000 +65.65 555000 +65 185000 +55 247000 +100 280000 +56 224000 +39 230000 +42.3 179000 +49.65 305000 +68 345000 +37 145000 +103 529000 +62.3 209000 +17.65 42000 +45 500000 +36.15 140000 +45 159000 +50 130000 +48 84000 +36 359000 +39.3 116400 +49.48 136950 +26 85000 +72 469000 +64 239000 +55 435000 +90 175903 +90 175903 +90 175903 +127.88 1710000 +59 649000 +48.7 240000 +73 259000 +32.9 275000 +64 170000 +44.72 174408 +68 275000 +38 323000 +35 110000 +63 165000 +25 69000 +50 290000 +76.312 572325 +65 429000 +52.5 499000 +58 145000 +34 95000 +46 280000 +38 120000 +52 269000 +47 105000 +63 266000 +67.79 275000 +60 550000 +107 1230000 +53 228000 +48.65 148000 +39 140000 +23 170000 +35 195000 +71.19 245000 +75 329000 +53 185000 +51 135000 +42 133000 +38 142000 +45.6 470000 +50 194000 +29 158999 +28.8 199000 +36 199000 +57.43 385621 +57.71 402305 +60.12 395000 +38 210000 +56.28 419000 +60 346800 +41 295000 +28.7 219000 +39 275000 +37 105000 +47 330000 +64 435000 +96 151200 +35.34 87000 +101 489000 +50 129000 +49.5 315000 +14 2000 +31 110000 +50.9 265000 +117 129000 +52.2 250000 +28 140000 +15 5000 +41.7 249000 +56.4 490000 +30.9 161000 +42.3 229000 +53 270000 +72.4 409000 +52.9 370000 +37.77 135000 +82 260000 +32 195000 +59 590000 +62.01 205000 +52.5 543000 +56 170000 +67.61 285000 +51 494000 diff --git a/pytorch1.py b/pytorch1.py new file mode 100755 index 0000000..deaca89 --- /dev/null +++ b/pytorch1.py @@ -0,0 +1,22 @@ +#!/usr/bin/python3 + +import torch + + +def fun(x): + return 2*x**4 - x**3 + 3.5*x + 10 + + +x = torch.tensor(5., requires_grad=True) + +learning_rate = torch.tensor(0.01) + +for _ in range(100): + y = fun(x) + print(x, " => ", y) + y.backward() + + with torch.no_grad(): + x = x - learning_rate * x.grad + + x.requires_grad_(True) diff --git a/pytorch10.py b/pytorch10.py new file mode 100755 index 0000000..944cb60 --- /dev/null +++ b/pytorch10.py @@ -0,0 +1,71 @@ +#!/usr/bin/python3 + +import torch +import pandas as pd +from sklearn.model_selection import train_test_split + +data = pd.read_csv('iris.data.multilabel', sep=',', header=None) +NAMES_DICT = { + 'Iris-setosa': 0, + 'Iris-versicolor': 1, + 'Iris-virginica': 2} + +data[5] = data[4].apply(lambda x: NAMES_DICT[x]) + +x = torch.tensor(data[[0,1,2,3]].values, dtype=torch.float) +y = torch.tensor(data[5], dtype=torch.long) + +X_train, X_test, y_train, y_test = train_test_split(x, y, random_state=42) + + +class Network(torch.nn.Module): + + def __init__(self): + super(Network, self).__init__() + self.fc1 = torch.nn.Linear(4, 4) + self.fc2 = torch.nn.Linear(4, 3) + + def forward(self, x): + x = self.fc1(x) + x = self.fc2(x) + x = torch.nn.functional.softmax(x) + return x + + +network = Network() +optimizer = torch.optim.SGD(network.parameters(), lr=0.002) +criterion = torch.nn.CrossEntropyLoss(reduction='sum') + +samples_in_batch = 5 + +for epoch in range(3000): + + network.train() + for i in range(0, len(X_train), samples_in_batch): + batch_x = X_train[i:i + samples_in_batch] + batch_y = y_train[i:i + samples_in_batch] + optimizer.zero_grad() + ypredicted = network(batch_x) + + loss = criterion(ypredicted, batch_y) + + loss.backward() + optimizer.step() + + network.eval() + predicted_correct = 0 + loss_sum = 0 + for i in range(0, len(X_test), samples_in_batch): + batch_x = X_test[i:i + samples_in_batch] + batch_y = y_test[i:i + samples_in_batch] + optimizer.zero_grad() + ypredicted = network(batch_x) + y_most_probable_class = torch.max(ypredicted,1)[1] + + loss = criterion(ypredicted, batch_y) + + predicted_correct += sum(y_most_probable_class == batch_y).item() + + + accuracy = 100 * predicted_correct / len(y_test) + print('{:.3}'.format(loss.item()), "\t => ", accuracy, '% accuracy') diff --git a/pytorch2.py b/pytorch2.py new file mode 100755 index 0000000..4864dca --- /dev/null +++ b/pytorch2.py @@ -0,0 +1,29 @@ +#!/usr/bin/python3 + +import torch + +m = torch.tensor([[2., 1.], [-1., 2.]]) + + +def fun(x): + return m @ x + + +def loss(y): + return torch.sum((y - torch.tensor([3., 2.]))**2) + + +x = torch.rand(2, requires_grad=True) + +learning_rate = torch.tensor(0.01) + +for _ in range(100): + y = fun(x) + cost = loss(y) + print(x, " => ", y, " ", cost) + cost.backward() + + with torch.no_grad(): + x = x - learning_rate * x.grad + + x.requires_grad_(True) diff --git a/pytorch3.py b/pytorch3.py new file mode 100755 index 0000000..5b066fa --- /dev/null +++ b/pytorch3.py @@ -0,0 +1,27 @@ +#!/usr/bin/python3 + +import torch +import pandas + + +data = pandas.read_csv('mieszkania.tsv', sep='\t') + +x = torch.tensor(data['powierzchnia'], dtype=torch.float) +y = torch.tensor(data['cena'], dtype=torch.float) + +w = torch.rand(1, requires_grad=True) + +learning_rate = torch.tensor(0.0000001) + +for _ in range(100): + ypredicted = w * x + cost = torch.sum((ypredicted - y) ** 2) + + print(w, " => ", cost) + + cost.backward() + + with torch.no_grad(): + w = w - learning_rate * w.grad + + w.requires_grad_(True) diff --git a/pytorch4.py b/pytorch4.py new file mode 100755 index 0000000..6832940 --- /dev/null +++ b/pytorch4.py @@ -0,0 +1,29 @@ +#!/usr/bin/python3 + +import torch +import pandas + + +data = pandas.read_csv('mieszkania.tsv', sep='\t') + +x1 = torch.tensor(data['powierzchnia'], dtype=torch.float) +x0 = torch.ones(x1.size(0)) +x = torch.stack((x0, x1)).transpose(0, 1) +y = torch.tensor(data['cena'], dtype=torch.float) + +w = torch.rand(2, requires_grad=True) + +learning_rate = torch.tensor(0.000002) + +for _ in range(400000): + ypredicted = x @ w + cost = torch.sum((ypredicted - y) ** 2) + + print(w, " => ", cost) + + cost.backward() + + with torch.no_grad(): + w = w - learning_rate * w.grad + + w.requires_grad_(True) diff --git a/pytorch5.py b/pytorch5.py new file mode 100755 index 0000000..18d33d3 --- /dev/null +++ b/pytorch5.py @@ -0,0 +1,32 @@ +#!/usr/bin/python3 + +import torch +import pandas as pd + + +data = pd.read_csv('iris.data',sep = ',', header = None) +data[5] = data[4].apply(lambda x: 1 if x == 'Iris-versicolor' else 0) + +x1 = torch.tensor(data[0], dtype=torch.float) +x0 = torch.ones(x1.size(0)) +x = torch.stack((x0, x1)).transpose(0, 1) +y = torch.tensor(data[5], dtype=torch.float) + +w = torch.rand(2, requires_grad=True) + +learning_rate = torch.tensor(0.005) + +for _ in range(3000): + ypredicted = torch.nn.functional.sigmoid(x @ w) + + # cost = torch.sum((ypredicted - y) ** 2) + cost = - (torch.sum(y*torch.log(ypredicted) + (torch.ones_like(y) - y) * torch.log(1- ypredicted))) + accuracy = 100 * sum((ypredicted > 0.5) == y).item() / len(ypredicted) + print(w, " => ", cost, " => ", accuracy, '% accuracy') + + cost.backward() + + with torch.no_grad(): + w = w - learning_rate * w.grad + + w.requires_grad_(True) diff --git a/pytorch6.py b/pytorch6.py new file mode 100755 index 0000000..5c9b80a --- /dev/null +++ b/pytorch6.py @@ -0,0 +1,29 @@ +#!/usr/bin/python3 + +import torch +import pandas as pd + + +data = pd.read_csv('iris.data',sep = ',', header = None) +data[5] = data[4].apply(lambda x: 1 if x == 'Iris-versicolor' else 0) + +x1 = torch.tensor(data[0], dtype=torch.float) +x0 = torch.ones(x1.size(0)) +x = torch.stack((x0, x1)).transpose(0, 1) +y = torch.tensor(data[5], dtype=torch.float) + +w = torch.rand(2, requires_grad=True) + +optimizer = torch.optim.SGD([w], lr=0.005) + +for _ in range(3000): + optimizer.zero_grad() + ypredicted = torch.nn.functional.sigmoid(x @ w) + + cost = - (torch.sum(y*torch.log(ypredicted) + (torch.ones_like(y) - y) * torch.log(1- ypredicted))) + accuracy = 100 * sum((ypredicted > 0.5) == y).item() / len(ypredicted) + print(w, " => ", cost, " => ", accuracy, '% accuracy') + + cost.backward() + optimizer.step() + diff --git a/pytorch7.py b/pytorch7.py new file mode 100755 index 0000000..0d81b90 --- /dev/null +++ b/pytorch7.py @@ -0,0 +1,41 @@ +#!/usr/bin/python3 + +import torch +import pandas as pd + + +data = pd.read_csv('iris.data',sep = ',', header = None) +data[5] = data[4].apply(lambda x: 1 if x == 'Iris-versicolor' else 0) + +x = torch.tensor(data[[0,1]].values, dtype=torch.float) +y = torch.tensor(data[5], dtype=torch.float) + +y = y.reshape(100,1) + +class Network(torch.nn.Module): + + def __init__(self): + super(Network, self).__init__() + self.fc = torch.nn.Linear(2,1) + + def forward(self,x): + x = self.fc(x) + x = torch.nn.functional.sigmoid(x) + + return x + +network = Network() +optimizer = torch.optim.SGD(network.parameters(), lr=0.002) +criterion = torch.nn.BCELoss() + +for _ in range(3000): + optimizer.zero_grad() + ypredicted = network(x) + + loss = criterion(ypredicted,y) + accuracy = 100 * sum((ypredicted > 0.5) == y).item() / len(ypredicted) + print('{:.3}'.format(loss.item()), "\t => ", accuracy, '% accuracy') + + loss.backward() + optimizer.step() + diff --git a/pytorch8.py b/pytorch8.py new file mode 100755 index 0000000..fabf1f6 --- /dev/null +++ b/pytorch8.py @@ -0,0 +1,68 @@ +#!/usr/bin/python3 + +import torch +import pandas as pd +from sklearn.model_selection import train_test_split + + +data = pd.read_csv('iris.data',sep = ',', header = None) +data[5] = data[4].apply(lambda x: 1 if x == 'Iris-versicolor' else 0) + + +x = torch.tensor(data[[0,1,2,3]].values, dtype=torch.float) +y = torch.tensor(data[5], dtype=torch.float) + +y = y.reshape(100,1) + +X_train, X_test, y_train, y_test = train_test_split(x,y, random_state = 42) + +class Network(torch.nn.Module): + + def __init__(self): + super(Network, self).__init__() + self.fc = torch.nn.Linear(4,1) + + def forward(self,x): + x = self.fc(x) + x = torch.nn.functional.sigmoid(x) + + return x + +network = Network() +optimizer = torch.optim.SGD(network.parameters(), lr=0.002) +criterion = torch.nn.BCELoss() + +samples_in_batch = 5 + +for epoch in range(3000): + + network.train() + for i in range(0,len(X_train),samples_in_batch): + batch_x = X_train[i:i+samples_in_batch] + batch_y = y_train[i:i+samples_in_batch] + optimizer.zero_grad() + ypredicted = network(batch_x) + + loss = criterion(ypredicted,batch_y) + + loss.backward() + optimizer.step() + + + network.eval() + predicted_correct = 0 + loss_sum = 0 + for i in range(0,len(X_test),samples_in_batch): + batch_x = X_test[i:i+samples_in_batch] + batch_y = y_test[i:i+samples_in_batch] + optimizer.zero_grad() + ypredicted = network(batch_x) + + loss_sum += criterion(ypredicted,batch_y) + + predicted_correct += sum(((ypredicted > 0.5) == batch_y)).item() + + accuracy = 100 * predicted_correct / len(y_test) + print('{:.3}'.format(loss.item()), "\t => ", accuracy, '% accuracy') + + diff --git a/pytorch9.py b/pytorch9.py new file mode 100755 index 0000000..f834e2a --- /dev/null +++ b/pytorch9.py @@ -0,0 +1,69 @@ +#!/usr/bin/python3 + +import torch +import pandas as pd +from sklearn.model_selection import train_test_split + +data = pd.read_csv('iris.data.multilabel', sep=',', header=None) +NAMES_DICT = { + 'Iris-setosa': 0, + 'Iris-versicolor': 1, + 'Iris-virginica': 2} + +data[5] = data[4].apply(lambda x: NAMES_DICT[x]) + +x = torch.tensor(data[[0,1,2,3]].values, dtype=torch.float) +y = torch.tensor(data[5], dtype=torch.long) + +X_train, X_test, y_train, y_test = train_test_split(x, y, random_state=42) + + +class Network(torch.nn.Module): + + def __init__(self): + super(Network, self).__init__() + self.fc = torch.nn.Linear(4, 3) + + def forward(self, x): + x = self.fc(x) + x = torch.nn.functional.softmax(x) + return x + + +network = Network() +optimizer = torch.optim.SGD(network.parameters(), lr=0.002) +criterion = torch.nn.CrossEntropyLoss(reduction='sum') + +samples_in_batch = 5 + +for epoch in range(3000): + + network.train() + for i in range(0, len(X_train), samples_in_batch): + batch_x = X_train[i:i + samples_in_batch] + batch_y = y_train[i:i + samples_in_batch] + optimizer.zero_grad() + ypredicted = network(batch_x) + + loss = criterion(ypredicted, batch_y) + + loss.backward() + optimizer.step() + + network.eval() + predicted_correct = 0 + loss_sum = 0 + for i in range(0, len(X_test), samples_in_batch): + batch_x = X_test[i:i + samples_in_batch] + batch_y = y_test[i:i + samples_in_batch] + optimizer.zero_grad() + ypredicted = network(batch_x) + y_most_probable_class = torch.max(ypredicted,1)[1] + + loss = criterion(ypredicted, batch_y) + + predicted_correct += sum(y_most_probable_class == batch_y).item() + + + accuracy = 100 * predicted_correct / len(y_test) + print('{:.3}'.format(loss.item()), "\t => ", accuracy, '% accuracy')