Compare commits

..

No commits in common. "master" and "carrying_packages_and_dynamic_wozek_image" have entirely different histories.

2059 changed files with 187 additions and 2862 deletions

160
.gitignore vendored
View File

@ -1,160 +0,0 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/

View File

@ -1,5 +0,0 @@
{
"python.analysis.extraPaths": [
"./DecisionTree"
]
}

View File

@ -1,200 +0,0 @@
10010010
00101101
20110001
22101110
10010001
21001100
10001001
11010001
00101110
02000110
00100101
00000110
02101100
20001000
21010111
01101110
02011101
12101100
00111101
00011001
11111010
12100111
22110111
12101101
01000101
11000101
01000111
21010101
01101100
21010110
12100011
12010111
02010101
21101111
02010001
01100110
22100011
10000010
00110100
22011100
12110001
12010011
01011110
01001100
01011000
11101101
11110110
21110110
22001100
10010101
21111010
00001100
21110101
12111011
02001111
21011000
02111011
12011110
02000101
12000100
20010111
21100011
01110100
21011100
02010000
21001001
11001100
20010011
20111011
22011000
01011101
10111000
20011111
10000001
21100001
00001101
01010001
22010000
02111101
22100110
12001110
01110001
11101000
20110011
20101010
22000110
11011011
20000011
12001101
12110000
00111110
02110100
21100010
10011000
22011101
20011100
02100000
12111001
00000111
22111011
01001010
21101100
01111111
12111010
20111110
10110010
02001001
22000100
02001100
01000011
10000101
21000010
01100100
10101010
20001100
00000000
00101000
10100000
02100001
20011101
02011110
02111111
12010110
02100100
20111111
00011111
12011000
12011001
22010010
22000010
00010010
10101000
02000000
20101111
02100011
02101111
22101010
11111111
01101000
21100111
00101111
01101010
20010010
11011110
20011110
00100110
10101111
01000001
02011001
21101011
11111011
10110011
10011001
21110010
10000000
00011110
10110001
21111011
12010101
11000110
22101101
00000010
02000111
21000011
00011100
10100110
20001111
12100001
22000101
01100010
02001010
11001111
00010011
01100111
22011010
10101011
11010011
20110010
20100010
11110111
21101000
02011000
12110100
21111101
02010111
02101001
01100011
10011011
22110000
01100000
20110100
01100001
00111010
02000010
20010110
00101011
22001011
22010001
22010101
12100101

Binary file not shown.

Before

Width:  |  Height:  |  Size: 504 KiB

View File

@ -1,197 +0,0 @@
digraph Tree {
node [shape=box, style="filled, rounded", color="black", fontname="helvetica"] ;
edge [fontname="helvetica"] ;
0 [label="g > d <= 0.5\nentropy = 0.997\nsamples = 200\nvalue = [94, 106]\nclass = 1", fillcolor="#e9f4fc"] ;
1 [label="waga, <= 0.5\nentropy = 0.803\nsamples = 98\nvalue = [74, 24]\nclass = 0", fillcolor="#edaa79"] ;
0 -> 1 [labeldistance=2.5, labelangle=45, headlabel="True"] ;
2 [label="wielkosc <= 1.5\nentropy = 0.998\nsamples = 34\nvalue = [16, 18]\nclass = 1", fillcolor="#e9f4fc"] ;
1 -> 2 ;
3 [label="priorytet <= 0.5\nentropy = 0.887\nsamples = 23\nvalue = [7, 16]\nclass = 1", fillcolor="#90c8f0"] ;
2 -> 3 ;
4 [label="kruchosc <= 0.5\nentropy = 0.439\nsamples = 11\nvalue = [1, 10]\nclass = 1", fillcolor="#4da7e8"] ;
3 -> 4 ;
5 [label="entropy = 0.0\nsamples = 7\nvalue = [0, 7]\nclass = 1", fillcolor="#399de5"] ;
4 -> 5 ;
6 [label="wielkosc <= 0.5\nentropy = 0.811\nsamples = 4\nvalue = [1, 3]\nclass = 1", fillcolor="#7bbeee"] ;
4 -> 6 ;
7 [label="ksztalt <= 0.5\nentropy = 0.918\nsamples = 3\nvalue = [1, 2]\nclass = 1", fillcolor="#9ccef2"] ;
6 -> 7 ;
8 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
7 -> 8 ;
9 [label="gorna <= 0.5\nentropy = 1.0\nsamples = 2\nvalue = [1, 1]\nclass = 0", fillcolor="#ffffff"] ;
7 -> 9 ;
10 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
9 -> 10 ;
11 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
9 -> 11 ;
12 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
6 -> 12 ;
13 [label="kruchosc <= 0.5\nentropy = 1.0\nsamples = 12\nvalue = [6, 6]\nclass = 0", fillcolor="#ffffff"] ;
3 -> 13 ;
14 [label="entropy = 0.0\nsamples = 5\nvalue = [5, 0]\nclass = 0", fillcolor="#e58139"] ;
13 -> 14 ;
15 [label="ksztalt <= 0.5\nentropy = 0.592\nsamples = 7\nvalue = [1, 6]\nclass = 1", fillcolor="#5aade9"] ;
13 -> 15 ;
16 [label="entropy = 0.0\nsamples = 4\nvalue = [0, 4]\nclass = 1", fillcolor="#399de5"] ;
15 -> 16 ;
17 [label="gorna <= 0.5\nentropy = 0.918\nsamples = 3\nvalue = [1, 2]\nclass = 1", fillcolor="#9ccef2"] ;
15 -> 17 ;
18 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
17 -> 18 ;
19 [label="entropy = 0.0\nsamples = 2\nvalue = [0, 2]\nclass = 1", fillcolor="#399de5"] ;
17 -> 19 ;
20 [label="ksztalt <= 0.5\nentropy = 0.684\nsamples = 11\nvalue = [9, 2]\nclass = 0", fillcolor="#eb9d65"] ;
2 -> 20 ;
21 [label="dolna <= 0.5\nentropy = 1.0\nsamples = 4\nvalue = [2, 2]\nclass = 0", fillcolor="#ffffff"] ;
20 -> 21 ;
22 [label="kruchosc <= 0.5\nentropy = 0.918\nsamples = 3\nvalue = [1, 2]\nclass = 1", fillcolor="#9ccef2"] ;
21 -> 22 ;
23 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
22 -> 23 ;
24 [label="entropy = 0.0\nsamples = 2\nvalue = [0, 2]\nclass = 1", fillcolor="#399de5"] ;
22 -> 24 ;
25 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
21 -> 25 ;
26 [label="entropy = 0.0\nsamples = 7\nvalue = [7, 0]\nclass = 0", fillcolor="#e58139"] ;
20 -> 26 ;
27 [label="gorna <= 0.5\nentropy = 0.449\nsamples = 64\nvalue = [58, 6]\nclass = 0", fillcolor="#e88e4d"] ;
1 -> 27 ;
28 [label="entropy = 0.0\nsamples = 33\nvalue = [33, 0]\nclass = 0", fillcolor="#e58139"] ;
27 -> 28 ;
29 [label="wielkosc <= 1.5\nentropy = 0.709\nsamples = 31\nvalue = [25, 6]\nclass = 0", fillcolor="#eb9f69"] ;
27 -> 29 ;
30 [label="ksztalt <= 0.5\nentropy = 0.918\nsamples = 18\nvalue = [12, 6]\nclass = 0", fillcolor="#f2c09c"] ;
29 -> 30 ;
31 [label="kruchosc <= 0.5\nentropy = 1.0\nsamples = 10\nvalue = [5, 5]\nclass = 0", fillcolor="#ffffff"] ;
30 -> 31 ;
32 [label="dolna <= 0.5\nentropy = 0.722\nsamples = 5\nvalue = [4, 1]\nclass = 0", fillcolor="#eca06a"] ;
31 -> 32 ;
33 [label="priorytet <= 0.5\nentropy = 1.0\nsamples = 2\nvalue = [1, 1]\nclass = 0", fillcolor="#ffffff"] ;
32 -> 33 ;
34 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
33 -> 34 ;
35 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
33 -> 35 ;
36 [label="entropy = 0.0\nsamples = 3\nvalue = [3, 0]\nclass = 0", fillcolor="#e58139"] ;
32 -> 36 ;
37 [label="dolna <= 0.5\nentropy = 0.722\nsamples = 5\nvalue = [1, 4]\nclass = 1", fillcolor="#6ab6ec"] ;
31 -> 37 ;
38 [label="entropy = 0.0\nsamples = 3\nvalue = [0, 3]\nclass = 1", fillcolor="#399de5"] ;
37 -> 38 ;
39 [label="waga, <= 1.5\nentropy = 1.0\nsamples = 2\nvalue = [1, 1]\nclass = 0", fillcolor="#ffffff"] ;
37 -> 39 ;
40 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
39 -> 40 ;
41 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
39 -> 41 ;
42 [label="waga, <= 1.5\nentropy = 0.544\nsamples = 8\nvalue = [7, 1]\nclass = 0", fillcolor="#e99355"] ;
30 -> 42 ;
43 [label="entropy = 0.0\nsamples = 4\nvalue = [4, 0]\nclass = 0", fillcolor="#e58139"] ;
42 -> 43 ;
44 [label="wielkosc <= 0.5\nentropy = 0.811\nsamples = 4\nvalue = [3, 1]\nclass = 0", fillcolor="#eeab7b"] ;
42 -> 44 ;
45 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
44 -> 45 ;
46 [label="kruchosc <= 0.5\nentropy = 0.918\nsamples = 3\nvalue = [2, 1]\nclass = 0", fillcolor="#f2c09c"] ;
44 -> 46 ;
47 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
46 -> 47 ;
48 [label="priorytet <= 0.5\nentropy = 1.0\nsamples = 2\nvalue = [1, 1]\nclass = 0", fillcolor="#ffffff"] ;
46 -> 48 ;
49 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
48 -> 49 ;
50 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
48 -> 50 ;
51 [label="entropy = 0.0\nsamples = 13\nvalue = [13, 0]\nclass = 0", fillcolor="#e58139"] ;
29 -> 51 ;
52 [label="wielkosc <= 1.5\nentropy = 0.714\nsamples = 102\nvalue = [20, 82]\nclass = 1", fillcolor="#69b5eb"] ;
0 -> 52 [labeldistance=2.5, labelangle=-45, headlabel="False"] ;
53 [label="waga, <= 0.5\nentropy = 0.469\nsamples = 70\nvalue = [7, 63]\nclass = 1", fillcolor="#4fa8e8"] ;
52 -> 53 ;
54 [label="entropy = 0.0\nsamples = 21\nvalue = [0, 21]\nclass = 1", fillcolor="#399de5"] ;
53 -> 54 ;
55 [label="ksztalt <= 0.5\nentropy = 0.592\nsamples = 49\nvalue = [7, 42]\nclass = 1", fillcolor="#5aade9"] ;
53 -> 55 ;
56 [label="wielkosc <= 0.5\nentropy = 0.25\nsamples = 24\nvalue = [1, 23]\nclass = 1", fillcolor="#42a1e6"] ;
55 -> 56 ;
57 [label="entropy = 0.0\nsamples = 15\nvalue = [0, 15]\nclass = 1", fillcolor="#399de5"] ;
56 -> 57 ;
58 [label="kruchosc <= 0.5\nentropy = 0.503\nsamples = 9\nvalue = [1, 8]\nclass = 1", fillcolor="#52a9e8"] ;
56 -> 58 ;
59 [label="dolna <= 0.5\nentropy = 0.722\nsamples = 5\nvalue = [1, 4]\nclass = 1", fillcolor="#6ab6ec"] ;
58 -> 59 ;
60 [label="entropy = 0.0\nsamples = 2\nvalue = [0, 2]\nclass = 1", fillcolor="#399de5"] ;
59 -> 60 ;
61 [label="gorna <= 0.5\nentropy = 0.918\nsamples = 3\nvalue = [1, 2]\nclass = 1", fillcolor="#9ccef2"] ;
59 -> 61 ;
62 [label="priorytet <= 0.5\nentropy = 1.0\nsamples = 2\nvalue = [1, 1]\nclass = 0", fillcolor="#ffffff"] ;
61 -> 62 ;
63 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
62 -> 63 ;
64 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
62 -> 64 ;
65 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
61 -> 65 ;
66 [label="entropy = 0.0\nsamples = 4\nvalue = [0, 4]\nclass = 1", fillcolor="#399de5"] ;
58 -> 66 ;
67 [label="kruchosc <= 0.5\nentropy = 0.795\nsamples = 25\nvalue = [6, 19]\nclass = 1", fillcolor="#78bced"] ;
55 -> 67 ;
68 [label="priorytet <= 0.5\nentropy = 0.98\nsamples = 12\nvalue = [5, 7]\nclass = 1", fillcolor="#c6e3f8"] ;
67 -> 68 ;
69 [label="dolna <= 0.5\nentropy = 0.764\nsamples = 9\nvalue = [2, 7]\nclass = 1", fillcolor="#72b9ec"] ;
68 -> 69 ;
70 [label="entropy = 0.0\nsamples = 5\nvalue = [0, 5]\nclass = 1", fillcolor="#399de5"] ;
69 -> 70 ;
71 [label="gorna <= 0.5\nentropy = 1.0\nsamples = 4\nvalue = [2, 2]\nclass = 0", fillcolor="#ffffff"] ;
69 -> 71 ;
72 [label="entropy = 0.0\nsamples = 2\nvalue = [2, 0]\nclass = 0", fillcolor="#e58139"] ;
71 -> 72 ;
73 [label="entropy = 0.0\nsamples = 2\nvalue = [0, 2]\nclass = 1", fillcolor="#399de5"] ;
71 -> 73 ;
74 [label="entropy = 0.0\nsamples = 3\nvalue = [3, 0]\nclass = 0", fillcolor="#e58139"] ;
68 -> 74 ;
75 [label="dolna <= 0.5\nentropy = 0.391\nsamples = 13\nvalue = [1, 12]\nclass = 1", fillcolor="#49a5e7"] ;
67 -> 75 ;
76 [label="entropy = 0.0\nsamples = 7\nvalue = [0, 7]\nclass = 1", fillcolor="#399de5"] ;
75 -> 76 ;
77 [label="gorna <= 0.5\nentropy = 0.65\nsamples = 6\nvalue = [1, 5]\nclass = 1", fillcolor="#61b1ea"] ;
75 -> 77 ;
78 [label="priorytet <= 0.5\nentropy = 0.918\nsamples = 3\nvalue = [1, 2]\nclass = 1", fillcolor="#9ccef2"] ;
77 -> 78 ;
79 [label="entropy = 0.0\nsamples = 2\nvalue = [0, 2]\nclass = 1", fillcolor="#399de5"] ;
78 -> 79 ;
80 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
78 -> 80 ;
81 [label="entropy = 0.0\nsamples = 3\nvalue = [0, 3]\nclass = 1", fillcolor="#399de5"] ;
77 -> 81 ;
82 [label="gorna <= 0.5\nentropy = 0.974\nsamples = 32\nvalue = [13, 19]\nclass = 1", fillcolor="#c0e0f7"] ;
52 -> 82 ;
83 [label="kruchosc <= 0.5\nentropy = 0.65\nsamples = 12\nvalue = [10, 2]\nclass = 0", fillcolor="#ea9a61"] ;
82 -> 83 ;
84 [label="entropy = 0.0\nsamples = 7\nvalue = [7, 0]\nclass = 0", fillcolor="#e58139"] ;
83 -> 84 ;
85 [label="waga, <= 1.5\nentropy = 0.971\nsamples = 5\nvalue = [3, 2]\nclass = 0", fillcolor="#f6d5bd"] ;
83 -> 85 ;
86 [label="priorytet <= 0.5\nentropy = 0.918\nsamples = 3\nvalue = [1, 2]\nclass = 1", fillcolor="#9ccef2"] ;
85 -> 86 ;
87 [label="entropy = 0.0\nsamples = 2\nvalue = [0, 2]\nclass = 1", fillcolor="#399de5"] ;
86 -> 87 ;
88 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
86 -> 88 ;
89 [label="entropy = 0.0\nsamples = 2\nvalue = [2, 0]\nclass = 0", fillcolor="#e58139"] ;
85 -> 89 ;
90 [label="dolna <= 0.5\nentropy = 0.61\nsamples = 20\nvalue = [3, 17]\nclass = 1", fillcolor="#5caeea"] ;
82 -> 90 ;
91 [label="entropy = 0.0\nsamples = 11\nvalue = [0, 11]\nclass = 1", fillcolor="#399de5"] ;
90 -> 91 ;
92 [label="kruchosc <= 0.5\nentropy = 0.918\nsamples = 9\nvalue = [3, 6]\nclass = 1", fillcolor="#9ccef2"] ;
90 -> 92 ;
93 [label="waga, <= 0.5\nentropy = 0.811\nsamples = 4\nvalue = [3, 1]\nclass = 0", fillcolor="#eeab7b"] ;
92 -> 93 ;
94 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
93 -> 94 ;
95 [label="entropy = 0.0\nsamples = 3\nvalue = [3, 0]\nclass = 0", fillcolor="#e58139"] ;
93 -> 95 ;
96 [label="entropy = 0.0\nsamples = 5\nvalue = [0, 5]\nclass = 1", fillcolor="#399de5"] ;
92 -> 96 ;
}

Binary file not shown.

View File

@ -1,201 +0,0 @@
wielkosc,"waga,",priorytet,ksztalt,kruchosc,dolna,gorna,g > d,polka
1,0,0,1,0,0,1,0,1
0,0,1,0,1,1,0,1,1
2,0,1,1,0,0,0,1,0
2,2,1,0,1,1,1,0,0
1,0,0,1,0,0,0,1,1
2,1,0,0,1,1,0,0,0
1,0,0,0,1,0,0,1,1
1,1,0,1,0,0,0,1,1
0,0,1,0,1,1,1,0,1
0,2,0,0,0,1,1,0,0
0,0,1,0,0,1,0,1,1
0,0,0,0,0,1,1,0,1
0,2,1,0,1,1,0,0,0
2,0,0,0,1,0,0,0,1
2,1,0,1,0,1,1,1,0
0,1,1,0,1,1,1,0,0
0,2,0,1,1,1,0,1,1
1,2,1,0,1,1,0,0,0
0,0,1,1,1,1,0,1,1
0,0,0,1,1,0,0,1,1
1,1,1,1,1,0,1,0,0
1,2,1,0,0,1,1,1,1
2,2,1,1,0,1,1,1,0
1,2,1,0,1,1,0,1,1
0,1,0,0,0,1,0,1,1
1,1,0,0,0,1,0,1,1
0,1,0,0,0,1,1,1,1
2,1,0,1,0,1,0,1,0
0,1,1,0,1,1,0,0,0
2,1,0,1,0,1,1,0,0
1,2,1,0,0,0,1,1,1
1,2,0,1,0,1,1,1,1
0,2,0,1,0,1,0,1,0
2,1,1,0,1,1,1,1,1
0,2,0,1,0,0,0,1,1
0,1,1,0,0,1,1,0,0
2,2,1,0,0,0,1,1,1
1,0,0,0,0,0,1,0,1
0,0,1,1,0,1,0,0,0
2,2,0,1,1,1,0,0,0
1,2,1,1,0,0,0,1,0
1,2,0,1,0,0,1,1,1
0,1,0,1,1,1,1,0,0
0,1,0,0,1,1,0,0,0
0,1,0,1,1,0,0,0,0
1,1,1,0,1,1,0,1,1
1,1,1,1,0,1,1,0,0
2,1,1,1,0,1,1,0,0
2,2,0,0,1,1,0,0,0
1,0,0,1,0,1,0,1,1
2,1,1,1,1,0,1,0,0
0,0,0,0,1,1,0,0,1
2,1,1,1,0,1,0,1,0
1,2,1,1,1,0,1,1,1
0,2,0,0,1,1,1,1,1
2,1,0,1,1,0,0,0,0
0,2,1,1,1,0,1,1,1
1,2,0,1,1,1,1,0,1
0,2,0,0,0,1,0,1,1
1,2,0,0,0,1,0,0,0
2,0,0,1,0,1,1,1,1
2,1,1,0,0,0,1,1,1
0,1,1,1,0,1,0,0,0
2,1,0,1,1,1,0,0,0
0,2,0,1,0,0,0,0,0
2,1,0,0,1,0,0,1,1
1,1,0,0,1,1,0,0,0
2,0,0,1,0,0,1,1,1
2,0,1,1,1,0,1,1,1
2,2,0,1,1,0,0,0,0
0,1,0,1,1,1,0,1,1
1,0,1,1,1,0,0,0,0
2,0,0,1,1,1,1,1,1
1,0,0,0,0,0,0,1,1
2,1,1,0,0,0,0,1,0
0,0,0,0,1,1,0,1,1
0,1,0,1,0,0,0,1,1
2,2,0,1,0,0,0,0,0
0,2,1,1,1,1,0,1,0
2,2,1,0,0,1,1,0,0
1,2,0,0,1,1,1,0,1
0,1,1,1,0,0,0,1,0
1,1,1,0,1,0,0,0,0
2,0,1,1,0,0,1,1,1
2,0,1,0,1,0,1,0,1
2,2,0,0,0,1,1,0,0
1,1,0,1,1,0,1,1,1
2,0,0,0,0,0,1,1,1
1,2,0,0,1,1,0,1,1
1,2,1,1,0,0,0,0,0
0,0,1,1,1,1,1,0,1
0,2,1,1,0,1,0,0,0
2,1,1,0,0,0,1,0,0
1,0,0,1,1,0,0,0,1
2,2,0,1,1,1,0,1,0
2,0,0,1,1,1,0,0,0
0,2,1,0,0,0,0,0,0
1,2,1,1,1,0,0,1,1
0,0,0,0,0,1,1,1,1
2,2,1,1,1,0,1,1,1
0,1,0,0,1,0,1,0,1
2,1,1,0,1,1,0,0,0
0,1,1,1,1,1,1,1,1
1,2,1,1,1,0,1,0,0
2,0,1,1,1,1,1,0,0
1,0,1,1,0,0,1,0,0
0,2,0,0,1,0,0,1,1
2,2,0,0,0,1,0,0,0
0,2,0,0,1,1,0,0,0
0,1,0,0,0,0,1,1,1
1,0,0,0,0,1,0,1,1
2,1,0,0,0,0,1,0,0
0,1,1,0,0,1,0,0,0
1,0,1,0,1,0,1,0,1
2,0,0,0,1,1,0,0,0
0,0,0,0,0,0,0,0,1
0,0,1,0,1,0,0,0,1
1,0,1,0,0,0,0,0,0
0,2,1,0,0,0,0,1,1
2,0,0,1,1,1,0,1,1
0,2,0,1,1,1,1,0,0
0,2,1,1,1,1,1,1,1
1,2,0,1,0,1,1,0,0
0,2,1,0,0,1,0,0,0
2,0,1,1,1,1,1,1,1
0,0,0,1,1,1,1,1,1
1,2,0,1,1,0,0,0,0
1,2,0,1,1,0,0,1,1
2,2,0,1,0,0,1,0,0
2,2,0,0,0,0,1,0,0
0,0,0,1,0,0,1,0,1
1,0,1,0,1,0,0,0,1
0,2,0,0,0,0,0,0,0
2,0,1,0,1,1,1,1,1
0,2,1,0,0,0,1,1,1
0,2,1,0,1,1,1,1,1
2,2,1,0,1,0,1,0,0
1,1,1,1,1,1,1,1,1
0,1,1,0,1,0,0,0,0
2,1,1,0,0,1,1,1,0
0,0,1,0,1,1,1,1,1
0,1,1,0,1,0,1,0,1
2,0,0,1,0,0,1,0,0
1,1,0,1,1,1,1,0,0
2,0,0,1,1,1,1,0,0
0,0,1,0,0,1,1,0,0
1,0,1,0,1,1,1,1,1
0,1,0,0,0,0,0,1,1
0,2,0,1,1,0,0,1,1
2,1,1,0,1,0,1,1,1
1,1,1,1,1,0,1,1,1
1,0,1,1,0,0,1,1,1
1,0,0,1,1,0,0,1,1
2,1,1,1,0,0,1,0,0
1,0,0,0,0,0,0,0,1
0,0,0,1,1,1,1,0,1
1,0,1,1,0,0,0,1,1
2,1,1,1,1,0,1,1,1
1,2,0,1,0,1,0,1,0
1,1,0,0,0,1,1,0,0
2,2,1,0,1,1,0,1,0
0,0,0,0,0,0,1,0,1
0,2,0,0,0,1,1,1,1
2,1,0,0,0,0,1,1,1
0,0,0,1,1,1,0,0,0
1,0,1,0,0,1,1,0,0
2,0,0,0,1,1,1,1,1
1,2,1,0,0,0,0,1,1
2,2,0,0,0,1,0,1,0
0,1,1,0,0,0,1,0,0
0,2,0,0,1,0,1,0,1
1,1,0,0,1,1,1,1,1
0,0,0,1,0,0,1,1,1
0,1,1,0,0,1,1,1,1
2,2,0,1,1,0,1,0,0
1,0,1,0,1,0,1,1,1
1,1,0,1,0,0,1,1,1
2,0,1,1,0,0,1,0,0
2,0,1,0,0,0,1,0,0
1,1,1,1,0,1,1,1,0
2,1,1,0,1,0,0,0,0
0,2,0,1,1,0,0,0,0
1,2,1,1,0,1,0,0,0
2,1,1,1,1,1,0,1,0
0,2,0,1,0,1,1,1,1
0,2,1,0,1,0,0,1,1
0,1,1,0,0,0,1,1,1
1,0,0,1,1,0,1,1,1
2,2,1,1,0,0,0,0,0
0,1,1,0,0,0,0,0,0
2,0,1,1,0,1,0,0,0
0,1,1,0,0,0,0,1,1
0,0,1,1,1,0,1,0,1
0,2,0,0,0,0,1,0,1
2,0,0,1,0,1,1,0,0
0,0,1,0,1,0,1,1,1
2,2,0,0,1,0,1,1,1
2,2,0,1,0,0,0,1,0
2,2,0,1,0,1,0,1,0
1,2,1,0,0,1,0,1,0
1 wielkosc waga, priorytet ksztalt kruchosc dolna gorna g > d polka
2 1 0 0 1 0 0 1 0 1
3 0 0 1 0 1 1 0 1 1
4 2 0 1 1 0 0 0 1 0
5 2 2 1 0 1 1 1 0 0
6 1 0 0 1 0 0 0 1 1
7 2 1 0 0 1 1 0 0 0
8 1 0 0 0 1 0 0 1 1
9 1 1 0 1 0 0 0 1 1
10 0 0 1 0 1 1 1 0 1
11 0 2 0 0 0 1 1 0 0
12 0 0 1 0 0 1 0 1 1
13 0 0 0 0 0 1 1 0 1
14 0 2 1 0 1 1 0 0 0
15 2 0 0 0 1 0 0 0 1
16 2 1 0 1 0 1 1 1 0
17 0 1 1 0 1 1 1 0 0
18 0 2 0 1 1 1 0 1 1
19 1 2 1 0 1 1 0 0 0
20 0 0 1 1 1 1 0 1 1
21 0 0 0 1 1 0 0 1 1
22 1 1 1 1 1 0 1 0 0
23 1 2 1 0 0 1 1 1 1
24 2 2 1 1 0 1 1 1 0
25 1 2 1 0 1 1 0 1 1
26 0 1 0 0 0 1 0 1 1
27 1 1 0 0 0 1 0 1 1
28 0 1 0 0 0 1 1 1 1
29 2 1 0 1 0 1 0 1 0
30 0 1 1 0 1 1 0 0 0
31 2 1 0 1 0 1 1 0 0
32 1 2 1 0 0 0 1 1 1
33 1 2 0 1 0 1 1 1 1
34 0 2 0 1 0 1 0 1 0
35 2 1 1 0 1 1 1 1 1
36 0 2 0 1 0 0 0 1 1
37 0 1 1 0 0 1 1 0 0
38 2 2 1 0 0 0 1 1 1
39 1 0 0 0 0 0 1 0 1
40 0 0 1 1 0 1 0 0 0
41 2 2 0 1 1 1 0 0 0
42 1 2 1 1 0 0 0 1 0
43 1 2 0 1 0 0 1 1 1
44 0 1 0 1 1 1 1 0 0
45 0 1 0 0 1 1 0 0 0
46 0 1 0 1 1 0 0 0 0
47 1 1 1 0 1 1 0 1 1
48 1 1 1 1 0 1 1 0 0
49 2 1 1 1 0 1 1 0 0
50 2 2 0 0 1 1 0 0 0
51 1 0 0 1 0 1 0 1 1
52 2 1 1 1 1 0 1 0 0
53 0 0 0 0 1 1 0 0 1
54 2 1 1 1 0 1 0 1 0
55 1 2 1 1 1 0 1 1 1
56 0 2 0 0 1 1 1 1 1
57 2 1 0 1 1 0 0 0 0
58 0 2 1 1 1 0 1 1 1
59 1 2 0 1 1 1 1 0 1
60 0 2 0 0 0 1 0 1 1
61 1 2 0 0 0 1 0 0 0
62 2 0 0 1 0 1 1 1 1
63 2 1 1 0 0 0 1 1 1
64 0 1 1 1 0 1 0 0 0
65 2 1 0 1 1 1 0 0 0
66 0 2 0 1 0 0 0 0 0
67 2 1 0 0 1 0 0 1 1
68 1 1 0 0 1 1 0 0 0
69 2 0 0 1 0 0 1 1 1
70 2 0 1 1 1 0 1 1 1
71 2 2 0 1 1 0 0 0 0
72 0 1 0 1 1 1 0 1 1
73 1 0 1 1 1 0 0 0 0
74 2 0 0 1 1 1 1 1 1
75 1 0 0 0 0 0 0 1 1
76 2 1 1 0 0 0 0 1 0
77 0 0 0 0 1 1 0 1 1
78 0 1 0 1 0 0 0 1 1
79 2 2 0 1 0 0 0 0 0
80 0 2 1 1 1 1 0 1 0
81 2 2 1 0 0 1 1 0 0
82 1 2 0 0 1 1 1 0 1
83 0 1 1 1 0 0 0 1 0
84 1 1 1 0 1 0 0 0 0
85 2 0 1 1 0 0 1 1 1
86 2 0 1 0 1 0 1 0 1
87 2 2 0 0 0 1 1 0 0
88 1 1 0 1 1 0 1 1 1
89 2 0 0 0 0 0 1 1 1
90 1 2 0 0 1 1 0 1 1
91 1 2 1 1 0 0 0 0 0
92 0 0 1 1 1 1 1 0 1
93 0 2 1 1 0 1 0 0 0
94 2 1 1 0 0 0 1 0 0
95 1 0 0 1 1 0 0 0 1
96 2 2 0 1 1 1 0 1 0
97 2 0 0 1 1 1 0 0 0
98 0 2 1 0 0 0 0 0 0
99 1 2 1 1 1 0 0 1 1
100 0 0 0 0 0 1 1 1 1
101 2 2 1 1 1 0 1 1 1
102 0 1 0 0 1 0 1 0 1
103 2 1 1 0 1 1 0 0 0
104 0 1 1 1 1 1 1 1 1
105 1 2 1 1 1 0 1 0 0
106 2 0 1 1 1 1 1 0 0
107 1 0 1 1 0 0 1 0 0
108 0 2 0 0 1 0 0 1 1
109 2 2 0 0 0 1 0 0 0
110 0 2 0 0 1 1 0 0 0
111 0 1 0 0 0 0 1 1 1
112 1 0 0 0 0 1 0 1 1
113 2 1 0 0 0 0 1 0 0
114 0 1 1 0 0 1 0 0 0
115 1 0 1 0 1 0 1 0 1
116 2 0 0 0 1 1 0 0 0
117 0 0 0 0 0 0 0 0 1
118 0 0 1 0 1 0 0 0 1
119 1 0 1 0 0 0 0 0 0
120 0 2 1 0 0 0 0 1 1
121 2 0 0 1 1 1 0 1 1
122 0 2 0 1 1 1 1 0 0
123 0 2 1 1 1 1 1 1 1
124 1 2 0 1 0 1 1 0 0
125 0 2 1 0 0 1 0 0 0
126 2 0 1 1 1 1 1 1 1
127 0 0 0 1 1 1 1 1 1
128 1 2 0 1 1 0 0 0 0
129 1 2 0 1 1 0 0 1 1
130 2 2 0 1 0 0 1 0 0
131 2 2 0 0 0 0 1 0 0
132 0 0 0 1 0 0 1 0 1
133 1 0 1 0 1 0 0 0 1
134 0 2 0 0 0 0 0 0 0
135 2 0 1 0 1 1 1 1 1
136 0 2 1 0 0 0 1 1 1
137 0 2 1 0 1 1 1 1 1
138 2 2 1 0 1 0 1 0 0
139 1 1 1 1 1 1 1 1 1
140 0 1 1 0 1 0 0 0 0
141 2 1 1 0 0 1 1 1 0
142 0 0 1 0 1 1 1 1 1
143 0 1 1 0 1 0 1 0 1
144 2 0 0 1 0 0 1 0 0
145 1 1 0 1 1 1 1 0 0
146 2 0 0 1 1 1 1 0 0
147 0 0 1 0 0 1 1 0 0
148 1 0 1 0 1 1 1 1 1
149 0 1 0 0 0 0 0 1 1
150 0 2 0 1 1 0 0 1 1
151 2 1 1 0 1 0 1 1 1
152 1 1 1 1 1 0 1 1 1
153 1 0 1 1 0 0 1 1 1
154 1 0 0 1 1 0 0 1 1
155 2 1 1 1 0 0 1 0 0
156 1 0 0 0 0 0 0 0 1
157 0 0 0 1 1 1 1 0 1
158 1 0 1 1 0 0 0 1 1
159 2 1 1 1 1 0 1 1 1
160 1 2 0 1 0 1 0 1 0
161 1 1 0 0 0 1 1 0 0
162 2 2 1 0 1 1 0 1 0
163 0 0 0 0 0 0 1 0 1
164 0 2 0 0 0 1 1 1 1
165 2 1 0 0 0 0 1 1 1
166 0 0 0 1 1 1 0 0 0
167 1 0 1 0 0 1 1 0 0
168 2 0 0 0 1 1 1 1 1
169 1 2 1 0 0 0 0 1 1
170 2 2 0 0 0 1 0 1 0
171 0 1 1 0 0 0 1 0 0
172 0 2 0 0 1 0 1 0 1
173 1 1 0 0 1 1 1 1 1
174 0 0 0 1 0 0 1 1 1
175 0 1 1 0 0 1 1 1 1
176 2 2 0 1 1 0 1 0 0
177 1 0 1 0 1 0 1 1 1
178 1 1 0 1 0 0 1 1 1
179 2 0 1 1 0 0 1 0 0
180 2 0 1 0 0 0 1 0 0
181 1 1 1 1 0 1 1 1 0
182 2 1 1 0 1 0 0 0 0
183 0 2 0 1 1 0 0 0 0
184 1 2 1 1 0 1 0 0 0
185 2 1 1 1 1 1 0 1 0
186 0 2 0 1 0 1 1 1 1
187 0 2 1 0 1 0 0 1 1
188 0 1 1 0 0 0 1 1 1
189 1 0 0 1 1 0 1 1 1
190 2 2 1 1 0 0 0 0 0
191 0 1 1 0 0 0 0 0 0
192 2 0 1 1 0 1 0 0 0
193 0 1 1 0 0 0 0 1 1
194 0 0 1 1 1 0 1 0 1
195 0 2 0 0 0 0 1 0 1
196 2 0 0 1 0 1 1 0 0
197 0 0 1 0 1 0 1 1 1
198 2 2 0 0 1 0 1 1 1
199 2 2 0 1 0 0 0 1 0
200 2 2 0 1 0 1 0 1 0
201 1 2 1 0 0 1 0 1 0

View File

@ -1,84 +0,0 @@
def read_text_document(file_path): # read given amount of lines (lines of 8 integers)
string_array = []
try:
with open(file_path, 'r') as file:
# Read each line of the document
for line in file:
# Remove trailing newline characters and append to the array
string_array.append(line.rstrip('\n'))
except FileNotFoundError:
print("File not found.")
return string_array # returns array of lines from the file
def gen_output(array, second_array): # transcribes line of integers into elemental decisions of shelf "g" and "d"
if array[0] == 0 or array[0] == 1:
second_array[0] = "g"
else:
second_array[0] = "d"
if array[1] == 1 or array[1] == 2:
second_array[1] = "d"
else:
second_array[1] = "g"
if array[2] == 0:
second_array[2] = "g"
else:
second_array[2] = "d"
if array[3] == 0:
second_array[3] = "g"
else:
second_array[3] = "d"
if array[4] == 0:
second_array[4] = "d"
else:
second_array[4] = "g"
if array[5] == 0:
second_array[5] = "g"
else:
second_array[5] = "d"
if array[6] == 0:
second_array[6] = "d"
else:
second_array[6] = "g"
if array[7] == 0:
second_array[7] = "d"
else:
second_array[7] = "g"
def count(array): # count number of g and d and make decision, if same number return 2 instead
d = 0
g = 0
for digit in array:
if digit == "g":
g += 1
else:
d += 1
if d > g:
return 0 # lower shelf
elif g > d:
return 1 # upper shelf
else:
return 2 # optimisation of space, goes to more empty shelf overall
file_path = 'file/path/to/input/lines/of/integers'
examples = read_text_document(file_path) # array of given number of examples from file
for input in examples:
digit_array = []
for char in input:
# Convert the character to an integer and add it to the array
digit_array.append(int(char))
output_array = [None] * 8
gen_output(digit_array, output_array)
decision_output = count(output_array)
if decision_output == 2: # in case d == g, check which shelf is more empty
if output_array[7] == "g":
decision_output = 1
elif output_array[7] == "d":
decision_output = 0
print(decision_output) # final decision

View File

@ -1,200 +0,0 @@
1
1
0
0
1
0
1
1
1
0
1
1
0
1
0
0
1
0
1
1
0
1
0
1
1
1
1
0
0
0
1
1
0
1
1
0
1
1
0
0
0
1
0
0
0
1
0
0
0
1
0
1
0
1
1
0
1
1
1
0
1
1
0
0
0
1
0
1
1
0
1
0
1
1
0
1
1
0
0
0
1
0
0
1
1
0
1
1
1
0
1
0
0
1
0
0
0
1
1
1
1
0
1
0
0
0
1
0
0
1
1
0
0
1
0
1
1
0
1
1
0
1
0
0
1
1
0
1
0
0
1
1
0
1
1
1
0
1
0
0
1
1
0
0
0
0
1
1
1
1
1
1
1
0
1
1
1
1
0
0
0
1
1
1
0
0
1
1
0
0
1
1
1
1
0
1
1
0
0
0
0
0
0
0
1
1
1
1
0
0
0
1
1
1
0
1
1
0
0
0

View File

@ -1,200 +0,0 @@
1;0;0;1;0;0;1;0
0;0;1;0;1;1;0;1
2;0;1;1;0;0;0;1
2;2;1;0;1;1;1;0
1;0;0;1;0;0;0;1
2;1;0;0;1;1;0;0
1;0;0;0;1;0;0;1
1;1;0;1;0;0;0;1
0;0;1;0;1;1;1;0
0;2;0;0;0;1;1;0
0;0;1;0;0;1;0;1
0;0;0;0;0;1;1;0
0;2;1;0;1;1;0;0
2;0;0;0;1;0;0;0
2;1;0;1;0;1;1;1
0;1;1;0;1;1;1;0
0;2;0;1;1;1;0;1
1;2;1;0;1;1;0;0
0;0;1;1;1;1;0;1
0;0;0;1;1;0;0;1
1;1;1;1;1;0;1;0
1;2;1;0;0;1;1;1
2;2;1;1;0;1;1;1
1;2;1;0;1;1;0;1
0;1;0;0;0;1;0;1
1;1;0;0;0;1;0;1
0;1;0;0;0;1;1;1
2;1;0;1;0;1;0;1
0;1;1;0;1;1;0;0
2;1;0;1;0;1;1;0
1;2;1;0;0;0;1;1
1;2;0;1;0;1;1;1
0;2;0;1;0;1;0;1
2;1;1;0;1;1;1;1
0;2;0;1;0;0;0;1
0;1;1;0;0;1;1;0
2;2;1;0;0;0;1;1
1;0;0;0;0;0;1;0
0;0;1;1;0;1;0;0
2;2;0;1;1;1;0;0
1;2;1;1;0;0;0;1
1;2;0;1;0;0;1;1
0;1;0;1;1;1;1;0
0;1;0;0;1;1;0;0
0;1;0;1;1;0;0;0
1;1;1;0;1;1;0;1
1;1;1;1;0;1;1;0
2;1;1;1;0;1;1;0
2;2;0;0;1;1;0;0
1;0;0;1;0;1;0;1
2;1;1;1;1;0;1;0
0;0;0;0;1;1;0;0
2;1;1;1;0;1;0;1
1;2;1;1;1;0;1;1
0;2;0;0;1;1;1;1
2;1;0;1;1;0;0;0
0;2;1;1;1;0;1;1
1;2;0;1;1;1;1;0
0;2;0;0;0;1;0;1
1;2;0;0;0;1;0;0
2;0;0;1;0;1;1;1
2;1;1;0;0;0;1;1
0;1;1;1;0;1;0;0
2;1;0;1;1;1;0;0
0;2;0;1;0;0;0;0
2;1;0;0;1;0;0;1
1;1;0;0;1;1;0;0
2;0;0;1;0;0;1;1
2;0;1;1;1;0;1;1
2;2;0;1;1;0;0;0
0;1;0;1;1;1;0;1
1;0;1;1;1;0;0;0
2;0;0;1;1;1;1;1
1;0;0;0;0;0;0;1
2;1;1;0;0;0;0;1
0;0;0;0;1;1;0;1
0;1;0;1;0;0;0;1
2;2;0;1;0;0;0;0
0;2;1;1;1;1;0;1
2;2;1;0;0;1;1;0
1;2;0;0;1;1;1;0
0;1;1;1;0;0;0;1
1;1;1;0;1;0;0;0
2;0;1;1;0;0;1;1
2;0;1;0;1;0;1;0
2;2;0;0;0;1;1;0
1;1;0;1;1;0;1;1
2;0;0;0;0;0;1;1
1;2;0;0;1;1;0;1
1;2;1;1;0;0;0;0
0;0;1;1;1;1;1;0
0;2;1;1;0;1;0;0
2;1;1;0;0;0;1;0
1;0;0;1;1;0;0;0
2;2;0;1;1;1;0;1
2;0;0;1;1;1;0;0
0;2;1;0;0;0;0;0
1;2;1;1;1;0;0;1
0;0;0;0;0;1;1;1
2;2;1;1;1;0;1;1
0;1;0;0;1;0;1;0
2;1;1;0;1;1;0;0
0;1;1;1;1;1;1;1
1;2;1;1;1;0;1;0
2;0;1;1;1;1;1;0
1;0;1;1;0;0;1;0
0;2;0;0;1;0;0;1
2;2;0;0;0;1;0;0
0;2;0;0;1;1;0;0
0;1;0;0;0;0;1;1
1;0;0;0;0;1;0;1
2;1;0;0;0;0;1;0
0;1;1;0;0;1;0;0
1;0;1;0;1;0;1;0
2;0;0;0;1;1;0;0
0;0;0;0;0;0;0;0
0;0;1;0;1;0;0;0
1;0;1;0;0;0;0;0
0;2;1;0;0;0;0;1
2;0;0;1;1;1;0;1
0;2;0;1;1;1;1;0
0;2;1;1;1;1;1;1
1;2;0;1;0;1;1;0
0;2;1;0;0;1;0;0
2;0;1;1;1;1;1;1
0;0;0;1;1;1;1;1
1;2;0;1;1;0;0;0
1;2;0;1;1;0;0;1
2;2;0;1;0;0;1;0
2;2;0;0;0;0;1;0
0;0;0;1;0;0;1;0
1;0;1;0;1;0;0;0
0;2;0;0;0;0;0;0
2;0;1;0;1;1;1;1
0;2;1;0;0;0;1;1
0;2;1;0;1;1;1;1
2;2;1;0;1;0;1;0
1;1;1;1;1;1;1;1
0;1;1;0;1;0;0;0
2;1;1;0;0;1;1;1
0;0;1;0;1;1;1;1
0;1;1;0;1;0;1;0
2;0;0;1;0;0;1;0
1;1;0;1;1;1;1;0
2;0;0;1;1;1;1;0
0;0;1;0;0;1;1;0
1;0;1;0;1;1;1;1
0;1;0;0;0;0;0;1
0;2;0;1;1;0;0;1
2;1;1;0;1;0;1;1
1;1;1;1;1;0;1;1
1;0;1;1;0;0;1;1
1;0;0;1;1;0;0;1
2;1;1;1;0;0;1;0
1;0;0;0;0;0;0;0
0;0;0;1;1;1;1;0
1;0;1;1;0;0;0;1
2;1;1;1;1;0;1;1
1;2;0;1;0;1;0;1
1;1;0;0;0;1;1;0
2;2;1;0;1;1;0;1
0;0;0;0;0;0;1;0
0;2;0;0;0;1;1;1
2;1;0;0;0;0;1;1
0;0;0;1;1;1;0;0
1;0;1;0;0;1;1;0
2;0;0;0;1;1;1;1
1;2;1;0;0;0;0;1
2;2;0;0;0;1;0;1
0;1;1;0;0;0;1;0
0;2;0;0;1;0;1;0
1;1;0;0;1;1;1;1
0;0;0;1;0;0;1;1
0;1;1;0;0;1;1;1
2;2;0;1;1;0;1;0
1;0;1;0;1;0;1;1
1;1;0;1;0;0;1;1
2;0;1;1;0;0;1;0
2;0;1;0;0;0;1;0
1;1;1;1;0;1;1;1
2;1;1;0;1;0;0;0
0;2;0;1;1;0;0;0
1;2;1;1;0;1;0;0
2;1;1;1;1;1;0;1
0;2;0;1;0;1;1;1
0;2;1;0;1;0;0;1
0;1;1;0;0;0;1;1
1;0;0;1;1;0;1;1
2;2;1;1;0;0;0;0
0;1;1;0;0;0;0;0
2;0;1;1;0;1;0;0
0;1;1;0;0;0;0;1
0;0;1;1;1;0;1;0
0;2;0;0;0;0;1;0
2;0;0;1;0;1;1;0
0;0;1;0;1;0;1;1
2;2;0;0;1;0;1;1
2;2;0;1;0;0;0;1
2;2;0;1;0;1;0;1
1;2;1;0;0;1;0;1

Binary file not shown.

Binary file not shown.

View File

@ -1,31 +0,0 @@
Epoch: 1 Train Loss: 65 Train Accuracy: 0.5754245754245755
Epoch: 2 Train Loss: 25 Train Accuracy: 0.7457542457542458
Epoch: 3 Train Loss: 8 Train Accuracy: 0.8431568431568431
Epoch: 4 Train Loss: 2 Train Accuracy: 0.9010989010989011
Epoch: 5 Train Loss: 1 Train Accuracy: 0.9335664335664335
Epoch: 6 Train Loss: 0 Train Accuracy: 0.9545454545454546
Epoch: 7 Train Loss: 0 Train Accuracy: 0.972027972027972
Epoch: 8 Train Loss: 0 Train Accuracy: 0.9820179820179821
Epoch: 9 Train Loss: 0 Train Accuracy: 0.994005994005994
Epoch: 10 Train Loss: 0 Train Accuracy: 0.9945054945054945
Epoch: 1 Train Loss: 42 Train Accuracy: 0.6428571428571429
Epoch: 2 Train Loss: 11 Train Accuracy: 0.8306693306693307
Epoch: 3 Train Loss: 3 Train Accuracy: 0.8921078921078921
Epoch: 4 Train Loss: 2 Train Accuracy: 0.8891108891108891
Epoch: 5 Train Loss: 1 Train Accuracy: 0.9335664335664335
Epoch: 6 Train Loss: 0 Train Accuracy: 0.952047952047952
Epoch: 7 Train Loss: 0 Train Accuracy: 0.9545454545454546
Epoch: 8 Train Loss: 0 Train Accuracy: 0.9655344655344655
Epoch: 9 Train Loss: 0 Train Accuracy: 0.9815184815184815
Epoch: 10 Train Loss: 0 Train Accuracy: 0.9805194805194806
Epoch: 11 Train Loss: 0 Train Accuracy: 0.9855144855144855
Epoch: 12 Train Loss: 0 Train Accuracy: 0.989010989010989
Epoch: 13 Train Loss: 0 Train Accuracy: 0.9925074925074925
Epoch: 14 Train Loss: 0 Train Accuracy: 0.9915084915084915
Epoch: 15 Train Loss: 0 Train Accuracy: 0.9885114885114885
Epoch: 16 Train Loss: 0 Train Accuracy: 0.994005994005994
Epoch: 17 Train Loss: 0 Train Accuracy: 0.997002997002997
Epoch: 18 Train Loss: 0 Train Accuracy: 0.9965034965034965
Epoch: 19 Train Loss: 0 Train Accuracy: 0.999000999000999
Epoch: 20 Train Loss: 0 Train Accuracy: 1.0

View File

@ -1,60 +0,0 @@
import glob
from src.torchvision_resize_dataset import combined_dataset, images_path, classes
import src.data_model
from torch.optim import Adam
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
train_loader = DataLoader(
combined_dataset, #dataset of images
batch_size=256, # accuracy
shuffle=True # rand order
)
model = src.data_model.DataModel(num_objects=2).to(device)
#optimizer
optimizer = Adam(model.parameters(), lr=0.001, weight_decay=0.0001)
#loss function
criterion = nn.CrossEntropyLoss()
num_epochs = 20
# train_size = len(glob.glob(images_path+'*.jpg'))
train_size = 2002
go_to_accuracy = 0.0
for epoch in range(num_epochs):
#training on dataset
model.train()
train_accuracy = 0.0
train_loss = 0.0
for i, (images, labels) in enumerate(train_loader):
if torch.cuda.is_available():
images = torch.Variable(images.cuda())
labels = torch.Variable(labels.cuda())
# clearing the optimizer gradients
optimizer.zero_grad()
outputs = model(images) # predoction
loss = criterion(outputs, labels) #loss calculation
loss.backward()
optimizer.step()
train_loss += loss.cpu().data*images.size(0)
_, prediction = torch.max(outputs.data, 1)
train_accuracy += int(torch.sum(prediction == labels.data))
train_accuracy = train_accuracy/train_size
train_loss = train_loss/train_size
model.eval()
print('Epoch: '+ str(epoch+1) +' Train Loss: '+ str(int(train_loss)) +' Train Accuracy: '+ str(train_accuracy))
if train_accuracy > go_to_accuracy:
go_to_accuracy= train_accuracy
torch.save(model.state_dict(), "best_model.pth")

Binary file not shown.

View File

@ -1,147 +0,0 @@
import torch
import torch.nn as nn
from torchvision.transforms import transforms
import numpy as np
from torch.autograd import Variable
from torchvision.models import squeezenet1_1
import torch.functional as F
from io import open
import os
from PIL import Image
import pathlib
import glob
from tkinter import Tk, Label
from PIL import Image, ImageTk
absolute_path = os.path.abspath('NeuralNetwork/src/train_images')
train_path = absolute_path
absolute_path = os.path.abspath('Images/Items_test')
pred_path = absolute_path
root=pathlib.Path(train_path)
classes=sorted([j.name.split('/')[-1] for j in root.iterdir()])
class DataModel(nn.Module):
def __init__(self, num_classes):
super(DataModel, self).__init__()
#input (batch=256, nr of channels rgb=3 , size=244x244)
# convolution
self.conv1 = nn.Conv2d(in_channels=3, out_channels=12, kernel_size=3, stride=1, padding=1)
#shape (256, 12, 224x224)
# batch normalization
self.bn1 = nn.BatchNorm2d(num_features=12)
#shape (256, 12, 224x224)
self.reul1 = nn.ReLU()
self.pool=nn.MaxPool2d(kernel_size=2, stride=2)
# reduce image size by factor 2
# pooling window moves by 2 pixels at a time instead of 1
# shape (256, 12, 112x112)
self.conv2 = nn.Conv2d(in_channels=12, out_channels=24, kernel_size=3, stride=1, padding=1)
self.bn2 = nn.BatchNorm2d(num_features=24)
self.reul2 = nn.ReLU()
# shape (256, 24, 112x112)
self.conv3 = nn.Conv2d(in_channels=24, out_channels=48, kernel_size=3, stride=1, padding=1)
#shape (256, 48, 112x112)
self.bn3 = nn.BatchNorm2d(num_features=48)
#shape (256, 48, 112x112)
self.reul3 = nn.ReLU()
# connected layer
self.fc = nn.Linear(in_features=48*112*112, out_features=num_classes)
def forward(self, input):
output = self.conv1(input)
output = self.bn1(output)
output = self.reul1(output)
output = self.pool(output)
output = self.conv2(output)
output = self.bn2(output)
output = self.reul2(output)
output = self.conv3(output)
output = self.bn3(output)
output = self.reul3(output)
# output shape matrix (256, 48, 112x112)
#print(output.shape)
#print(self.fc.weight.shape)
output = output.view(-1, 48*112*112)
output = self.fc(output)
return output
script_dir = os.path.dirname(os.path.abspath(__file__))
file_path = os.path.join(script_dir, 'best_model.pth')
checkpoint=torch.load(file_path)
model = DataModel(num_classes=2)
model.load_state_dict(checkpoint)
model.eval()
transformer = transforms.Compose([
transforms.Resize((224, 224)), # Resize images to (224, 224)
transforms.ToTensor(), # Convert images to tensors, 0-255 to 0-1
# transforms.RandomHorizontalFlip(), # 0.5 chance to flip the image
transforms.Normalize([0.5,0.5,0.5], [0.5,0.5,0.5])
])
def prediction(img_path,transformer):
image=Image.open(img_path)
image_tensor=transformer(image).float()
image_tensor=image_tensor.unsqueeze_(0)
if torch.cuda.is_available():
image_tensor.cuda()
input=Variable(image_tensor)
output=model(input)
index=output.data.numpy().argmax()
pred=classes[index]
return pred
def prediction_keys():
#funkcja zwracajaca sciezki do kazdego pliku w folderze w postaci listy
images_path=glob.glob(pred_path+'/*.jpg')
pred_list=[]
for i in images_path:
pred_list.append(i)
return pred_list
def predict_one(path):
#wyswietlanie obrazka po kazdym podniesieniu itemu
root = Tk()
root.title("Okno z obrazkiem")
image = Image.open(path)
photo = ImageTk.PhotoImage(image)
label = Label(root, image=photo)
label.pack()
root.mainloop()
#uruchamia sie funkcja spr czy obrazek to paczka czy list
pred_print = prediction(path,transformer)
print('Zdjecie jest: '+pred_print)
return pred_print

View File

@ -1,61 +0,0 @@
import torch.nn as nn
import torch
class DataModel(nn.Module):
def __init__(self, num_objects):
super(DataModel, self).__init__()
#input (batch=256, nr of channels rgb=3 , size=244x244)
# convolution
self.conv1 = nn.Conv2d(in_channels=3, out_channels=12, kernel_size=3, stride=1, padding=1)
#shape (256, 12, 224x224)
# batch normalization
self.bn1 = nn.BatchNorm2d(num_features=12)
#shape (256, 12, 224x224)
self.reul1 = nn.ReLU()
self.pool=nn.MaxPool2d(kernel_size=2, stride=2)
# reduce image size by factor 2
# pooling window moves by 2 pixels at a time instead of 1
# shape (256, 12, 112x112)
self.conv2 = nn.Conv2d(in_channels=12, out_channels=24, kernel_size=3, stride=1, padding=1)
self.bn2 = nn.BatchNorm2d(num_features=24)
self.reul2 = nn.ReLU()
# shape (256, 24, 112x112)
self.conv3 = nn.Conv2d(in_channels=24, out_channels=48, kernel_size=3, stride=1, padding=1)
#shape (256, 48, 112x112)
self.bn3 = nn.BatchNorm2d(num_features=48)
#shape (256, 48, 112x112)
self.reul3 = nn.ReLU()
# connected layer
self.fc = nn.Linear(in_features=48*112*112, out_features=num_objects)
def forward(self, input):
output = self.conv1(input)
output = self.bn1(output)
output = self.reul1(output)
output = self.pool(output)
output = self.conv2(output)
output = self.bn2(output)
output = self.reul2(output)
output = self.conv3(output)
output = self.bn3(output)
output = self.reul3(output)
# output shape matrix (256, 48, 112x112)
#print(output.shape)
#print(self.fc.weight.shape)
output = output.view(-1, 48*112*112)
output = self.fc(output)
return output

View File

@ -1,31 +0,0 @@
import glob
import pathlib
import torchvision.transforms as transforms
from torchvision.datasets import ImageFolder
from torch.utils.data import ConcatDataset
# images have to be the same size for the algorithm to work
transform = transforms.Compose([
transforms.Resize((224, 224)), # Resize images to (224, 224)
transforms.ToTensor(), # Convert images to tensors, 0-255 to 0-1
# transforms.RandomHorizontalFlip(), # 0.5 chance to flip the image
transforms.Normalize([0.5,0.5,0.5], [0.5,0.5,0.5])
])
letters_path = 'C:/Users/wojmed/Documents/VS repositories/Inteligentny_Wozek/NeuralNetwork/src/train_images/letters'
package_path = 'C:/Users/wojmed/Documents/VS repositories/Inteligentny_Wozek/NeuralNetwork/src/train_images/package'
images_path = 'C:/Users/wojmed/Documents/VS repositories/Inteligentny_Wozek/NeuralNetwork/src/train_images'
# # Load images from folders
# letter_folder = ImageFolder(letters_path, transform=transform)
# package_folder = ImageFolder(package_path, transform=transform)
# Combine the both datasets into a single dataset
#combined_dataset = ConcatDataset([letter_folder, package_folder])
combined_dataset = ImageFolder(images_path, transform=transform)
#image classes
path=pathlib.Path(images_path)
classes = sorted([i.name.split("/")[-1] for i in path.iterdir()])
# print(classes)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.7 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.8 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 193 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.7 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 32 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 135 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 76 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 62 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 78 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.6 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 91 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 85 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 74 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.8 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 99 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1004 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 976 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 434 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 131 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 111 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 73 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.6 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 38 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 555 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 362 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.6 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 153 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 108 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 66 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 150 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.0 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 120 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 88 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 96 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.2 KiB

Some files were not shown because too many files have changed in this diff Show More