Compare commits

..

3 Commits

Author SHA1 Message Date
3f65be482c Merge pull request 'drzewo_wiki' (#19) from drzewo_wiki into master
Reviewed-on: #19
2023-05-30 18:48:46 +02:00
d1ba1659c1 drzewo generowane poza main 2023-05-30 18:47:23 +02:00
7bf768e6af update 2023-05-29 13:29:04 +02:00
14 changed files with 285 additions and 143 deletions

5
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"python.analysis.extraPaths": [
"./DecisionTree"
]
}

View File

@ -1,87 +0,0 @@
import graphviz
import pandas as pd
from sklearn.tree import DecisionTreeClassifier
from sklearn.tree import export_graphviz
# def make_tree():
# plikZPrzecinkami = open("training_data.txt", 'w')
# with open('DecisionTree/200permutations_table.txt', 'r') as plik:
# for linia in plik:
# liczby = linia.strip()
# wiersz = ""
# licznik = 0
# for liczba in liczby:
# wiersz += liczba
# wiersz += ";"
# wiersz = wiersz[:-1]
# wiersz += '\n'
# plikZPrzecinkami.write(wiersz)
# plikZPrzecinkami.close()
# x = pd.read_csv('DecisionTree/training_data.txt', delimiter=';',
# names=['wielkosc', 'waga,', 'priorytet', 'ksztalt', 'kruchosc', 'dolna', 'gorna', 'g > d'])
# y = pd.read_csv('DecisionTree/decisions.txt', names=['polka'])
# # X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.3, random_state=1) # 70% treningowe and 30% testowe
# # Tworzenie instancji klasyfikatora ID3
# clf = DecisionTreeClassifier(criterion='entropy')
# # Trenowanie klasyfikatora
# clf.fit(x.values, y.values)
# # clf.fit(X_train, y_train)
# return clf
plikZPrzecinkami = open("training_data.txt", 'w')
with open('200permutations_table.txt', 'r') as plik:
for linia in plik:
liczby = linia.strip()
wiersz = ""
licznik = 0
for liczba in liczby:
wiersz += liczba
wiersz += ";"
wiersz = wiersz[:-1]
wiersz += '\n'
plikZPrzecinkami.write(wiersz)
plikZPrzecinkami.close()
x = pd.read_csv('training_data.txt', delimiter=';',
names=['wielkosc', 'waga,', 'priorytet', 'ksztalt', 'kruchosc', 'dolna', 'gorna', 'g > d'])
y = pd.read_csv('decisions.txt', names=['polka'])
# X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.3, random_state=1) # 70% treningowe and 30% testowe
# Tworzenie instancji klasyfikatora ID3
clf = DecisionTreeClassifier(criterion='entropy')
# Trenowanie klasyfikatora
clf.fit(x.values, y.values)
# clf.fit(X_train, y_train)
# Predykcja na nowych danych
new_data = [[2, 2, 1, 0, 1, 1, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0]]
predictions = clf.predict(new_data)
# y_pred = clf.predict(X_test)
print(predictions)
# print("Accuracy:", clf.score(new_data, predictions))
# print("Accuracy:", metrics.accuracy_score(y_test, y_pred))
# Wygenerowanie pliku .dot reprezentującego drzewo
dot_data = export_graphviz(clf, out_file=None, feature_names=list(x.columns), class_names=['0', '1'], filled=True,
rounded=True)
# Tworzenie obiektu graphviz z pliku .dot
graph = graphviz.Source(dot_data)
# Wyświetlanie drzewa
graph.view()
z = pd.concat([x, y], axis=1)
z.to_csv('dane.csv', index=False)

197
Source.gv Normal file
View File

@ -0,0 +1,197 @@
digraph Tree {
node [shape=box, style="filled, rounded", color="black", fontname="helvetica"] ;
edge [fontname="helvetica"] ;
0 [label="g > d <= 0.5\nentropy = 0.997\nsamples = 200\nvalue = [94, 106]\nclass = 1", fillcolor="#e9f4fc"] ;
1 [label="waga, <= 0.5\nentropy = 0.803\nsamples = 98\nvalue = [74, 24]\nclass = 0", fillcolor="#edaa79"] ;
0 -> 1 [labeldistance=2.5, labelangle=45, headlabel="True"] ;
2 [label="wielkosc <= 1.5\nentropy = 0.998\nsamples = 34\nvalue = [16, 18]\nclass = 1", fillcolor="#e9f4fc"] ;
1 -> 2 ;
3 [label="priorytet <= 0.5\nentropy = 0.887\nsamples = 23\nvalue = [7, 16]\nclass = 1", fillcolor="#90c8f0"] ;
2 -> 3 ;
4 [label="kruchosc <= 0.5\nentropy = 0.439\nsamples = 11\nvalue = [1, 10]\nclass = 1", fillcolor="#4da7e8"] ;
3 -> 4 ;
5 [label="entropy = 0.0\nsamples = 7\nvalue = [0, 7]\nclass = 1", fillcolor="#399de5"] ;
4 -> 5 ;
6 [label="ksztalt <= 0.5\nentropy = 0.811\nsamples = 4\nvalue = [1, 3]\nclass = 1", fillcolor="#7bbeee"] ;
4 -> 6 ;
7 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
6 -> 7 ;
8 [label="dolna <= 0.5\nentropy = 0.918\nsamples = 3\nvalue = [1, 2]\nclass = 1", fillcolor="#9ccef2"] ;
6 -> 8 ;
9 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
8 -> 9 ;
10 [label="gorna <= 0.5\nentropy = 1.0\nsamples = 2\nvalue = [1, 1]\nclass = 0", fillcolor="#ffffff"] ;
8 -> 10 ;
11 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
10 -> 11 ;
12 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
10 -> 12 ;
13 [label="kruchosc <= 0.5\nentropy = 1.0\nsamples = 12\nvalue = [6, 6]\nclass = 0", fillcolor="#ffffff"] ;
3 -> 13 ;
14 [label="entropy = 0.0\nsamples = 5\nvalue = [5, 0]\nclass = 0", fillcolor="#e58139"] ;
13 -> 14 ;
15 [label="wielkosc <= 0.5\nentropy = 0.592\nsamples = 7\nvalue = [1, 6]\nclass = 1", fillcolor="#5aade9"] ;
13 -> 15 ;
16 [label="entropy = 0.0\nsamples = 4\nvalue = [0, 4]\nclass = 1", fillcolor="#399de5"] ;
15 -> 16 ;
17 [label="ksztalt <= 0.5\nentropy = 0.918\nsamples = 3\nvalue = [1, 2]\nclass = 1", fillcolor="#9ccef2"] ;
15 -> 17 ;
18 [label="entropy = 0.0\nsamples = 2\nvalue = [0, 2]\nclass = 1", fillcolor="#399de5"] ;
17 -> 18 ;
19 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
17 -> 19 ;
20 [label="ksztalt <= 0.5\nentropy = 0.684\nsamples = 11\nvalue = [9, 2]\nclass = 0", fillcolor="#eb9d65"] ;
2 -> 20 ;
21 [label="kruchosc <= 0.5\nentropy = 1.0\nsamples = 4\nvalue = [2, 2]\nclass = 0", fillcolor="#ffffff"] ;
20 -> 21 ;
22 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
21 -> 22 ;
23 [label="dolna <= 0.5\nentropy = 0.918\nsamples = 3\nvalue = [1, 2]\nclass = 1", fillcolor="#9ccef2"] ;
21 -> 23 ;
24 [label="entropy = 0.0\nsamples = 2\nvalue = [0, 2]\nclass = 1", fillcolor="#399de5"] ;
23 -> 24 ;
25 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
23 -> 25 ;
26 [label="entropy = 0.0\nsamples = 7\nvalue = [7, 0]\nclass = 0", fillcolor="#e58139"] ;
20 -> 26 ;
27 [label="gorna <= 0.5\nentropy = 0.449\nsamples = 64\nvalue = [58, 6]\nclass = 0", fillcolor="#e88e4d"] ;
1 -> 27 ;
28 [label="entropy = 0.0\nsamples = 33\nvalue = [33, 0]\nclass = 0", fillcolor="#e58139"] ;
27 -> 28 ;
29 [label="wielkosc <= 1.5\nentropy = 0.709\nsamples = 31\nvalue = [25, 6]\nclass = 0", fillcolor="#eb9f69"] ;
27 -> 29 ;
30 [label="ksztalt <= 0.5\nentropy = 0.918\nsamples = 18\nvalue = [12, 6]\nclass = 0", fillcolor="#f2c09c"] ;
29 -> 30 ;
31 [label="kruchosc <= 0.5\nentropy = 1.0\nsamples = 10\nvalue = [5, 5]\nclass = 0", fillcolor="#ffffff"] ;
30 -> 31 ;
32 [label="waga, <= 1.5\nentropy = 0.722\nsamples = 5\nvalue = [4, 1]\nclass = 0", fillcolor="#eca06a"] ;
31 -> 32 ;
33 [label="entropy = 0.0\nsamples = 3\nvalue = [3, 0]\nclass = 0", fillcolor="#e58139"] ;
32 -> 33 ;
34 [label="dolna <= 0.5\nentropy = 1.0\nsamples = 2\nvalue = [1, 1]\nclass = 0", fillcolor="#ffffff"] ;
32 -> 34 ;
35 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
34 -> 35 ;
36 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
34 -> 36 ;
37 [label="priorytet <= 0.5\nentropy = 0.722\nsamples = 5\nvalue = [1, 4]\nclass = 1", fillcolor="#6ab6ec"] ;
31 -> 37 ;
38 [label="entropy = 0.0\nsamples = 3\nvalue = [0, 3]\nclass = 1", fillcolor="#399de5"] ;
37 -> 38 ;
39 [label="dolna <= 0.5\nentropy = 1.0\nsamples = 2\nvalue = [1, 1]\nclass = 0", fillcolor="#ffffff"] ;
37 -> 39 ;
40 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
39 -> 40 ;
41 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
39 -> 41 ;
42 [label="waga, <= 1.5\nentropy = 0.544\nsamples = 8\nvalue = [7, 1]\nclass = 0", fillcolor="#e99355"] ;
30 -> 42 ;
43 [label="entropy = 0.0\nsamples = 4\nvalue = [4, 0]\nclass = 0", fillcolor="#e58139"] ;
42 -> 43 ;
44 [label="wielkosc <= 0.5\nentropy = 0.811\nsamples = 4\nvalue = [3, 1]\nclass = 0", fillcolor="#eeab7b"] ;
42 -> 44 ;
45 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
44 -> 45 ;
46 [label="dolna <= 0.5\nentropy = 0.918\nsamples = 3\nvalue = [2, 1]\nclass = 0", fillcolor="#f2c09c"] ;
44 -> 46 ;
47 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
46 -> 47 ;
48 [label="kruchosc <= 0.5\nentropy = 1.0\nsamples = 2\nvalue = [1, 1]\nclass = 0", fillcolor="#ffffff"] ;
46 -> 48 ;
49 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
48 -> 49 ;
50 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
48 -> 50 ;
51 [label="entropy = 0.0\nsamples = 13\nvalue = [13, 0]\nclass = 0", fillcolor="#e58139"] ;
29 -> 51 ;
52 [label="wielkosc <= 1.5\nentropy = 0.714\nsamples = 102\nvalue = [20, 82]\nclass = 1", fillcolor="#69b5eb"] ;
0 -> 52 [labeldistance=2.5, labelangle=-45, headlabel="False"] ;
53 [label="waga, <= 0.5\nentropy = 0.469\nsamples = 70\nvalue = [7, 63]\nclass = 1", fillcolor="#4fa8e8"] ;
52 -> 53 ;
54 [label="entropy = 0.0\nsamples = 21\nvalue = [0, 21]\nclass = 1", fillcolor="#399de5"] ;
53 -> 54 ;
55 [label="ksztalt <= 0.5\nentropy = 0.592\nsamples = 49\nvalue = [7, 42]\nclass = 1", fillcolor="#5aade9"] ;
53 -> 55 ;
56 [label="wielkosc <= 0.5\nentropy = 0.25\nsamples = 24\nvalue = [1, 23]\nclass = 1", fillcolor="#42a1e6"] ;
55 -> 56 ;
57 [label="entropy = 0.0\nsamples = 15\nvalue = [0, 15]\nclass = 1", fillcolor="#399de5"] ;
56 -> 57 ;
58 [label="kruchosc <= 0.5\nentropy = 0.503\nsamples = 9\nvalue = [1, 8]\nclass = 1", fillcolor="#52a9e8"] ;
56 -> 58 ;
59 [label="gorna <= 0.5\nentropy = 0.722\nsamples = 5\nvalue = [1, 4]\nclass = 1", fillcolor="#6ab6ec"] ;
58 -> 59 ;
60 [label="dolna <= 0.5\nentropy = 0.918\nsamples = 3\nvalue = [1, 2]\nclass = 1", fillcolor="#9ccef2"] ;
59 -> 60 ;
61 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
60 -> 61 ;
62 [label="waga, <= 1.5\nentropy = 1.0\nsamples = 2\nvalue = [1, 1]\nclass = 0", fillcolor="#ffffff"] ;
60 -> 62 ;
63 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
62 -> 63 ;
64 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
62 -> 64 ;
65 [label="entropy = 0.0\nsamples = 2\nvalue = [0, 2]\nclass = 1", fillcolor="#399de5"] ;
59 -> 65 ;
66 [label="entropy = 0.0\nsamples = 4\nvalue = [0, 4]\nclass = 1", fillcolor="#399de5"] ;
58 -> 66 ;
67 [label="kruchosc <= 0.5\nentropy = 0.795\nsamples = 25\nvalue = [6, 19]\nclass = 1", fillcolor="#78bced"] ;
55 -> 67 ;
68 [label="priorytet <= 0.5\nentropy = 0.98\nsamples = 12\nvalue = [5, 7]\nclass = 1", fillcolor="#c6e3f8"] ;
67 -> 68 ;
69 [label="dolna <= 0.5\nentropy = 0.764\nsamples = 9\nvalue = [2, 7]\nclass = 1", fillcolor="#72b9ec"] ;
68 -> 69 ;
70 [label="entropy = 0.0\nsamples = 5\nvalue = [0, 5]\nclass = 1", fillcolor="#399de5"] ;
69 -> 70 ;
71 [label="gorna <= 0.5\nentropy = 1.0\nsamples = 4\nvalue = [2, 2]\nclass = 0", fillcolor="#ffffff"] ;
69 -> 71 ;
72 [label="entropy = 0.0\nsamples = 2\nvalue = [2, 0]\nclass = 0", fillcolor="#e58139"] ;
71 -> 72 ;
73 [label="entropy = 0.0\nsamples = 2\nvalue = [0, 2]\nclass = 1", fillcolor="#399de5"] ;
71 -> 73 ;
74 [label="entropy = 0.0\nsamples = 3\nvalue = [3, 0]\nclass = 0", fillcolor="#e58139"] ;
68 -> 74 ;
75 [label="gorna <= 0.5\nentropy = 0.391\nsamples = 13\nvalue = [1, 12]\nclass = 1", fillcolor="#49a5e7"] ;
67 -> 75 ;
76 [label="priorytet <= 0.5\nentropy = 0.65\nsamples = 6\nvalue = [1, 5]\nclass = 1", fillcolor="#61b1ea"] ;
75 -> 76 ;
77 [label="entropy = 0.0\nsamples = 4\nvalue = [0, 4]\nclass = 1", fillcolor="#399de5"] ;
76 -> 77 ;
78 [label="wielkosc <= 0.5\nentropy = 1.0\nsamples = 2\nvalue = [1, 1]\nclass = 0", fillcolor="#ffffff"] ;
76 -> 78 ;
79 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
78 -> 79 ;
80 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
78 -> 80 ;
81 [label="entropy = 0.0\nsamples = 7\nvalue = [0, 7]\nclass = 1", fillcolor="#399de5"] ;
75 -> 81 ;
82 [label="gorna <= 0.5\nentropy = 0.974\nsamples = 32\nvalue = [13, 19]\nclass = 1", fillcolor="#c0e0f7"] ;
52 -> 82 ;
83 [label="kruchosc <= 0.5\nentropy = 0.65\nsamples = 12\nvalue = [10, 2]\nclass = 0", fillcolor="#ea9a61"] ;
82 -> 83 ;
84 [label="entropy = 0.0\nsamples = 7\nvalue = [7, 0]\nclass = 0", fillcolor="#e58139"] ;
83 -> 84 ;
85 [label="priorytet <= 0.5\nentropy = 0.971\nsamples = 5\nvalue = [3, 2]\nclass = 0", fillcolor="#f6d5bd"] ;
83 -> 85 ;
86 [label="waga, <= 1.5\nentropy = 0.918\nsamples = 3\nvalue = [1, 2]\nclass = 1", fillcolor="#9ccef2"] ;
85 -> 86 ;
87 [label="entropy = 0.0\nsamples = 2\nvalue = [0, 2]\nclass = 1", fillcolor="#399de5"] ;
86 -> 87 ;
88 [label="entropy = 0.0\nsamples = 1\nvalue = [1, 0]\nclass = 0", fillcolor="#e58139"] ;
86 -> 88 ;
89 [label="entropy = 0.0\nsamples = 2\nvalue = [2, 0]\nclass = 0", fillcolor="#e58139"] ;
85 -> 89 ;
90 [label="dolna <= 0.5\nentropy = 0.61\nsamples = 20\nvalue = [3, 17]\nclass = 1", fillcolor="#5caeea"] ;
82 -> 90 ;
91 [label="entropy = 0.0\nsamples = 11\nvalue = [0, 11]\nclass = 1", fillcolor="#399de5"] ;
90 -> 91 ;
92 [label="kruchosc <= 0.5\nentropy = 0.918\nsamples = 9\nvalue = [3, 6]\nclass = 1", fillcolor="#9ccef2"] ;
90 -> 92 ;
93 [label="waga, <= 0.5\nentropy = 0.811\nsamples = 4\nvalue = [3, 1]\nclass = 0", fillcolor="#eeab7b"] ;
92 -> 93 ;
94 [label="entropy = 0.0\nsamples = 1\nvalue = [0, 1]\nclass = 1", fillcolor="#399de5"] ;
93 -> 94 ;
95 [label="entropy = 0.0\nsamples = 3\nvalue = [3, 0]\nclass = 0", fillcolor="#e58139"] ;
93 -> 95 ;
96 [label="entropy = 0.0\nsamples = 5\nvalue = [0, 5]\nclass = 1", fillcolor="#399de5"] ;
92 -> 96 ;
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

58
drzewo_decyzyjne.py Normal file
View File

@ -0,0 +1,58 @@
import graphviz
import pandas as pd
from sklearn.tree import DecisionTreeClassifier
from sklearn.tree import export_graphviz
def make_tree():
plikZPrzecinkami = open("training_data.txt", 'w')
with open('DecisionTree/200permutations_table.txt', 'r') as plik:
for linia in plik:
liczby = linia.strip()
wiersz = ""
licznik = 0
for liczba in liczby:
wiersz += liczba
wiersz += ";"
wiersz = wiersz[:-1]
wiersz += '\n'
plikZPrzecinkami.write(wiersz)
plikZPrzecinkami.close()
x = pd.read_csv('DecisionTree/training_data.txt', delimiter=';',
names=['wielkosc', 'waga,', 'priorytet', 'ksztalt', 'kruchosc', 'dolna', 'gorna', 'g > d'])
y = pd.read_csv('DecisionTree/decisions.txt', names=['polka'])
# X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.3, random_state=1) # 70% treningowe and 30% testowe
# Tworzenie instancji klasyfikatora ID3
clf = DecisionTreeClassifier(criterion='entropy')
# Trenowanie klasyfikatora
clf.fit(x.values, y.values)
# clf.fit(X_train, y_train)
return clf
# # Predykcja na nowych danych
# new_data = [[2, 2, 1, 0, 1, 1, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0]]
# predictions = clf.predict(new_data)
# # y_pred = clf.predict(X_test)
# print(predictions)
# # print("Accuracy:", clf.score(new_data, predictions))
# # print("Accuracy:", metrics.accuracy_score(y_test, y_pred))
# Wygenerowanie pliku .dot reprezentującego drzewo
# dot_data = export_graphviz(clf, out_file=None, feature_names=list(x.columns), class_names=['0', '1'], filled=True,
# rounded=True)
# # Tworzenie obiektu graphviz z pliku .dot
# graph = graphviz.Source(dot_data)
# # Wyświetlanie drzewa
# graph.view()
# z = pd.concat([x, y], axis=1)
# z.to_csv('dane.csv', index=False)

View File

@ -1,10 +1,7 @@
import pygame
from plansza import x1, y1, x2, y2, x3, y3, x4, y4, a_pix, b_pix
from regal import Regal, obliczPixeleNaPodstawieKratek
# import wozek
from packageList import *
# from paczka import Paczka
EKRAN_SZEROKOSC = 770
EKRAN_WYSOKOSC = 770
@ -15,9 +12,6 @@ pygame.display.set_caption("Inteligentny wozek")
icon = pygame.image.load('images/icon.png')
pygame.display.set_icon(icon)
# lista_paczek = packageList.zainicjuj_liste_paczek(a_pix, b_pix)
#lista_paczek = listOfPackages()
lista_paczek = []
lista_paczek_na_regalach = []
@ -52,11 +46,6 @@ def zwroc_regaly_kategoria(kategoria):
lista_reg.append(reg)
return lista_reg
# def zwroc_regaly_wspolrzedne(x_reg, y_reg):
# for regal in lista_regalow:
# if regal.wiersz == obliczPixeleNaPodstawieKratek(x_reg) and regal.kolumna == obliczPixeleNaPodstawieKratek(y_reg):
# return regal
def narysuj_siatke():
blockSize = 70 # Set the size of the grid block
WHITE = (200, 200, 200)
@ -94,13 +83,16 @@ def sprawdz_ktora_kolumna(y):
def narysuj_paczki(wozek):
#if wozek.ln == 0:
#for paczka in lista_paczek.list:
#if len(lista_paczek_na_regalach) == 0:
for paczka in lista_paczek:
paczka.narysuj(paczka.x, paczka.y, screen)
def narysuj_paczke_na_regale():
for paczka in lista_paczek_na_regalach:
if paczka.is_in_move is False:
paczka.narysuj(paczka.x, paczka.y, screen)
paczka.narysuj(paczka.x, paczka.y, screen)
def dodaj_paczki_na_rampe(p1, p2):
lista_paczek.append(p1)
lista_paczek.append(p2)
p1.update_position(a_pix, b_pix)
p2.update_position(a_pix, b_pix)

33
main.py
View File

@ -7,6 +7,7 @@ import ekran
from grid import GridCellType, SearchGrid
from sklearn.tree import DecisionTreeClassifier
import pandas as pd
import drzewo_decyzyjne
from plansza import a_pix, b_pix
pygame.init()
@ -14,15 +15,11 @@ pygame.init()
def main():
wozek = Wozek()
p1 = Paczka('duzy', 12, 'narzedzia', False, True, False, any, any, any, any, any)
p2 = Paczka('maly', 1, 'ogród', False, True, False, any, any, any, any, any)
ekran.lista_paczek.append(p1)
ekran.lista_paczek.append(p2)
p1.update_position(a_pix, b_pix)
p2.update_position(a_pix, b_pix)
ekran.dodaj_paczki_na_rampe(p1, p2)
grid_points = SearchGrid()
drzewo = drzewo_decyzyjne.make_tree()
while True:
for event in pygame.event.get():
@ -52,34 +49,14 @@ def main():
array, reg = przenoszona_paczka.tablica_do_drzewa(przenoszona_paczka.kategoria)
plikZPrzecinkami = open("DecisionTree/training_data.txt", 'w')
predictions = drzewo.predict([array])
with open('DecisionTree/200permutations_table.txt', 'r') as plik:
for linia in plik:
liczby = linia.strip()
wiersz = ""
licznik = 0
for liczba in liczby:
wiersz += liczba
wiersz += ";"
wiersz = wiersz[:-1]
wiersz += '\n'
plikZPrzecinkami.write(wiersz)
plikZPrzecinkami.close()
x = pd.read_csv('DecisionTree/training_data.txt', delimiter=';',
names=['wielkosc', 'waga,', 'priorytet', 'ksztalt', 'kruchosc', 'dolna', 'gorna', 'g > d'])
y = pd.read_csv('DecisionTree/decisions.txt', names=['polka'])
clf = DecisionTreeClassifier(criterion='entropy')
clf.fit(x.values, y.values)
predictions = clf.predict([array])
if predictions == 0:
print('odklada na dolna polke!')
else:
print('odklada na gorna polke!')
docelowy_stan = wyszukiwanie.Stan(reg.numerWiersza * 70, reg.numerKolumny * 70, 1) # x1 i y1 bo to są regału kategorii ogród
docelowy_stan = wyszukiwanie.Stan(reg.numerWiersza * 70, reg.numerKolumny * 70, 1)
wezel = wyszukiwanie.wyszukiwanie_a_star(wozek.obecnyStan, docelowy_stan, grid_points)
sciezka = wyszukiwanie.znajdz_sciezke(wezel)
wozek.przemiesc_wozek_po_sciezce(sciezka)

View File

@ -89,36 +89,36 @@ class Paczka(pygame.sprite.Sprite):
# piorytet
if self.priorytet is True:
tablica.append(0)
else: tablica.append(1)
tablica.append(1)
else: tablica.append(0)
# kształt
if self.ksztalt is True:
tablica.append(0)
else: tablica.append(1)
tablica.append(1)
else: tablica.append(0)
# kruchość
if self.kruchosc is True:
tablica.append(0)
else: tablica.append(1)
tablica.append(1)
else: tablica.append(0)
reg = ekran.zwroc_regaly_kategoria(kategoria)
# czy dolna wolna
if reg[0].is_dolna_free() is True:
tablica.append(0)
else:
tablica.append(1)
else:
tablica.append(0)
#czy górna wolna
if reg[0].is_dolna_free() is True:
tablica.append(0)
else:
tablica.append(1)
else:
tablica.append(0)
# czy na górnej więcej miejsca
if reg[0].is_dolna_free() is True:
tablica.append(0)
else:
if reg[0].czy_na_gornej_wiecej_miejsca() is True:
tablica.append(1)
else:
tablica.append(0)
return tablica, reg[0]