automatyczny_kelner/TreeConcept.py

38 lines
1.4 KiB
Python
Raw Normal View History

2023-05-18 20:49:16 +02:00
from sklearn import tree
2023-05-18 22:59:37 +02:00
import graphviz
import os
os.environ["PATH"] += os.pathsep + 'C:/Program Files (x86)/Graphviz/bin/'
2023-05-18 20:49:16 +02:00
# Define the training dataset with 8 attributes and corresponding labels
training_data = [
2023-05-18 22:59:37 +02:00
[0, 0, 0, 1, 0, 1, 1, 0, 'A'],
[1, 0, 0, 0, 1, 1, 1, 1, 'A'],
[0, 1, 0, 1, 0, 1, 1, 1, 'B'],
[1, 0, 0, 1, 1, 0, 1, 0, 'B'],
[1, 1, 1, 0, 1, 0, 0, 1, 'B'],
[0, 0, 0, 0, 1, 1, 1, 0, 'A'],
[0, 0, 0, 1, 0, 0, 0, 0, 'B'],
[1, 1, 0, 1, 1, 1, 0, 1, 'A'],
[0, 0, 0, 0, 0, 0, 1, 1, 'B'],
[1, 0, 1, 0, 0, 1, 0, 0, 'B']
2023-05-18 20:49:16 +02:00
]
# Separate the attributes and labels
X_train = [data[:-1] for data in training_data]
y_train = [data[-1] for data in training_data]
# Create the decision tree classifier using the ID3 algorithm
clf = tree.DecisionTreeClassifier(criterion='entropy')
# Train the decision tree on the training data
clf.fit(X_train, y_train)
2023-05-18 22:59:37 +02:00
# Visualize the trained decision tree
dot_data = tree.export_graphviz(clf, out_file=None, feature_names=['Attr1', 'Attr2', 'Attr3', 'Attr4', 'Attr5', 'Attr6', 'Attr7', 'Attr8'], class_names=['A', 'B'], filled=True)
graph = graphviz.Source(dot_data)
graph.render("decision_tree") # Save the visualization as a PDF file
2023-05-18 20:49:16 +02:00
# Test the decision tree with a new example
2023-05-18 22:59:37 +02:00
new_example = [1, 0, 0, 1, 1, 0, 0, 1] # Example with 8 attributes
2023-05-18 20:49:16 +02:00
predicted_label = clf.predict([new_example])
print("Predicted Label:", predicted_label[0])