38 lines
1.4 KiB
Python
38 lines
1.4 KiB
Python
from sklearn import tree
|
|
import graphviz
|
|
import os
|
|
os.environ["PATH"] += os.pathsep + 'C:/Program Files (x86)/Graphviz/bin/'
|
|
|
|
# Define the training dataset with 8 attributes and corresponding labels
|
|
training_data = [
|
|
[0, 0, 0, 1, 0, 1, 1, 0, 'A'],
|
|
[1, 0, 0, 0, 1, 1, 1, 1, 'A'],
|
|
[0, 1, 0, 1, 0, 1, 1, 1, 'B'],
|
|
[1, 0, 0, 1, 1, 0, 1, 0, 'B'],
|
|
[1, 1, 1, 0, 1, 0, 0, 1, 'B'],
|
|
[0, 0, 0, 0, 1, 1, 1, 0, 'A'],
|
|
[0, 0, 0, 1, 0, 0, 0, 0, 'B'],
|
|
[1, 1, 0, 1, 1, 1, 0, 1, 'A'],
|
|
[0, 0, 0, 0, 0, 0, 1, 1, 'B'],
|
|
[1, 0, 1, 0, 0, 1, 0, 0, 'B']
|
|
]
|
|
|
|
# Separate the attributes and labels
|
|
X_train = [data[:-1] for data in training_data]
|
|
y_train = [data[-1] for data in training_data]
|
|
|
|
# Create the decision tree classifier using the ID3 algorithm
|
|
clf = tree.DecisionTreeClassifier(criterion='entropy')
|
|
|
|
# Train the decision tree on the training data
|
|
clf.fit(X_train, y_train)
|
|
|
|
# Visualize the trained decision tree
|
|
dot_data = tree.export_graphviz(clf, out_file=None, feature_names=['Attr1', 'Attr2', 'Attr3', 'Attr4', 'Attr5', 'Attr6', 'Attr7', 'Attr8'], class_names=['A', 'B'], filled=True)
|
|
graph = graphviz.Source(dot_data)
|
|
graph.render("decision_tree") # Save the visualization as a PDF file
|
|
|
|
# Test the decision tree with a new example
|
|
new_example = [1, 0, 0, 1, 1, 0, 0, 1] # Example with 8 attributes
|
|
predicted_label = clf.predict([new_example])
|
|
print("Predicted Label:", predicted_label[0]) |