diff --git a/TreeConcept.py b/TreeConcept.py index 2335209..b53512e 100644 --- a/TreeConcept.py +++ b/TreeConcept.py @@ -1,17 +1,20 @@ from sklearn import tree +import graphviz +import os +os.environ["PATH"] += os.pathsep + 'C:/Program Files (x86)/Graphviz/bin/' # Define the training dataset with 8 attributes and corresponding labels training_data = [ - [1, 0, 0, 1, 0, 1, 1, 'A'], - [1, 0, 0, 0, 1, 1, 1, 'A'], - [0, 1, 0, 1, 0, 1, 1, 'B'], - [0, 0, 0, 1, 0, 0, 1, 'B'], - [0, 1, 1, 0, 1, 0, 0, 'B'], - [1, 0, 0, 0, 1, 0, 1, 'A'], - [0, 0, 0, 1, 0, 0, 0, 'B'], - [1, 1, 0, 1, 1, 1, 0, 'A'], - [0, 0, 0, 0, 0, 0, 1, 'B'], - [0, 0, 1, 0, 0, 1, 0, 'B'] + [0, 0, 0, 1, 0, 1, 1, 0, 'A'], + [1, 0, 0, 0, 1, 1, 1, 1, 'A'], + [0, 1, 0, 1, 0, 1, 1, 1, 'B'], + [1, 0, 0, 1, 1, 0, 1, 0, 'B'], + [1, 1, 1, 0, 1, 0, 0, 1, 'B'], + [0, 0, 0, 0, 1, 1, 1, 0, 'A'], + [0, 0, 0, 1, 0, 0, 0, 0, 'B'], + [1, 1, 0, 1, 1, 1, 0, 1, 'A'], + [0, 0, 0, 0, 0, 0, 1, 1, 'B'], + [1, 0, 1, 0, 0, 1, 0, 0, 'B'] ] # Separate the attributes and labels @@ -24,7 +27,12 @@ clf = tree.DecisionTreeClassifier(criterion='entropy') # Train the decision tree on the training data clf.fit(X_train, y_train) +# Visualize the trained decision tree +dot_data = tree.export_graphviz(clf, out_file=None, feature_names=['Attr1', 'Attr2', 'Attr3', 'Attr4', 'Attr5', 'Attr6', 'Attr7', 'Attr8'], class_names=['A', 'B'], filled=True) +graph = graphviz.Source(dot_data) +graph.render("decision_tree") # Save the visualization as a PDF file + # Test the decision tree with a new example -new_example = [1, 0, 0, 1, 1, 0, 0] # Example with 8 attributes +new_example = [1, 0, 0, 1, 1, 0, 0, 1] # Example with 8 attributes predicted_label = clf.predict([new_example]) print("Predicted Label:", predicted_label[0]) \ No newline at end of file