diff --git a/1.py b/1.py deleted file mode 100644 index eecf7d7..0000000 --- a/1.py +++ /dev/null @@ -1,25 +0,0 @@ -from sklearn.datasets import load_iris -from sklearn.tree import DecisionTreeClassifier -from sklearn.model_selection import train_test_split -from sklearn import metrics - -# Load the Iris dataset (or you can use your own dataset) -iris = load_iris() -X = iris.data -y = iris.target - -# Split the dataset into training and testing sets -X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42) - -# Create an instance of the DecisionTreeClassifier -clf = DecisionTreeClassifier() - -# Train the decision tree classifier -clf.fit(X_train, y_train) - -# Make predictions on the testing set -y_pred = clf.predict(X_test) - -# Evaluate the accuracy of the model -accuracy = metrics.accuracy_score(y_test, y_pred) -print("Accuracy:", accuracy) \ No newline at end of file diff --git a/TreeConcept.py b/TreeConcept.py new file mode 100644 index 0000000..2335209 --- /dev/null +++ b/TreeConcept.py @@ -0,0 +1,30 @@ +from sklearn import tree + +# Define the training dataset with 8 attributes and corresponding labels +training_data = [ + [1, 0, 0, 1, 0, 1, 1, 'A'], + [1, 0, 0, 0, 1, 1, 1, 'A'], + [0, 1, 0, 1, 0, 1, 1, 'B'], + [0, 0, 0, 1, 0, 0, 1, 'B'], + [0, 1, 1, 0, 1, 0, 0, 'B'], + [1, 0, 0, 0, 1, 0, 1, 'A'], + [0, 0, 0, 1, 0, 0, 0, 'B'], + [1, 1, 0, 1, 1, 1, 0, 'A'], + [0, 0, 0, 0, 0, 0, 1, 'B'], + [0, 0, 1, 0, 0, 1, 0, 'B'] +] + +# Separate the attributes and labels +X_train = [data[:-1] for data in training_data] +y_train = [data[-1] for data in training_data] + +# Create the decision tree classifier using the ID3 algorithm +clf = tree.DecisionTreeClassifier(criterion='entropy') + +# Train the decision tree on the training data +clf.fit(X_train, y_train) + +# Test the decision tree with a new example +new_example = [1, 0, 0, 1, 1, 0, 0] # Example with 8 attributes +predicted_label = clf.predict([new_example]) +print("Predicted Label:", predicted_label[0]) \ No newline at end of file