diff --git a/your-code/main.ipynb b/your-code/main.ipynb index 406a5e2..ac3d3eb 100644 --- a/your-code/main.ipynb +++ b/your-code/main.ipynb @@ -80,18 +80,25 @@ "metadata": {}, "outputs": [], "source": [ - "#Create and Train a Decision Tree Classifier and print the train and test accuracy\n", + "# Create and Train a Decision Tree Classifier and print the train and test accuracy\n", "\n", "from sklearn.tree import DecisionTreeClassifier\n", "from sklearn.metrics import accuracy_score, mean_squared_error\n", "\n", "# Train Decision Tree\n", - "\n", + "dt_clf = DecisionTreeClassifier(random_state=0)\n", + "dt_clf.fit(X_train_scaled, y_train)\n", "\n", "# Predictions and evaluation\n", + "y_train_pred = dt_clf.predict(X_train_scaled)\n", + "y_test_pred = dt_clf.predict(X_test_scaled)\n", "\n", + "# Evaluate performance\n", + "train_accuracy = accuracy_score(y_train, y_train_pred)\n", + "test_accuracy = accuracy_score(y_test, y_test_pred)\n", "\n", - "# Evaluate performance\n" + "print(f\"Train Accuracy: {train_accuracy}\")\n", + "print(f\"Test Accuracy: {test_accuracy}\")" ] }, { @@ -147,12 +154,19 @@ "from sklearn.ensemble import BaggingClassifier\n", "\n", "# Train BaggingClassifier\n", - "\n", + "weak_bagging_clf = BaggingClassifier(base_estimator=DecisionTreeClassifier(max_depth=1), n_estimators=100, random_state=0)\n", + "weak_bagging_clf.fit(X_train_scaled, y_train)\n", "\n", "# Predictions and evaluation\n", + "y_train_weak_bagging_pred = weak_bagging_clf.predict(X_train_scaled)\n", + "y_test_weak_bagging_pred = weak_bagging_clf.predict(X_test_scaled)\n", "\n", + "# Evaluate performance\n", + "train_weak_bagging_accuracy = accuracy_score(y_train, y_train_weak_bagging_pred)\n", + "test_weak_bagging_accuracy = accuracy_score(y_test, y_test_weak_bagging_pred)\n", "\n", - "# Evaluate performance\n" + "print(f\"Weak Bagging Train Accuracy: {train_weak_bagging_accuracy}\")\n", + "print(f\"Weak Bagging Test Accuracy: {test_weak_bagging_accuracy}\")" ] }, { @@ -204,12 +218,19 @@ "from sklearn.ensemble import AdaBoostClassifier\n", "\n", "# Train AdaBoost\n", - "\n", + "adaboost_clf = AdaBoostClassifier(base_estimator=DecisionTreeClassifier(max_depth=1), n_estimators=100, random_state=0)\n", + "adaboost_clf.fit(X_train_scaled, y_train)\n", "\n", "# Predictions and evaluation\n", + "y_train_adaboost_pred = adaboost_clf.predict(X_train_scaled)\n", + "y_test_adaboost_pred = adaboost_clf.predict(X_test_scaled)\n", "\n", + "# Evaluate performance\n", + "train_adaboost_accuracy = accuracy_score(y_train, y_train_adaboost_pred)\n", + "test_adaboost_accuracy = accuracy_score(y_test, y_test_adaboost_pred)\n", "\n", - "# Evaluate performance\n" + "print(f\"AdaBoost Train Accuracy: {train_adaboost_accuracy}\")\n", + "print(f\"AdaBoost Test Accuracy: {test_adaboost_accuracy}\")" ] }, {