This commit is contained in:
2025-06-18 12:17:59 +00:00
parent f5b7f3d19c
commit ecb0c722fa

View File

@@ -9240,7 +9240,7 @@ class DataProcessor:
# Feature-Liste speichern (bleibt unverändert)
self._expected_features = feature_columns_ml
try:
patterns_data = {"feature_columns": self._expected_features, "target_classes": list(dt_classifier.classes_)}
patterns_data = {"feature_columns": self._expected_features, "target_classes": list(rf_classifier.classes_)} # << KORRIGIERT
patterns_dir = os.path.dirname(patterns_out)
if patterns_dir and not os.path.exists(patterns_dir):
os.makedirs(patterns_dir, exist_ok=True)
@@ -9256,26 +9256,26 @@ class DataProcessor:
# 5. Evaluation (Optional, aber empfohlen, um die Modellleistung zu bewerten)
self.logger.info("Starte Modellevaluation...")
y_pred = dt_classifier.predict(X_test_imputed)
y_pred = rf_classifier.predict(X_test_imputed) # << KORRIGIERT
accuracy = accuracy_score(y_test, y_pred)
self.logger.info(f"Modell Genauigkeit auf dem Testset: {accuracy:.4f}")
class_report_labels = list(dt_classifier.classes_) # Sicherstellen, dass es eine Liste ist
class_report_labels = list(rf_classifier.classes_) # << KORRIGIERT
class_report = classification_report(y_test, y_pred, zero_division=0, labels=class_report_labels, target_names=[str(c) for c in class_report_labels])
self.logger.info(f"Klassifikationsbericht auf dem Testset:\n{class_report}")
cm = confusion_matrix(y_test, y_pred, labels=class_report_labels)
self.logger.info(f"Konfusionsmatrix auf dem Testset (Zeilen=Wahr, Spalten=Vorhersage):\n{cm}")
# Block für Feature Importance
try:
self.logger.info("Feature Importance (Top 15):")
importances = rf_classifier.feature_importances_
importances = rf_classifier.feature_importances_ # << Hier war es schon korrekt
feature_importance_df = pd.DataFrame({
'Feature': feature_columns_ml,
'Importance': importances
}).sort_values(by='Importance', ascending=False)
# Ausgabe der Top 15 Features
self.logger.info(f"\n{feature_importance_df.head(15).to_string(index=False)}")
except Exception as e_feat_imp:
self.logger.warning(f"FEHLER beim Berechnen/Anzeigen der Feature Importance: {e_feat_imp}")