From d8482988ff9e4445172998fce795d98167c1b531 Mon Sep 17 00:00:00 2001 From: nuluh Date: Sun, 31 Aug 2025 13:01:04 +0700 Subject: [PATCH] feat(ml): add classification report generation to model evaluation to show all metrics during training --- code/src/ml/model_selection.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/code/src/ml/model_selection.py b/code/src/ml/model_selection.py index 2c10474..2e28573 100644 --- a/code/src/ml/model_selection.py +++ b/code/src/ml/model_selection.py @@ -2,7 +2,7 @@ import numpy as np import pandas as pd import os import matplotlib.pyplot as plt -from sklearn.metrics import confusion_matrix, ConfusionMatrixDisplay +from sklearn.metrics import confusion_matrix, ConfusionMatrixDisplay, classification_report from joblib import load def create_ready_data( @@ -159,6 +159,7 @@ def train_and_evaluate_model( # Continue despite export error result["success"] = True + result["classification_report"] = classification_report(y_test, y_pred, output_dict=True) return result except Exception as e: