diff --git a/machine_learning/classification_models.py b/machine_learning/classification_models.py index 852a36c..6a2bb1a 100644 --- a/machine_learning/classification_models.py +++ b/machine_learning/classification_models.py @@ -60,36 +60,60 @@ class ClassificationModels: print("\n************************************\n") print("Current model:", model_title, end="\n") print("Acc:", model["metrics"][0] / n_clusters) - scores_df.append( - { - "method": model_title, - "metric": "test_accuracy", - "mean": model["metrics"][0] / n_clusters, - } + scores_df = pd.concat( + [ + scores_df, + pd.DataFrame( + { + "method": model_title, + "metric": "test_accuracy", + "mean": model["metrics"][0] / n_clusters, + } + ), + ], + ignore_index=True, ) print("Precision:", model["metrics"][1] / n_clusters) - scores_df.append( - { - "method": model_title, - "metric": "test_precision", - "mean": model["metrics"][1] / n_clusters, - } + scores_df = pd.concat( + [ + scores_df, + pd.DataFrame( + { + "method": model_title, + "metric": "test_precision", + "mean": model["metrics"][1] / n_clusters, + } + ), + ], + ignore_index=True, ) print("Recall:", model["metrics"][2] / n_clusters) - scores_df.append( - { - "method": model_title, - "metric": "test_recall", - "mean": model["metrics"][2] / n_clusters, - } + scores_df = pd.concat( + [ + scores_df, + pd.DataFrame( + { + "method": model_title, + "metric": "test_recall", + "mean": model["metrics"][2] / n_clusters, + } + ), + ], + ignore_index=True, ) print("F1:", model["metrics"][3] / n_clusters) - scores_df.append( - { - "method": model_title, - "metric": "test_f1", - "mean": model["metrics"][3] / n_clusters, - } + scores_df = pd.concat( + [ + scores_df, + pd.DataFrame( + { + "method": model_title, + "metric": "test_f1", + "mean": model["metrics"][3] / n_clusters, + } + ), + ], + ignore_index=True, ) scores = pd.concat([scores, scores_df]) return scores