-
Notifications
You must be signed in to change notification settings - Fork 1
/
mpkpts_evaluate.py
43 lines (35 loc) · 1.33 KB
/
mpkpts_evaluate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import argparse
import pickle
from pathlib import Path
import pandas as pd
from src import constants
from src.modelevaluator import ModelEvaluator
parser = argparse.ArgumentParser()
parser.add_argument(
"--folder",
type=str,
required=True,
help="Folder containing the extracted features.",
)
args = parser.parse_args()
path_data = Path(args.folder)
y_test = pd.read_pickle(path_data / "y_test.pkl")
with open(str(path_data / "training_results.pkl"), "rb") as handle:
training_results = pickle.load(handle)
y_proba_dict = {
classifier_name: model_details.get("y_proba_test")[:, 1]
for classifier_name, model_details in training_results.items()
}
# Create "report" folder if it doesn't exist
report_folder = Path("report") / constants.MODEL_NAME
report_folder.mkdir(parents=True, exist_ok=True)
# Evaluate performance on testing data
evaluator = ModelEvaluator(y_true=y_test, y_proba_dict=y_proba_dict, threshold=0.5)
evaluator.get_metrics(export="all", filename=f"./report/{constants.MODEL_NAME}/stats")
evaluator.plot_roc_curve(export="save", filename=f"./report/{constants.MODEL_NAME}/roc")
evaluator.plot_precision_recall_curve(
export="save", filename=f"./report/{constants.MODEL_NAME}/precision_recall"
)
evaluator.plot_confusion_matrix(
export="save", filename=f"./report/{constants.MODEL_NAME}/confusion_matrix"
)