-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathutils.py
More file actions
126 lines (110 loc) · 7 KB
/
utils.py
File metadata and controls
126 lines (110 loc) · 7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
import pandas as pd
from lightning.pytorch.callbacks import Callback, ModelCheckpoint, RichProgressBar
from lightning.pytorch.callbacks.early_stopping import EarlyStopping
class CustomCallback:
def __init__(self, args):
self.output_dir = args.output_dir
self.max_tolerance = args.max_tolerance
self.method = args.method
self.save_top_k = 3
# Set configurations based on task
if args.task == "flores":
if args.method == "finetune":
self.monitor = "val/xppl"
self.mode = "min"
self.filename = "xppl={val/xppl:.2f}"
else:
self.monitor = "val/forget_xma"
self.mode = "min"
self.filename = "fxma={val/forget_xma:.4f}-xppl={val/xppl:.2f}-fxppl={val/forget_xppl:.2f}"
elif args.task == "bmlama":
if args.method == "finetune":
self.monitor = "val/sent_ppl"
self.mode = "min"
self.filename = "sent_ppl={val/sent_ppl:.2f}"
else:
self.monitor = "val/forget_xpa"
self.mode = "min"
self.filename = "fxpa={val/forget_xpa:.4f}-xppl={val/sent_xppl:.2f}-fxppl={val/forget_sent_xppl:.2f}"
else:
raise ValueError(f"Task {args.task} not supported.")
def load_checkpoint_callback(self):
return ModelCheckpoint(
dirpath=self.output_dir,
filename=self.filename,
monitor=self.monitor,
mode=self.mode,
save_top_k=self.save_top_k,
save_last=False,
save_weights_only=True,
verbose=True,
auto_insert_metric_name=False,
)
def load_early_stopping_callback(self):
return EarlyStopping(
monitor=self.monitor,
mode=self.mode,
patience=self.max_tolerance,
verbose=True,
)
class CustomMetricTracker(Callback):
def __init__(self, output_dir):
self.output_dir = output_dir
def on_validation_end(self, trainer, pl_module):
ppl_df = pd.DataFrame({k: [v.item()] for k, v in trainer.logged_metrics.items() if "forget" not in k and "ppl" in k and "sent" not in k})
ppl_df.rename(columns=lambda x: x.replace("val/", ""), inplace=True)
ppl_df.to_csv(f"{self.output_dir}/val_ppl.csv", index=False, mode="a")
forget_ppl_df = pd.DataFrame({k: [v.item()] for k, v in trainer.logged_metrics.items() if "forget" in k and "ppl" in k and "sent" not in k})
forget_ppl_df.rename(columns=lambda x: x.replace("val/", ""), inplace=True)
forget_ppl_df.to_csv(f"{self.output_dir}/val_forget_ppl.csv", index=False, mode="a")
if pl_module.hparams.task == "flores":
#! validation MA not measured for fast validation
forget_ma_df = pd.DataFrame({k: [v.item()] for k, v in trainer.logged_metrics.items() if "forget" in k and "ma" in k})
forget_ma_df.rename(columns=lambda x: x.replace("val/", ""), inplace=True)
forget_ma_df.to_csv(f"{self.output_dir}/val_forget_ma.csv", index=False, mode="a")
elif pl_module.hparams.task == "bmlama":
#! validation PA not measured for fast validation
forget_pa_df = pd.DataFrame({k: [v.item()] for k, v in trainer.logged_metrics.items() if "forget" in k and "pa" in k})
forget_pa_df.rename(columns=lambda x: x.replace("val/", ""), inplace=True)
forget_pa_df.to_csv(f"{self.output_dir}/val_forget_pa.csv", index=False, mode="a")
forget_sent_ppl_df = pd.DataFrame({k: [v.item()] for k, v in trainer.logged_metrics.items() if "forget" in k and "sent_ppl" in k})
forget_sent_ppl_df.rename(columns=lambda x: x.replace("val/", ""), inplace=True)
forget_sent_ppl_df.to_csv(f"{self.output_dir}/val_forget_sent_ppl.csv", index=False, mode="a")
sent_ppl_df = pd.DataFrame({k: [v.item()] for k, v in trainer.logged_metrics.items() if "forget" not in k and "sent_ppl" in k})
sent_ppl_df.rename(columns=lambda x: x.replace("val/", ""), inplace=True)
sent_ppl_df.to_csv(f"{self.output_dir}/val_sent_ppl.csv", index=False, mode="a")
def on_test_end(self, trainer, pl_module):
ppl_df = pd.DataFrame({k: [v.item()] for k, v in trainer.logged_metrics.items() if "forget" not in k and "ppl" in k and "sent" not in k})
ppl_df.rename(columns=lambda x: x.replace("test/", ""), inplace=True)
ppl_df.to_csv(f"{self.output_dir}/test_ppl.csv", index=False)
forget_ppl_df = pd.DataFrame({k: [v.item()] for k, v in trainer.logged_metrics.items() if "forget" in k and "ppl" in k and "sent" not in k})
forget_ppl_df.rename(columns=lambda x: x.replace("test/", ""), inplace=True)
forget_ppl_df.to_csv(f"{self.output_dir}/test_forget_ppl.csv", index=False)
if pl_module.hparams.task == "flores":
forget_ma_df = pd.DataFrame({k: [v.item()] for k, v in trainer.logged_metrics.items() if "forget" in k and "ma" in k})
forget_ma_df.rename(columns=lambda x: x.replace("test/", ""), inplace=True)
forget_ma_df.to_csv(f"{self.output_dir}/test_forget_ma.csv", index=False)
ma_df = pd.DataFrame({k: [v.item()] for k, v in trainer.logged_metrics.items() if "forget" not in k and "ma" in k})
ma_df.rename(columns=lambda x: x.replace("test/", ""), inplace=True)
ma_df.to_csv(f"{self.output_dir}/test_ma.csv", index=False)
elif pl_module.hparams.task == "bmlama":
forget_pa_df = pd.DataFrame({k: [v.item()] for k, v in trainer.logged_metrics.items() if "forget" in k and "pa" in k})
forget_pa_df.rename(columns=lambda x: x.replace("test/", ""), inplace=True)
forget_pa_df.to_csv(f"{self.output_dir}/test_forget_pa.csv", index=False)
forget_sent_ppl_df = pd.DataFrame({k: [v.item()] for k, v in trainer.logged_metrics.items() if "forget" in k and "sent_ppl" in k})
forget_sent_ppl_df.rename(columns=lambda x: x.replace("test/", ""), inplace=True)
forget_sent_ppl_df.to_csv(f"{self.output_dir}/test_forget_sent_ppl.csv", index=False)
pa_df = pd.DataFrame({k: [v.item()] for k, v in trainer.logged_metrics.items() if "forget" not in k and "pa" in k})
pa_df.rename(columns=lambda x: x.replace("test/", ""), inplace=True)
pa_df.to_csv(f"{self.output_dir}/test_pa.csv", index=False)
sent_ppl_df = pd.DataFrame({k: [v.item()] for k, v in trainer.logged_metrics.items() if "forget" not in k and "sent_ppl" in k})
sent_ppl_df.rename(columns=lambda x: x.replace("test/", ""), inplace=True)
sent_ppl_df.to_csv(f"{self.output_dir}/test_sent_ppl.csv", index=False)
class CustomRichProgressBar(RichProgressBar):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def get_metrics(self, trainer, model):
# don't show the version number
items = super().get_metrics(trainer, model)
items.pop("v_num", None)
return items