forked from caodoanh2001/COSFormer
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcontinual_trainer.py
More file actions
38 lines (31 loc) · 1.12 KB
/
continual_trainer.py
File metadata and controls
38 lines (31 loc) · 1.12 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
# All commands
import os
import glob
import time
config_path = "./configs/"
all_continual_configs = glob.glob(config_path + "/*")
all_continual_configs = sorted(all_continual_configs)
fold = 2
gpu = 0
ckpt_path = "./logs_dir_continual_learning_past_logits/" # logs_camel/configs/camel/fold0"
for t, config_path in enumerate(all_continual_configs):
command_for_task_t = " ".join([
"CUDA_VISIBLE_DEVICES=3 python train.py",
"--config", config_path,
"--stage", "train",
"--fold", str(fold),
"--gpu", str(gpu)
])
if t > 0:
command_for_task_t = " ".join([
"CUDA_VISIBLE_DEVICES=3 python train.py",
"--config", config_path,
"--stage", "train",
"--fold", str(fold),
"--gpu", str(gpu),
"--model_path", ckpt_file_task_t
])
os.system(command_for_task_t)
data_seq = config_path.split("/")[-1].split(".")[0]
ckpt_path_task_t = ckpt_path + "logs_" + data_seq + "/" + "configs" + "/" + data_seq + "/" + "fold" + str(fold) + "/"
ckpt_file_task_t = glob.glob(ckpt_path_task_t + '/*.ckpt')[-1]