-
Notifications
You must be signed in to change notification settings - Fork 0
/
perform_ood.py
executable file
·79 lines (71 loc) · 2.71 KB
/
perform_ood.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
import argparse
import ast
from src.trainers import TransformerTrainer
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--seed", type=int, default=2, help="Random seed to use.")
parser.add_argument("--output_dir", help="Location for models.")
parser.add_argument("--model_name", help="Name of model.")
parser.add_argument("--training_dir", help="Location of folder with training niis.")
parser.add_argument("--validation_dir", help="Location of folder with validation niis.")
parser.add_argument(
"--ood_dir", help="Location or list of locations of folder(s) to perform OOD on."
)
# model params
parser.add_argument("--vqvae_checkpoint", help="Path to a VQ-VAE model checkpoint.")
parser.add_argument(
"--spatial_dimension", default=3, type=int, help="Dimension of images: 2d or 3d."
)
parser.add_argument("--image_size", default=None, help="Resize images.")
parser.add_argument(
"--image_roi",
default=[176, 208, 176],
help="Specify central ROI crop of inputs, as a tuple, with -1 to not crop a dimension.",
type=ast.literal_eval,
)
parser.add_argument("--transformer_checkpoint", help="Path to a VQ-VAE model checkpoint.")
parser.add_argument(
"--transformer_type",
default="transformer",
help="transformer or performer or memory-efficient",
)
parser.add_argument(
"--transformer_max_seq_length",
default=None,
help="Maximum sequence length for transformer.",
)
# training param
parser.add_argument("--batch_size", type=int, default=4, help="Training batch size.")
parser.add_argument("--n_epochs", type=int, default=300, help="Number of epochs to train.")
parser.add_argument(
"--eval_freq",
type=int,
default=10,
help="Number of epochs to between evaluations.",
)
parser.add_argument("--num_workers", type=int, default=8, help="Number of loader workers")
parser.add_argument(
"--cache_data",
type=int,
default=1,
help="Whether or not to cache data in dataloaders.",
)
parser.add_argument(
"--checkpoint_every",
type=int,
default=50,
help="Save a checkpoint every checkpoint_every epochs.",
)
parser.add_argument(
"--quick_test",
default=0,
type=int,
help="If True, runs through a single batch of the train and eval loop.",
)
args = parser.parse_args()
return args
# to run using DDP, run torchrun --nproc_per_node=1 --nnodes=1 --node_rank=0 train_transformer.py --args
if __name__ == "__main__":
args = parse_args()
trainer = TransformerTrainer(args)
trainer.ood(args)