-
Notifications
You must be signed in to change notification settings - Fork 12
/
main_config.cfg
executable file
·56 lines (52 loc) · 2.01 KB
/
main_config.cfg
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
[args]
# Bert pre-trained model selected in the list [bert-base-cased, roberta-base, albert-base-v1 / albert-large-v1] (default = roberta-base)
bert_model = roberta-base
# The input data dir. Should contain the .tsv files (VUA18 / VUAverb / MOH-X/CLS / TroFi/CLS / VUA20)
data_dir = data/VUA20
# The name of the task to train (vua(1-fold) / trofi(10-fold))
task_name = vua
# The name of model type (default = MELBERT) (BERT_BASE / BERT_SEQ / MELBERT_SPV / MELBERT_MIP / MELBERT)
model_type = MELBERT
# The hidden dimension for classifier (default = 768)
classifier_hidden = 768
# Learning rate scheduler (default = warmup_linear) (none / warmup_linear)
lr_schedule = warmup_linear
# Training epochs to perform linear learning rate warmup for. (default = 2)
warmup_epoch = 2
# Dropout ratio (default = 0.2)
drop_ratio = 0.2
# K-fold (default = 10)
kfold = 10
# Number of bagging (default = 0) (0 not for using bagging technique)
num_bagging = 0
# The index of bagging only for the case using bagging technique (default = 0)
bagging_index = 0
# Use additional linguistic features
# POS tag (default = True)
use_pos = True
# Local context (default = True)
use_local_context= True
# The maximum total input sequence length after WordPiece tokenization. (default = 200)
max_seq_length = 150
# Whether to run training (default = False)
do_train = True
# Whether to run eval on the test set (default = False)
do_test = True
# Whether to run eval on the dev set. (default = False)
do_eval = True
# Set this flag if you are using an uncased model. (default = False)
do_lower_case = False
# Weight of metaphor. (default = 3.0)
class_weight = 3
# Total batch size for training. (default = 32)
train_batch_size = 32
# Total batch size for eval. (default = 8)
eval_batch_size = 8
# The initial learning rate for Adam (default = 3e-5)
learning_rate = 3e-5
# Total number of training epochs to perform. (default = 3.0)
num_train_epoch = 3
# Whether not to use CUDA when available (default = False)
no_cuda = False
# random seed for initialization (default = 42)
seed = 42