Skip to content
This repository has been archived by the owner on Dec 1, 2021. It is now read-only.

Commit

Permalink
Change learning rate schdule text more readable
Browse files Browse the repository at this point in the history
  • Loading branch information
ruimashita committed Dec 26, 2018
1 parent e3a5675 commit ea8f63a
Show file tree
Hide file tree
Showing 11 changed files with 49 additions and 47 deletions.
15 changes: 8 additions & 7 deletions blueoil/blueoil_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,12 +102,12 @@
# ]


learning_rate_schedule_map = OrderedDict({
"constant": "'constant' -> constant learning rate.",
"2-step-decay": "'2-step-decay' -> learning rate reduce to 1/10 on epochs/2 and epochs-1.",
"3-step-decay": "'3-step-decay' -> learning rate reduce to 1/10 on epochs/3 and epochs*2/3 and epochs-1",
"3-step-decay-with-warmup": "'3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'",
})
learning_rate_schedule_map = OrderedDict([
("constant", "'constant' -> constant learning rate."),
("2-step-decay", "'2-step-decay' -> learning rate decrease by 1/10 on {epochs}/2 and {epochs}-1."),
("3-step-decay", "'3-step-decay' -> learning rate decrease by 1/10 on {epochs}/3 and {epochs}*2/3 and {epochs}-1"),
("3-step-decay-with-warmup", "'3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'"),
])


def network_name_choices(task_type):
Expand Down Expand Up @@ -304,7 +304,8 @@ def ask_questions():
learning_rate_schedule_question = {
'type': 'rawlist',
'name': 'value',
'message': 'choose learning rate schedule:',
'message': 'choose learning rate schedule \
({epochs} is the number of training epochs you entered before):',
'choices': list(learning_rate_schedule_map.values()),
'default': learning_rate_schedule_map["constant"],
}
Expand Down
8 changes: 4 additions & 4 deletions blueoil/templates/blueoil-config.tpl.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ dataset:
trainer:
batch_size: {{ batch_size }}
epochs: {{ training_epochs }}
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup'.
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup' ({epochs} is the number of training epochs you entered before).
# 'constant' -> constant learning rate.
# '2-step-decay' -> learning rate reduce to 1/10 on epochs/2 and epochs-1.
# '3-step-decay' -> learning rate reduce to 1/10 on epochs/3 and epochs*2/3 and epochs-1
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'
# '2-step-decay' -> learning rate decrease by 1/10 on {epochs}/2 and {epochs}-1.
# '3-step-decay' -> learning rate decrease by 1/10 on {epochs}/3 and {epochs}*2/3 and {epochs}-1.
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'.
learning_rate_schedule: {{ learning_rate_schedule }}
initial_learning_rate: {{ initial_learning_rate_value }}

Expand Down
8 changes: 4 additions & 4 deletions tests/config/caltech101_classification.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ dataset:
trainer:
batch_size: 1
epochs: 1
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup'.
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup' ({epochs} is the number of training epochs you entered before).
# 'constant' -> constant learning rate.
# '2-step-decay' -> learning rate reduce to 1/10 on epochs/2 and epochs-1.
# '3-step-decay' -> learning rate reduce to 1/10 on epochs/3 and epochs*2/3 and epochs-1
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'
# '2-step-decay' -> learning rate decrease by 1/10 on {epochs}/2 and {epochs}-1.
# '3-step-decay' -> learning rate decrease by 1/10 on {epochs}/3 and {epochs}*2/3 and {epochs}-1.
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'.
learning_rate_schedule: constant
initial_learning_rate: 0.001

Expand Down
8 changes: 4 additions & 4 deletions tests/config/caltech101_classification_has_validation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ dataset:
trainer:
batch_size: 1
epochs: 1
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup'.
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup' ({epochs} is the number of training epochs you entered before).
# 'constant' -> constant learning rate.
# '2-step-decay' -> learning rate reduce to 1/10 on epochs/2 and epochs-1.
# '3-step-decay' -> learning rate reduce to 1/10 on epochs/3 and epochs*2/3 and epochs-1
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'
# '2-step-decay' -> learning rate decrease by 1/10 on {epochs}/2 and {epochs}-1.
# '3-step-decay' -> learning rate decrease by 1/10 on {epochs}/3 and {epochs}*2/3 and {epochs}-1.
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'.
learning_rate_schedule: constant
initial_learning_rate: 0.001

Expand Down
8 changes: 4 additions & 4 deletions tests/config/delta_mark_classification.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ dataset:
trainer:
batch_size: 1
epochs: 1
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup'.
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup' ({epochs} is the number of training epochs you entered before).
# 'constant' -> constant learning rate.
# '2-step-decay' -> learning rate reduce to 1/10 on epochs/2 and epochs-1.
# '3-step-decay' -> learning rate reduce to 1/10 on epochs/3 and epochs*2/3 and epochs-1
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'
# '2-step-decay' -> learning rate decrease by 1/10 on {epochs}/2 and {epochs}-1.
# '3-step-decay' -> learning rate decrease by 1/10 on {epochs}/3 and {epochs}*2/3 and {epochs}-1.
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'.
learning_rate_schedule: constant
initial_learning_rate: 0.001

Expand Down
8 changes: 4 additions & 4 deletions tests/config/delta_mark_classification_has_validation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ dataset:
trainer:
batch_size: 1
epochs: 1
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup'.
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup' ({epochs} is the number of training epochs you entered before).
# 'constant' -> constant learning rate.
# '2-step-decay' -> learning rate reduce to 1/10 on epochs/2 and epochs-1.
# '3-step-decay' -> learning rate reduce to 1/10 on epochs/3 and epochs*2/3 and epochs-1
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'
# '2-step-decay' -> learning rate decrease by 1/10 on {epochs}/2 and {epochs}-1.
# '3-step-decay' -> learning rate decrease by 1/10 on {epochs}/3 and {epochs}*2/3 and {epochs}-1.
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'.
learning_rate_schedule: constant
initial_learning_rate: 0.001

Expand Down
8 changes: 4 additions & 4 deletions tests/config/delta_mark_object_detection.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ dataset:
trainer:
batch_size: 1
epochs: 1
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup'.
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup' ({epochs} is the number of training epochs you entered before).
# 'constant' -> constant learning rate.
# '2-step-decay' -> learning rate reduce to 1/10 on epochs/2 and epochs-1.
# '3-step-decay' -> learning rate reduce to 1/10 on epochs/3 and epochs*2/3 and epochs-1
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'
# '2-step-decay' -> learning rate decrease by 1/10 on {epochs}/2 and {epochs}-1.
# '3-step-decay' -> learning rate decrease by 1/10 on {epochs}/3 and {epochs}*2/3 and {epochs}-1.
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'.
learning_rate_schedule: constant
initial_learning_rate: 0.001

Expand Down
8 changes: 4 additions & 4 deletions tests/config/delta_mark_object_detection_has_validation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ dataset:
trainer:
batch_size: 1
epochs: 1
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup'.
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup' ({epochs} is the number of training epochs you entered before).
# 'constant' -> constant learning rate.
# '2-step-decay' -> learning rate reduce to 1/10 on epochs/2 and epochs-1.
# '3-step-decay' -> learning rate reduce to 1/10 on epochs/3 and epochs*2/3 and epochs-1
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'
# '2-step-decay' -> learning rate decrease by 1/10 on {epochs}/2 and {epochs}-1.
# '3-step-decay' -> learning rate decrease by 1/10 on {epochs}/3 and {epochs}*2/3 and {epochs}-1.
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'.
learning_rate_schedule: constant
initial_learning_rate: 0.001

Expand Down
9 changes: 5 additions & 4 deletions tests/config/make_yml_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,11 +114,12 @@
]

trainer_lr_schedule_comment = """\
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup'.
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup' \
({epochs} is the number of training epochs you entered before).
# 'constant' -> constant learning rate.
# '2-step-decay' -> learning rate reduce to 1/10 on epochs/2 and epochs-1.
# '3-step-decay' -> learning rate reduce to 1/10 on epochs/3 and epochs*2/3 and epochs-1
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'
# '2-step-decay' -> learning rate decrease by 1/10 on {epochs}/2 and {epochs}-1.
# '3-step-decay' -> learning rate decrease by 1/10 on {epochs}/3 and {epochs}*2/3 and {epochs}-1.
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'.
"""

trainer_lr_schedules = [
Expand Down
8 changes: 4 additions & 4 deletions tests/config/openimagesv4_object_detection.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ dataset:
trainer:
batch_size: 1
epochs: 1
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup'.
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup' ({epochs} is the number of training epochs you entered before).
# 'constant' -> constant learning rate.
# '2-step-decay' -> learning rate reduce to 1/10 on epochs/2 and epochs-1.
# '3-step-decay' -> learning rate reduce to 1/10 on epochs/3 and epochs*2/3 and epochs-1
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'
# '2-step-decay' -> learning rate decrease by 1/10 on {epochs}/2 and {epochs}-1.
# '3-step-decay' -> learning rate decrease by 1/10 on {epochs}/3 and {epochs}*2/3 and {epochs}-1.
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'.
learning_rate_schedule: constant
initial_learning_rate: 0.001

Expand Down
8 changes: 4 additions & 4 deletions tests/config/openimagesv4_object_detection_has_validation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ dataset:
trainer:
batch_size: 1
epochs: 1
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup'.
# supported 'learning_rate_schedule' is 'constant', '2-step-decay', '3-step-decay', '3-step-decay-with-warmup' ({epochs} is the number of training epochs you entered before).
# 'constant' -> constant learning rate.
# '2-step-decay' -> learning rate reduce to 1/10 on epochs/2 and epochs-1.
# '3-step-decay' -> learning rate reduce to 1/10 on epochs/3 and epochs*2/3 and epochs-1
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'
# '2-step-decay' -> learning rate decrease by 1/10 on {epochs}/2 and {epochs}-1.
# '3-step-decay' -> learning rate decrease by 1/10 on {epochs}/3 and {epochs}*2/3 and {epochs}-1.
# '3-step-decay-with-warmup' -> warmup learning rate 1/1000 in first epoch, then train same as '3-step-decay'.
learning_rate_schedule: constant
initial_learning_rate: 0.001

Expand Down

0 comments on commit ea8f63a

Please sign in to comment.