Skip to content

Commit

Permalink
Add missing load_weights_only to example yamls (#776)
Browse files Browse the repository at this point in the history
  • Loading branch information
dakinggg committed Dec 5, 2023
1 parent 61cd110 commit 6ff3f27
Show file tree
Hide file tree
Showing 6 changed files with 6 additions and 7 deletions.
2 changes: 2 additions & 0 deletions mcli/mcli-llama2-finetune.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,8 @@ parameters:
memory_monitor: {}
runtime_estimator: {}

load_weights_only: true # Only load the weights, not the optimizer state, LR schedule, etc

# loggers:
# wandb: {}

Expand Down
1 change: 1 addition & 0 deletions scripts/train/yamls/finetune/1b_local_data_sft.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -136,3 +136,4 @@ callbacks:
# Load from remote object store
# REPLACE THE BELOW with you own checkpoint!
load_path: oci://my-bucket/my-folder/mpt-1b/checkpoints/some_checkpoint.pt
load_weights_only: true # Only load the weights, not the optimizer state, LR schedule, etc
1 change: 1 addition & 0 deletions scripts/train/yamls/finetune/7b_dolly_sft.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -124,3 +124,4 @@ callbacks:
# Load from remote object store
# REPLACE THE BELOW with you own checkpoint!
load_path: oci://my-bucket/my-folder/mpt-7b/checkpoints/some_checkpoint.pt
load_weights_only: true # Only load the weights, not the optimizer state, LR schedule, etc
7 changes: 0 additions & 7 deletions scripts/train/yamls/finetune/mpt-30b-instruct.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -120,13 +120,6 @@ callbacks:
# save_interval: 3ep
# save_num_checkpoints_to_keep: 1

# need to use converted checkpoint with llm-foundry code
# load_path:
autoresume: false
load_weights_only: false
python_log_level: debug


icl_max_seq_len: 2048

# YOU MUST ADD YOUR OWN DATASET URIs
Expand Down
1 change: 1 addition & 0 deletions scripts/train/yamls/finetune/mpt-7b_domain_adapt.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -115,3 +115,4 @@ save_folder: ./{run_name}/checkpoints
# Load from local filesystem or remote object store
# load_path: ./gpt-7b/checkpoints/latest-rank{rank}.pt
# load_path: s3://my-bucket/my-folder/gpt-7b/checkpoints/latest-rank{rank}.pt
load_weights_only: true # Only load the weights, not the optimizer state, LR schedule, etc
1 change: 1 addition & 0 deletions scripts/train/yamls/finetune/t5-small_dolly_sft.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -101,3 +101,4 @@ callbacks:
# Load from remote object store
# REPLACE THE BELOW with you own checkpoint!
# load_path: oci://my-bucket/my-folder/checkpoints/some_checkpoint.pt
load_weights_only: true # Only load the weights, not the optimizer state, LR schedule, etc

0 comments on commit 6ff3f27

Please sign in to comment.