Skip to content

Commit

Permalink
add todos, fix path bug
Browse files Browse the repository at this point in the history
  • Loading branch information
aspfohl committed Dec 6, 2023
1 parent 7af7383 commit 9337af0
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 16 deletions.
3 changes: 2 additions & 1 deletion llmfoundry/callbacks/async_eval_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,6 +240,7 @@ def close(self) -> None:
return
self.training_config

# TODO: enforce this exists before
save_folder = self.training_config['save_folder']
save_latest_filename = self.training_config.get('save_latest_filename',
None)
Expand Down Expand Up @@ -295,7 +296,7 @@ def launch_run(self, checkpoint: str, current_interval: str) -> Run:
continue

found_llm_foundry = True
if i['path']:
if i.get('path'):
installation_path = i['path']

if not found_llm_foundry:
Expand Down
26 changes: 11 additions & 15 deletions scripts/train/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -529,21 +529,17 @@ def main(cfg: DictConfig) -> Trainer:
mosaicml_logger.log_metrics({'data_validated': time.time()})

## Evaluation
if use_async_eval:
print('Using async eval, skipping eval loader')
evaluators, eval_gauntlet_callback = [], None
else:
print('Building eval loader...')
eval_icl_seq_len: int = icl_seq_len if icl_seq_len else max_seq_len
evaluators, _, eval_gauntlet_callback = build_evaluators(
eval_loader_config,
icl_tasks_config,
eval_gauntlet_config,
tokenizer=tokenizer,
device_eval_batch_size=device_eval_batch_size,
icl_seq_len=eval_icl_seq_len,
icl_subset_num_batches=icl_subset_num_batches,
)
print('Building eval loader...')
eval_icl_seq_len: int = icl_seq_len if icl_seq_len else max_seq_len
evaluators, _, eval_gauntlet_callback = build_evaluators(
eval_loader_config, # TODO: async eval should not even call eval loader
icl_tasks_config if not use_async_eval else None,
eval_gauntlet_config if not use_async_eval else None,
tokenizer=tokenizer,
device_eval_batch_size=device_eval_batch_size,
icl_seq_len=eval_icl_seq_len,
icl_subset_num_batches=icl_subset_num_batches,
)

if eval_gauntlet_callback is not None and not use_async_eval:
callbacks.append(eval_gauntlet_callback)
Expand Down

0 comments on commit 9337af0

Please sign in to comment.