Skip to content

Commit

Permalink
Update wandb requirement from <0.18,>=0.13.2 to >=0.13.2,<0.19 (#3615)
Browse files Browse the repository at this point in the history
Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Saaketh Narayan <[email protected]>
Co-authored-by: Mihir Patel <[email protected]>
  • Loading branch information
3 people committed Sep 19, 2024
1 parent 893f398 commit 129dcbe
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def package_files(prefix: str, directory: str, extension: str):
]

extra_deps['wandb'] = [
'wandb>=0.13.2,<0.18',
'wandb>=0.13.2,<0.19',
]

extra_deps['comet_ml'] = [
Expand Down
4 changes: 2 additions & 2 deletions tests/loggers/test_wandb_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,10 +269,10 @@ def test_wandb_log_metrics(test_wandb_logger):
eval_metrics_cross_entropy_count = all_run_text.count('metrics/eval/CrossEntropy')
train_loss_count = all_run_text.count('loss/train/total')

expected_number_train_loss_count = (dataset_size / batch_size) + 1 # wandb includes it in the file one extra time
expected_number_train_loss_count = (dataset_size / batch_size) * 2 # wandb includes it twice per step
expected_number_train_metrics_count = (
dataset_size / batch_size
) + 2 # wandb includes it in the file two extra times
) * 2 + 2 # wandb includes it twice per step plus two extra times
expected_number_eval_metrics_count = 2 # wandb includes it in the file twice
assert train_metrics_accuracy_count == expected_number_train_metrics_count
assert train_loss_count == expected_number_train_loss_count
Expand Down

0 comments on commit 129dcbe

Please sign in to comment.