Skip to content

Commit

Permalink
Updated black version and ran black.
Browse files Browse the repository at this point in the history
  • Loading branch information
tmills committed Sep 27, 2024
1 parent 03d46b3 commit 99c0a2f
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 10 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 23.7.0
rev: 24.8.0
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
Expand Down
18 changes: 9 additions & 9 deletions src/cnlpt/train_system.py
Original file line number Diff line number Diff line change
Expand Up @@ -545,16 +545,16 @@ def main(
# steps per epoch factors in gradient accumulation steps (as compared to batches_per_epoch above which doesn't)
steps_per_epoch = int(total_steps // training_args.num_train_epochs)
training_args.eval_steps = steps_per_epoch // training_args.evals_per_epoch
training_args.evaluation_strategy = (
training_args.eval_strategy
) = IntervalStrategy.STEPS
training_args.evaluation_strategy = training_args.eval_strategy = (
IntervalStrategy.STEPS
)
# This will save model per epoch
# training_args.save_strategy = IntervalStrategy.EPOCH
elif training_args.do_eval:
logger.info("Evaluation strategy not specified so evaluating every epoch")
training_args.evaluation_strategy = (
training_args.eval_strategy
) = IntervalStrategy.EPOCH
training_args.evaluation_strategy = training_args.eval_strategy = (
IntervalStrategy.EPOCH
)

current_prediction_packet = deque()

Expand Down Expand Up @@ -662,9 +662,9 @@ def compute_metrics_fn(p: EvalPrediction):
"w",
) as f:
config_dict = model_args.to_dict()
config_dict[
"label_dictionary"
] = dataset.get_labels()
config_dict["label_dictionary"] = (
dataset.get_labels()
)
config_dict["task_names"] = task_names
json.dump(config_dict, f)
for task_ind, task_name in enumerate(metrics):
Expand Down

0 comments on commit 99c0a2f

Please sign in to comment.