Skip to content

Commit

Permalink
fix: flake8 rule E231
Browse files Browse the repository at this point in the history
Signed-off-by: win5923 <[email protected]>
  • Loading branch information
win5923 committed Oct 19, 2024
1 parent f860b74 commit cc2bafb
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 5 deletions.
6 changes: 3 additions & 3 deletions python/ray/data/tests/test_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,7 @@ def test_large_args_scheduling_strategy(
f"Dataset throughput:\n"
f" * Ray Data throughput: N rows/s\n"
f" * Estimated single node throughput: N rows/s\n"
f"{gen_runtime_metrics_str(['ReadRange','MapBatches(dummy_map_batches)'], verbose_stats_logs)}" # noqa: E501
f"{gen_runtime_metrics_str(['ReadRange', 'MapBatches(dummy_map_batches)'], verbose_stats_logs)}" # noqa: E501
)
assert canonicalize(stats) == expected_stats

Expand Down Expand Up @@ -421,7 +421,7 @@ def test_dataset_stats_basic(
f"Dataset throughput:\n"
f" * Ray Data throughput: N rows/s\n"
f" * Estimated single node throughput: N rows/s\n"
f"{gen_runtime_metrics_str(['ReadRange->MapBatches(dummy_map_batches)','Map(dummy_map_batches)'], verbose_stats_logs)}" # noqa: E501
f"{gen_runtime_metrics_str(['ReadRange->MapBatches(dummy_map_batches)', 'Map(dummy_map_batches)'], verbose_stats_logs)}" # noqa: E501
)

for batch in ds.iter_batches():
Expand Down Expand Up @@ -473,7 +473,7 @@ def test_dataset_stats_basic(
f"Dataset throughput:\n"
f" * Ray Data throughput: N rows/s\n"
f" * Estimated single node throughput: N rows/s\n"
f"{gen_runtime_metrics_str(['ReadRange->MapBatches(dummy_map_batches)','Map(dummy_map_batches)'], verbose_stats_logs)}" # noqa: E501
f"{gen_runtime_metrics_str(['ReadRange->MapBatches(dummy_map_batches)', 'Map(dummy_map_batches)'], verbose_stats_logs)}" # noqa: E501
)


Expand Down
2 changes: 1 addition & 1 deletion python/ray/serve/tests/unit/test_deployment_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@ def test_get_node_to_running_replicas():
# Test random case
node_to_running_replicas = defaultdict(set)
for i in range(40):
node_id = f"node{random.randint(0,5)}"
node_id = f"node{random.randint(0, 5)}"
r_id = ReplicaID(f"r{i}", d_id)
node_to_running_replicas[node_id].add(r_id)
scheduler.on_replica_running(r_id, node_id)
Expand Down
2 changes: 1 addition & 1 deletion python/ray/train/torch/xla/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def _neuron_compile_extracted_graphs():
logger.info("Compiling extracted graphs on local rank0 worker")

parallel_compile_workdir = (
f"/tmp/{os.environ.get('USER','no-user')}/parallel_compile_workdir/"
f"/tmp/{os.environ.get('USER', 'no-user')}/parallel_compile_workdir/"
)
if os.path.exists(parallel_compile_workdir):
shutil.rmtree(parallel_compile_workdir)
Expand Down

0 comments on commit cc2bafb

Please sign in to comment.