Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix some type checking errors after removing type hint ignores #567

Merged
merged 5 commits into from
Sep 3, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/taskgraph/actions/cancel.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def cancel_action(parameters, graph_config, input, task_group_id, task_id):
try:
cancel_task(task_id, use_proxy=True)
except requests.HTTPError as e:
if e.response.status_code == 409: # type: ignore
if e.response.status_code == 409:
# A 409 response indicates that this task is past its deadline. It
# cannot be cancelled at this time, but it's also not running
# anymore, so we can ignore this error.
Expand Down
2 changes: 1 addition & 1 deletion src/taskgraph/actions/cancel_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def do_cancel_task(task_id):
try:
cancel_task(task_id, use_proxy=True)
except requests.HTTPError as e:
if e.response.status_code == 409: # type: ignore
if e.response.status_code == 409:
# A 409 response indicates that this task is past its deadline. It
# cannot be cancelled at this time, but it's also not running
# anymore, so we can ignore this error.
Expand Down
2 changes: 1 addition & 1 deletion src/taskgraph/actions/rebuild_cached_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def rebuild_cached_tasks_action(
)
cached_tasks = [
label
for label, task in full_task_graph.tasks.items() # type: ignore
for label, task in full_task_graph.tasks.items()
if task.attributes.get("cached_task", False)
]
if cached_tasks:
Expand Down
6 changes: 3 additions & 3 deletions src/taskgraph/actions/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def fetch_action(task_id):
run_label_to_id = get_artifact(task_id, "public/label-to-taskid.json")
label_to_taskid.update(run_label_to_id)
except HTTPError as e:
if e.response.status_code != 404: # type: ignore
if e.response.status_code != 404:
raise
logger.debug(f"No label-to-taskid.json found for {task_id}: {e}")

Expand All @@ -79,7 +79,7 @@ def fetch_cron(task_id):
run_label_to_id = get_artifact(task_id, "public/label-to-taskid.json")
label_to_taskid.update(run_label_to_id)
except HTTPError as e:
if e.response.status_code != 404: # type: ignore
if e.response.status_code != 404:
raise
logger.debug(f"No label-to-taskid.json found for {task_id}: {e}")

Expand Down Expand Up @@ -161,7 +161,7 @@ def create_tasks(
target_graph = full_task_graph.graph.transitive_closure(to_run)
target_task_graph = TaskGraph(
{l: modifier(full_task_graph[l]) for l in target_graph.nodes},
target_graph, # type: ignore
target_graph,
)
target_task_graph.for_each_task(update_parent)
if decision_task_id and decision_task_id != os.environ.get("TASK_ID"):
Expand Down
12 changes: 7 additions & 5 deletions src/taskgraph/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ def _run(self):
for kind in kinds.values():
for dep in kind.config.get("kind-dependencies", []):
edges.add((kind.name, dep, "kind-dependency"))
kind_graph = Graph(set(kinds), edges) # type: ignore
kind_graph = Graph(frozenset(kinds), frozenset(edges))

if target_kinds:
kind_graph = kind_graph.transitive_closure(
Expand All @@ -321,7 +321,7 @@ def _run(self):
raise Exception("duplicate tasks with label " + task.label)
all_tasks[task.label] = task
logger.info(f"Generated {len(new_tasks)} tasks for kind {kind_name}")
full_task_set = TaskGraph(all_tasks, Graph(set(all_tasks), set())) # type: ignore
full_task_set = TaskGraph(all_tasks, Graph(frozenset(all_tasks), frozenset()))
yield self.verify("full_task_set", full_task_set, graph_config, parameters)

logger.info("Generating full task graph")
Expand All @@ -334,7 +334,9 @@ def _run(self):
)
edges.add((t.label, dep, depname))

full_task_graph = TaskGraph(all_tasks, Graph(full_task_set.graph.nodes, edges)) # type: ignore
full_task_graph = TaskGraph(
all_tasks, Graph(frozenset(full_task_set.graph.nodes), frozenset(edges))
)
logger.info(
"Full task graph contains %d tasks and %d dependencies"
% (len(full_task_set.graph.nodes), len(edges))
Expand All @@ -344,14 +346,14 @@ def _run(self):
logger.info("Generating target task set")
target_task_set = TaskGraph(
dict(all_tasks),
Graph(set(all_tasks.keys()), set()), # type: ignore
Graph(frozenset(all_tasks.keys()), frozenset()),
)
for fltr in filters:
old_len = len(target_task_set.graph.nodes)
target_tasks = set(fltr(target_task_set, parameters, graph_config))
target_task_set = TaskGraph(
{l: all_tasks[l] for l in target_tasks},
Graph(target_tasks, set()), # type: ignore
Graph(frozenset(target_tasks), frozenset()),
)
logger.info(
"Filter %s pruned %d tasks (%d remain)"
Expand Down
13 changes: 9 additions & 4 deletions src/taskgraph/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,10 +88,12 @@ def get_filtered_taskgraph(taskgraph, tasksregex, exclude_keys):
if regexprogram.match(dep):
filterededges.add((key, dep, depname))

taskgraph = TaskGraph(filteredtasks, Graph(set(filteredtasks), filterededges)) # type: ignore
taskgraph = TaskGraph(
filteredtasks, Graph(frozenset(filteredtasks), frozenset(filterededges))
)

if exclude_keys:
for label, task in taskgraph.tasks.items(): # type: ignore
for label, task in taskgraph.tasks.items():
bhearsum marked this conversation as resolved.
Show resolved Hide resolved
task = task.to_json()
for key in exclude_keys:
obj = task
Expand Down Expand Up @@ -862,7 +864,10 @@ def init_taskgraph(options):
context = {"project_name": root.name, "taskgraph_version": taskgraph.__version__}

try:
repo_url = repo.get_url(remote=repo.remote_name) # type: ignore
if isinstance(repo.remote_name, str):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This check looks like it ought to be unnecessary. remote_name appears to be a string in all cases that don't result in an Exception. I think if you update the properties and helper function that they end up calling, that this check can go away.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Fixed (I think). Please take a look at the new changes

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looks good to me - thank you!

repo_url = repo.get_url(remote=repo.remote_name)
else:
repo_url = ""
except RuntimeError:
repo_url = ""

Expand Down Expand Up @@ -895,7 +900,7 @@ def init_taskgraph(options):
directory="template",
extra_context=context,
no_input=options["no_input"],
output_dir=root.parent, # type: ignore
output_dir=str(root.parent),
overwrite_if_exists=True,
)

Expand Down
4 changes: 2 additions & 2 deletions src/taskgraph/parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,8 @@ def _get_defaults(repo_root=None):
project = parsed_url.repo_name
except (
CalledProcessError,
mozilla_repo_urls.errors.InvalidRepoUrlError, # type: ignore
mozilla_repo_urls.errors.UnsupportedPlatformError, # type: ignore
mozilla_repo_urls.InvalidRepoUrlError,
mozilla_repo_urls.UnsupportedPlatformError,
):
repo_url = ""
project = ""
Expand Down
17 changes: 10 additions & 7 deletions src/taskgraph/run-task/robustcheckout.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,6 +337,7 @@ def callself():
@contextlib.contextmanager
def timeit(op, behavior):
behaviors.add(behavior)
start = 0
errored = False
try:
start = time.time()
Expand All @@ -345,7 +346,7 @@ def timeit(op, behavior):
errored = True
raise
finally:
elapsed = time.time() - start # type: ignore
elapsed = time.time() - start

if errored:
op += "_errored"
Expand Down Expand Up @@ -672,6 +673,7 @@ def handlepullerror(e):
# We only pull if we are using symbolic names or the requested revision
# doesn't exist.
havewantedrev = False
checkoutrevision = None
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hooray, this fixes some cases where this is potentially undefined!


if revision:
try:
Expand Down Expand Up @@ -748,6 +750,7 @@ def handlepullerror(e):
# Mercurial 4.3 doesn't purge files outside the sparse checkout.
# See https://bz.mercurial-scm.org/show_bug.cgi?id=5626. Force
# purging by monkeypatching the sparse matcher.
old_sparse_fn = None
try:
old_sparse_fn = getattr(repo.dirstate, "_sparsematchfn", None)
if old_sparse_fn is not None:
Expand All @@ -765,8 +768,8 @@ def handlepullerror(e):
):
raise error.Abort(b"error purging")
finally:
if old_sparse_fn is not None: # type: ignore
repo.dirstate._sparsematchfn = old_sparse_fn # type: ignore
if old_sparse_fn is not None:
repo.dirstate._sparsematchfn = old_sparse_fn

# Update the working directory.

Expand All @@ -781,11 +784,11 @@ def handlepullerror(e):
# By default, Mercurial will ignore unknown sparse profiles. This could
# lead to a full checkout. Be more strict.
try:
repo.filectx(sparse_profile, changeid=checkoutrevision).data() # type: ignore
repo.filectx(sparse_profile, changeid=checkoutrevision).data()
except error.ManifestLookupError:
raise error.Abort(
b"sparse profile %s does not exist at revision "
b"%s" % (sparse_profile, checkoutrevision) # type: ignore
b"%s" % (sparse_profile, checkoutrevision)
)

old_config = sparsemod.parseconfig(
Expand Down Expand Up @@ -843,10 +846,10 @@ def parentchange(repo):
behavior = "update-sparse" if sparse_profile else "update"

with timeit(op, behavior):
if commands.update(ui, repo, rev=checkoutrevision, clean=True): # type: ignore
if commands.update(ui, repo, rev=checkoutrevision, clean=True):
raise error.Abort(b"error updating")

ui.write(b"updated to %s\n" % checkoutrevision) # type: ignore
ui.write(b"updated to %s\n" % checkoutrevision)

return None

Expand Down
61 changes: 42 additions & 19 deletions src/taskgraph/util/vcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import os
import re
import subprocess
from abc import ABC, abstractmethod, abstractproperty
from abc import ABC, abstractmethod
from shutil import which

from taskgraph.util.path import ancestors
Expand Down Expand Up @@ -51,45 +51,57 @@ def run(self, *args: str, **kwargs):
return ""
raise

@abstractproperty
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TIL that abstractproperty is deprecated. Thanks for updating this!

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No problem. It seems datetime.utcnow is as well, but I have left it unchanged.

def tool(self) -> str: # type: ignore
@property
@abstractmethod
def tool(self) -> str:
"""Version control system being used, either 'hg' or 'git'."""

@abstractproperty
def head_rev(self) -> str: # type: ignore
@property
@abstractmethod
def head_rev(self) -> str:
"""Hash of HEAD revision."""

@abstractproperty
@property
@abstractmethod
def base_rev(self):
"""Hash of revision the current topic branch is based on."""

@abstractproperty
@property
@abstractmethod
def branch(self):
"""Current branch or bookmark the checkout has active."""

@abstractproperty
@property
@abstractmethod
def all_remote_names(self):
"""Name of all configured remote repositories."""

@abstractproperty
@property
@abstractmethod
def default_remote_name(self):
"""Name the VCS defines for the remote repository when cloning
it for the first time. This name may not exist anymore if users
changed the default configuration, for instance."""

@abstractproperty
@property
@abstractmethod
def remote_name(self):
"""Name of the remote repository."""

def _get_most_suitable_remote(self, remote_instructions):
remotes = self.all_remote_names
if len(remotes) == 1: # type: ignore
return remotes[0] # type: ignore

if self.default_remote_name in remotes: # type: ignore
# in case all_remote_names raised a RuntimeError
if remotes is None:
raise RuntimeError("No valid remotes found")

if len(remotes) == 1:
return remotes[0]

if self.default_remote_name in remotes:
return self.default_remote_name

first_remote = remotes[0] # type: ignore
first_remote = remotes[0]
logger.warning(
f"Unable to determine which remote repository to use between: {remotes}. "
f'Arbitrarily using the first one "{first_remote}". Please set an '
Expand All @@ -99,7 +111,8 @@ def _get_most_suitable_remote(self, remote_instructions):

return first_remote

@abstractproperty
@property
@abstractmethod
def default_branch(self):
"""Name of the default branch."""

Expand Down Expand Up @@ -181,8 +194,13 @@ def does_revision_exist_locally(self, revision):


class HgRepository(Repository):
tool = "hg" # type: ignore
default_remote_name = "default" # type: ignore
@property
def tool(self) -> str:
return "hg"

@property
def default_remote_name(self) -> str:
return "default"

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
Expand Down Expand Up @@ -321,8 +339,13 @@ def does_revision_exist_locally(self, revision):


class GitRepository(Repository):
tool = "git" # type: ignore
default_remote_name = "origin" # type: ignore
@property
def tool(self) -> str:
return "git"

@property
def default_remote_name(self) -> str:
return "origin"

_LS_REMOTE_PATTERN = re.compile(r"ref:\s+refs/heads/(?P<branch_name>\S+)\s+HEAD")

Expand Down
Loading