Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bioconda bot #581

Open
wants to merge 20 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 12 additions & 2 deletions bioconda_utils/bot/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,18 @@ async def command_bump(ghapi, issue_number, _user, *_args):


@command_routes.register("lint")
async def command_lint(ghapi, issue_number, _user, *_args):
"""Lint the current recipe"""
async def command_lint(ghapi, issue_number, _user, *args):
"""Lint the current recipe

If the additional argument ``fix`` is provided, an attempt to fix errors
will be scheduled instead of a new check run. This is identical to clicking
the "Fix Issues" button on the checks page.
"""
if args:
if args[0] == 'fix':
pr = await ghapi.get_prs(number=issue_number)
tasks.lint_fix.s(None, pr['head']['sha'], ghapi).apply_async()
return f"Scheduled fixing lints in #{issue_number}"
(
tasks.get_latest_pr_commit.s(issue_number, ghapi) |
tasks.create_check_run.s(ghapi)
Expand Down
2 changes: 1 addition & 1 deletion bioconda_utils/bot/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ async def handle_check_run(event, ghapi):
else:
logger.error("Unknown requested action in check run: %s", requested_action)
else:
logger.error("Unknown action %s", action)
logger.info("Action '%s' unhandled", action)

@event_routes.register("pull_request")
async def handle_pull_request(event, ghapi):
Expand Down
32 changes: 20 additions & 12 deletions bioconda_utils/bot/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ async def __aenter__(self):
elif self.ref:
fork_user = None
fork_repo = None
branch_name = "unknown"
branch_name = None
ref = self.ref
elif self.branch_name:
fork_user = None
Expand Down Expand Up @@ -442,23 +442,23 @@ async def merge_pr(self, pr_number: int, comment_id: int, ghapi) -> Tuple[bool,
head_ref = pr['head']['ref']
head_sha = pr['head']['sha']
head_user = pr['head']['repo']['owner']['login']
# get path for Circle

# get artifacts from circleci
if head_user == ghapi.user:
branch = head_ref
circle_path = head_ref
else:
branch = "pull/{}".format(pr_number)
circle_path = "pull/{}".format(pr_number)
capi = AsyncCircleAPI(ghapi.session, token=CIRCLE_TOKEN)
artifacts = await capi.get_artifacts(circle_path, head_sha)

lines = []

capi = AsyncCircleAPI(ghapi.session, token=CIRCLE_TOKEN)
artifacts = await capi.get_artifacts(branch, head_sha)
files = []
images = []
packages = []
for path, url, buildno in artifacts:
for path, url, _buildno in artifacts:
match = PACKAGE_RE.match(url)
if match:
repo_url, arch, fname = match.groups()
_repo_url, arch, fname = match.groups()
fpath = os.path.join(arch, fname)
files.append((url, fpath))
packages.append(fpath)
Expand All @@ -483,7 +483,7 @@ async def merge_pr(self, pr_number: int, comment_id: int, ghapi) -> Tuple[bool,
try:
fds = []
urls = []
for url,path in files:
for url, path in files:
fpath = os.path.join(tmpdir, path)
fdir = os.path.dirname(fpath)
if not os.path.exists(fdir):
Expand Down Expand Up @@ -569,8 +569,16 @@ async def merge_pr(self, pr_number: int, comment_id: int, ghapi) -> Tuple[bool,
if not res:
return res, msg

if not branch.startswith('pull/'):
await ghapi.delete_branch(branch)
if head_user == ghapi.user:
# PR head branch is on same account as our install - it's a PR not
# from a fork. Go ahead and try to delete the branch.
if head_ref in ('master', 'bulk'):
# Safeguard - never delete master or bulk!
logger.error("Cowardly refusing to delete master or bulk")
else:
logger.info("Trying to delete branch %s", head_ref)
await ghapi.delete_branch(head_ref)

return res, msg


Expand Down
39 changes: 36 additions & 3 deletions bioconda_utils/githandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import tempfile
import subprocess
from typing import List, Union
import shutil

import git
import yaml
Expand Down Expand Up @@ -218,6 +219,14 @@ def get_remote_branch(self, branch_name: str, try_fetch=False):
return None
for remote_ref in remote_refs:
if remote_ref.remote_ref_path == branch_name:
remote_heads = [head for head in self.fork_remote.refs
if head.commit.hexsha == branch_name]
if remote_heads:
if len(remote_heads) > 1:
logger.warning("Found multiple heads for %s - using %s",
branch_name, remote_heads[0])
logger.warning(" other options: %s", remote_heads[1:])
return remote_heads[0]
return remote_ref.ref

def get_latest_master(self):
Expand Down Expand Up @@ -249,6 +258,9 @@ def create_local_branch(self, branch_name: str, remote_branch: str = None):
remote_branch = self.get_remote_branch(remote_branch, try_fetch=False)
if remote_branch is None:
return None
if branch_name is None and hasattr(remote_branch, 'remote_head'):
branch_name = remote_branch.remote_head
logger.info("Resolved %s to %s", remote_branch, branch_name)
self.repo.create_head(branch_name, remote_branch)
return self.get_local_branch(branch_name)

Expand Down Expand Up @@ -532,8 +544,15 @@ class TempGitHandler(GitHandlerBase):
might be working in multiple threads and want to avoid blocking waits for
a single repo. It also improves robustness: If something goes wrong with this
repo, it will not break the entire process.
"""

This class uses a bare clone as a cache to speed up subsequent
instantiations of temporary git clones. This bare "caching" clone
will be created in a temporary directory. To save on download
times when debugging, you can use the environment variable
``BIOCONDA_REPO_CACHEDIR`` to specify a directory lasting accross
instantiations of the Python interpreter.

"""
_local_mirror_tmpdir: Union[str, tempfile.TemporaryDirectory] = None

@classmethod
Expand Down Expand Up @@ -570,6 +589,7 @@ def _get_local_mirror(cls, url: str) -> git.Repo:

# Make location of repo in tmpdir from url
_, _, fname = url.rpartition('@')
fname = fname.lstrip('https://')
tmpname = getattr(cls._local_mirror_tmpdir, 'name', cls._local_mirror_tmpdir)
mirror_name = os.path.join(tmpname, fname)

Expand Down Expand Up @@ -612,6 +632,8 @@ def __init__(self,
fork_user=None,
fork_repo=None,
dry_run=False) -> None:
self._clean = True

userpass = ""
if password is not None and username is None:
username = "x-access-token"
Expand Down Expand Up @@ -645,16 +667,27 @@ def censor(string):
logger.info("Finished setting up repo in %s", self.tempdir)
super().__init__(repo, dry_run, home_url, fork_url)

def keep_tempdir(self):
"""Disable temp dir removal on `close`"""
self._clean = False

def close(self) -> None:
"""Remove temporary clone and cleanup resources"""
super().close()
logger.info("Removing repo from %s", self.tempdir.name)
self.tempdir.cleanup()
if self._clean:
logger.info("Removing repo from %s", self.tempdir.name)
self.tempdir.cleanup()
else:
logger.warning("Keeping repo in %s%s", self.tempdir.name, "_saved")
shutil.copytree(self.tempdir.name, self.tempdir.name + "_saved")


class BiocondaRepo(GitHandler, BiocondaRepoMixin):
pass

class TempBiocondaRepo(TempGitHandler, BiocondaRepoMixin):
pass


# Allow setting a pre-checkout from env
TempGitHandler.set_mirror_dir(os.environ.get('BIOCONDA_REPO_CACHEDIR'))
10 changes: 7 additions & 3 deletions bioconda_utils/githubhandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -359,7 +359,6 @@ async def get_prs_from_sha(self, head_sha: str, only_open=False) -> List[int]:
closed=False if only_open else None)
for pull in result.get('items', []):
pr_number = int(pull['number'])
logger.error("checking %s", pr_number)
full_pr = await self.get_prs(number=pr_number)
if full_pr['head']['sha'].startswith(head_sha):
pr_numbers.append(pr_number)
Expand Down Expand Up @@ -894,15 +893,20 @@ async def get_contents(self, path: str, ref: str = None) -> str:
content = content_bytes.decode('utf-8')
return content

async def delete_branch(self, ref: str) -> None:
async def delete_branch(self, ref: str) -> bool:
"""Delete a branch (ref)

Arguments:
ref: Name of reference/branch
"""
var_data = copy(self.var_default)
var_data['ref'] = ref
await self.api.delete(self.GIT_REFERENCE, var_data)
try:
await self.api.delete(self.GIT_REFERENCE, var_data)
return True
except gidgethub.InvalidField as exc:
logger.info("Failed to delete branch %s: %s", ref, exc)
return False

def _deparse_card_pr_number(self, card: Dict[str, Any]) -> Dict[str, Any]:
"""Extracts the card's issue's number from the content_url
Expand Down
85 changes: 46 additions & 39 deletions bioconda_utils/lint/check_noarch.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@

from . import LintCheck, ERROR, WARNING, INFO

from logging import getLogger

logger = getLogger(__name__)


# Noarch or not checks:
#
Expand All @@ -22,6 +26,7 @@
# b) Not use ``- python [<>]3``,
# but use ``skip: True # [py[23]k]``


class should_be_noarch_python(LintCheck):
"""The recipe should be build as ``noarch``

Expand All @@ -36,17 +41,14 @@ class should_be_noarch_python(LintCheck):

"""
def check_deps(self, deps):
if 'python' not in deps:
return # not a python package
if all('build' not in loc for loc in deps['python']):
return # only uses python in run/host
if any(dep.startswith('compiler_') for dep in deps):
return # not compiled
if self.recipe.get('build/noarch', None) == 'python':
return # already marked noarch: python
self.message(section='build', data=True)
if not self.recipe.is_noarch(python=True) and \
self.recipe.has_dep('python', section='host') and \
not self.recipe.has_compiler() and \
not self.recipe.has_selector():
self.message(section='build', data=True)

def fix(self, _message, _data):
logger.warning("Lint fix: setting build/noarch=python")
self.recipe.set('build/noarch', 'python')
return True

Expand All @@ -66,13 +68,16 @@ class should_be_noarch_generic(LintCheck):
"""
requires = ['should_be_noarch_python']
def check_deps(self, deps):
if any(dep.startswith('compiler_') for dep in deps):
return # not compiled
if self.recipe.get('build/noarch', None):
return # already marked noarch
self.message(section='build', data=True)
if not self.recipe.is_noarch(python=False) and \
not self.recipe.has_dep('python', section='host') and \
not self.recipe.has_compiler() and \
not self.recipe.has_selector():
logger.error("here")
logger.error(self.recipe.has_selector())
self.message(section='build', data=True)

def fix(self, _message, _data):
logger.warning("Lint fix: setting build/noarch=generic")
self.recipe.set('build/noarch', 'generic')
return True

Expand All @@ -86,11 +91,8 @@ class should_not_be_noarch_compiler(LintCheck):

"""
def check_deps(self, deps):
if not any(dep.startswith('compiler_') for dep in deps):
return # not compiled
if self.recipe.get('build/noarch', False) is False:
return # no noarch, or noarch=False
self.message(section='build/noarch')
if self.recipe.is_noarch() and self.recipe.has_compiler():
self.message(section='build/noarch')


class should_not_be_noarch_skip(LintCheck):
Expand All @@ -100,11 +102,23 @@ class should_not_be_noarch_skip(LintCheck):

"""
def check_recipe(self, recipe):
if self.recipe.get('build/noarch', False) is False:
return # no noarch, or noarch=False
if self.recipe.get('build/skip', False) is False:
return # no skip or skip=False
self.message(section='build/noarch')
if self.recipe.is_noarch() and \
self.recipe.get('build/skip', False) is not False:
self.message(section='build/noarch')


class should_not_be_noarch_selector(LintCheck):
"""The recipe uses ``# [cond]`` but is marked noarch

Recipes using conditional lines cannot be noarch.

"""
requires = ['should_use_compilers',
'should_not_be_noarch_skip',
'should_not_be_noarch_source']
def check_recipe(self, recipe):
if self.recipe.is_noarch() and self.recipe.has_selector():
self.message(section='build/noarch')


class should_not_use_skip_python(LintCheck):
Expand All @@ -127,16 +141,12 @@ class should_not_use_skip_python(LintCheck):
bad_skip_terms = ('py2k', 'py3k', 'python')

def check_deps(self, deps):
if 'python' not in deps:
return # not a python package
if any(dep.startswith('compiler_') for dep in deps):
return # not compiled
if self.recipe.get('build/skip', None) is None:
return # no build: skip: section
skip_line = self.recipe.get_raw('build/skip')
if not any(term in skip_line for term in self.bad_skip_terms):
return # no offending skip terms
self.message(section='build/skip')
if self.recipe.has_dep('python') and \
not self.recipe.has_compiler() and \
self.recipe.get('build/skip', None) is not None and \
any(term in self.bad_skip_terms
for term in self.recipe.get_raw('build/skip')):
self.message(section='build/skip')


class should_not_be_noarch_source(LintCheck):
Expand All @@ -146,9 +156,6 @@ class should_not_be_noarch_source(LintCheck):
platform. Remove the noarch section or use just one source for all
platforms.
"""

_pat = re.compile(r'# +\[.*\]')
def check_source(self, source, section):
# just search the entire source entry for a comment
if self._pat.search(self.recipe.get_raw(f"{section}")):
self.message(section)
if self.recipe.is_noarch() and self.recipe.has_selector(section):
self.message(section)
Loading