From bd65698b4aacc0ce8b6eb72d574301c08cbf90d3 Mon Sep 17 00:00:00 2001 From: Nicholas Devenish Date: Tue, 15 Oct 2024 09:55:04 +0100 Subject: [PATCH] Switch dxtbx to Github Actions (#765) --- .azure-pipelines/azure-pipelines.yml | 135 --- .azure-pipelines/bootstrap.py | 888 ------------------ .azure-pipelines/lint-validation.py | 42 - .azure-pipelines/syntax-validation.py | 32 - .azure-pipelines/unix-build.yml | 102 -- .azure-pipelines/windows-build.yml | 111 --- .../ci-dependencies.yaml | 3 +- .../parse_dependencies.py | 0 .github/workflows/build_test.yml | 79 ++ 9 files changed, 80 insertions(+), 1312 deletions(-) delete mode 100644 .azure-pipelines/azure-pipelines.yml delete mode 100644 .azure-pipelines/bootstrap.py delete mode 100644 .azure-pipelines/lint-validation.py delete mode 100644 .azure-pipelines/syntax-validation.py delete mode 100644 .azure-pipelines/unix-build.yml delete mode 100644 .azure-pipelines/windows-build.yml rename {.azure-pipelines => .github}/ci-dependencies.yaml (62%) rename {.azure-pipelines => .github}/parse_dependencies.py (100%) create mode 100644 .github/workflows/build_test.yml diff --git a/.azure-pipelines/azure-pipelines.yml b/.azure-pipelines/azure-pipelines.yml deleted file mode 100644 index b10f478af..000000000 --- a/.azure-pipelines/azure-pipelines.yml +++ /dev/null @@ -1,135 +0,0 @@ -variables: - CACHE_VERSION: 20210424 - isPullRequest: $[startsWith(variables['Build.SourceBranch'], 'refs/pull/')] - -stages: -- stage: prepare - displayName: Check - jobs: - - job: setup - displayName: static analysis - pool: - vmImage: ubuntu-20.04 - timeoutInMinutes: 10 - steps: - - checkout: none - - # Use Python >=3.9 for syntax validation - - task: UsePythonVersion@0 - displayName: Set up python - inputs: - versionSpec: 3.9 - - # Run syntax validation on a shallow clone - - bash: | - set -eux - mkdir repository - cd repository - echo Checking out $(Build.SourceBranch) from $(Build.Repository.Uri) - git init - git remote add origin $(Build.Repository.Uri) - git fetch --depth 1 --no-recurse-submodules origin $(Build.SourceBranch) - git checkout FETCH_HEAD - python .azure-pipelines/syntax-validation.py - displayName: Syntax validation - - - bash: | - set -eux - pip install ruff - cd repository - python .azure-pipelines/lint-validation.py - displayName: Ruff validation - - # Set up constants for further build steps - - bash: | - echo "##vso[task.setvariable variable=CURRENT_WEEK;isOutput=true]$(date +W%W)" - echo "##vso[task.setvariable variable=TODAY_ISO;isOutput=true]$(date +%Y%m%d)" - echo - echo "Checking for waiting newsfragments:" - cd repository/newsfragments - \ls -1 --color=never | grep -v "^\([0-9]\+\|README\)\." && { - echo "##vso[task.setvariable variable=NEWSFRAGMENT_WAITING;isOutput=true]true" - } || { - echo "none found" - echo "##vso[task.setvariable variable=NEWSFRAGMENT_WAITING;isOutput=true]false" - } - displayName: Set up build constants - name: constants - -- stage: build - ${{ if not(startsWith(variables['Build.SourceBranch'], 'refs/pull/')) }}: - displayName: Build - ${{ if startsWith(variables['Build.SourceBranch'], 'refs/pull/') }}: - displayName: PR - condition: and(eq(dependencies.prepare.outputs['setup.constants.NEWSFRAGMENT_WAITING'], 'false'), - succeeded()) - dependsOn: - - prepare - variables: - CURRENT_WEEK: $[ stageDependencies.prepare.setup.outputs['constants.CURRENT_WEEK'] ] - TODAY_ISO: $[ stageDependencies.prepare.setup.outputs['constants.TODAY_ISO'] ] - - jobs: - - ${{ if eq(variables['Build.Reason'], 'Schedule') }}: - # A single build job is sufficient to regenerate caches, no need to waste build time - - job: linux - displayName: linux python38 (scheduled) - pool: - vmImage: ubuntu-20.04 - timeoutInMinutes: 60 - variables: - PYTHON_VERSION: 3.9 - steps: - - template: unix-build.yml - - - ${{ if ne(variables['Build.Reason'], 'Schedule') }}: - - job: linux - pool: - vmImage: ubuntu-20.04 - strategy: - matrix: - python39: - PYTHON_VERSION: 3.9 - python312: - PYTHON_VERSION: 3.12 - timeoutInMinutes: 60 - steps: - - template: unix-build.yml - - - job: macos - pool: - vmImage: macOS-latest - strategy: - matrix: - python39: - PYTHON_VERSION: 3.9 - python312: - PYTHON_VERSION: 3.12 - timeoutInMinutes: 60 - steps: - - template: unix-build.yml - - - job: windows - pool: - vmImage: windows-2019 - strategy: - matrix: - python39: - PYTHON_VERSION: 3.9 - python312: - PYTHON_VERSION: 3.12 - timeoutInMinutes: 20 - steps: - - template: windows-build.yml - - - ${{ if startsWith(variables['Build.SourceBranch'], 'refs/pull/') }}: - - job: pr - displayName: ready to merge - dependsOn: - - linux - - macos - condition: and(eq(variables.isPullRequest, true), succeeded()) - pool: - vmImage: ubuntu-20.04 - steps: - - checkout: none diff --git a/.azure-pipelines/bootstrap.py b/.azure-pipelines/bootstrap.py deleted file mode 100644 index a384e7973..000000000 --- a/.azure-pipelines/bootstrap.py +++ /dev/null @@ -1,888 +0,0 @@ -from __future__ import annotations - -import argparse -import multiprocessing.pool -import os -import re -import shutil -import socket as pysocket -import stat -import subprocess -import sys -import tarfile -import threading -import time -import zipfile -from urllib.error import HTTPError, URLError -from urllib.request import Request, urlopen - -if sys.hexversion < 0x3070000: - exit("Python 3.7+ required") - -# Clean environment for subprocesses -clean_env = { - key: value - for key, value in os.environ.items() - if key not in ("PYTHONPATH", "LD_LIBRARY_PATH", "DYLD_LIBRARY_PATH") -} - -devnull = open(os.devnull, "wb") # to redirect unwanted subprocess output -allowed_ssh_connections = {} -concurrent_git_connection_limit = threading.Semaphore(5) - - -def make_executable(filepath): - if os.name == "posix": - mode = os.stat(filepath).st_mode - mode |= (mode & 0o444) >> 2 # copy R bits to X - # r--r--r-- => 0o444 - os.chmod(filepath, mode) - - -def install_micromamba(python): - """Download and install Micromamba""" - if sys.platform.startswith("linux"): - member = "bin/micromamba" - url = "https://micro.mamba.pm/api/micromamba/linux-64/1.5.10" - elif sys.platform == "darwin": - member = "bin/micromamba" - url = "https://micro.mamba.pm/api/micromamba/osx-64/1.5.10" - elif os.name == "nt": - member = "Library/bin/micromamba.exe" - url = "https://micro.mamba.pm/api/micromamba/win-64/1.5.10" - else: - raise NotImplementedError(f"Unsupported platform {os.name} / {sys.platform}") - mamba_prefix = os.path.realpath("micromamba") - clean_env["MAMBA_ROOT_PREFIX"] = mamba_prefix - mamba = os.path.join(mamba_prefix, member.split("/")[-1]) - print(f"Downloading {url}:", end=" ") - result = download_to_file(url, os.path.join(mamba_prefix, "micromamba.tar.bz2")) - if result in (0, -1): - sys.exit("Micromamba download failed") - with tarfile.open( - os.path.join(mamba_prefix, "micromamba.tar.bz2"), "r:bz2" - ) as tar, open(mamba, "wb") as fh: - fh.write(tar.extractfile(member).read()) - make_executable(mamba) - - # verify micromamba works and check version - conda_info = subprocess.check_output([mamba, "--version"], env=clean_env) - if sys.version_info.major > 2: - conda_info = conda_info.decode("latin-1") - print("Using Micromamba version", conda_info.strip()) - - # identify packages required for environment - filename = os.path.join("modules", "dxtbx", ".azure-pipelines", "ci-conda-env.txt") - if not os.path.isfile(filename): - raise RuntimeError(f"The environment file {filename} is not available") - - # install a new environment or update an existing one - prefix = os.path.realpath("conda_base") - if os.path.exists(prefix): - command = "install" - text_messages = ["Updating", "update of"] - else: - command = "create" - text_messages = ["Installing", "installation into"] - - command_list = [ - mamba, - "--no-env", - "--no-rc", - "--prefix", - prefix, - "--root-prefix", - mamba_prefix, - command, - "--file", - filename, - "--yes", - "--channel", - "conda-forge", - "--override-channels", - "python=%s" % python, - ] - - print( - "{text} dials environment from {filename} with Python {python}".format( - text=text_messages[0], filename=filename, python=python - ) - ) - for retry in range(5): - retry += 1 - try: - run_command( - command=command_list, - workdir=".", - ) - except Exception: - print( - """ -******************************************************************************* -There was a failure in constructing the conda environment. -Attempt {retry} of 5 will start {retry} minute(s) from {t}. -******************************************************************************* -""".format(retry=retry, t=time.asctime()) - ) - time.sleep(retry * 60) - else: - break - else: - sys.exit( - """ -The conda environment could not be constructed. Please check that there is a -working network connection for downloading conda packages. -""" - ) - print("Completed {text}:\n {prefix}".format(text=text_messages[1], prefix=prefix)) - with open(os.path.join(prefix, ".condarc"), "w") as fh: - fh.write( - """ -changeps1: False -channels: - - conda-forge -""".lstrip() - ) - - -def run_command(command, workdir): - print("Running {} (in {})".format(" ".join(command), workdir)) - workdir = os.path.abspath(workdir) - try: - os.makedirs(workdir) - except OSError: - pass - try: - p = subprocess.Popen(args=command, cwd=workdir, env=clean_env) - except Exception as e: - if isinstance(e, OSError): - if e.errno == 2: - executable = os.path.normpath(os.path.join(workdir, command[0])) - raise RuntimeError("Could not run %s: File not found" % executable) - if "child_traceback" in dir(e): - print("Calling subprocess resulted in error; ", e.child_traceback) - raise e - - try: - p.wait() - except KeyboardInterrupt: - print("\nReceived CTRL+C, trying to stop subprocess...\n") - p.terminate() - raise - if p.returncode: - sys.exit("Process failed with return code %s" % p.returncode) - - -def run_indirect_command(command, args): - print("(via conda environment) " + command) - if os.name == "nt": - filename = os.path.join("build", "indirection.cmd") - with open(filename, "w") as fh: - fh.write("call %s\\conda_base\\condabin\\activate.bat\r\n" % os.getcwd()) - fh.write("shift\r\n") - fh.write("%*\r\n") - command = command + ".bat" - indirection = ["cmd.exe", "/C", "indirection.cmd"] - else: - filename = os.path.join("build", "indirection.sh") - with open(filename, "w") as fh: - fh.write("#!/bin/bash\n") - fh.write("source %s/conda_base/etc/profile.d/conda.sh\n" % os.getcwd()) - fh.write("conda activate %s/conda_base\n" % os.getcwd()) - fh.write('"$@"\n') - make_executable(filename) - indirection = ["./indirection.sh"] - run_command( - command=indirection + [command] + args, - workdir="build", - ) - - -def download_to_file(url, file, quiet=False): - """Downloads a URL to file. Returns the file size. - Returns -1 if the downloaded file size does not match the expected file - size - Returns -2 if the download is skipped due to the file at the URL not - being newer than the local copy (identified by matching timestamp and - size) - """ - - # Create directory structure if necessary - if os.path.dirname(file): - try: - os.makedirs(os.path.dirname(file)) - except Exception: - pass - - localcopy = os.path.isfile(file) - - try: - from ssl import SSLError - except ImportError: - SSLError = None - - # Open connection to remote server - try: - url_request = Request(url) - if localcopy: - # Shorten timeout to 7 seconds if a copy of the file is already present - socket = urlopen(url_request, None, 7) - else: - socket = urlopen(url_request) - except SSLError as e: - # This could be a timeout - if localcopy: - # Download failed for some reason, but a valid local copy of - # the file exists, so use that one instead. - if not quiet: - print(str(e)) - return -2 - # otherwise pass on the error message - raise - except (pysocket.timeout, HTTPError) as e: - if localcopy: - # Download failed for some reason, but a valid local copy of - # the file exists, so use that one instead. - if not quiet: - print(str(e)) - return -2 - # otherwise pass on the error message - raise - except URLError as e: - if localcopy: - # Download failed for some reason, but a valid local copy of - # the file exists, so use that one instead. - if not quiet: - print(str(e)) - return -2 - # if url fails to open, try using curl - # temporary fix for old OpenSSL in system Python on macOS - # https://github.com/cctbx/cctbx_project/issues/33 - command = ["/usr/bin/curl", "--http1.0", "-fLo", file, "--retry", "5", url] - subprocess.call(command) - socket = None # prevent later socket code from being run - try: - received = os.path.getsize(file) - except OSError: - raise RuntimeError("Download failed") - - if socket is not None: - try: - file_size = int(socket.info().get("Content-Length")) - except Exception: - file_size = 0 - - remote_mtime = 0 - try: - remote_mtime = time.mktime(socket.info().getdate("last-modified")) - except Exception: - pass - - if file_size > 0: - if remote_mtime > 0: - # check if existing file matches remote size and timestamp - try: - ( - mode, - ino, - dev, - nlink, - uid, - gid, - size, - atime, - mtime, - ctime, - ) = os.stat(file) - if (size == file_size) and (remote_mtime == mtime): - if not quiet: - print("local copy is current") - socket.close() - return -2 - except Exception: - # proceed with download if timestamp/size check fails for any reason - pass - - hr_size = (file_size, "B") - if hr_size[0] > 500: - hr_size = (hr_size[0] / 1024, "kB") - if hr_size[0] > 500: - hr_size = (hr_size[0] / 1024, "MB") - if not quiet: - print("%.1f %s" % hr_size) - print(" [0%", end="") - sys.stdout.flush() # becomes print(flush=True) when we move to 3.3+ - - received = 0 - block_size = 8192 - progress = 1 - # Write to the file immediately so we can empty the buffer - tmpfile = file + ".tmp" - - with open(tmpfile, "wb") as fh: - while True: - block = socket.read(block_size) - received += len(block) - fh.write(block) - if file_size > 0 and not quiet: - while (100 * received / file_size) > progress: - progress += 1 - if (progress % 20) == 0: - print(progress, end="%") - sys.stdout.flush() # becomes print(flush=True) when we move to 3.3+ - elif (progress % 2) == 0: - print(".", end="") - sys.stdout.flush() # becomes print(flush=True) when we move to 3.3+ - if not block: - break - socket.close() - - if not quiet: - if file_size > 0: - print("]") - else: - print("%d kB" % (received / 1024)) - sys.stdout.flush() # becomes print(flush=True) when we move to 3.3+ - - # Do not overwrite file during the download. If a download temporarily fails we - # may still have a clean, working (yet older) copy of the file. - shutil.move(tmpfile, file) - - if (file_size > 0) and (file_size != received): - return -1 - - if remote_mtime > 0: - # set file timestamp if timestamp information is available - st = os.stat(file) - atime = st[stat.ST_ATIME] # current access time - os.utime(file, (atime, remote_mtime)) - - return received - - -def unzip(archive, directory, trim_directory=0): - """unzip a file into a directory.""" - if not zipfile.is_zipfile(archive): - raise Exception( - f"Cannot install {directory}: {archive} is not a valid .zip file" - ) - z = zipfile.ZipFile(archive, "r") - for member in z.infolist(): - is_directory = member.filename.endswith("/") - filename = os.path.join(*member.filename.split("/")[trim_directory:]) - if filename != "": - filename = os.path.normpath(filename) - if "../" in filename: - raise Exception( - f"Archive {archive} contains invalid filename {filename}" - ) - filename = os.path.join(directory, filename) - upperdirs = os.path.dirname(filename) - try: - if is_directory and not os.path.exists(filename): - os.makedirs(filename) - elif upperdirs and not os.path.exists(upperdirs): - os.makedirs(upperdirs) - except Exception: - pass - if not is_directory: - with z.open(member) as source: - with open(filename, "wb") as target: - shutil.copyfileobj(source, target) - - # Preserve executable permission, if set - unix_executable = member.external_attr >> 16 & 0o111 - # rwxrwxrwx => --x--x--x => 0o111 - if unix_executable: - make_executable(filename) - z.close() - - -def set_git_repository_config_to_rebase(config): - with open(config) as fh: - cfg = fh.readlines() - - branch, remote, rebase = False, False, False - insertions = [] - for n, line in enumerate(cfg): - if line.startswith("["): - if branch and remote and not rebase: - insertions.insert(0, (n, branch)) - if line.startswith("[branch"): - branch = line.split('"')[1] - else: - branch = False - remote, rebase = False, False - if re.match(r"remote\s*=", line.strip()): - remote = True - if re.match(r"rebase\s*=", line.strip()): - rebase = True - if branch and remote and not rebase: - insertions.insert(0, (n + 1, branch)) - for n, branch in insertions: - cfg.insert(n, "\trebase = true\n") - with open(config, "w") as fh: - fh.write("".join(cfg)) - - -def git(module, git_available, ssh_available, settings): - """Retrieve a git repository, either by running git directly - or by downloading and unpacking an archive. - """ - destination = os.path.join("modules", module) - - if os.path.exists(destination): - if not os.path.exists(os.path.join(destination, ".git")): - return module, "WARNING", "Existing non-git directory -- skipping" - if not git_available: - return module, "WARNING", "Cannot update module, git command not found" - - with open(os.path.join(destination, ".git", "HEAD")) as fh: - if fh.read(4) != "ref:": - return ( - module, - "WARNING", - "Cannot update existing git repository! You are not on a branch.\n" - "This may be legitimate when run eg. via Jenkins, but be aware that you cannot commit any changes", - ) - - with concurrent_git_connection_limit: - p = subprocess.Popen( - args=["git", "pull", "--rebase"], - cwd=destination, - env=clean_env, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) - # This may fail for unclean trees and merge problems. In this case manual - # user intervention will be required. - # For the record, you can clean up the tree and *discard ALL changes* with - # git reset --hard origin/master - # git clean -dffx - try: - output, _ = p.communicate() - output = output.decode("latin-1") - except KeyboardInterrupt: - print("\nReceived CTRL+C, trying to terminate subprocess...\n") - p.terminate() - raise - if p.returncode: - return ( - module, - "WARNING", - "Cannot update existing git repository! Unclean tree or merge problems.\n" - + output, - ) - # Show the hash for the checked out commit for debugging purposes - p = subprocess.Popen( - args=["git", "rev-parse", "HEAD", "--abbrev-ref", "HEAD"], - cwd=destination, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) - output, _ = p.communicate() - output = output.decode("latin-1") - if p.returncode: - return module, "WARNING", "Cannot get git repository revision\n" + output - output = output.split() - if len(output) == 2: - return ( - module, - "OK", - "Checked out revision {} ({})".format(output[0], output[1]), - ) - return module, "OK", "Checked out revision " + output[0].strip() - - try: - os.makedirs("modules") - except OSError: - pass - - remote_branch = settings.get("branch-remote", settings["branch-local"]) - - if not git_available: - # Fall back to downloading a static archive - url = "https://github.com/{}/archive/{}.zip".format( - settings.get("effective-repository", settings.get("base-repository")), - remote_branch, - ) - filename = os.path.join("modules", f"{module}-{remote_branch}.zip") - try: - download_to_file(url, filename, quiet=True) - except Exception: - print("Error downloading", url) - raise - unzip(filename, destination, trim_directory=1) - return module, "OK", "Downloaded branch %s from static archive" % remote_branch - - if ssh_available: - remote_pattern = "git@github.com:%s.git" - else: - remote_pattern = "https://github.com/%s.git" - - secondary_remote = settings.get("effective-repository") and ( - settings["effective-repository"] != settings.get("base-repository") - ) - direct_branch_checkout = [] - if not secondary_remote and remote_branch == settings["branch-local"]: - direct_branch_checkout = ["-b", remote_branch] - - with concurrent_git_connection_limit: - p = subprocess.Popen( - args=["git", "clone", "--recursive"] - + direct_branch_checkout - + [ - remote_pattern - % settings.get("base-repository", settings.get("effective-repository")), - module, - ], - cwd="modules", - env=clean_env, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) - try: - output, _ = p.communicate() - output = output.decode("latin-1") - except KeyboardInterrupt: - print("\nReceived CTRL+C, trying to terminate subprocess...\n") - p.terminate() - raise - if p.returncode: - return (module, "ERROR", "Cannot checkout git repository\n" + output) - - if secondary_remote: - returncode = subprocess.call( - [ - "git", - "remote", - "add", - "upstream", - remote_pattern % settings["effective-repository"], - ], - cwd=destination, - env=clean_env, - stdout=devnull, - stderr=devnull, - ) - if returncode: - return ( - module, - "ERROR", - "Could not add upstream remote to repository. Repository may be in invalid state!", - ) - with concurrent_git_connection_limit: - returncode = subprocess.call( - ["git", "fetch", "upstream"], - cwd=destination, - env=clean_env, - stdout=devnull, - stderr=devnull, - ) - if returncode: - return ( - module, - "ERROR", - "Could not fetch upstream repository %s. Repository may be in invalid state!" - % settings["effective-repository"], - ) - - set_git_repository_config_to_rebase(os.path.join(destination, ".git", "config")) - - if not direct_branch_checkout: - # set up the local branch with tracking - returncode = subprocess.call( - [ - "git", - "checkout", - "-B", - settings["branch-local"], - "--track", - "{}/{}".format( - "upstream" if secondary_remote else "origin", remote_branch - ), - ], - cwd=destination, - env=clean_env, - stdout=devnull, - stderr=devnull, - ) - if returncode: - return ( - module, - "ERROR", - "Could not check out alternate branch %s. Repository may be in invalid state!" - % remote_branch, - ) - - # Show the hash for the checked out commit for debugging purposes - p = subprocess.Popen( - args=["git", "rev-parse", "HEAD"], - cwd=destination, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - ) - output, _ = p.communicate() - output = output.decode("latin-1") - if p.returncode: - return ( - module, - "WARNING", - "Cannot get git repository revision\n" + output, - ) - git_status = settings["branch-local"] - if settings["branch-local"] != remote_branch: - git_status += " tracking " + remote_branch - if secondary_remote: - git_status += " at " + settings["effective-repository"] - return module, "OK", f"Checked out revision {output.strip()} ({git_status})" - - -def update_sources(options): - try: - git_available = not subprocess.call( - ["git", "--version"], stdout=devnull, stderr=devnull - ) - except OSError: - git_available = False - ssh_available = False - if git_available: - try: - returncode = subprocess.call( - [ - "ssh", - "-oBatchMode=yes", - "-oStrictHostKeyChecking=no", - "-T", - "git@github.com", - ], - stdout=devnull, - stderr=devnull, - ) - # SSH errors lead to 255 - ssh_available = returncode in (0, 1) - except OSError: - pass - - repositories = { - source.split("/")[1]: {"base-repository": source, "branch-local": "master"} - for source in ( - "cctbx/cctbx_project", - "dials/cbflib", - ) - } - repositories["dxtbx"] = { - "base-repository": "cctbx/dxtbx", - "branch-local": "main", - } - - for source, setting in options.branch: - if source not in repositories: - sys.exit("Unknown repository %s" % source) - setting = re.match( - r"^(?:(\w+/\w+)@?)?([a-zA-Z0-9._\-]+)?(?::([a-zA-Z0-9._\-]+))?$", setting - ) - if not setting: - sys.exit("Could not parse the branch setting for repository %s" % source) - _repository, _branch_remote, _branch_local = setting.groups() - if _repository: - repositories[source] = { - "base-repository": _repository, - "branch-remote": _branch_remote or "master", - "branch-local": _branch_local or _branch_remote or "master", - } - elif _branch_remote: - repositories[source]["branch-remote"] = _branch_remote - repositories[source]["branch-local"] = _branch_local or _branch_remote - elif _branch_local: - repositories[source]["branch-local"] = _branch_local - - def _git_fn(repository): - return git(repository, git_available, ssh_available, repositories[repository]) - - success = True - update_pool = multiprocessing.pool.ThreadPool(20) - try: - for result in update_pool.imap_unordered(_git_fn, repositories): - module, result, output = result - output = (result + " - " + output).replace( - "\n", "\n" + " " * (len(module + result) + 5) - ) - if os.name == "posix" and sys.stdout.isatty(): - if result == "OK": - output = "\x1b[32m" + output + "\x1b[0m" - elif result == "WARNING": - output = "\x1b[33m" + output + "\x1b[0m" - elif result == "ERROR": - output = "\x1b[31m" + output + "\x1b[0m" - success = False - print(module + ": " + output) - except KeyboardInterrupt: - update_pool.terminate() - sys.exit("\naborted with Ctrl+C") - except Exception: - update_pool.terminate() - raise - update_pool.close() - update_pool.join() - if not success: - sys.exit("\nFailed to update one or more repositories") - - -def refresh_build(): - print("Running libtbx.refresh") - dispatch_extension = ".bat" if os.name == "nt" else "" - run_command( - [os.path.join("build", "bin", "libtbx.refresh" + dispatch_extension)], - workdir=".", - ) - - -def configure_build(options): - if os.name == "nt": - conda_python = os.path.join(os.getcwd(), "conda_base", "python.exe") - else: - conda_python = os.path.join("..", "conda_base", "bin", "python") - - with open("dials", "w") as fh: - # ensure we write a new-style environment setup script - fh.write( - """ -#!/bin/bash - -if [ -n "$BASH_SOURCE" ]; then - LIBTBX_BUILD="$(dirname -- "${BASH_SOURCE[0]}")/build" -else - LIBTBX_BUILD="%s" -fi - -# make path absolute and resolve symlinks -LIBTBX_BUILD=$(cd -P -- "${LIBTBX_BUILD}" && pwd -P) - -# enable conda environment -source ${LIBTBX_BUILD}/../conda_base/etc/profile.d/conda.sh -conda activate $(dirname -- "${LIBTBX_BUILD}")/conda_base - -# prepend cctbx /build/bin directory to PATH -PATH="${LIBTBX_BUILD}/bin:${PATH}" -export PATH - -unset LIBTBX_BUILD -""".lstrip() - % os.path.join(os.getcwd(), "build") - ) - print("Setting up build directory") - run_command( - command=[ - conda_python, - os.path.join("..", "modules", "cctbx_project", "libtbx", "configure.py"), - "--skip_phenix_dispatchers", - "cctbx", - "cbflib", - "dxtbx", - "scitbx", - "libtbx", - "iotbx", - "--compiler=conda", - "--enable_cxx11", - "--use_conda", - ], - workdir="build", - ) - - -def make_build(): - try: - nproc = len(os.sched_getaffinity(0)) - except AttributeError: - nproc = multiprocessing.cpu_count() - run_indirect_command(os.path.join("bin", "libtbx.scons"), args=["-j", str(nproc)]) - # run build again to make sure everything is built - run_indirect_command(os.path.join("bin", "libtbx.scons"), args=["-j", str(nproc)]) - - -def repository_at_tag(string): - try: - repository, tag = string.split("@", 1) - return (repository, tag) - except ValueError: - raise argparse.ArgumentTypeError( - "%s does not follow the repository@branch format" % string - ) - - -class Choices(tuple): - # Python bug https://bugs.python.org/issue27227, https://bugs.python.org/issue9625 - def __new__(cls, *args, **kwargs): - x = tuple.__new__(cls, *args, **kwargs) - Choices.__init__(x, *args, **kwargs) - return x - - def __init__(self, *args, **kwargs): - self.default = [] - - def __contains__(self, item): - return tuple.__contains__(self, item) or item is self.default - - -def run(): - description = "A bootstrap helper for dxtbx CI builds" - - parser = argparse.ArgumentParser( - description=description, - formatter_class=argparse.RawDescriptionHelpFormatter, - ) - action_choices = Choices(("update", "base", "build")) - action_choices.default = ["update", "base", "build"] - parser.add_argument( - "actions", - nargs="*", - help="actions for building", - choices=action_choices, - default=action_choices.default, - ) - parser.add_argument( - "--python", - help="Install this minor version of Python (default: %(default)s)", - default="3.9", - choices=("3.9", "3.10", "3.11", "3.12"), - ) - parser.add_argument( - "--branch", - type=repository_at_tag, - action="append", - default=[], - help=( - "during 'update' step when a repository is newly cloned set it to a given " - "branch. Specify as repository@branch, eg. 'dials@dials-next'" - ), - ) - parser.add_argument( - "--clean", - help="Remove temporary conda environments and package caches after installation", - default=False, - action="store_true", - ) - options = parser.parse_args() - - # Add sources - if "update" in options.actions: - update_sources(options) - - # Build base packages - if "base" in options.actions: - install_micromamba(options.python) - if options.clean: - shutil.rmtree(os.path.realpath("micromamba")) - - # Configure, make - if "build" in options.actions: - configure_build(options) - make_build() - refresh_build() - - print("\nBootstrap success: %s" % ", ".join(options.actions)) - - -if __name__ == "__main__": - run() diff --git a/.azure-pipelines/lint-validation.py b/.azure-pipelines/lint-validation.py deleted file mode 100644 index ad57bc75b..000000000 --- a/.azure-pipelines/lint-validation.py +++ /dev/null @@ -1,42 +0,0 @@ -from __future__ import annotations - -import os -import subprocess - -failures = 0 -try: - process = subprocess.run( - [ - "ruff", - "check", - "--exit-zero", - ], - capture_output=True, - check=True, - encoding="latin-1", - timeout=300, - ) -except (subprocess.CalledProcessError, subprocess.TimeoutExpired) as e: - print( - "##vso[task.logissue type=error;]Ruff validation failed with", - str(e.__class__.__name__), - ) - print(e.stdout) - print(e.stderr) - print("##vso[task.complete result=Failed;]Ruff validation failed") - exit() -for line in process.stdout.split("\n"): - if ":" not in line: - continue - filename, lineno, column, error = line.split(":", maxsplit=3) - errcode, error = error.strip().split(" ", maxsplit=1) - filename = os.path.normpath(filename) - failures += 1 - print( - f"##vso[task.logissue type=error;sourcepath={filename};" - f"linenumber={lineno};columnnumber={column};code={errcode};]" + error - ) - -if failures: - print(f"##vso[task.logissue type=warning]Found {failures} Ruff violation(s)") - print(f"##vso[task.complete result=Failed;]Found {failures} Ruff violation(s)") diff --git a/.azure-pipelines/syntax-validation.py b/.azure-pipelines/syntax-validation.py deleted file mode 100644 index 2d74948ac..000000000 --- a/.azure-pipelines/syntax-validation.py +++ /dev/null @@ -1,32 +0,0 @@ -from __future__ import annotations - -import ast -import os -import sys - -print("Python", sys.version, "\n") - -failures = 0 - -for base, _, files in os.walk("."): - for f in files: - if not f.endswith(".py"): - continue - filename = os.path.normpath(os.path.join(base, f)) - try: - with open(filename) as fh: - ast.parse(fh.read()) - except SyntaxError as se: - failures += 1 - print( - f"##vso[task.logissue type=error;sourcepath={filename};" - f"linenumber={se.lineno};columnnumber={se.offset};]" - f"SyntaxError: {se.msg}" - ) - print(" " + se.text + " " * se.offset + "^") - print(f"SyntaxError: {se.msg} in {filename} line {se.lineno}") - print() - -if failures: - print(f"##vso[task.logissue type=warning]Found {failures} syntax error(s)") - print(f"##vso[task.complete result=Failed;]Found {failures} syntax error(s)") diff --git a/.azure-pipelines/unix-build.yml b/.azure-pipelines/unix-build.yml deleted file mode 100644 index 93060f759..000000000 --- a/.azure-pipelines/unix-build.yml +++ /dev/null @@ -1,102 +0,0 @@ -# Variables: -# CACHE_VERSION: unique cache identifier -# CURRENT_WEEK: weekly changing cache identifier -# PYTHON_VERSION: string in the form of "3.x" -# TODAY_ISO: today's date in ISO format, eg. "20200531" - -steps: - -# Obtain a shallow clone of the DXTBX repository. -# DXTBX will not be able to report proper version numbers -- checkout: self - path: ./modules/dxtbx - fetchDepth: 1 - displayName: Checkout $(Build.SourceBranch) - -# Create a new conda environment using the bootstrap script -# Extract the dials-data version so we can correctly cache regression data. -- bash: | - set -eux - python3 modules/dxtbx/.azure-pipelines/parse_dependencies.py \ - modules/dxtbx/dependencies.yaml \ - modules/dxtbx/.azure-pipelines/ci-dependencies.yaml \ - --prebuilt-cctbx \ - > modules/dxtbx/.azure-pipelines/ci-conda-env.txt - python3 modules/dxtbx/.azure-pipelines/bootstrap.py base --clean --python $(PYTHON_VERSION) - - set +ux - . conda_base/bin/activate - set -ux - - dials.data info -v - echo "##vso[task.setvariable variable=DIALS_DATA_VERSION_FULL]$(dials.data info -v | grep version.full)" - echo "##vso[task.setvariable variable=DIALS_DATA_VERSION]$(dials.data info -v | grep version.major_minor)" - mkdir -p data - displayName: Create python $(PYTHON_VERSION) environment - workingDirectory: $(Pipeline.Workspace) - -# Build dxtbx -- bash: | - set -e - . conda_base/bin/activate - set -ux - mkdir build - cd build - cmake ../modules/dxtbx -DCMAKE_UNITY_BUILD=true - cmake --build . --target install - pip install ../modules/dxtbx - displayName: Build dxtbx - workingDirectory: $(Pipeline.Workspace) - -# Retrieve the regression data from cache if possible -# The cache allows day-to-day incremental updates, which is relevant only if -# tests are added that refer to datasets in dials-data that were not previously -# referred to. -# New versions of dials-data also lead to cache updates, kick-started from the -# previous cache version. -# The cache is shared across operating systems and python versions, and flushed -# once a week and for dials-data major and minor releases (eg. 2.0->2.1). -- task: Cache@2 - inputs: - key: '"data" | "$(CACHE_VERSION)-$(CURRENT_WEEK)" | "$(DIALS_DATA_VERSION)" | "$(TODAY_ISO)" | "$(DIALS_DATA_VERSION_FULL)"' - restoreKeys: | - "data" | "$(CACHE_VERSION)-$(CURRENT_WEEK)" | "$(DIALS_DATA_VERSION)" | "$(TODAY_ISO)" - "data" | "$(CACHE_VERSION)-$(CURRENT_WEEK)" | "$(DIALS_DATA_VERSION)" - path: $(Pipeline.Workspace)/data - cacheHitVar: DATA_CACHED - displayName: Restore regression data cache - -# Finally, run the full regression test suite -- bash: | - set -e - . conda_base/bin/activate - set -ux - export DIALS_DATA=${PWD}/data - cd modules/dxtbx - export PYTHONDEVMODE=1 - pytest -v -ra -n auto --basetemp="$(Pipeline.Workspace)/tests" --durations=10 \ - --cov=dxtbx --cov-report=html --cov-report=xml --cov-branch \ - --timeout=5400 --regression || echo "##vso[task.complete result=Failed;]Some tests failed" - displayName: Run tests - workingDirectory: $(Pipeline.Workspace) - -- script: | - bash <(curl -s https://codecov.io/bash) -v -n "Python $(PYTHON_VERSION) $(Agent.OS)" - displayName: Publish coverage stats - continueOnError: True - timeoutInMinutes: 2 - workingDirectory: $(Pipeline.Workspace)/modules/dxtbx - -# Recover disk space after testing -# This is only relevant if we had cache misses, as free disk space is required to create cache archives -- bash: | - echo Disk space usage: - df -h - du -sh * - echo - echo Test artefacts: - du -h tests - rm -rf tests - displayName: Recover disk space - workingDirectory: $(Pipeline.Workspace) - condition: ne(variables.DATA_CACHED, 'true') diff --git a/.azure-pipelines/windows-build.yml b/.azure-pipelines/windows-build.yml deleted file mode 100644 index b47ef31d1..000000000 --- a/.azure-pipelines/windows-build.yml +++ /dev/null @@ -1,111 +0,0 @@ -# This is work in progress. -# Currently the windows build only proceeds up to the base installation stage. - -# Variables: -# CACHE_VERSION: unique cache identifier -# CURRENT_WEEK: weekly changing cache identifier -# PYTHON_VERSION: string in the form of "3.x" -# TODAY_ISO: today's date in ISO format, eg. "20200531" - -steps: - -# Obtain a shallow clone of the DXTBX repository. -# DXTBX will not be able to report proper version numbers -- checkout: self - path: ./modules/dxtbx - fetchDepth: 1 - displayName: Checkout $(Build.SourceBranch) - -- powershell: Write-Host "##vso[task.prependpath]$env:CONDA\Scripts" - displayName: Add conda to PATH - -# Create a new conda environment using the bootstrap script -- bash: | - set -eux - # Remove compilers from conda-env, as prebuilt cctbx appears to use - # the system configured compilers and so the conda-forge settings conflict - python3 modules/dxtbx/.azure-pipelines/parse_dependencies.py \ - modules/dxtbx/dependencies.yaml \ - modules/dxtbx/.azure-pipelines/ci-dependencies.yaml \ - --prebuilt-cctbx \ - | grep -v compiler \ - > modules/dxtbx/.azure-pipelines/ci-conda-env.txt - - python3 modules/dxtbx/.azure-pipelines/bootstrap.py base --clean --python $(PYTHON_VERSION) - - displayName: Create python $(PYTHON_VERSION) environment - workingDirectory: $(Pipeline.Workspace) - -# Extract the dials-data version so we can correctly cache regression data. -- bash: | - # Note: Running directly avoids having to deal with cross bash/cmd conda activation - conda_base/Scripts/dials.data-script.py info -v - - echo "##vso[task.setvariable variable=DIALS_DATA_VERSION_FULL]$(conda_base/Scripts/dials.data-script.py info -v | grep version.full)" - echo "##vso[task.setvariable variable=DIALS_DATA_VERSION]$(conda_base/Scripts/dials.data-script.py info -v | grep version.major_minor)" - mkdir -p data - displayName: Determine dials.data version - workingDirectory: $(Pipeline.Workspace) - -# Build dxtbx -- script: | - call activate conda_base/ - - mkdir build - cd build - cmake ../modules/dxtbx -DCMAKE_UNITY_BUILD=true - if %errorlevel% neq 0 exit /b %errorlevel% - - cmake --build . --config Release - if %errorlevel% neq 0 exit /b %errorlevel% - - cmake --install . --config Release - if %errorlevel% neq 0 exit /b %errorlevel% - - pip install ../modules/dxtbx - if %errorlevel% neq 0 exit /b %errorlevel% - - displayName: Build dxtbx - workingDirectory: $(Pipeline.Workspace) - -# Retrieve the regression data from cache if possible -# The cache allows day-to-day incremental updates, which is relevant only if -# tests are added that refer to datasets in dials-data that were not previously -# referred to. -# New versions of dials-data also lead to cache updates, kick-started from the -# previous cache version. -# The cache is shared across operating systems and python versions, and flushed -# once a week and for dials-data major and minor releases (eg. 2.0->2.1). -- task: Cache@2 - inputs: - key: '"data" | "$(CACHE_VERSION)-$(CURRENT_WEEK)" | "$(DIALS_DATA_VERSION)" | "$(TODAY_ISO)" | "$(DIALS_DATA_VERSION_FULL)"' - restoreKeys: | - "data" | "$(CACHE_VERSION)-$(CURRENT_WEEK)" | "$(DIALS_DATA_VERSION)" | "$(TODAY_ISO)" - "data" | "$(CACHE_VERSION)-$(CURRENT_WEEK)" | "$(DIALS_DATA_VERSION)" - path: $(Pipeline.Workspace)/data - cacheHitVar: DATA_CACHED - displayName: Restore regression data cache - -# Run the dxtbx regression suite -- script: | - call activate conda_base/ - SET PYTHONDEVMODE=1 - SET DIALS_DATA=$(Pipeline.Workspace)/data - pytest -v -ra modules/dxtbx/tests --regression --basetemp="$(Pipeline.Workspace)/tests" --durations=10 - if %errorlevel% neq 0 exit /b %errorlevel% - displayName: Run Tests - workingDirectory: $(Pipeline.Workspace) - -# Recover disk space after testing -# This is only relevant if we had cache misses, as free disk space is required to create cache archives -- bash: | - echo Disk space usage: - df -h - du -sh * - echo - echo Test artefacts: - du -h tests - rm -rf tests - displayName: Recover disk space - workingDirectory: $(Pipeline.Workspace) - condition: ne(variables.DATA_CACHED, 'true') diff --git a/.azure-pipelines/ci-dependencies.yaml b/.github/ci-dependencies.yaml similarity index 62% rename from .azure-pipelines/ci-dependencies.yaml rename to .github/ci-dependencies.yaml index 787bc0ddb..ea553efba 100644 --- a/.azure-pipelines/ci-dependencies.yaml +++ b/.github/ci-dependencies.yaml @@ -1,7 +1,6 @@ # Extra dependendencies used in addition for this CI testing test: - - pytest-azurepipelines - pytest-cov - pytest-forked - pytest-timeout - - conda # This adds easy activation scripts \ No newline at end of file + - pytest-md diff --git a/.azure-pipelines/parse_dependencies.py b/.github/parse_dependencies.py similarity index 100% rename from .azure-pipelines/parse_dependencies.py rename to .github/parse_dependencies.py diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml new file mode 100644 index 000000000..7450a10f3 --- /dev/null +++ b/.github/workflows/build_test.yml @@ -0,0 +1,79 @@ +on: + push: + pull_request: + types: + - synchronize + - opened + +jobs: + build_test: + name: Build/Test + strategy: + fail-fast: false + matrix: + os: [ubuntu, macOS, windows] + python: ["3.9", "3.12"] + runs-on: ${{ matrix.os }}-latest + # Micromamba needs a login shell to activate + defaults: + run: + shell: bash -leo pipefail {0} + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + path: dxtbx + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + - name: Prepare Base Environment + run: | + python3 dxtbx/.github/parse_dependencies.py \ + dxtbx/dependencies.yaml \ + dxtbx/.github/ci-dependencies.yaml \ + --prebuilt-cctbx \ + > ci-conda-env.txt + echo "python=${{ matrix.python }}" >> ci-conda-env.txt + - uses: mamba-org/setup-micromamba@v1 + with: + environment-file: ci-conda-env.txt + environment-name: conda_base + init-shell: bash + - if: runner.os != 'Windows' + run: echo "CMAKE_GENERATOR=Ninja" >> $GITHUB_ENV + - name: Build + run: | + mkdir build + cd build + cmake ../dxtbx -DCMAKE_UNITY_BUILD=true -DPython_ROOT_DIR="${CONDA_PREFIX}" + cmake --build . --config Release + cmake --install . --config Release + which pip3 + pip3 install ../dxtbx + cd .. + rm -rf ./build + - name: Prepare for cache restoration + run: | + set -x + find . -name "dials.data*" + echo "DIALS_DATA_VERSION_FULL=$(dials.data info -v | grep version.full)" >> $GITHUB_ENV + echo "DIALS_DATA_VERSION=$(dials.data info -v | grep version.major_minor)" >> $GITHUB_ENV + echo "DIALS_DATA=${PWD}/data" >> $GITHUB_ENV + echo "CURRENT_WEEK=$(date +W%W)" >> $GITHUB_ENV + echo "TODAY_ISO=$(date +%Y%m%d)" >> $GITHUB_ENV + - name: Restore Cache + uses: actions/cache@v4 + with: + key: "${{ env.CURRENT_WEEK }}-${{ env.DIALS_DATA_VERSION }}-${{ env.TODAY_ISO }}-${{ env.DIALS_DATA_VERSION_FULL }}" + restore-keys: | + ${{ env.CURRENT_WEEK }}-${{ env.DIALS_DATA_VERSION }}-${{ env.TODAY_ISO }}- + ${{ env.CURRENT_WEEK }}-${{ env.DIALS_DATA_VERSION }}- + path: ${{ github.workspace }}/data + - name: Run pytest + uses: pavelzw/pytest-action@510c5e90c360a185039bea56ce8b3e7e51a16507 # v2.2.0 + with: + verbose: true + emoji: false + job-summary: true + custom-arguments: dxtbx --regression + click-to-expand: true \ No newline at end of file