diff --git a/.VERSION b/.VERSION new file mode 100644 index 00000000000..008a20e06c0 --- /dev/null +++ b/.VERSION @@ -0,0 +1,3 @@ +refs=$Format:%D$ +commit=$Format:%H$ +abbrev_commit=$Format:%H$ diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 9efedae0995..d66d0e7ee09 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,6 +1,5 @@ [bumpversion] -current_version = 21.01.02 +current_version = 21.2.9 files = setup.cfg src/scancode_config.py commit = False tag = False - diff --git a/.gitattributes b/.gitattributes index c446d380f74..0d0f448a6a8 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,2 +1,4 @@ # Ignore all Git auto CR/LF line endings conversions * binary +# save the version details for git tarballs +.VERSION export-subst \ No newline at end of file diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 9f354365713..9502eda7f69 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -6,15 +6,21 @@ v21.x (next) -v21.1.21 --------- +v21.2.9 +------- + +Security: + + - Update vulnerable LXML to version 4.6.2 to fix + https://nvd.nist.gov/vuln/detail/CVE-2020-27783 + This was detected thanks to https://github.com/nexb/vulnerablecode Operating system support: - Drop support for Python 2 #295 - Drop support for 32 bits on Windows #335 - Add support for Python 64 bits on Windows 64 bits #335 - - Add support for Python 3.6 to Python 3.9 on Linux, Windows and macOS. + - Add support for Python 3.6, 37, 3.8 and 3.9 on Linux, Windows and macOS. These are now tested on Azure. - Add deprecation message for native Windows support #2366 @@ -39,7 +45,7 @@ Copyright scanning: - Improve detection with minor grammar fixes -Misc. +Misc.: - Adopt a new calendar date-based versioning for scancode-toolkit version numbers - Update thirdparty dependencies and built-in plugins diff --git a/INSTALL.rst b/INSTALL.rst index 20438ee3286..a4ea89b4cb4 100644 --- a/INSTALL.rst +++ b/INSTALL.rst @@ -2,80 +2,94 @@ Installation ============ -There are 4 main ways you can `install ScanCode `_. +There are a few ways you can `install ScanCode `_. + +- Recommended standard install for everyone: Use a release download and install as an application + +- Advanced installation options: + - pip install a Python PyPI package + - from source code using a git clone + - using Docker -- Installation as an Application: Downloading Releases (Recommended) -- Docker Installation -- Installation as a library: via pip -- Installation from Source Code: Git Clone Prerequisites ------------- -Before installing ScanCode make sure you've installed the prerequisites properly. This mainly -refers to installing the required Python interpreter (Python 3.6 is recommended). +Before installing ScanCode make sure you have installed these prerequisites. +The main one is a Python interpreter. +Python 3.6 is required for the standard installation. - For Linux(Ubuntu): ``sudo apt install python3.6-dev bzip2 xz-utils zlib1g libxml2-dev libxslt1-dev`` -- For MacOS: Install Python 3.6.8 from https://www.python.org/ftp/python/3.6.8/python-3.6.8-macosx10.6.pkg -- For Windows: Install Python 3.6.8 from https://www.python.org/ftp/python/3.6.8/python-3.6.8.exe +- For MacOS: Install Python 3.6.8 from https://www.python.org/ftp/python/3.6.8/python-3.6.8-macosx10.9.pkg +- For Windows: Install Python 3.6.8 from https://www.python.org/ftp/python/3.6.8/python-3.6.8-amd64.exe +- For FreeBSD: (this ineeds to be documented) -Refer `Prerequisites `_ for detailed information on all different platforms and Python Versions. +Refer `Prerequisites `_ +for detailed information on all different operating systems and Python versions. -Installation as an Application : Downloading Releases ------------------------------------------------------ -#. Download and extract the latest ScanCode release from https://github.com/nexB/scancode-toolkit/releases/ +Use a release download and install as an application +---------------------------------------------------- -#. Open a terminal window and then `cd` to the extracted ScanCode directory. +- Download and extract the latest ScanCode release from + https://github.com/nexB/scancode-toolkit/releases/ -#. Run this command to self-configure and display the help-text. +- Open a terminal window (or command prompt on Windows) and then `cd` to the + extracted ScanCode directory. + +- Run this command to self-configure and display the initial command line help: - Linux/Mac : ``./scancode --help`` - Windows : ``scancode --help`` -Docker Installation -------------------- -#. Download the Source Code as an archive from the `GitHub releases `_ and unzip it, or via `git clone`. +Advanced installation: pip install a Python PyPI package +-------------------------------------------------------- -#. Build the docker image from the `scancode-toolkit` directory.:: +- Create a virtual environment for Python 3.6 (of higher) and activate it:: - docker build -t scancode-toolkit . + virtualenv -p /usr/bin/python3.6 venv-scancode && source venv-scancode/bin/activate -#. Mount current working directory and run scan on mounted folder:: +- Run ``pip install scancode-toolkit[full]`` - docker run -v $PWD/:/project scancode-toolkit -clpeui --json-pp /project/result.json /project +Note that the ``[full]`` extra option is required to get a working installation +except in some advanced use cases. -Note that the parameters *before* ``scancode-toolkit`` are used for docker, -those after will be forwarded to scancode. -Installation as a library: via pip ----------------------------------- -#. Create a Python 3.6 Virtual Environment and activate the same:: +Advanced installation: using Docker +----------------------------------- - virtualenv -p /usr/bin/python3.6 venv-scancode && source venv-scancode/bin/activate +- Download the Source Code as an archive from the `GitHub releases + `_ and unzip it, or via + `git clone`. -#. Run ``pip install scancode-toolkit[full]`` +- Build the docker image from the `scancode-toolkit` directory:: -Installation from Source Code: Git Clone ----------------------------------------- + docker build -t scancode-toolkit . -#. Download the Source Code or Use Git Clone:: +- Mount current working directory and run a scan the mounted folder:: - git clone https://github.com/nexB/scancode-toolkit.git - cd scancode-toolkit + docker run -v $PWD/:/project scancode-toolkit -clpeui --json-pp /project/result.json /project + +Note that the parameters *before* ``scancode-toolkit`` are used by docker and +those after will be forwarded to scancode. -#. You can jump to any checkpoint/Branch/Commit using the following command:: - git checkout master +Advanced installation: from source code using a git clone +--------------------------------------------------------- -#. Run the Configure Script +- Download the Source Code or Use Git Clone:: + + git clone https://github.com/nexB/scancode-toolkit.git + cd scancode-toolkit - - On Linux/Mac: ``./configure`` - - On Windows: ``configure`` +- Run the configure script for development usage: + - On Linux/Mac: ``./configure --dev`` + - On Windows: ``configure --dev`` -Note the `Commands will vary `_ across different Installation methods and Platforms. -If this displays the `Help Text `_, you are all set to start using ScanCode. \ No newline at end of file +If this displays the `help text +`_, +you are all set to start using ScanCode. diff --git a/MANIFEST.in b/MANIFEST.in index eec3fd7b161..114ec213651 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,18 +1,25 @@ graft src graft thirdparty recursive-include etc configure.py +graft etc/thirdparty +recursive-include etc/thirdparty * include *.LICENSE include NOTICE include *.ABOUT -include *.toml -include *.rst + include setup.* include configure* +include requirements* +include *.toml + include extractcode* include scancode* + + +include *.rst include Dockerfile -include requirements* +include .VERSION global-exclude *.py[co] __pycache__ *.*~ diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 5b33125c10a..5f89ba0aa7c 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -205,7 +205,7 @@ jobs: ######################################################################## # RELEASE on 3.6. Also check that we can pip install ######################################################################## - +# # - job: Build_release_archive_py3 # pool: # vmImage: ubuntu-16.04 @@ -218,9 +218,6 @@ jobs: # versionSpec: '3.6' # displayName: 'Install Python 3.6' # -# - script: ./etc/release/scancode-create-release.sh -# displayName: 'Build installable releases' -# # - script: ./etc/release/scancode-test-pip-install.sh # displayName: 'Test pip wheel installation' # diff --git a/etc/release/README.rst b/etc/release/README.rst index da7fb16bd6f..6ad6e1c3e8c 100755 --- a/etc/release/README.rst +++ b/etc/release/README.rst @@ -151,7 +151,7 @@ etc/thirdparty and save to thirdparty and update the ABOUT and LICENSE files as needed. * This virtualenv app contains also bundled pip, wheel and setuptools that are - essential for the correct operations to work. + essential for the installation to work. Other files diff --git a/etc/release/fetch_requirements.py b/etc/release/fetch_requirements.py index 4b76f5f29e7..6cb73ed0b2c 100755 --- a/etc/release/fetch_requirements.py +++ b/etc/release/fetch_requirements.py @@ -59,6 +59,10 @@ is_flag=True, help='Allow requirements without pinned versions.', ) +@click.option('-s', '--only-sources', + is_flag=True, + help='Fetch only the corresponding source distributions.', +) @click.help_option('-h', '--help') def fetch_requirements( requirements_file, @@ -68,6 +72,7 @@ def fetch_requirements( with_sources, with_about, allow_unpinned, + only_sources, ): """ Fetch and save to THIRDPARTY_DIR all the required wheels for pinned @@ -85,21 +90,21 @@ def fetch_requirements( operating_systems = operating_system requirements_files = requirements_file - envs = itertools.product(python_versions, operating_systems) - envs = (utils_thirdparty.Environment.from_pyver_and_os(pyv, os) for pyv, os in envs) - - for env, reqf in itertools.product(envs, requirements_files): - for package, error in utils_thirdparty.fetch_wheels( - environment=env, - requirements_file=reqf, - allow_unpinned=allow_unpinned, - dest_dir=thirdparty_dir, - ): - if error: - print('Failed to fetch wheel:', package, ':', error) + if not only_sources: + envs = itertools.product(python_versions, operating_systems) + envs = (utils_thirdparty.Environment.from_pyver_and_os(pyv, os) for pyv, os in envs) + for env, reqf in itertools.product(envs, requirements_files): + for package, error in utils_thirdparty.fetch_wheels( + environment=env, + requirements_file=reqf, + allow_unpinned=allow_unpinned, + dest_dir=thirdparty_dir, + ): + if error: + print('Failed to fetch wheel:', package, ':', error) # optionally fetch sources - if with_sources: + if with_sources or only_sources: for reqf in requirements_files: for package, error in utils_thirdparty.fetch_sources( requirements_file=reqf, @@ -111,7 +116,11 @@ def fetch_requirements( if with_about: utils_thirdparty.add_fetch_or_update_about_and_license_files(dest_dir=thirdparty_dir) - utils_thirdparty.find_problems(dest_dir=thirdparty_dir) + utils_thirdparty.find_problems( + dest_dir=thirdparty_dir, + report_missing_sources=with_sources or only_sources, + report_missing_wheels=not only_sources, + ) if __name__ == '__main__': diff --git a/etc/release/scancode-create-release.sh b/etc/release/scancode-create-release.sh index 060d12f0a97..54f572a383c 100755 --- a/etc/release/scancode-create-release.sh +++ b/etc/release/scancode-create-release.sh @@ -10,14 +10,18 @@ # ################################################################################ -# Supported Python versions and OS combos -# one archive or installer is built for each combo -PYTHON_VERSIONS="36" +# Supported current app Python version and OS +# one archive or installer is built for each python x OS combo +PYTHON_APP_VERSION="36" +PYTHON_APP_DOT_VERSION="3.6" + +PYTHON_PYPI_TESTS_DOT_VERSIONS="3.6 3.7 3.8 3.9" + OPERATING_SYSTEMS="linux macos windows" -#QUIET="" +QUIET="" +#QUIET="--quiet" -QUIET="--quiet" ################################################################################ @@ -27,32 +31,22 @@ set -e #set -x -#if [ "$(uname -s)" != "Linux" ]; then -# echo "Building is only supported on Linux. Aborting" -# exit 1 -#fi +if [ "$(uname -s)" != "Linux" ]; then + echo "Building is only supported on Linux. Aborting" + exit 1 +fi CLI_ARGS=$1 - echo "##########################################################################" -echo "### BUILDING for Python: $PYTHON_VERSIONS on operating systems: $OPERATING_SYSTEMS" +echo "### BUILDING App for Python: $PYTHON_APP_VERSIONS on OS: $OPERATING_SYSTEMS" ################################ # Setup ################################ -echo "## RELEASE: Setup environment" - -echo "## RELEASE: Clean and configure, then regen license index" -./configure --clean -./configure -source bin/activate -scancode --reindex-licenses - - echo "## RELEASE: Backup previous releases" function backup_previous_release { @@ -75,6 +69,15 @@ backup_previous_release clean_build mkdir release +echo "## RELEASE: Setup environment" + +echo "## RELEASE: Clean and configure, then regen license index" +./configure --clean +./configure +source bin/activate +scancode --reindex-licenses + + echo "## RELEASE: Install release requirements" # We do not need a full env for releasing @@ -82,114 +85,220 @@ bin/pip install $QUIET -r etc/release/requirements.txt ################################ -# PyPI wheels and sdist +# PyPI wheels and sdist: these are not Python version- or OS-dependent ################################ +echo " " echo "## RELEASE: Building a wheel and a source distribution" -bin/python setup.py $QUIET $QUIET $QUIET sdist bdist_wheel - +bin/python setup.py $QUIET sdist bdist_wheel -echo "## RELEASE: Building a wheel and a source distribution" -bin/python setup.py $QUIET $QUIET $QUIET sdist bdist_wheel mv dist release/pypi echo "## RELEASE: wheel and source distribution built and ready for PyPI upload" -find release/pypi -ls +find release -ls ################################ # Build OSes and Pythons-specific release archives ################################ -function build_archives { - # Build scancode release archives (zip and tarbal) for one target python - # and operating_system +function build_app_archive { + # Build scancode release archives (zip and tarbal) for the current app + # python and a provided operating_system argument # Arguments: - # python_version: only include wheels for this Python version. Example: 36 - # operating_system: only include wheels for this operating_system. One of windows, linux or mac + # operating_system: only include wheels for this operating_system. + # One of windows, linux or macos + + operating_system=$1 - python_version=$1 - operating_system=$2 + echo " " + echo "## RELEASE: Building archive for Python $PYTHON_APP_VERSION on operating system: $operating_system" - echo "## RELEASE: Building archive for Python $python_version on operating system: $operating_system" + if [ "$operating_system" == "windows" ]; then + # create a zip only on Windows + formats=zip + else + formats=xztar + fi clean_build mkdir -p thirdparty - # collect thirdparty deps only for the subset for this Python/operating_system - bin/python etc/release/fetch_required_wheels.py \ - --requirement=requirements.txt \ + # 1. Collect thirdparty deps only for the subset for this Python/operating_system + bin/python etc/release/fetch_requirements.py \ + --requirements-file=requirements.txt \ --thirdparty-dir=thirdparty \ - --python-version=$python_version \ - --operating_system=$operating_system + --python-version=$PYTHON_APP_VERSION \ + --operating-system=$operating_system \ + --with-about - # Create tarball and zip. + # 2. Create tarball or zip. # For now as a shortcut we use the Python setup.py sdist to create a tarball. # This is hackish and we should instead use our own archiving code that # would take a distutils manifest-like input - bin/python setup.py $QUIET $QUIET $QUIET sdist --formats=xztar,zip - bin/python etc/release/scancode_rename_archives.py dist/ $python_version $operating_system + bin/python setup.py $QUIET sdist --formats=$formats + bin/python etc/release/scancode_rename_archives.py dist/ _py$PYTHON_APP_VERSION-$operating_system mkdir -p release/archives mv dist/* release/archives/ } -function build_archives_with_sources { - # Build scancode release archives (zip and tarbal) for one target python - # and operating_system, including all thirdparty source code. - # Arguments: - # python_version: only include wheels for this Python version. Example: 36 - # operating_system: only include wheels for this operating_system. One of windows, linux or mac - - python_version=$1 - operating_system=$2 +function build_source_archive { + # Build scancode source archive tarball including only thirdparty source + # code, no wheels (and for any python and operating_system) - echo "## RELEASE: Building archive with sources for Python $python_version on operating system: $operating_system" + echo " " + echo "## RELEASE: Building archive with sources" clean_build mkdir -p thirdparty - # collect thirdparty deps only for the subset for this Python/operating_system - bin/python etc/release/fetch_required_sources.py \ - --requirement=requirements.txt \ - --thirdparty-dir=thirdparty + # 1. collect thirdparty deps sources + bin/python etc/release/fetch_requirements.py \ + --requirements-file=requirements.txt \ + --thirdparty-dir=thirdparty \ + --with-about \ + --only-sources - # Create tarball and zip. + # 2. Create tarball # For now as a shortcut we use the Python setup.py sdist to create a tarball. # This is hackish and we should instead use our own archiving code that # would take a distutils manifest-like input - bin/python setup.py $QUIET $QUIET $QUIET sdist --formats=xztar - - bin/python etc/release/scancode_rename_archives.py dist/ $python_version $operating_system-sources + bin/python setup.py $QUIET sdist --formats=xztar + bin/python etc/release/scancode_rename_archives.py dist/ $src _sources mkdir -p release/archives mv dist/* release/archives/ } -# build all the combos -for python_version in $PYTHON_VERSIONS +# build the app combos on the current App Python +for operating_system in $OPERATING_SYSTEMS do - for operating_system in $OPERATING_SYSTEMS - do - build_archives $python_version $operating_system - build_archives_with_sources $python_version $operating_system - done - done + build_app_archive $operating_system +done + +build_source_archive -echo "## RELEASE: archive built and ready for publishing" -find release/archives -ls +echo " " +echo "## RELEASE: archives built and ready for test and publishing" +find release -ls ################################ # Run optional smoke tests ################################ +function run_app_smoke_tests { + # Call romp with an archive to run the smoke tests on the selected + # operating system remotely using Azure + # + # Arguments: + # operating_system: One of windows, linux or macos + + operating_system=$1 + + echo " " + echo "### Testing app on OS: $operating_system" + archive_to_test=$(ls -1 -R release/archives/ | grep "$PYTHON_APP_VERSION-$operating_system") + + echo "#### Testing $archive_to_test with Python $PYTHON_APP_DOT_VERSION on OS: $operating_system" + + # Check checksum of archive and script since it transits through file.io + sha_arch=$(sha256sum release/archives/$archive_to_test | awk '{ print $1 }') + sha_py=$(sha256sum etc/release/scancode_release_tests.py | awk '{ print $1 }') + + echo "#### Creating a temp archive that contains the tested archive: $archive_file and the test script" + archive_file=input.tar.gz + tar -czf $archive_file \ + -C release/archives $archive_to_test \ + -C ../../etc/release scancode_release_tests.py + + tar -tvf $archive_file + + echo "#### Remote test command: python scancode_release_tests.py app $archive_to_test sha_arch:$sha_arch sha_py:$sha_py" + + romp \ + --interpreter cpython \ + --architecture x86_64 \ + --check-period 5 \ + --version $PYTHON_APP_DOT_VERSION \ + --platform $operating_system \ + --archive-file $archive_file \ + --command "python scancode_release_tests.py app $archive_to_test $sha_arch $sha_py" + + echo "#### RELEASE TEST: Completed App tests of $archive_to_test with Python $PYTHON_APP_DOT_VERSION on OS: $operating_system" +} + + +function run_pypi_smoke_tests { + # Call romp with a PyPI archive to pip install and run the smoke tests on + # the selected Python and operating system remotely using Azure + # + # Arguments: + # dist: One of whl for wheel, or tar.gz for sdist + # python_dot_versions: run with these Python version as in "3.6 3.7" + # operating_systems: run on these operating_systems as in "windows linux macos" + + dist=$1 + python_dot_versions=$2 + operating_systems=$3 + + archive_to_test=$(ls -1 -R release/pypi | grep "$dist") + echo " " + echo "### Testing $archive_to_test with Pythons: $python_dot_versions on OSses: $operating_systems" + + # Check checksum of archive and script since it transits through file.io + sha_arch=$(sha256sum release/pypi/$archive_to_test | awk '{ print $1 }') + sha_py=$(sha256sum etc/release/scancode_release_tests.py | awk '{ print $1 }') + + echo "#### Creating a temp archive that contains the tested archive: $archive_file and the test script" + archive_file=input.tar.gz + tar -czf $archive_file \ + -C release/pypi $archive_to_test \ + -C ../../etc/release scancode_release_tests.py + + tar -tvf $archive_file + + echo "#### Remote test command: python scancode_release_tests.py pypi $archive_to_test $sha_arch $sha_py" + + # build options for Python versions and OS + ver_opts=" " + for pdv in $python_dot_versions + do + ver_opts="$ver_opts --version $pdv" + done + + os_opts=" " + for os in $operating_systems + do + os_opts="$os_opts --platform $os" + done + + romp \ + --interpreter cpython \ + --architecture x86_64 \ + --check-period 5 \ + $ver_opts \ + $os_opts \ + --archive-file $archive_file \ + --command "python scancode_release_tests.py pypi $archive_to_test $sha_arch $sha_py" + + echo "#### RELEASE TEST: Completed PyPI tests of $archive_to_test with Pythons: $python_dot_versions on OSses: $operating_systems" + +} if [ "$CLI_ARGS" == "--test" ]; then - ./scancode-release-tests.sh + for operating_system in $OPERATING_SYSTEMS + do + run_app_smoke_tests $operating_system + done + + run_pypi_smoke_tests .whl "$PYTHON_PYPI_TESTS_DOT_VERSIONS" "$OPERATING_SYSTEMS" + run_pypi_smoke_tests .tar.gz "$PYTHON_PYPI_TESTS_DOT_VERSIONS" "$OPERATING_SYSTEMS" + else - echo " RELEASE: !!!!NOT Testing..." + echo " !!!! RELEASE is NOT Tested..." fi @@ -197,24 +306,21 @@ fi # Publish release ################################ +echo " " echo "### RELEASE is ready for publishing ###" # Upload wheels and sdist to PyPI # They are found in release/pypi -# Create and upload release archives to GitHub -# They are found in release/archives - - -# also upload wheels and sdist to GitHub -# They are found in release/pypi +# Create and upload release and pypi archives to GitHub +# They are found in release/archives and in release/pypi ################################ # Announce release ################################ -# ping on chat and tweeter +# ping on chat and twitter # send email diff --git a/etc/release/scancode-test-pip-install-editable.sh b/etc/release/scancode-test-pip-install-editable.sh index 5643bb4e738..bf209395fa7 100755 --- a/etc/release/scancode-test-pip-install-editable.sh +++ b/etc/release/scancode-test-pip-install-editable.sh @@ -13,12 +13,14 @@ set -x echo "### Installing ScanCode release with pip editable###" -mkdir -p tmp/pipe -python -m venv tmp/pipe -tmp/pipe/bin/pip install -e . +mkdir -p tmp/pipedit +wget -O tmp/pipedit/virtualenv.pyz https://bootstrap.pypa.io/virtualenv/virtualenv.pyz +python3 tmp/pipedit/virtualenv.pyz tmp/pipedit + +tmp/pipedit/bin/pip install -e .[full] # perform a minimal check of the results for https://github.com/nexB/scancode-toolkit/issues/2201 -if [ `tmp/pipe/bin/scancode -i --json-pp - NOTICE | grep -c "scan_timings"` == 1 ]; then +if [ `tmp/pipedit/bin/scancode -i --json-pp - NOTICE | grep -c "scan_timings"` == 1 ]; then echo "Failed scan that includes timings" exit 1 else diff --git a/etc/release/scancode-test-pip-install.sh b/etc/release/scancode-test-pip-install.sh index 37e8940e244..68ee76da33a 100755 --- a/etc/release/scancode-test-pip-install.sh +++ b/etc/release/scancode-test-pip-install.sh @@ -11,14 +11,23 @@ set -e # un-comment to trace execution set -x +echo "## Build a wheel" +./configure --dev +./scancode --reindex-licenses +bin/python setup.py bdist_wheel + echo "### Installing ScanCode release with pip ###" -mkdir -p tmp/pip -python -m venv tmp/pip -tmp/pip/bin/pip install release/pypi/scancode_toolkit*.whl +mkdir -p tmp/pipinst +wget -O tmp/pipinst/virtualenv.pyz https://bootstrap.pypa.io/virtualenv/virtualenv.pyz +python3 tmp/pipinst/virtualenv.pyz tmp/pipinst + +archive_to_test=$(find dist -type f -name "*.whl") + +tmp/pipinst/bin/pip install release/pypi/$archive_to_test[full] # perform a minimal check of the results for https://github.com/nexB/scancode-toolkit/issues/2201 -if [ `tmp/pip/bin/scancode -i --json-pp - NOTICE | grep -c "scan_timings"` == 1 ]; then +if [ `tmp/pipinst/bin/scancode -i --json-pp - NOTICE | grep -c "scan_timings"` == 1 ]; then echo "Failed scan that includes timings" exit 1 else diff --git a/etc/release/scancode_release_tests.py b/etc/release/scancode_release_tests.py new file mode 100755 index 00000000000..1d9d872c1f7 --- /dev/null +++ b/etc/release/scancode_release_tests.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# ScanCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/scancode-toolkit for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +import hashlib +import os +import shutil +import subprocess +import sys + +# TODO: also test a pip install with a find-links option to our new PyPI repo + + +def run_pypi_smoke_tests(pypi_archive): + """ + Run basic install and "smoke" scancode tests for a PyPI archive. + """ + # archive is either a wheel or an sdist as in + # scancode_toolkit-21.1.21-py3-none-any.whl or scancode-toolkit-21.1.21.tar.gz + run_command(['pip', 'install', pypi_archive + '[full]']) + + with open('some.file', 'w') as sf: + sf.write('license: gpl-2.0') + + run_command(['scancode', '-clipeu', '--json-pp', '-', 'some.file']) + + +def run_app_smoke_tests(app_archive): + """ + Run basic "smoke" scancode tests for the app release archive `app_archive` + """ + # Extract app archive which has this namin pattern: + # scancode-toolki-21.1.21_py36-linux.tar.xz + # or scancode-toolkit-21.1.21_py36-windows.zip + # We split the name on "_" to extract the laft hand side which is name of + # the root directory inside the archive e.g. "scancode-toolkit-21.1.21" + # where the archive gest extracted + extract_dir, _, _py_ver_ext = app_archive.partition('_') + shutil.unpack_archive(app_archive) + print() + print('cwd:', os.getcwd()) + + extract_loc = os.path.normpath(os.path.abspath(os.path.expanduser(extract_dir))) + print('extract_loc:', extract_loc) + for f in os.listdir(extract_loc): + print(' ', f) + print() + + os.chdir(extract_loc) + + # minimal tests: update when new scans are available + args = [ + os.path.join(extract_loc, 'scancode'), + '-clipeu', + '--classify', + '--verbose', + '--json', 'test_scan.json', + '--csv', 'test_scan.csv', + '--html', 'test_scan.html', + '--spdx-tv', 'test_scan.spdx', + '--json-pp', '-', + os.path.join(extract_loc, 'apache-2.0.LICENSE'), + ] + + print(f'Testing scancode release: {app_archive}') + run_command(args) + + +def run_command(args): + """ + Run a command list of `args` in a subprocess. Print the output. Exit on + error. + """ + cmd = ' '.join(args) + print() + print(f'Running command: {cmd}') + try: + on_windows = 'win32' in str(sys.platform).lower() + output = subprocess.check_output(args, encoding='utf-8', shell=on_windows) + print(f'Success to run command: {cmd}') + print(output) + + except subprocess.CalledProcessError as cpe: + print(f'Failure to run command: {cmd}') + print(cpe.output) + sys.exit(128) + + +if __name__ == '__main__': + args = sys.argv[1:] + action, archive, sha_arch, sha_py = args + + with open(archive, 'rb') as arch: + current_sha_arch = hashlib.sha256(arch.read()).hexdigest() + assert current_sha_arch == sha_arch + + with open(__file__, 'rb') as py: + current_sha_py = hashlib.sha256(py.read()).hexdigest() + assert current_sha_py == sha_py + + if action == 'pypi': + run_pypi_smoke_tests(archive) + else: + # action =='app': + run_app_smoke_tests(archive) diff --git a/etc/release/scancode_release_tests.sh b/etc/release/scancode_release_tests.sh deleted file mode 100755 index c3e786813aa..00000000000 --- a/etc/release/scancode_release_tests.sh +++ /dev/null @@ -1,84 +0,0 @@ -#!/bin/bash -# -# Copyright (c) nexB Inc. http://www.nexb.com/ - All rights reserved. -# - -################################################################################ -# ScanCode release test script -################################################################################ - -set -e - -# Un-comment to trace execution -#set -x - - -function run_test_scan { - # Run a test scan for a given release archive - # Note that for now, these tests run only on Linux - # Arguments: - # file_extension: the file name suffix to consider for testing - # extract_command: the command to use to extract an archive - - file_extension=$1 - extract_command=$2 - for archive in *$file_extension; - do - echo " RELEASE: Testing release archive: $archive ... " - $($extract_command $archive) - extract_dir=$(ls -d */) - cd "$extract_dir" - - # this is needed for the zip - chmod o+x scancode extractcode - - # minimal tests: update when new scans are available - cmd="./scancode --quiet -lcip apache-2.0.LICENSE --json test_scan.json" - echo "RUNNING TEST: $cmd" - $cmd - echo "TEST PASSED" - - cmd="./scancode --quiet -clipeu apache-2.0.LICENSE --json-pp test_scan.json" - echo "RUNNING TEST: $cmd" - $cmd - echo "TEST PASSED" - - cmd="./scancode --quiet -clipeu apache-2.0.LICENSE --csv test_scan.csv" - echo "RUNNING TEST: $cmd" - $cmd - echo "TEST PASSED" - - cmd="./scancode --quiet -clipeu apache-2.0.LICENSE --html test_scan.html" - echo "RUNNING TEST: $cmd" - $cmd - echo "TEST PASSED" - - cmd="./scancode --quiet -clipeu apache-2.0.LICENSE --spdx-tv test_scan.spdx" - echo "RUNNING TEST: $cmd" - $cmd - echo "TEST PASSED" - - mkdir -p foo - touch foo/bar - tar -czf foo.tgz foo - cmd="./extractcode --quiet foo.tgz" - echo "RUNNING TEST: $cmd" - $cmd - echo "TEST PASSED" - - # cleanup - cd .. - rm -rf "$extract_dir" - echo " RELEASE: Success" - done -} - - -cd release/archives -echo " RELEASE: Testing built archives for LINUX only..." -run_test_scan "linux.tar.xz" "tar -xf" -cd ../.. - - -set +e -set +x diff --git a/etc/release/sancode_rename_archives.py b/etc/release/scancode_rename_archives.py similarity index 62% rename from etc/release/sancode_rename_archives.py rename to etc/release/scancode_rename_archives.py index 34fefef5cb6..791e60f14b4 100755 --- a/etc/release/sancode_rename_archives.py +++ b/etc/release/scancode_rename_archives.py @@ -13,14 +13,14 @@ import sys -def rename_archives(target_directory, python_version, operating_system): +def rename_archives(target_directory, suffix): """ Rename all the archives found in the `target_directory` to include a - python_version and operating_system name in their file names. + distinguishing `suffix` in their file names (typically a python version and + operating system name). - For example, if `target_directory` contains "foo.tar.gz" initially, and the - python_version="36 and operating_system="macos", then "foo.tar.gz" will be - renamed to "foo-py36-macos.tar.gz" + For example, if `target_directory` contains "foo.tar.gz" initially, with the + suffix="_py36-macos", then "foo.tar.gz" will be renamed to "foo_py36-macos.tar.gz" """ supported_extensions = '.tar.gz', '.tar.bz2', '.zip', '.tar.xz', renameable = [ @@ -35,18 +35,17 @@ def rename_archives(target_directory, python_version, operating_system): name, extension, compression = old_name.rpartition('.tar') extension = extension + compression - pyos = f'py{python_version}-{operating_system}' - new_name = f'{name}-{pyos}{extension}' - # do not rename twice - if not name.endswith(pyos): - os.rename( - os.path.join(target_directory, old_name), - os.path.join(target_directory, new_name), - ) + if name.endswith(suffix): + return + + os.rename( + os.path.join(target_directory, old_name), + os.path.join(target_directory, f'{name}{suffix}{extension}'), + ) if __name__ == '__main__': args = sys.argv[1:] - target_directory, python_version, operating_system = args - rename_archives(target_directory, python_version, operating_system) + target_directory, suffix = args + rename_archives(target_directory=target_directory, suffix=suffix) diff --git a/etc/release/utils_thirdparty.py b/etc/release/utils_thirdparty.py index 5c87b1f91ca..2898a15d4b8 100755 --- a/etc/release/utils_thirdparty.py +++ b/etc/release/utils_thirdparty.py @@ -678,7 +678,7 @@ def save_if_modified(location, content): if existing_content == content: return False - print(f'Saving ABOUT (and NOTICE) files for: {self}') + if TRACE: print(f'Saving ABOUT (and NOTICE) files for: {self}') with open(location, 'w') as fo: fo.write(content) return True @@ -829,7 +829,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR): path_or_url=lic_url, as_text=True, ) - print(f'Fetched license from {lic_url}') + if TRACE: print(f'Fetched license from remote: {lic_url}') except: try: @@ -841,7 +841,7 @@ def fetch_license_files(self, dest_dir=THIRDPARTY_DIR): path_or_url=lic_url, as_text=True, ) - print(f'Fetched license from {lic_url}') + if TRACE: print(f'Fetched license from licensedb: {lic_url}') except: msg = f'No text for license {filename} in expression "{self.license_expression}" from {self}' @@ -1399,14 +1399,14 @@ def fetch_sdist(self, dest_dir=THIRDPARTY_DIR): """ if self.sdist: assert self.sdist.filename - print('Fetching source for package:', self.name, self.version) + if TRACE: print('Fetching source for package:', self.name, self.version) fetch_and_save_path_or_url( filename=self.sdist.filename, dest_dir=dest_dir, path_or_url=self.sdist.path_or_url, as_text=False, ) - print(' --> file:', self.sdist.filename) + if TRACE: print(' --> file:', self.sdist.filename) return self.sdist.filename else: print(f'Missing sdist for: {self.name}=={self.version}') @@ -1819,7 +1819,7 @@ def get_file_content(path_or_url, as_text=True): return get_local_file_content(path=path_or_url, as_text=as_text) elif path_or_url.startswith('https://'): - print(f'Fetching: {path_or_url}') + if TRACE: print(f'Fetching: {path_or_url}') _headers, content = get_remote_file_content(url=path_or_url, as_text=as_text) return content @@ -2142,16 +2142,14 @@ def find_links_from_release_url(links_url=REMOTE_LINKS_URL): URL that starts with the `prefix` string and ends with any of the extension in the list of `extensions` strings. Use the `base_url` to prefix the links. """ - if TRACE: - print(f'Finding links for {links_url}') + if TRACE: print(f'Finding links for {links_url}') plinks_url = urllib.parse.urlparse(links_url) base_url = urllib.parse.SplitResult( plinks_url.scheme, plinks_url.netloc, '', '', '').geturl() - if TRACE: - print(f'Base URL {base_url}') + if TRACE: print(f'Base URL {base_url}') _headers, text = get_remote_file_content(links_url) links = [] @@ -2173,12 +2171,11 @@ def find_links_from_release_url(links_url=REMOTE_LINKS_URL): # relative link url = f'{links_url}/{link}' - if TRACE: - print(f'Adding URL: {url}') + if TRACE: print(f'Adding URL: {url}') links.append(url) - print(f'Found {len(links)} links at {links_url}') + if TRACE: print(f'Found {len(links)} links at {links_url}') return links @@ -2187,8 +2184,7 @@ def find_pypi_links(name, simple_url=PYPI_SIMPLE_URL): Return a list of download link URLs found in a PyPI simple index for package name. with the list of `extensions` strings. Use the `simple_url` PyPI url. """ - if TRACE: - print(f'Finding links for {simple_url}') + if TRACE: print(f'Finding links for {simple_url}') name = name and NameVer.normalize_name(name) simple_url = simple_url.strip('/') @@ -2466,7 +2462,7 @@ def call(args): Call args in a subprocess and display output on the fly. Return or raise stdout, stderr, returncode """ - print('Calling:', ' '.join(args)) + if TRACE: print('Calling:', ' '.join(args)) with subprocess.Popen( args, stdout=subprocess.PIPE, @@ -2478,7 +2474,7 @@ def call(args): line = process.stdout.readline() if not line and process.poll() is not None: break - print(line.rstrip(), flush=True) + if TRACE: print(line.rstrip(), flush=True) stdout, stderr = process.communicate() returncode = process.returncode @@ -2631,27 +2627,8 @@ def build_wheels_remotely_on_multiple_platforms( combinations and save them back in `dest_dir` and return a list of built wheel file names. """ - # these environment variable must be set before - has_envt = ( - os.environ.get('ROMP_BUILD_REQUEST_URL') and - os.environ.get('ROMP_DEFINITION_ID') and - os.environ.get('ROMP_PERSONAL_ACCESS_TOKEN') and - os.environ.get('ROMP_USERNAME') - ) - - if not has_envt: - raise Exception( - 'ROMP_BUILD_REQUEST_URL, ROMP_DEFINITION_ID, ' - 'ROMP_PERSONAL_ACCESS_TOKEN and ROMP_USERNAME ' - 'are required enironment variables.') - - python_dot_versions = ['.'.join(pv) for pv in python_versions] - python_cli_options = sorted(set(itertools.chain.from_iterable( - ('--version', ver) for ver in python_dot_versions))) - - os_cli_options = sorted(set(itertools.chain.from_iterable( - ('--platform' , plat) for plat in operating_systems))) - + check_romp_is_configured() + pyos_options = get_romp_pyos_options(python_versions, operating_systems) deps = '' if with_deps else '--no-deps' verbose = '--verbose' if verbose else '' @@ -2661,7 +2638,7 @@ def build_wheels_remotely_on_multiple_platforms( '--architecture', 'x86_64', '--check-period', '5', # in seconds - ] + python_cli_options + os_cli_options + [ + ] + pyos_options + [ '--artifact-paths', '*.whl', '--artifact', 'artifacts.tar.gz', @@ -2685,6 +2662,36 @@ def build_wheels_remotely_on_multiple_platforms( return wheel_filenames +def get_romp_pyos_options( + python_versions=PYTHON_VERSIONS, + operating_systems=PLATFORMS_BY_OS, +): + python_dot_versions = ['.'.join(pv) for pv in python_versions] + pyos_options = sorted(set(itertools.chain.from_iterable( + ('--version', ver) for ver in python_dot_versions))) + + pyos_options += sorted(set(itertools.chain.from_iterable( + ('--platform' , plat) for plat in operating_systems))) + + return pyos_options + + +def check_romp_is_configured(): + # these environment variable must be set before + has_envt = ( + os.environ.get('ROMP_BUILD_REQUEST_URL') and + os.environ.get('ROMP_DEFINITION_ID') and + os.environ.get('ROMP_PERSONAL_ACCESS_TOKEN') and + os.environ.get('ROMP_USERNAME') + ) + + if not has_envt: + raise Exception( + 'ROMP_BUILD_REQUEST_URL, ROMP_DEFINITION_ID, ' + 'ROMP_PERSONAL_ACCESS_TOKEN and ROMP_USERNAME ' + 'are required enironment variables.') + + def build_wheels_locally_if_pure_python( requirements_specifier, with_deps=False, @@ -2736,7 +2743,7 @@ def build_wheels_locally_if_pure_python( return all_pure, pure_built - # TODO: +# TODO: Use me def optimize_wheel(wheel_filename, dest_dir=THIRDPARTY_DIR): """ Optimize a wheel named `wheel_filename` in `dest_dir` such as renaming its @@ -2835,7 +2842,11 @@ def check_about(dest_dir=THIRDPARTY_DIR): print(cpe.output.decode('utf-8', errors='replace')) -def find_problems(dest_dir=THIRDPARTY_DIR): +def find_problems( + dest_dir=THIRDPARTY_DIR, + report_missing_sources=False, + report_missing_wheels=False, +): """ Print the problems found in `dest_dir`. """ @@ -2843,9 +2854,9 @@ def find_problems(dest_dir=THIRDPARTY_DIR): local_packages = get_local_packages(directory=dest_dir) for package in local_packages: - if not package.sdist: + if report_missing_sources and not package.sdist: print(f'{package.name}=={package.version}: Missing source distribution.') - if not package.wheels: + if report_missing_wheels and not package.wheels: print(f'{package.name}=={package.version}: Missing wheels.') for dist in package.get_distributions(): diff --git a/etc/thirdparty/virtualenv.pyz b/etc/thirdparty/virtualenv.pyz index 0a05f8ad16f..6a813f85cfe 100644 Binary files a/etc/thirdparty/virtualenv.pyz and b/etc/thirdparty/virtualenv.pyz differ diff --git a/etc/thirdparty/virtualenv.pyz.ABOUT b/etc/thirdparty/virtualenv.pyz.ABOUT index e1965ac2670..0389f152fcd 100644 --- a/etc/thirdparty/virtualenv.pyz.ABOUT +++ b/etc/thirdparty/virtualenv.pyz.ABOUT @@ -1,7 +1,7 @@ about_resource: virtualenv.pyz name: get-virtualenv -version: 20.4.0 -download_url: https://github.com/pypa/get-virtualenv/raw/20.4.0/public/virtualenv.pyz +version: 20.4.2 +download_url: https://github.com/pypa/get-virtualenv/raw/20.4.2/public/virtualenv.pyz description: virtualenv is a tool to create isolated Python environments. homepage_url: https://github.com/pypa/virtualenv license_expression: lgpl-2.1-plus AND (bsd-new OR apache-2.0) AND mit AND python AND bsd-new @@ -10,9 +10,7 @@ copyright: Copyright (c) The Python Software Foundation and others redistribute: yes attribute: yes track_changes: yes -checksum_md5: 03de3fe6299c78ff0fa2368837b7190c -checksum_sha1: a55f54d50a64860f2d5bc78325b3404b5d63ecbe -package_url: pkg:github/pypa/get-virtualenv@20.4.0#public/virtualenv.pyz +package_url: pkg:github/pypa/get-virtualenv@20.4.2#public/virtualenv.pyz licenses: - key: apache-2.0 name: Apache License 2.0 diff --git a/setup.cfg b/setup.cfg index dae6218ea91..cf6c50d30a5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = scancode-toolkit -version = 21.1.21 +version = 21.2.9 license = Apache-2.0 AND CC-BY-4.0 AND LicenseRef-scancode-other-permissive AND LicenseRef-scancode-other-copyleft description = ScanCode is a tool to scan code for license, copyright, package and their documented dependencies and other interesting facts. @@ -34,10 +34,10 @@ keywords = licensing license_files = - NOTICE - cc0-1.0.LICENSE apache-2.0.LICENSE - README.rst + NOTICE + cc-by-4.0.LICENSE + AUTHORS.rst CODE_OF_CONDUCT.rst @@ -45,6 +45,7 @@ license_files = packages = find: package_dir = =src py_modules = + scancode_config include_package_data = True zip_safe = False diff --git a/src/packagedcode/npm.py b/src/packagedcode/npm.py index 2cc8c269a68..16be9fdeb46 100644 --- a/src/packagedcode/npm.py +++ b/src/packagedcode/npm.py @@ -36,11 +36,19 @@ logger = logging.getLogger(__name__) + +def logger_debug(*args): + pass + + if TRACE: import sys logging.basicConfig(stream=sys.stdout) logger.setLevel(logging.DEBUG) + def logger_debug(*args): + return print(' '.join(isinstance(a, str) and a or repr(a) for a in args)) + # TODO: add os and engines from package.json?? # add lock files and yarn details @@ -54,7 +62,7 @@ class NpmPackage(models.Package): 'package-lock.json', 'yarn.lock', ) - filetypes = ('.tgz',) + extensions = ('.tgz',) mimetypes = ('application/x-tar',) default_type = 'npm' default_primary_language = 'JavaScript' diff --git a/src/packagedcode/phpcomposer.py b/src/packagedcode/phpcomposer.py index 1dcb60ff462..1b94ae70d94 100644 --- a/src/packagedcode/phpcomposer.py +++ b/src/packagedcode/phpcomposer.py @@ -52,8 +52,9 @@ class PHPComposerPackage(models.Package): 'composer.json', 'composer.lock', ) - filetypes = ('.json', '.lock') + extensions = ('.json', '.lock',) mimetypes = ('application/json',) + default_type = 'composer' default_primary_language = 'PHP' default_web_baseurl = 'https://packagist.org' diff --git a/src/scancode_config.py b/src/scancode_config.py index 8ef5148df68..553cbb33afa 100644 --- a/src/scancode_config.py +++ b/src/scancode_config.py @@ -79,7 +79,7 @@ def _create_dir(location): except (DistributionNotFound, ImportError): # package is not installed or we do not have setutools/pkg_resources # on hand - __version__ = '21.1.21' + __version__ = '21.2.9' system_temp_dir = tempfile.gettempdir() scancode_src_dir = dirname(__file__) diff --git a/tests/packagedcode/data/plugin/help.txt b/tests/packagedcode/data/plugin/help.txt index a0680ae0726..270ce410c5e 100644 --- a/tests/packagedcode/data/plugin/help.txt +++ b/tests/packagedcode/data/plugin/help.txt @@ -69,7 +69,7 @@ Package: chrome Package: composer class: packagedcode.phpcomposer:PHPComposerPackage metafiles: composer.json, composer.lock - filetypes: .json, .lock + extensions: .json, .lock -------------------------------------------- Package: conda @@ -190,7 +190,7 @@ Package: msi Package: npm class: packagedcode.npm:NpmPackage metafiles: package.json, npm-shrinkwrap.json, package-lock.json, yarn.lock - filetypes: .tgz + extensions: .tgz -------------------------------------------- Package: nsis