Skip to content

Commit

Permalink
[GR-47508] Add support for merging layout dir distributions from mult…
Browse files Browse the repository at this point in the history
…iple platforms.

PullRequest: mx/1667
  • Loading branch information
gilles-duboscq committed Aug 29, 2023
2 parents cd5ccfd + 9cc1da7 commit 33839d8
Show file tree
Hide file tree
Showing 3 changed files with 275 additions and 15 deletions.
177 changes: 171 additions & 6 deletions mx.py
Original file line number Diff line number Diff line change
Expand Up @@ -629,6 +629,8 @@ def __init__(self, parents=None):
"be stored in the parent directory of the repository containing the primary suite. This option "
"can also be configured using the MX_COMPDB environment variable. Use --compdb none to disable.")
self.add_argument('--arch', action='store', dest='arch', help='force use of the specified architecture')
self.add_argument('--multi-platform-layout-directories', action='store', help="Causes platform-dependent layout dir distribution to contain the union of the files from their declared platforms. "
"Can be set to 'all' or to a comma-separated list of platforms.")

if not is_windows():
# Time outs are (currently) implemented with Unix specific functionality
Expand Down Expand Up @@ -4450,7 +4452,7 @@ def _collect_clean_dependencies():
return _dependencies_opt_limit_to_suites(res)

if args.dependencies is not None:
deps = [dependency(mx_subst.string_substitutions.substitute(name)) for name in args.dependencies.split(',')]
deps = resolve_targets(args.dependencies.split(','))
else:
deps = _collect_clean_dependencies()

Expand Down Expand Up @@ -6449,6 +6451,7 @@ def make_archive(self):
self._install_source(source, output, destination, arc)
self._persist_layout()
self._persist_linky_state()
self._persist_resource_entries_state()

def getArchivableResults(self, use_relpath=True, single=False):
for (p, n) in super(LayoutDistribution, self).getArchivableResults(use_relpath, single):
Expand Down Expand Up @@ -6502,6 +6505,8 @@ def needsUpdate(self, newestInput):
return "LINKY_LAYOUT has changed"
if not self._check_resources_file_list():
return "fileListPurpose has changed"
if not self._check_resource_entries():
return "hashEntry or fileListEntry has changed"
return None

def _persist_layout(self):
Expand Down Expand Up @@ -6537,7 +6542,7 @@ def _check_persisted_layout(self):
return False

def _linky_state_file(self):
return join(self.suite.get_mx_output_dir(), 'linkyState', self.name)
return join(self.suite.get_mx_output_dir(self.platformDependent), 'linkyState', self.name)

def _persist_linky_state(self):
linky_state_file = self._linky_state_file()
Expand All @@ -6561,6 +6566,37 @@ def _check_linky_state(self):
saved_pattern = fp.read()
return saved_pattern == LayoutDistribution._linky.pattern

def _resource_entries_state_file(self):
return join(self.suite.get_mx_output_dir(self.platformDependent), 'resource_entries', self.name)

def _resource_entries_state(self):
if self.hashEntry is None and self.fileListEntry is None:
return None
return f"{self.hashEntry}\n{self.fileListEntry}"

def _persist_resource_entries_state(self):
state_file = self._resource_entries_state_file()
current_state = self._resource_entries_state()
if current_state is None:
if exists(state_file):
os.unlink(state_file)
return
ensure_dir_exists(dirname(state_file))
with open(state_file, 'w') as fp:
fp.write(current_state)

def _check_resource_entries(self):
state_file = self._resource_entries_state_file()
current_state = self._resource_entries_state()
if not exists(state_file):
return current_state is None
if current_state is None:
return False
with open(state_file) as fp:
saved_state = fp.read()
return saved_state == current_state


def find_single_source_location(self, source, fatal_if_missing=True, abort_on_multiple=False):
locations = self.find_source_location(source, fatal_if_missing=fatal_if_missing)
unique_locations = set(locations)
Expand Down Expand Up @@ -6665,16 +6701,90 @@ def make_archive(self):
os.makedirs(os.path.abspath(os.path.dirname(sentinel)), exist_ok=True)
with open(sentinel, 'w'):
pass
self._persist_platforms_state()

def needsUpdate(self, newestInput):
reason = super().needsUpdate(newestInput)
if reason:
return reason
if not self._check_platforms():
return "--multi-platform-layout-directories changed"
return None

def _platforms_state_file(self):
return join(self.suite.get_mx_output_dir(self.platformDependent), 'platforms', self.name)

def _platforms_state(self):
if _opts.multi_platform_layout_directories is None or not self.platformDependent:
return None
canonical_platforms = sorted(set(_opts.multi_platform_layout_directories.split(',')))
return ','.join(canonical_platforms)

def _persist_platforms_state(self):
state_file = self._platforms_state_file()
current_state = self._platforms_state()
if current_state is None:
if exists(state_file):
os.unlink(state_file)
return
ensure_dir_exists(dirname(state_file))
with open(state_file, 'w') as fp:
fp.write(current_state)

def _check_platforms(self):
state_file = self._platforms_state_file()
current_state = self._platforms_state()
if not exists(state_file):
return current_state is None
if current_state is None:
return False
with open(state_file) as fp:
saved_state = fp.read()
return saved_state == current_state

def getArchivableResults(self, use_relpath=True, single=False):
if single:
raise ValueError("{} only produces multiple output".format(self))
output_dir = self.get_output()
contents = {}
for dirpath, _, filenames in os.walk(output_dir):
for filename in filenames:
file_path = join(dirpath, filename)
archive_path = relpath(file_path, output_dir) if use_relpath else basename(file_path)
contents[archive_path] = file_path
yield file_path, archive_path
if _opts.multi_platform_layout_directories and self.platformDependent:
if _opts.multi_platform_layout_directories == 'all':
requested_platforms = None
else:
requested_platforms = _opts.multi_platform_layout_directories.split(',')
local_os_arch = f"{get_os()}-{get_arch()}"
assert local_os_arch in output_dir
hashes = {}
def _hash(path):
if path not in hashes:
hashes[path] = digest_of_file(path, 'sha1')
return hashes[path]
for platform in self.platforms:
if requested_platforms is not None and platform not in requested_platforms:
continue
if local_os_arch == platform:
continue
foreign_output = output_dir.replace(local_os_arch, platform)
if not isdir(foreign_output):
raise abort(f"Missing {platform} output directory for {self.name} ({foreign_output})")
for dirpath, _, filenames in os.walk(foreign_output):
for filename in filenames:
file_path = join(dirpath, filename)
archive_path = relpath(file_path, foreign_output) if use_relpath else basename(file_path)
if archive_path in contents:
if _hash(file_path) != _hash(contents[archive_path]):
raise abort(f"""File from alternative platfrom is located in the same path but has different contents:
- {contents[archive_path]}
- {file_path}""")
else:
contents[archive_path] = file_path
yield file_path, archive_path

def remoteExtension(self):
return 'sentinel'
Expand Down Expand Up @@ -8592,7 +8702,7 @@ def _is_sane_name(m):
def create(src):
if src.endswith(".tar") or src.endswith(".tar.gz") or src.endswith(".tgz"):
return TarExtractor(src)
if src.endswith(".zip"):
if src.endswith(".zip") or src.endswith(".jar"):
return ZipExtractor(src)
abort("Don't know how to extract the archive: " + src)

Expand Down Expand Up @@ -14791,6 +14901,60 @@ def _resolve_ecj_jar(jdk, java_project_compliance, spec):
'from within the plugins/ directory of an Eclipse IDE installation.')
return ecj


_special_build_targets = {}


def register_special_build_target(name, target_enumerator, with_argument=False):
if name in _special_build_targets:
raise abort(f"Special build target {name} already registered")
_special_build_targets[name] = target_enumerator, with_argument


def _platform_dependent_layout_dir_distributions():
for d in distributions(True):
if isinstance(d, LayoutDirDistribution) and d.platformDependent:
yield d


def _maven_tag_distributions(tag):
for d in distributions(True):
if getattr(d, 'maven', False) and _match_tags(d, [tag]):
yield d


register_special_build_target('PLATFORM_DEPENDENT_LAYOUT_DIR_DISTRIBUTIONS', _platform_dependent_layout_dir_distributions)
register_special_build_target('MAVEN_TAG_DISTRIBUTIONS', _maven_tag_distributions, with_argument=True)


def resolve_targets(names):
targets = []
for name in names:
expanded_name = mx_subst.string_substitutions.substitute(name)
if expanded_name[0] == '{' and expanded_name[-1] == '}':
special_target = expanded_name[1:-1]
idx = special_target.find(':')
if idx >= 0:
arg = special_target[idx + 1:]
special_target = special_target[:idx]
else:
arg = None
if special_target not in _special_build_targets:
raise abort(f"Unknown special build target: {special_target}")
target_enumerator, with_arg = _special_build_targets[special_target]
if with_arg and arg is None:
raise abort(f"Special build target {special_target} requires an argument: {{{special_target}:argument}}")
if not with_arg and arg is not None:
raise abort(f"Special build target {special_target} doesn't accept an argument")
if arg is not None:
targets.extend(target_enumerator(arg))
else:
targets.extend(target_enumerator())
else:
targets.append(dependency(expanded_name))
return targets


def build(cmd_args, parser=None):
"""builds the artifacts of one or more dependencies"""
global _gmake_cmd
Expand Down Expand Up @@ -14875,12 +15039,12 @@ def build(cmd_args, parser=None):
if args.only is not None:
# N.B. This build will not respect any dependencies (including annotation processor dependencies)
onlyDeps = set(args.only.split(','))
roots = [dependency(mx_subst.string_substitutions.substitute(name)) for name in onlyDeps]
roots = resolve_targets(onlyDeps)
elif args.dependencies is not None:
if len(args.dependencies) == 0:
abort('The value of the --dependencies argument cannot be the empty string')
names = args.dependencies.split(',')
roots = [dependency(mx_subst.string_substitutions.substitute(name)) for name in names]
roots = resolve_targets(names)
else:
# This is the normal case for build (e.g. `mx build`) so be
# clear about JDKs being used ...
Expand Down Expand Up @@ -18326,6 +18490,7 @@ def list_commands(l):
import mx_fetchjdk # pylint: disable=unused-import
import mx_bisect # pylint: disable=unused-import
import mx_gc # pylint: disable=unused-import
import mx_multiplatform # pylint: disable=unused-import

from mx_unittest import unittest
from mx_jackpot import jackpot
Expand Down Expand Up @@ -18618,7 +18783,7 @@ def alarm_handler(signum, frame):
abort(1, killsig=signal.SIGINT)

# The version must be updated for every PR (checked in CI) and the comment should reflect the PR's issue
version = VersionSpec("6.44.3") # GR-48250, Fix: disable -Werror for javac if lint overrides for given project are "none"
version = VersionSpec("6.45.0") # multi-arch layout dirs

_mx_start_datetime = datetime.utcnow()
_last_timestamp = _mx_start_datetime
Expand Down
17 changes: 8 additions & 9 deletions mx_jardistribution.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
import re
import pickle

from os.path import join, exists, basename, dirname, isdir, islink, realpath
from os.path import join, exists, basename, dirname, isdir, islink
from argparse import ArgumentTypeError
from stat import S_IMODE

Expand Down Expand Up @@ -875,14 +875,13 @@ def add_jar(self, dep, jar_path, is_sources_jar=False):
if self.versioned_meta_inf_re.match(arcname):
mx.warn(f"META-INF resources can not be versioned ({arcname} from {jar_path}). The resulting JAR will be invalid.")

def add_file(self, dep, base_dir, relpath, archivePrefix, arcnameCheck=None, includeServices=False):
def add_file(self, dep, filepath, relpath, archivePrefix, arcnameCheck=None, includeServices=False):
"""
Adds the contents of the file `base_dir`/`relpath` to `self.bin_archive.staging_dir`
Adds the contents of the file `filepath` to `self.bin_archive.staging_dir`
under the path formed by concatenating `archivePrefix` with `relpath`.
:param Dependency dep: the Dependency owning the file
"""
filepath = join(base_dir, relpath)
arcname = join(archivePrefix, relpath).replace(os.sep, '/')
assert arcname[-1] != '/'
if arcnameCheck is not None and not arcnameCheck(arcname):
Expand Down Expand Up @@ -1039,7 +1038,8 @@ def add_classes(archivePrefix, includeServices):
self.bin_archive.entries[dirEntry] = dirEntry
else:
relpath = join(reldir, f)
self.add_file(dep, outputDir, relpath, archivePrefix, arcnameCheck=overlay_check, includeServices=includeServices)
filepath = join(root, f)
self.add_file(dep, filepath, relpath, archivePrefix, arcnameCheck=overlay_check, includeServices=includeServices)

add_classes(archivePrefix, includeServices=True)
sourceDirs = p.source_dirs()
Expand All @@ -1066,12 +1066,11 @@ def add_classes(archivePrefix, includeServices):
outputDir = dep.output_dir()
for f in dep.getResults():
relpath = dep.get_relpath(f, outputDir)
self.add_file(dep, outputDir, relpath, archivePrefix)
self.add_file(dep, f, relpath, archivePrefix)
elif dep.isLayoutDirDistribution():
mx.logv('[' + original_path + ': adding contents of layout dir distribution ' + dep.name + ']')
output = realpath(dep.get_output())
for _, p in dep.getArchivableResults():
self.add_file(dep, output, p, '')
for file_path, arc_name in dep.getArchivableResults():
self.add_file(dep, file_path, arc_name, '')
elif dep.isClasspathDependency():
mx.logv('[' + original_path + ': adding classpath ' + dep.name + ']')
jarPath = dep.classpath_repr(resolve=True)
Expand Down
Loading

0 comments on commit 33839d8

Please sign in to comment.