diff --git a/CHANGELOG.md b/CHANGELOG.md index 05a05dd1..3bc242b2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,32 @@ and this project adheres to [Semantic Versioning][semver]. ### Fixed + +## [0.31.2] - 10/16/2024 + +### Added + +- Added a function for exporting `keys-description.json` ([550]) +- Added support for cloning a new dependency when adding it to `dependencies.json` if it is not on disk ([550]) +- Clean up authentication repository if an error occurs while running a cli command ([550]) + +### Changed + +- Return a non-zero exit code with `sys.exit` when updater fails ([550]) +- Rework addition of a new role and target repositories. Use `custom.json` files ([550]) + + +### Fixed + +- Minor `conf init` and detection of the authentication repository fixes ([550]) +- Replace `info` logging calls with `notice` in API functions ([550]) +- Use `mirrors.json` urls when cloning dependencies ([551]) + + +[551]: https://github.com/openlawlibrary/taf/pull/551 +[550]: https://github.com/openlawlibrary/taf/pull/550 + + ## [0.31.1] - 10/03/2024 ### Added @@ -1278,7 +1304,8 @@ and this project adheres to [Semantic Versioning][semver]. [keepachangelog]: https://keepachangelog.com/en/1.0.0/ [semver]: https://semver.org/spec/v2.0.0.html -[unreleased]: https://github.com/openlawlibrary/taf/compare/v0.31.1...HEAD +[unreleased]: https://github.com/openlawlibrary/taf/compare/v0.31.2...HEAD +[0.31.2]: https://github.com/openlawlibrary/taf/compare/v0.31.1...v0.31.2 [0.31.1]: https://github.com/openlawlibrary/taf/compare/v0.31.0...v0.31.1 [0.31.0]: https://github.com/openlawlibrary/taf/compare/v0.30.2...0.31.0 [0.30.2]: https://github.com/openlawlibrary/taf/compare/v0.30.1...v0.30.2 diff --git a/setup.py b/setup.py index 1c3941d5..539c359c 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ import sys PACKAGE_NAME = "taf" -VERSION = "0.31.1" +VERSION = "0.31.2" AUTHOR = "Open Law Library" AUTHOR_EMAIL = "info@openlawlib.org" DESCRIPTION = "Implementation of archival authentication" diff --git a/taf/api/conf.py b/taf/api/conf.py index 0820c061..371983b2 100644 --- a/taf/api/conf.py +++ b/taf/api/conf.py @@ -1,8 +1,10 @@ from shutil import Error, copytree +import shutil from typing import Optional from pathlib import Path from taf.api.keystore import generate_keys from taf.log import taf_logger +from taf.utils import read_input_dict def init( @@ -17,11 +19,11 @@ def init( taf_directory = Path(".taf") if taf_directory.exists() and taf_directory.is_dir(): - taf_logger.info(".taf directory already exists.") + taf_logger.log("NOTICE", ".taf directory already exists.") else: # Create the .taf directory taf_directory.mkdir(exist_ok=True) - taf_logger.info("Generated .taf directory") + taf_logger.log("NOTICE", "Generated .taf directory") # Create the config.toml file config_file_path = taf_directory / "config.toml" @@ -32,7 +34,15 @@ def init( keystore_directory.mkdir(exist_ok=True) # If any of these parameters exist you can assume the user wants to generate keys - if not keystore and not roles_key_infos: + + # check if keystore already exists + roles_key_infos_dict = read_input_dict(roles_key_infos) + keystore = ( + keystore or (roles_key_infos and roles_key_infos_dict.get("keystore")) or None + ) + should_generate_keys = False + keystore_path = Path(keystore) if keystore else None + if not keystore: # Prompt the user if they want to run the generate_keys function while True: use_keystore = ( @@ -43,9 +53,9 @@ def init( if use_keystore in ["y", "n"]: should_generate_keys = use_keystore == "y" break - if should_generate_keys or (keystore and not roles_key_infos): - # First check if the user already specified keystore - if not keystore: + + if should_generate_keys: + # First check if the user already specified keystore copy_keystore = ( input( "Do you want to load an existing keystore from another location? [y/N]: " @@ -61,32 +71,33 @@ def init( keystore_path = Path(keystore_input) if keystore_path.exists() and keystore_path.is_dir(): keystore = keystore_input # Assign the string path to the keystore variable + should_generate_keys = ( + False # no need to generate keys, they will be copied + ) break else: taf_logger.error( f"Provided keystore path {keystore} is invalid." ) - # Check if keystore is specified now. If so copy the keys - if keystore: - try: - copytree(keystore, keystore_directory, dirs_exist_ok=True) - taf_logger.info( - f"Copied keystore from {keystore} to {keystore_directory}" - ) - except FileNotFoundError: - taf_logger.error(f"Provided keystore path {keystore} not found.") - except Error as e: - taf_logger.error(f"Error occurred while copying keystore: {e}") + # Check if keystore is specified now. If so copy the keys + if keystore and keystore_path and keystore_path.is_dir(): + try: + copytree(keystore, keystore_directory, dirs_exist_ok=True) + taf_logger.log( + "NOTICE", f"Copied keystore from {keystore} to {keystore_directory}" + ) + except FileNotFoundError: + taf_logger.error(f"Provided keystore path {keystore} not found.") + except Error as e: + taf_logger.error(f"Error occurred while copying keystore: {e}") - # If there is no keystore path specified, ask for keys description and generate keys - elif not roles_key_infos: - roles_key_infos = input( - "Enter the path to the keys description JSON file (can be left empty): " - ).strip() - if not roles_key_infos: - roles_key_infos = "." - if roles_key_infos: - generate_keys(taf_directory, str(keystore_directory), roles_key_infos) - taf_logger.info( - f"Successfully generated keys inside the {keystore_directory} directory" + if should_generate_keys: + generate_keys(keystore_directory, roles_key_infos) + taf_logger.log( + "NOTICE", + f"Successfully generated keys inside the {keystore_directory} directory", ) + + if roles_key_infos is not None and Path(roles_key_infos).is_file(): + infos_config_path = (taf_directory / Path(roles_key_infos).name).absolute() + shutil.copy(str(roles_key_infos), str(infos_config_path)) diff --git a/taf/api/dependencies.py b/taf/api/dependencies.py index d1793c08..7308655b 100644 --- a/taf/api/dependencies.py +++ b/taf/api/dependencies.py @@ -17,6 +17,8 @@ from taf.exceptions import TAFError from taf.git import GitRepository from taf.log import taf_logger +from taf.updater.updater import OperationType, clone_repository +import taf.updater.updater as updater @log_on_start( @@ -38,6 +40,7 @@ def add_dependency( out_of_band_commit: str, keystore: str, dependency_path: Optional[str] = None, + dependency_url: Optional[str] = None, library_dir: Optional[str] = None, scheme: Optional[str] = DEFAULT_RSA_SIGNATURE_SCHEME, custom: Optional[Dict] = None, @@ -82,7 +85,12 @@ def add_dependency( auth_repo = AuthenticationRepository(path=path) if not auth_repo.is_git_repository_root: - print(f"{path} is not a git repository!") + taf_logger.error(f"{path} is not a git repository!") + return + + dependencies_json = repositoriesdb.load_dependencies_json(auth_repo) + if dependencies_json is not None and dependency_name in dependencies_json: + taf_logger.log("NOTICE", f"{dependency_name} already added") return if library_dir is None: library_dir = str(auth_repo.path.parent.parent) @@ -92,6 +100,26 @@ def add_dependency( else: dependency = GitRepository(library_dir, dependency_name) + if not dependency.is_git_repository and dependency_url is not None: + taf_logger.log( + "NOTICE", f"{dependency.path} does not exist. Cloning from {dependency_url}" + ) + config = updater.UpdateConfig( + operation=OperationType.CLONE, + url=dependency_url, + path=Path(library_dir, dependency_name), + library_dir=library_dir, + strict=False, + bare=False, + no_deps=False, + ) # type: ignore + try: + clone_repository(config) + dependency.default_branch = dependency._determine_default_branch() + except Exception as e: + taf_logger.error(f"Dependency clone failed due to error {e}.") + return + if dependency.is_git_repository: branch_name, out_of_band_commit = _determine_out_of_band_data( dependency, branch_name, out_of_band_commit, no_prompt @@ -124,9 +152,9 @@ def add_dependency( dependencies[dependency_name]["custom"] = custom # update content of repositories.json before updating targets metadata - Path(auth_repo.path, repositoriesdb.DEPENDENCIES_JSON_PATH).write_text( - json.dumps(dependencies_json, indent=4) - ) + dependencies_path = Path(auth_repo.path, repositoriesdb.DEPENDENCIES_JSON_PATH) + dependencies_path.parent.mkdir(exist_ok=True) + Path(dependencies_path).write_text(json.dumps(dependencies_json, indent=4)) removed_targets_data: Dict = {} added_targets_data: Dict = {repositoriesdb.DEPENDENCIES_JSON_NAME: {}} diff --git a/taf/api/keystore.py b/taf/api/keystore.py index 2b8ad55c..86267a31 100644 --- a/taf/api/keystore.py +++ b/taf/api/keystore.py @@ -1,5 +1,5 @@ from logging import INFO -from typing import Optional +from typing import Optional, Union from logdecorator import log_on_start, log_on_end from pathlib import Path from taf.models.types import RolesKeysData @@ -15,6 +15,7 @@ from taf.log import taf_logger from taf.models.types import RolesIterator from taf.models.converter import from_dict +from taf.exceptions import KeystoreError @log_on_start(INFO, "Generating '{key_path:s}'", logger=taf_logger) @@ -40,14 +41,21 @@ def _generate_rsa_key(key_path: str, password: str, bits: Optional[int] = None) Returns: None """ - if password: - generate_and_write_rsa_keypair(filepath=key_path, bits=bits, password=password) - else: - generate_and_write_unencrypted_rsa_keypair(filepath=key_path, bits=bits) + try: + if password: + generate_and_write_rsa_keypair( + filepath=key_path, bits=bits, password=password + ) + else: + generate_and_write_unencrypted_rsa_keypair(filepath=key_path, bits=bits) + taf_logger.log("NOTICE", f"Generated key {key_path}") + except Exception: + taf_logger.error(f"An error occurred while generating rsa key {key_path}") + raise KeystoreError(f"An error occurred while generating rsa key {key_path}") def generate_keys( - auth_repo_path: Path, keystore: Optional[str], roles_key_infos: str + keystore: Optional[Union[str, Path]], roles_key_infos: Optional[str] ) -> None: """ Generate public and private keys and writes them to disk. Names of keys correspond to names @@ -68,15 +76,20 @@ def generate_keys( Returns: None + + Raises: + KeystoreError if an error occurs while initializing the keystore directory or generating a key """ if keystore is None: - taf_directory = find_taf_directory(auth_repo_path) + taf_directory = find_taf_directory(Path()) if taf_directory: keystore = str(taf_directory / "keystore") else: keystore = "./keystore" + + taf_logger.log("NOTICE", f"Generating keys in {str(Path(keystore).absolute())}") roles_key_infos_dict, keystore, _ = _initialize_roles_and_keystore( - roles_key_infos, keystore + roles_key_infos, str(keystore) ) roles_keys_data = from_dict(roles_key_infos_dict, RolesKeysData) @@ -86,7 +99,7 @@ def generate_keys( key_name = get_key_name(role.name, key_num, role.number) if keystore is not None: password = input( - "Enter keystore password and press ENTER (can be left empty)" + f"Enter {key_name} keystore password and press ENTER (can be left empty)" ) key_path = str(Path(keystore, key_name)) _generate_rsa_key(key_path, password, role.length) diff --git a/taf/api/repository.py b/taf/api/repository.py index 71413906..2957d86d 100644 --- a/taf/api/repository.py +++ b/taf/api/repository.py @@ -20,6 +20,7 @@ from taf.exceptions import TAFError from taf.keys import load_sorted_keys_of_new_roles import taf.repositoriesdb as repositoriesdb +from taf.api.utils._conf import find_keystore from taf.utils import ensure_pre_push_hook from tuf.repository_tool import create_new_repository from taf.log import taf_logger @@ -65,6 +66,10 @@ def create_repository( if not _check_if_can_create_repository(auth_repo): return + if not keystore and auth_repo.path is not None: + keystore_path = find_keystore(auth_repo.path) + if keystore_path is not None: + keystore = str(keystore_path) roles_key_infos_dict, keystore, skip_prompt = _initialize_roles_and_keystore( roles_key_infos, keystore ) diff --git a/taf/api/roles.py b/taf/api/roles.py index f1d3cbd1..c66781fb 100644 --- a/taf/api/roles.py +++ b/taf/api/roles.py @@ -13,12 +13,13 @@ from taf.api.utils._git import check_if_clean, commit_and_push from taf.exceptions import KeystoreError, TAFError from taf.models.converter import from_dict -from taf.models.types import RolesIterator, TargetsRole +from taf.models.types import RolesIterator, TargetsRole, compare_roles_data from taf.repositoriesdb import REPOSITORIES_JSON_PATH from tuf.repository_tool import TARGETS_DIRECTORY_NAME import tuf.roledb import taf.repositoriesdb as repositoriesdb from taf.keys import ( + find_keystore, get_key_name, get_metadata_key_info, load_signing_keys, @@ -101,7 +102,7 @@ def add_role( existing_roles = auth_repo.get_all_targets_roles() existing_roles.extend(MAIN_ROLES) if role in existing_roles: - taf_logger.info("All roles already set up") + taf_logger.log("NOTICE", "All roles already set up") return targets_parent_role = TargetsRole() @@ -141,7 +142,7 @@ def add_role( commit_msg = git_commit_message("add-role", role=role) commit_and_push(auth_repo, commit_msg=commit_msg, push=push) else: - taf_logger.info("\nPlease commit manually\n") + taf_logger.log("NOTICE", "\nPlease commit manually\n") @log_on_start(DEBUG, "Adding new paths to role {delegated_role:s}", logger=taf_logger) @@ -185,10 +186,18 @@ def add_role_paths( """ if auth_repo is None: auth_repo = AuthenticationRepository(path=auth_path) + if not auth_repo.check_if_role_exists(delegated_role): + raise TAFError(f"Role {delegated_role} does not exist") + parent_role = auth_repo.find_delegated_roles_parent(delegated_role) parent_role_obj = _role_obj(parent_role, auth_repo) if isinstance(parent_role_obj, Targets): - parent_role_obj.add_paths(paths, delegated_role) + try: + parent_role_obj.add_paths(paths, delegated_role) + except tuf.exceptions.InvalidNameError: + raise TAFError( + "All delegated paths should be relative to targets directory." + ) _update_role(auth_repo, parent_role, keystore, prompt_for_keys=prompt_for_keys) if commit: update_snapshot_and_timestamp( @@ -199,7 +208,7 @@ def add_role_paths( ) commit_and_push(auth_repo, commit_msg=commit_msg, push=push) else: - taf_logger.info("\nPlease commit manually\n") + taf_logger.log("NOTICE", "\nPlease commit manually\n") else: taf_logger.error( f"Could not find parent role of role {delegated_role}. Check if its name was misspelled" @@ -216,7 +225,7 @@ def add_role_paths( reraise=True, ) @check_if_clean -def add_roles( +def add_multiple_roles( path: str, keystore: Optional[str] = None, roles_key_infos: Optional[str] = None, @@ -227,7 +236,8 @@ def add_roles( ) -> None: """ Add new target roles and sign all metadata files given information stored in roles_key_infos - dictionary or .json file + dictionary or .json file. + Arguments: path: Path to the authentication repository. @@ -246,44 +256,38 @@ def add_roles( """ auth_repo = AuthenticationRepository(path=path) - roles_key_infos_dict, keystore, _ = _initialize_roles_and_keystore( - roles_key_infos, keystore + roles_keys_data_new = _initialize_roles_and_keystore_for_existing_repo( + path, roles_key_infos, keystore ) + roles_data = auth_repo.generate_roles_description() + roles_keys_data_current = from_dict(roles_data, RolesKeysData) - roles_keys_data = from_dict(roles_key_infos_dict, RolesKeysData) - - new_roles = [] - existing_roles = auth_repo.get_all_targets_roles() - main_roles = ["root", "snapshot", "timestamp"] - existing_roles.extend(main_roles) - - new_roles = [ - role - for role in RolesIterator(roles_keys_data.roles.targets, skip_top_role=True) - if role.name not in existing_roles - ] + new_roles, _ = compare_roles_data(roles_keys_data_current, roles_keys_data_new) parent_roles_names = {role.parent.name for role in new_roles} if not len(new_roles): - taf_logger.info("All roles already set up") + taf_logger.log("NOTICE", "All roles already set up") return repository = auth_repo._repository + existing_roles = [ + role.name for role in RolesIterator(roles_keys_data_current.roles) + ] signing_keys, verification_keys = load_sorted_keys_of_new_roles( auth_repo=auth_repo, - roles=roles_keys_data.roles, + roles=roles_keys_data_new.roles, keystore=keystore, - yubikeys_data=roles_keys_data.yubikeys, + yubikeys_data=roles_keys_data_new.yubikeys, existing_roles=existing_roles, ) create_delegations( - roles_keys_data.roles.targets, + roles_keys_data_new.roles.targets, repository, verification_keys, signing_keys, - existing_roles, + existing_roles=existing_roles, ) for parent_role_name in parent_roles_names: _update_role( @@ -301,7 +305,7 @@ def add_roles( commit_msg = git_commit_message("add-roles", roles=", ".join(roles_names)) commit_and_push(auth_repo, commit_msg=commit_msg, push=push) else: - taf_logger.info("\nPlease commit manually\n") + taf_logger.log("NOTICE", "\nPlease commit manually\n") @log_on_start(DEBUG, "Adding new signing key to roles", logger=taf_logger) @@ -350,6 +354,13 @@ def add_signing_key( None """ auth_repo = AuthenticationRepository(path=path) + non_existant_roles = [] + for role in roles: + if not auth_repo.check_if_role_exists(role): + non_existant_roles.append(role) + if len(non_existant_roles): + raise TAFError(f"Role(s) {', '.join(non_existant_roles)} do not exist") + _, keystore, _ = _initialize_roles_and_keystore( roles_key_infos, keystore, enter_info=False ) @@ -366,7 +377,9 @@ def add_signing_key( parent_roles = set() for role in roles: if auth_repo.is_valid_metadata_key(role, pub_key_pem): - taf_logger.info(f"Key already registered as signing key of role {role}") + taf_logger.log( + "NOTICE", f"Key already registered as signing key of role {role}" + ) continue auth_repo.add_metadata_key(role, pub_key_pem, scheme) @@ -403,7 +416,7 @@ def add_signing_key( # ) commit_and_push(auth_repo, commit_msg=commit_msg, push=push) else: - taf_logger.info("\nPlease commit manually\n") + taf_logger.log("NOTICE", "\nPlease commit manually\n") # TODO this is probably outdated, the format of the outputted roles_key_infos @@ -449,8 +462,9 @@ def _print_roles_key_infos(infos_json_str): path = Path(roles_key_infos) path.parent.mkdir(parents=True, exist_ok=True) Path(roles_key_infos).write_text(infos_json_str) - taf_logger.info( - f"Configuration json written to {Path(roles_key_infos).absolute()}" + taf_logger.log( + "NOTICE", + f"Configuration json written to {Path(roles_key_infos).absolute()}", ) except Exception as e: taf_logger.error(e) @@ -541,6 +555,25 @@ def _read_val(input_type, name, param=None, required=False): pass +def _initialize_roles_and_keystore_for_existing_repo( + path: str, + roles_key_infos: Optional[str], + keystore: Optional[str], + enter_info: Optional[bool] = True, +) -> RolesKeysData: + roles_key_infos_dict = read_input_dict(roles_key_infos) + + if not roles_key_infos_dict and enter_info: + roles_key_infos_dict = _enter_roles_infos(None, roles_key_infos) + roles_keys_data = from_dict(roles_key_infos_dict, RolesKeysData) + keystore = keystore or roles_keys_data.keystore + if keystore is None and path is not None: + keystore_path = find_keystore(Path(path)) + if keystore_path: + roles_keys_data.keystore = str(keystore_path) + return roles_keys_data + + def _initialize_roles_and_keystore( roles_key_infos: Optional[str], keystore: Optional[str], @@ -562,6 +595,7 @@ def _initialize_roles_and_keystore( enter_info (optional): Indicates if the user should be asked to enter information about the roles and keys if not specified. Set to True by default. + Side Effects: None @@ -600,8 +634,9 @@ def _initialize_roles_and_keystore( or "./keystore" ) else: - taf_logger.info( - "Keys will be entered and then printed from the command line..." + taf_logger.log( + "NOTICE", + "Keys will be entered and then printed from the command line...", ) if keystore is not None: @@ -676,6 +711,9 @@ def list_keys_of_role( """ auth_repo = AuthenticationRepository(path=path) key_ids = auth_repo.get_role_keys(role=role) + if key_ids is None: + raise TAFError(f"Role {role} does not exist") + return [ str(get_metadata_key_info(auth_repo.certs_dir, key_id)) for key_id in key_ids ] @@ -806,7 +844,7 @@ def remove_role( commit_msg = git_commit_message("remove-role", role=role) commit_and_push(auth_repo, commit_msg=commit_msg, push=push) else: - taf_logger.info("Please commit manually") + taf_logger.log("NOTICE", "Please commit manually") @log_on_start(DEBUG, "Removing delegated paths", logger=taf_logger) @@ -867,7 +905,7 @@ def remove_paths( ) commit_and_push(auth_repo, commit_msg=commit_msg, push=push) elif delegation_existed: - taf_logger.info("\nPlease commit manually\n") + taf_logger.log("NOTICE", "\nPlease commit manually\n") return delegation_existed @@ -909,7 +947,7 @@ def _remove_path_from_role_info( delegations_paths.remove(path_to_remove) delegation_exists = True else: - taf_logger.info(f"{path_to_remove} not in delegated paths") + taf_logger.log("NOTICE", f"{path_to_remove} not in delegated paths") break if delegation_exists: tuf.roledb.update_roleinfo( diff --git a/taf/api/targets.py b/taf/api/targets.py index 6c1906e2..51e77f71 100644 --- a/taf/api/targets.py +++ b/taf/api/targets.py @@ -81,9 +81,7 @@ def add_target_repo( None """ auth_repo = AuthenticationRepository(path=path) - if not auth_repo.is_git_repository_root: - taf_logger.error(f"{path} is not a git repository!") - return + if library_dir is None: library_dir = str(auth_repo.path.parent.parent) @@ -131,7 +129,7 @@ def add_target_repo( # delegated role paths are not specified for the top-level targets role # the targets role is responsible for signing all paths not # delegated to another target role - taf_logger.info("Role already exists") + taf_logger.log("NOTICE", "Role already exists") add_role_paths( paths=[target_name], delegated_role=role, @@ -149,8 +147,9 @@ def add_target_repo( repositories_json = {"repositories": {}} repositories = repositories_json["repositories"] if target_repo.name in repositories: - taf_logger.info( - f"{target_repo.name} already added to repositories.json. Overwriting" + taf_logger.log( + "NOTICE", + f"{target_repo.name} already added to repositories.json. Overwriting", ) repositories[target_repo.name] = {} if custom: @@ -188,7 +187,7 @@ def add_target_repo( commit_msg = git_commit_message("add-target", target_name=target_name) commit_and_push(auth_repo, commit_msg=commit_msg, push=push) else: - print("\nPlease commit manually\n") + taf_logger.log("NOTICE", "\nPlease commit manually\n") def export_targets_history( @@ -222,8 +221,9 @@ def export_targets_history( if repositoriesdb.get_repository(auth_repo, target_repo) is None: invalid_targets.append(target_repo) if len(invalid_targets): - print( - f"The following target repositories are not defined: {', '.join(invalid_targets)}" + taf_logger.log( + "NOTICE", + f"The following target repositories are not defined: {', '.join(invalid_targets)}", ) return elif target_repos is not None: @@ -239,9 +239,9 @@ def export_targets_history( output_path = output_path.with_suffix(".json") output_path.parent.mkdir(parents=True, exist_ok=True) output_path.write_text(commits_json) - print(f"Result written to {output_path}") + taf_logger.log("NOTICE", f"Result written to {output_path}") else: - print(commits_json) + taf_logger.log("NOTICE", commits_json) def list_targets( @@ -265,7 +265,7 @@ def list_targets( auth_repo = AuthenticationRepository(path=path) head_commit = auth_repo.head_commit_sha() if head_commit is None: - print("Repository is empty") + taf_logger.log("NOTICE", "Repository is empty") return top_commit = [head_commit] repositoriesdb.load_repositories(auth_repo) @@ -306,7 +306,7 @@ def list_targets( ) repo_output["something-to-commit"] = repo.something_to_commit() - print(json.dumps(output, indent=4)) + taf_logger.log("NOTICE", json.dumps(output, indent=4)) @log_on_start(INFO, "Signing target files", logger=taf_logger) @@ -375,7 +375,7 @@ def register_target_files( commit_msg = git_commit_message("update-targets") commit_and_push(auth_repo, commit_msg=commit_msg, push=push) elif not no_commit_warning: - print("\nPlease commit manually\n") + taf_logger.log("NOTICE", "\nPlease commit manually\n") return updated @@ -417,13 +417,13 @@ def remove_target_repo( removed_targets_data: Dict = {} added_targets_data: Dict = {} if not auth_repo.is_git_repository_root: - taf_logger.info(f"{path} is not a git repository!") + taf_logger.error(f"{path} is not a git repository!") return repositories_json = repositoriesdb.load_repositories_json(auth_repo) if repositories_json is not None: repositories = repositories_json["repositories"] if target_name not in repositories: - taf_logger.info(f"{target_name} not in repositories.json") + taf_logger.log("NOTICE", f"{target_name} not in repositories.json") else: repositories.pop(target_name) # update content of repositories.json before updating targets metadata @@ -439,7 +439,7 @@ def remove_target_repo( os.unlink(str(target_file_path)) removed_targets_data[target_name] = {} else: - taf_logger.info(f"{target_file_path} target file does not exist") + taf_logger.log("NOTICE", f"{target_file_path} target file does not exist") changes_committed = False if len(added_targets_data) or len(removed_targets_data): @@ -476,7 +476,7 @@ def remove_target_repo( ) changes_committed = True else: - taf_logger.info(f"{target_name} not among delegated paths") + taf_logger.log("NOTICE", f"{target_name} not among delegated paths") # update snapshot and timestamp calls write_all, so targets updates will be saved too if changes_committed and push: auth_repo.push() @@ -625,7 +625,7 @@ def update_and_sign_targets( nonexistent_target_types.append(target_type) continue if len(nonexistent_target_types): - taf_logger.info( + taf_logger.error( f"Target types {'.'.join(nonexistent_target_types)} not in repositories.json. Targets not updated" ) return @@ -635,7 +635,7 @@ def update_and_sign_targets( _save_top_commit_of_repo_to_target( Path(library_dir), target_name, repo_path, True ) - taf_logger.info(f"Updated {target_name} target file") + taf_logger.log("NOTICE", f"Updated {target_name} target file") register_target_files( repo_path, keystore, @@ -669,4 +669,4 @@ def _update_target_repos( target_repo_name = target_repo_path.name path = targets_dir / target_repo_name path.write_text(json.dumps(data, indent=4)) - taf_logger.info(f"Updated {path}") + taf_logger.log("NOTICE", f"Updated {path}") diff --git a/taf/api/utils/_conf.py b/taf/api/utils/_conf.py index f13130a3..0023e355 100644 --- a/taf/api/utils/_conf.py +++ b/taf/api/utils/_conf.py @@ -29,7 +29,7 @@ def find_taf_directory(auth_repo_path: Path) -> Optional[Path]: return taf_directory current_dir = current_dir.parent - taf_logger.info(f"No .taf directory found starting from {auth_repo_path.parent}") + taf_logger.debug(f"No .taf directory found starting from {auth_repo_path.parent}") return None @@ -39,7 +39,7 @@ def find_keystore(path: Path) -> Optional[Path]: if taf_directory: keystore_path = taf_directory / "keystore" if keystore_path.exists() and keystore_path.is_dir(): - taf_logger.info(f"Found keystore at {keystore_path}") + taf_logger.debug(f"Found keystore at {keystore_path}") return keystore_path - taf_logger.info(f"No keystore found starting from {path}") + taf_logger.debug(f"No keystore found starting from {path}") return None diff --git a/taf/api/utils/_git.py b/taf/api/utils/_git.py index 6367e839..f13c3ab5 100644 --- a/taf/api/utils/_git.py +++ b/taf/api/utils/_git.py @@ -34,7 +34,7 @@ def commit_and_push( if push and auth_repo.has_remote(): try: auth_repo.push() - taf_logger.info("Successfully pushed to remote") + taf_logger.log("NOTICE", "Successfully pushed to remote") new_commit_branch = auth_repo.default_branch if new_commit_branch: diff --git a/taf/api/utils/_metadata.py b/taf/api/utils/_metadata.py index 781f9327..e2aa8fcc 100644 --- a/taf/api/utils/_metadata.py +++ b/taf/api/utils/_metadata.py @@ -108,7 +108,7 @@ def update_target_metadata( ) if not roles_targets: - taf_logger.info("No target files to sign") + taf_logger.log("NOTICE", "No target files to sign") return False # update targets diff --git a/taf/api/utils/_roles.py b/taf/api/utils/_roles.py index 86eb06a4..434dcc08 100644 --- a/taf/api/utils/_roles.py +++ b/taf/api/utils/_roles.py @@ -50,33 +50,36 @@ def create_delegations( if existing_roles is None: existing_roles = [] skip_top_role = role.name == "targets" - for delegated_role in RolesIterator(role, skip_top_role=skip_top_role): - parent_role_obj = _role_obj(delegated_role.parent.name, repository) - if not isinstance(parent_role_obj, Targets): - raise TAFError( - f"Could not find parent targets role of role {delegated_role}" + try: + for delegated_role in RolesIterator(role, skip_top_role=skip_top_role): + parent_role_obj = _role_obj(delegated_role.parent.name, repository) + if not isinstance(parent_role_obj, Targets): + raise TAFError( + f"Could not find parent targets role of role {delegated_role}" + ) + if delegated_role.name in existing_roles: + taf_logger.log("NOTICE", f"Role {delegated_role.name} already set up.") + continue + paths = delegated_role.paths + roles_verification_keys = verification_keys[delegated_role.name] + # if yubikeys are used for signing, signing keys are not loaded + roles_signing_keys = signing_keys.get(delegated_role.name) + parent_role_obj.delegate( + delegated_role.name, + roles_verification_keys, + paths, + threshold=delegated_role.threshold, + terminating=delegated_role.terminating, + ) + setup_role( + delegated_role, + repository, + roles_verification_keys, + roles_signing_keys, + parent=parent_role_obj, ) - if delegated_role.name in existing_roles: - taf_logger.info(f"Role {delegated_role.name} already set up.") - continue - paths = delegated_role.paths - roles_verification_keys = verification_keys[delegated_role.name] - # if yubikeys are used for signing, signing keys are not loaded - roles_signing_keys = signing_keys.get(delegated_role.name) - parent_role_obj.delegate( - delegated_role.name, - roles_verification_keys, - paths, - threshold=delegated_role.threshold, - terminating=delegated_role.terminating, - ) - setup_role( - delegated_role, - repository, - roles_verification_keys, - roles_signing_keys, - parent=parent_role_obj, - ) + except tuf.exceptions.InvalidNameError: + raise TAFError("All delegated paths should be relative to targets directory.") @log_on_start(DEBUG, "Finding roles of key", logger=taf_logger) @@ -131,9 +134,13 @@ def setup_role( # as previous_keys # this means that TUF expects at least one of those signing keys to be present # we are setting up this role, so there should be no previous keys - tuf.roledb._roledb_dict[repository._repository_name][role.name][ - "previous_keyids" - ] = [] + + try: + tuf.roledb._roledb_dict[repository._repository_name][role.name][ + "previous_keyids" + ] = [] + except Exception: # temporary quick fix, this will all be reworked + tuf.roledb._roledb_dict[repository.name][role.name]["previous_keyids"] = [] def _role_obj( diff --git a/taf/auth_repo.py b/taf/auth_repo.py index 42342ad0..242991ba 100644 --- a/taf/auth_repo.py +++ b/taf/auth_repo.py @@ -185,6 +185,9 @@ def get_info_json( else: return self.get_json(head_commit, INFO_JSON_PATH) + def get_metadata_path(self, role): + return self.path / METADATA_DIRECTORY_NAME / f"{role}.json" + def is_commit_authenticated(self, target_name: str, commit: str) -> bool: """Checks if passed commit is ever authenticated for given target name.""" for auth_commit in self.all_commits_on_branch(reverse=False): diff --git a/taf/exceptions.py b/taf/exceptions.py index 3685047b..1ca1051f 100644 --- a/taf/exceptions.py +++ b/taf/exceptions.py @@ -207,5 +207,5 @@ class ValidationFailedError(TAFError): pass -class YubikeyError(Exception): +class YubikeyError(TAFError): pass diff --git a/taf/git.py b/taf/git.py index 5dcf5888..40927e98 100644 --- a/taf/git.py +++ b/taf/git.py @@ -171,11 +171,6 @@ def is_git_repository(self) -> bool: """Check if the given path is the root of a Git repository.""" # This is used when instantiating a PyGitRepository repo, so do not use # it here - # Check for a .git directory or file (submodule or bare repo) - if (self.path / ".git").exists(): - return True - - # Use 'git rev-parse --is-inside-work-tree' to check if it's a git repository try: result = self._git("rev-parse --is-inside-work-tree", reraise_error=True) if result == "true": @@ -190,18 +185,21 @@ def is_git_repository(self) -> bool: @property def is_git_repository_root(self) -> bool: - if not self.is_git_repository: - return False - repo = self.pygit_repo - if repo is None: + try: + if not self.is_git_repository: + return False + repo = self.pygit_repo + if repo is None: + return False + if self.is_bare_repository: + return repo.is_bare and Path(repo.path).resolve() == self.path.resolve() + else: + git_path = self.path / ".git" + return Path(repo.path).resolve() == git_path.resolve() and ( + git_path.is_dir() or git_path.is_file() + ) + except PygitError: return False - if self.is_bare_repository: - return repo.is_bare and Path(repo.path).resolve() == self.path.resolve() - else: - git_path = self.path / ".git" - return Path(repo.path).resolve() == git_path.resolve() and ( - git_path.is_dir() or git_path.is_file() - ) @property def initial_commit(self) -> str: diff --git a/taf/keys.py b/taf/keys.py index e74c5ba8..d766c92c 100644 --- a/taf/keys.py +++ b/taf/keys.py @@ -383,7 +383,10 @@ def _setup_yubikey_roles_keys( for key_id in yubikey_ids: # Check the present value from the yubikeys dictionary - if not users_yubikeys_details[key_id].present: + if ( + key_id in users_yubikeys_details + and not users_yubikeys_details[key_id].present + ): continue public_key_text = None diff --git a/taf/models/types.py b/taf/models/types.py index 3fc8378e..bb9c9eaa 100644 --- a/taf/models/types.py +++ b/taf/models/types.py @@ -182,3 +182,26 @@ def _dfs_delegations(role, skip_top_role=False): for role in roles: yield from _dfs_delegations(role, self.skip_top_role) + + +def compare_roles_data(old_data: RolesKeysData, new_data: RolesKeysData): + added_roles = [] + removed_roles = [] + current_roles = { + role.name: role + for role in RolesIterator(old_data.roles.targets, skip_top_role=True) + } + new_roles = { + role.name: role + for role in RolesIterator(new_data.roles.targets, skip_top_role=True) + } + + for role_name, role in current_roles.items(): + if role_name not in new_roles: + removed_roles.append(role) + + for role_name, role in new_roles.items(): + if role_name not in current_roles: + added_roles.append(role) + + return added_roles, removed_roles diff --git a/taf/repositoriesdb.py b/taf/repositoriesdb.py index b8e0627f..15b5a899 100644 --- a/taf/repositoriesdb.py +++ b/taf/repositoriesdb.py @@ -467,14 +467,15 @@ def get_deduplicated_repositories( auth_repo: AuthenticationRepository, commits: Optional[List[str]] = None, excluded_target_globs: Optional[List[str]] = None, + library_dir: Optional[str] = None, ) -> Dict[str, GitRepository]: return _get_deduplicated_target_or_auth_repositories( - auth_repo, commits, False, excluded_target_globs + auth_repo, commits, False, excluded_target_globs, library_dir ) def _get_deduplicated_target_or_auth_repositories( - auth_repo, commits, load_auth=False, excluded_target_globs=None + auth_repo, commits, load_auth=False, excluded_target_globs=None, library_dir=None ): if commits is None: commits = [auth_repo.head_commit_sha()] @@ -499,6 +500,7 @@ def _get_deduplicated_target_or_auth_repositories( auth_repo=auth_repo, commits=commits, excluded_target_globs=excluded_target_globs, + library_dir=library_dir, ) } diff --git a/taf/repository_tool.py b/taf/repository_tool.py index e79d2543..be69cf9e 100644 --- a/taf/repository_tool.py +++ b/taf/repository_tool.py @@ -3,6 +3,7 @@ import operator import os import shutil +from cryptography.hazmat.primitives import serialization from fnmatch import fnmatch from functools import partial, reduce from pathlib import Path @@ -37,7 +38,12 @@ KeystoreError, ) from taf.git import GitRepository -from taf.utils import normalize_file_line_endings, on_rm_error, get_file_details +from taf.utils import ( + default_backend, + normalize_file_line_endings, + on_rm_error, + get_file_details, +) try: import taf.yubikey as yk @@ -332,7 +338,10 @@ def _role_obj(self, role): return self._repository.timestamp elif role == "root": return self._repository.root - return self._repository.targets(role) + try: + return self._repository.targets(role) + except tuf.exceptions.UnknownRoleError: + return def _try_load_metadata_key(self, role, key): """Check if given key can be used to sign given role and load it. @@ -606,6 +615,10 @@ def get_role_from_target_paths(self, target_paths): return common_role.pop() + def check_if_role_exists(self, role_name): + role = self._role_obj(role_name) + return role is not None + def check_roles_expiration_dates( self, interval=None, start_date=None, excluded_roles=None ): @@ -767,6 +780,68 @@ def find_associated_roles_of_key(self, public_key): roles.extend(self.find_keys_roles([public_key], check_threshold=False)) return roles + def get_key_length_and_scheme_from_metadata(self, parent_role, keyid): + try: + metadata = json.loads( + Path( + self.path, METADATA_DIRECTORY_NAME, f"{parent_role}.json" + ).read_text() + ) + metadata = metadata["signed"] + if "delegations" in metadata: + metadata = metadata["delegations"] + scheme = metadata["keys"][keyid]["scheme"] + pub_key_pem = metadata["keys"][keyid]["keyval"]["public"] + pub_key = serialization.load_pem_public_key( + pub_key_pem.encode(), backend=default_backend() + ) + return pub_key, scheme + except Exception: + return None, None + + def generate_roles_description(self) -> Dict: + roles_description = {} + + def _get_delegations(role_name): + delegations_info = {} + delegations = self.get_delegations_info(role_name) + if len(delegations): + for role_info in delegations.get("roles"): + delegations_info[role_info["name"]] = { + "threshold": role_info["threshold"], + "number": len(role_info["keyids"]), + "paths": role_info["paths"], + "terminating": role_info["terminating"], + } + pub_key, scheme = self.get_key_length_and_scheme_from_metadata( + role_name, role_info["keyids"][0] + ) + delegations_info[role_info["name"]]["scheme"] = scheme + delegations_info[role_info["name"]]["length"] = pub_key.key_size + inner_roles_data = _get_delegations(role_info["name"]) + if len(inner_roles_data): + delegations_info[role_info["name"]][ + "delegations" + ] = inner_roles_data + return delegations_info + + for role_name in MAIN_ROLES: + role_obj = self._role_obj(role_name) + roles_description[role_name] = { + "threshold": role_obj.threshold, + "number": len(role_obj.keys), + } + pub_key, scheme = self.get_key_length_and_scheme_from_metadata( + "root", role_obj.keys[0] + ) + roles_description[role_name]["scheme"] = scheme + roles_description[role_name]["length"] = pub_key.key_size + if role_name == "targets": + delegations_info = _get_delegations(role_name) + if len(delegations_info): + roles_description[role_name]["delegations"] = delegations_info + return {"roles": roles_description} + def get_all_targets_roles(self): """ Return a list containing names of all target roles diff --git a/taf/repository_utils.py b/taf/repository_utils.py index f9b077a4..aa2db948 100644 --- a/taf/repository_utils.py +++ b/taf/repository_utils.py @@ -5,6 +5,7 @@ from pathlib import Path from taf.exceptions import GitError, InvalidRepositoryError from taf.auth_repo import AuthenticationRepository +from taf.git import GitRepository from taf.utils import taf_logger @@ -26,9 +27,15 @@ def try_load_repository(repo_path: Path) -> bool: return False path = Path(path).resolve() + if not path.is_dir(): + raise InvalidRepositoryError(f"Directory {path} does not exist") # First, try to load the repository from the given path if try_load_repository(path): - return path + # find the git repository's root + current_path = path + while not GitRepository(path=current_path).is_git_repository_root: + current_path = current_path.parent + return current_path # Search subdirectories taf_logger.debug( @@ -37,15 +44,6 @@ def try_load_repository(repo_path: Path) -> bool: for subdir in path.iterdir(): if subdir.is_dir() and try_load_repository(subdir): return subdir - # Search parent directories - taf_logger.debug( - f"Current directory {path} is not a valid authentication repository. Searching parent directories..." - ) - current_path = path - while current_path != current_path.parent: - current_path = current_path.parent - if try_load_repository(current_path): - return current_path raise InvalidRepositoryError( f"Could not find a valid authentication repository in {path} or any of its subdirectories or parent directories." diff --git a/taf/tests/test_api/test_roles.py b/taf/tests/test_api/test_roles.py index c77695f8..e8d3c4d7 100644 --- a/taf/tests/test_api/test_roles.py +++ b/taf/tests/test_api/test_roles.py @@ -5,7 +5,7 @@ from taf.api.roles import ( add_role, add_role_paths, - add_roles, + add_multiple_roles, add_signing_key, list_keys_of_role, remove_paths, @@ -122,7 +122,7 @@ def test_add_multiple_roles( ): auth_repo = AuthenticationRepository(path=auth_repo_path) initial_commits_num = len(auth_repo.list_commits()) - add_roles( + add_multiple_roles( path=str(auth_repo_path), keystore=roles_keystore, roles_key_infos=with_delegations_no_yubikeys_path, diff --git a/taf/tests/test_updater/update_utils.py b/taf/tests/test_updater/update_utils.py index e864fb77..a4ef131f 100644 --- a/taf/tests/test_updater/update_utils.py +++ b/taf/tests/test_updater/update_utils.py @@ -126,7 +126,7 @@ def clone_repositories( shutil.rmtree(client_test_root) config = UpdateConfig( operation=OperationType.CLONE, - url=str(origin_auth_repo.path), + remote_url=str(origin_auth_repo.path), update_from_filesystem=True, path=None, library_dir=str(clients_dir), @@ -235,7 +235,7 @@ def update_and_check_commit_shas( config = UpdateConfig( operation=operation, - url=str(origin_auth_repo.path), + remote_url=str(origin_auth_repo.path), update_from_filesystem=True, path=str(clients_auth_repo_path) if auth_repo_name_exists else None, library_dir=str(clients_dir), @@ -299,7 +299,7 @@ def update_invalid_repos_and_check_if_repos_exist( config = UpdateConfig( operation=operation, - url=str(origin_auth_repo.path), + remote_url=str(origin_auth_repo.path), update_from_filesystem=True, path=str(clients_auth_repo_path) if auth_repo_name_exists else None, library_dir=str(clients_dir), diff --git a/taf/tools/cli/__init__.py b/taf/tools/cli/__init__.py index 3f40221d..e9a4bff6 100644 --- a/taf/tools/cli/__init__.py +++ b/taf/tools/cli/__init__.py @@ -1,30 +1,66 @@ +import shutil +import sys import click - from functools import partial, wraps from logging import ERROR from logdecorator import log_on_error -from taf.exceptions import TAFError +from taf.exceptions import InvalidRepositoryError, TAFError from taf.log import taf_logger -from taf.utils import is_run_from_python_executable +from taf.repository_utils import find_valid_repository +from taf.git import GitRepository +from taf.utils import is_run_from_python_executable, on_rm_error -def catch_cli_exception(func=None, *, handle, print_error=False): +def catch_cli_exception(func=None, *, handle=TAFError, print_error=False, remove_dir_on_error=False): if not func: - return partial(catch_cli_exception, handle=handle, print_error=print_error) + return partial( + catch_cli_exception, + handle=handle, + print_error=print_error, + remove_dir_on_error=remove_dir_on_error + ) + + handle = tuple(handle) if isinstance(handle, (list, tuple)) else (handle,) @wraps(func) def wrapper(*args, **kwargs): + successful = False try: - return func(*args, **kwargs) + result = func(*args, **kwargs) + successful = True + return result except handle as e: if print_error: click.echo(e) except Exception as e: if is_run_from_python_executable(): click.echo(f"An error occurred: {e}") + sys.exit(1) else: raise e + finally: + if not successful and "path" in kwargs: + path = kwargs["path"] + repo = GitRepository(path=path) + if repo.is_git_repository: + repo.clean_and_reset() + if remove_dir_on_error: + shutil.rmtree(path, onerror=on_rm_error) + + return wrapper + + +def find_repository(func): + @wraps(func) + def wrapper(*args, **kwargs): + try: + if "path" in kwargs: + kwargs["path"] = find_valid_repository(kwargs["path"]) + return func(*args, **kwargs) + except InvalidRepositoryError as e: + click.echo(f"An error occurred: {e}") + sys.exit(1) return wrapper diff --git a/taf/tools/dependencies/__init__.py b/taf/tools/dependencies/__init__.py index 39e165ab..e51af218 100644 --- a/taf/tools/dependencies/__init__.py +++ b/taf/tools/dependencies/__init__.py @@ -1,8 +1,7 @@ import click from taf.api.dependencies import add_dependency, remove_dependency from taf.exceptions import TAFError -from taf.repository_utils import find_valid_repository -from taf.tools.cli import catch_cli_exception, process_custom_command_line_args +from taf.tools.cli import catch_cli_exception, find_repository, process_custom_command_line_args def add_dependency_command(): @@ -38,23 +37,25 @@ def add_dependency_command(): library root directory as the authentication repository, in a directory whose name corresponds to its name. If dependency's parent authentication repository's path is `E:\\examples\\root\\namespace\\auth`, and the dependency's namespace prefixed name is `namespace1\\auth`, the target's path will be set to `E:\\examples\\root\\namespace1\\auth`.""") + @find_repository @catch_cli_exception(handle=TAFError) @click.argument("dependency_name") @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @click.option("--branch-name", default=None, help="Name of the branch which contains the out-of-band commit") + @click.option("--dependency-url", default=None, help="URL from which the dependency should be cloned if not already on disk") @click.option("--out-of-band-commit", default=None, help="Out-of-band commit SHA") @click.option("--dependency-path", default=None, help="Dependency's filesystem path") @click.option("--keystore", default=None, help="Location of the keystore files") @click.option("--prompt-for-keys", is_flag=True, default=False, help="Whether to ask the user to enter their key if not located inside the keystore directory") @click.option("--no-commit", is_flag=True, default=False, help="Indicates that the changes should not be committed automatically") @click.pass_context - def add(ctx, dependency_name, path, branch_name, out_of_band_commit, dependency_path, keystore, prompt_for_keys, no_commit): - path = find_valid_repository(path) + def add(ctx, dependency_name, path, branch_name, dependency_url, out_of_band_commit, dependency_path, keystore, prompt_for_keys, no_commit): custom = process_custom_command_line_args(ctx) add_dependency( path=path, dependency_name=dependency_name, branch_name=branch_name, + dependency_url=dependency_url, out_of_band_commit=out_of_band_commit, dependency_path=dependency_path, keystore=keystore, @@ -76,6 +77,7 @@ def remove_dependency_command(): `taf dependencies remove dependency-name --keystore keystore-path` if inside an authentication repository""") + @find_repository @catch_cli_exception(handle=TAFError) @click.argument("dependency-name") @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @@ -83,7 +85,6 @@ def remove_dependency_command(): @click.option("--prompt-for-keys", is_flag=True, default=False, help="Whether to ask the user to enter their key if not located inside the keystore directory") @click.option("--no-commit", is_flag=True, default=False, help="Indicates that the changes should not be committed automatically") def remove(dependency_name, path, keystore, prompt_for_keys, no_commit): - path = find_valid_repository(path) remove_dependency( path=path, dependency_name=dependency_name, diff --git a/taf/tools/keystore/__init__.py b/taf/tools/keystore/__init__.py index 50a68f68..880ef966 100644 --- a/taf/tools/keystore/__init__.py +++ b/taf/tools/keystore/__init__.py @@ -1,6 +1,7 @@ -from pathlib import Path import click from taf.api.keystore import generate_keys +from taf.exceptions import KeystoreError +from taf.tools.cli import catch_cli_exception def generate_keys_command(): @@ -31,11 +32,11 @@ def generate_keys_command(): Default number of keys and threshold are 1, length 3072 and password is an empty string. If keystore location is specified through the keystore input parameter and not listed in keys-description dictionary, keys will be saved to ./keystore""") + @catch_cli_exception(handle=KeystoreError) @click.option("--keystore", default=None, help="Location of the keystore directory. Can be specified in keys-description dictionary") @click.option("--keys-description", help="A dictionary containing information about the keys or a path to a json file which stores the needed information") def generate(keystore, keys_description): - auth_repo_path = Path.cwd() - generate_keys(auth_repo_path,keystore, keys_description) + generate_keys(keystore, keys_description) return generate diff --git a/taf/tools/metadata/__init__.py b/taf/tools/metadata/__init__.py index 9db16f2b..8f851caf 100644 --- a/taf/tools/metadata/__init__.py +++ b/taf/tools/metadata/__init__.py @@ -1,9 +1,8 @@ import click from taf.api.metadata import update_metadata_expiration_date, check_expiration_dates from taf.constants import DEFAULT_RSA_SIGNATURE_SCHEME -from taf.exceptions import SigningError -from taf.repository_utils import find_valid_repository -from taf.tools.cli import catch_cli_exception +from taf.exceptions import TAFError +from taf.tools.cli import catch_cli_exception, find_repository from taf.utils import ISO_DATE_PARAM_TYPE as ISO_DATE import datetime @@ -25,11 +24,11 @@ def check_expiration_dates_command(): timestamp will expire on 2022-07-22 snapshot will expire on 2022-07-28 root will expire on 2022-08-19""") + @find_repository @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @click.option("--interval", default=30, type=int, help="Number of days added to the start date") @click.option("--start-date", default=datetime.datetime.now(), help="Date to which expiration interval is added", type=ISO_DATE) def checking_expiration_dates(path, interval, start_date): - path = find_valid_repository(path) check_expiration_dates(path=path, interval=interval, start_date=start_date) return checking_expiration_dates @@ -49,7 +48,8 @@ def update_expiration_dates_command(): sign the file using a yubikey. If targets or other delegated role is updated, automatically sign snapshot and timestamp.""") - @catch_cli_exception(handle=SigningError) + @find_repository + @catch_cli_exception(handle=TAFError) @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @click.option("--role", multiple=True, help="A list of roles which expiration date should get updated") @click.option("--interval", default=None, type=int, help="Number of days added to the start date") @@ -59,7 +59,6 @@ def update_expiration_dates_command(): @click.option("--no-commit", is_flag=True, default=False, help="Indicates that the changes should not be committed automatically") @click.option("--prompt-for-keys", is_flag=True, default=False, help="Whether to ask the user to enter their key if not located inside the keystore directory") def update_expiration_dates(path, role, interval, keystore, scheme, start_date, no_commit, prompt_for_keys): - path = find_valid_repository(path) if not len(role): print("Specify at least one role") return diff --git a/taf/tools/repo/__init__.py b/taf/tools/repo/__init__.py index d4faa3f0..1267ba7f 100644 --- a/taf/tools/repo/__init__.py +++ b/taf/tools/repo/__init__.py @@ -1,3 +1,4 @@ +import sys import click import json @@ -5,9 +6,8 @@ from taf.api.repository import create_repository, taf_status from taf.auth_repo import AuthenticationRepository from taf.exceptions import TAFError, UpdateFailedError -from taf.log import initialize_logger_handlers -from taf.repository_utils import find_valid_repository -from taf.tools.cli import catch_cli_exception +from taf.log import initialize_logger_handlers, taf_logger +from taf.tools.cli import catch_cli_exception, find_repository from taf.updater.types.update import UpdateType from taf.updater.updater import OperationType, UpdateConfig, clone_repository, update_repository, validate_repository @@ -22,6 +22,36 @@ def common_update_options(f): return f +def _call_updater(config, format_output): + """ + A helper function which call update or clone repository + """ + try: + if config.operation == OperationType.CLONE: + updater_output = clone_repository(config) + else: + updater_output = update_repository(config) + + successful = updater_output["event"] == "event/succeeded" + if format_output: + if successful: + print(json.dumps({'updateSuccessful': successful}, )) + else: + error = updater_output.get("error_msg", "") + print(json.dumps({'updateSuccessful': successful, "error": error})) + + if not successful: + sys.exit(1) + except Exception as e: + if format_output: + error_data = {"updateSuccessful": False, "error": str(e)} + taf_logger.error(json.dumps(error_data)) + sys.exit(1) + else: + taf_logger.error(str(e)) + sys.exit(1) + + def start_profiling(): import cProfile import atexit @@ -82,7 +112,7 @@ def create_repo_command(): If the test flag is set, a special target file will be created. This means that when calling the updater, it'll be necessary to use the --authenticate-test-repo flag. """) - @catch_cli_exception(handle=TAFError) + @catch_cli_exception(handle=TAFError, remove_dir_on_error=True) @click.argument("path", type=click.Path(exists=False, file_okay=False, dir_okay=True, writable=True)) @click.option("--keys-description", help="A dictionary containing information about the " "keys or a path to a json file which stores the needed information") @@ -145,13 +175,16 @@ def clone_repo_command(): @click.option("--bare", is_flag=True, default=False, help="Clone repositories as bare repositories") @click.option("--no-deps", is_flag=True, default=False, help="Optionally disables updating of dependencies") @click.option("--upstream/--no-upstream", default=False, help="Skips comparison with remote repositories upstream") - def clone(path, url, library_dir, from_fs, expected_repo_type, scripts_root_dir, profile, format_output, exclude_target, strict, bare, upstream, no_deps): + @click.option("-v", "--verbosity", count=True, help="Displays varied levels of logging information based on verbosity level") + def clone(path, url, library_dir, from_fs, expected_repo_type, scripts_root_dir, profile, format_output, exclude_target, strict, bare, upstream, no_deps, verbosity): + settings.VERBOSITY = verbosity + initialize_logger_handlers() if profile: start_profiling() config = UpdateConfig( operation=OperationType.CLONE, - url=url, + remote_url=url, path=path, library_dir=library_dir, update_from_filesystem=from_fs, @@ -164,16 +197,8 @@ def clone(path, url, library_dir, from_fs, expected_repo_type, scripts_root_dir, no_deps=no_deps, ) - try: - clone_repository(config) - if format_output: - print(json.dumps({'updateSuccessful': True})) - except Exception as e: - if format_output: - error_data = {'updateSuccessful': False, 'error': str(e)} - print(json.dumps(error_data)) - else: - raise e + _call_updater(config, format_output) + return clone @@ -209,6 +234,7 @@ def update_repo_command(): --strict, which will raise errors during the update if any warnings are found. By default, --strict is disabled. """) + @find_repository @catch_cli_exception(handle=UpdateFailedError) @common_update_options @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @@ -221,7 +247,6 @@ def update(path, library_dir, expected_repo_type, scripts_root_dir, profile, for settings.VERBOSITY = verbosity initialize_logger_handlers() - path = find_valid_repository(path) if profile: start_profiling() @@ -238,16 +263,7 @@ def update(path, library_dir, expected_repo_type, scripts_root_dir, profile, for no_deps=no_deps, ) - try: - update_repository(config) - if format_output: - print(json.dumps({'updateSuccessful': True})) - except Exception as e: - if format_output: - error_data = {'updateSuccessful': False, 'error': str(e)} - print(json.dumps(error_data)) - else: - raise e + _call_updater(config, format_output) return update @@ -260,6 +276,8 @@ def validate_repo_command(): Validation can be in strict or no-strict mode. Strict mode is set by specifying --strict, which will raise errors during validate if any/all warnings are found. By default, --strict is disabled. """) + @find_repository + @catch_cli_exception(handle=UpdateFailedError) @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @click.option("--library-dir", default=None, help="Directory where target repositories and, " "optionally, authentication repository are located. If omitted it is " @@ -277,7 +295,6 @@ def validate_repo_command(): def validate(path, library_dir, from_commit, from_latest, exclude_target, strict, no_targets, no_deps, verbosity): settings.VERBOSITY = verbosity initialize_logger_handlers() - path = find_valid_repository(path) auth_repo = AuthenticationRepository(path=path) bare = auth_repo.is_bare_repository if from_latest: @@ -287,10 +304,12 @@ def validate(path, library_dir, from_commit, from_latest, exclude_target, strict def latest_commit_and_branch_command(): - @click.command(help="Fetch and print the last validated commit hash and the default branch.") + @click.command(help="""Fetch and print the last validated commit hash and the default branch. + Integrated into the pre-push git hook""") + @find_repository + @catch_cli_exception @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") def latest_commit_and_branch(path): - path = find_valid_repository(path) auth_repo = AuthenticationRepository(path=path) last_validated_commit = auth_repo.last_validated_commit or "" default_branch = auth_repo.default_branch @@ -300,10 +319,11 @@ def latest_commit_and_branch(path): def status_command(): @click.command(help="Prints the whole state of the library, including authentication repositories and its dependencies.") + @find_repository + @catch_cli_exception @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @click.option("--library-dir", default=None, help="Path to the library's root directory. Determined based on the authentication repository's path if not provided.") def status(path, library_dir): - path = find_valid_repository(path) try: taf_status(path, library_dir) except TAFError as e: diff --git a/taf/tools/roles/__init__.py b/taf/tools/roles/__init__.py index 979b9de7..24de96c1 100644 --- a/taf/tools/roles/__init__.py +++ b/taf/tools/roles/__init__.py @@ -1,34 +1,69 @@ +import json +from pathlib import Path +import sys import click -from taf.api.roles import add_role, add_roles, list_keys_of_role, remove_role, add_signing_key +from taf.api.roles import add_multiple_roles, add_role, list_keys_of_role, add_signing_key, remove_role from taf.constants import DEFAULT_RSA_SIGNATURE_SCHEME from taf.exceptions import TAFError -from taf.repository_utils import find_valid_repository -from taf.tools.cli import catch_cli_exception +from taf.auth_repo import AuthenticationRepository +from taf.log import taf_logger +from taf.tools.cli import catch_cli_exception, find_repository from taf.api.roles import add_role_paths def add_role_command(): - @click.command(help="""Add a new delegated target role, specifying which paths are delegated to the new role. - Its parent role, number of signing keys and signatures threshold can also be defined. - Update and sign all metadata files and commit. - """) + @click.command(help=""" + Add a new delegated target role. Allows optional specification of the role's properties through a JSON configuration file. + If the configuration file is not provided or specific properties are omitted, default values are used. + Only a list of one or more delegated paths has to be provided. + + Configuration file (JSON) can specify: + - 'parent_role' (string): The parent role under which the new role will be delegated. Default is 'targets'. + - 'delegated_path' (array of strings): Paths to be delegated to the new role. Must specify at least one if using a config file. + - 'keys_number' (integer): Number of signing keys. Default is 1. + - 'threshold' (integer): Number of keys required to sign. Default is 1. + - 'yubikey' (boolean): Whether to use a YubiKey for signing. Default is false. + - 'scheme' (string): Signature scheme, e.g., 'rsa-pkcs1v15-sha256'. Default is 'rsa-pkcs1v15-sha256'. + + Example JSON structure: + { + "parent_role": "targets", + "delegated_path": ["/delegated_path_inside_targets1", "/delegated_path_inside_targets2"], + "keys_number": 1, + "threshold": 1, + "yubikey": true, + "scheme": "rsa-pkcs1v15-sha256" + } + """) + @find_repository @catch_cli_exception(handle=TAFError) @click.argument("role") + @click.option("--config-file", type=click.Path(exists=True), help="Path to the JSON configuration file.") @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") - @click.option("--parent-role", default="targets", help="Parent targets role of this role. Defaults to targets") - @click.option("--delegated-path", multiple=True, help="Paths associated with the delegated role") @click.option("--keystore", default=None, help="Location of the keystore files") - @click.option("--keys-number", default=1, help="Number of signing keys. Defaults to 1") - @click.option("--threshold", default=1, help="An integer number of keys of that role whose signatures are required in order to consider a file as being properly signed by that role") - @click.option("--yubikey", is_flag=True, default=None, help="A flag determining if the new role should be signed using a Yubikey") - @click.option("--scheme", default=DEFAULT_RSA_SIGNATURE_SCHEME, help="A signature scheme used for signing") @click.option("--no-commit", is_flag=True, default=False, help="Indicates that the changes should not be committed automatically") @click.option("--prompt-for-keys", is_flag=True, default=False, help="Whether to ask the user to enter their key if not located inside the keystore directory") - def add(role, path, parent_role, delegated_path, keystore, keys_number, threshold, yubikey, scheme, no_commit, prompt_for_keys): - if not path: - print("Specify at least one path") - delegated_path = find_valid_repository(delegated_path) + def add(role, config_file, path, keystore, no_commit, prompt_for_keys): + + config_data = {} + if config_file: + try: + config_data = json.loads(Path(config_file).read_text()) + except json.JSONDecodeError: + click.echo("Invalid JSON provided. Please check your input.", err=True) + sys.exit(1) + + delegated_path = config_data.get("delegated_path", []) + if not delegated_path: + taf_logger.log("NOTICE", "Specify at least one delegated path through a configuration file.") + return + + parent_role = config_data.get("parent_role", "targets") + keys_number = config_data.get("keys_number", 1) + threshold = config_data.get("threshold", 1) + yubikey = config_data.get("yubikey", False) + scheme = config_data.get("scheme", DEFAULT_RSA_SIGNATURE_SCHEME) add_role( path=path, @@ -46,19 +81,46 @@ def add(role, path, parent_role, delegated_path, keystore, keys_number, threshol return add -def add_multiple_roles_command(): - @click.command(help="""Add one or more target roles. Information about the roles - can be provided through a dictionary - either specified directly or contained - by a .json file whose path is specified when calling this command. This allows - definition of: - - total number of keys per role - - threshold of signatures per role - - should keys of a role be on Yubikeys or should keystore files be used - - scheme (the default scheme is rsa-pkcs1v15-sha256) - - keystore path, if not specified via keystore option +def export_roles_description_command(): + @click.command(help=""" + Export roles-description.json file based on the + """) + @find_repository + @catch_cli_exception(handle=TAFError) + @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") + @click.option("--output", default=None, help="Output file path") + @click.option("--keystore", default=None, help="Location of the keystore files") + def export_roles_description(path, output, keystore): + auth_repo = AuthenticationRepository(path=path) + roles_description = auth_repo.generate_roles_description() + if keystore: + roles_description["keystore"] = keystore + if not output: + taf_logger.log("NOTICE", json.dumps(roles_description, indent=True)) + else: + output = Path(output) + output.parent.mkdir(exist_ok=True, parents=True) + output.write_text(json.dumps(roles_description, indent=True)) + + return export_roles_description + + +def add_multiple_command(): + @click.command(help="""Adds new roles based on the provided keys-description file by + comparing it with the current state of the repository. + + The current state can be exported using taf roles export_roles_description and then + edited manually to add new roles. + + For each role, the following can be defined: + - Total number of keys per role. + - Threshold of required signatures per role. + - Use of Yubikeys or keystore files for storing keys. + - Signature scheme, with the default being 'rsa-pkcs1v15-sha256'. + - Keystore path, if not specified via the keystore option. \b - For example: + Example of a JSON configuration: { "roles": { "root": { @@ -77,6 +139,7 @@ def add_multiple_roles_command(): "keystore": "keystore_path" } """) + @find_repository @catch_cli_exception(handle=TAFError) @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @click.argument("keys-description") @@ -85,8 +148,7 @@ def add_multiple_roles_command(): @click.option("--no-commit", is_flag=True, default=False, help="Indicates that the changes should not be committed automatically") @click.option("--prompt-for-keys", is_flag=True, default=False, help="Whether to ask the user to enter their key if not located inside the keystore directory") def add_multiple(path, keystore, keys_description, scheme, no_commit, prompt_for_keys): - path = find_valid_repository(path) - add_roles( + add_multiple_roles( path=path, keystore=keystore, roles_key_infos=keys_description, @@ -99,6 +161,7 @@ def add_multiple(path, keystore, keys_description, scheme, no_commit, prompt_for def add_role_paths_command(): @click.command(help="Add a new delegated target role, specifying which paths are delegated to the new role. Its parent role, number of signing keys and signatures threshold can also be defined. Update and sign all metadata files and commit.") + @find_repository @catch_cli_exception(handle=TAFError) @click.argument("role") @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @@ -107,7 +170,6 @@ def add_role_paths_command(): @click.option("--no-commit", is_flag=True, default=False, help="Indicates that the changes should not be committed automatically") @click.option("--prompt-for-keys", is_flag=True, default=False, help="Whether to ask the user to enter their key if not located inside the keystore directory") def adding_role_paths(role, path, delegated_path, keystore, no_commit, prompt_for_keys): - path = find_valid_repository(path) if not delegated_path: print("Specify at least one path") return @@ -124,12 +186,16 @@ def adding_role_paths(role, path, delegated_path, keystore, no_commit, prompt_fo return adding_role_paths +# commenting out this command since its execution leads to an invalid state +# this is a TUF bug (or better said, caused by using a newer version of the updater and old repository_tool) +# it will be addressed when we transition to metadata API def remove_role_command(): @click.command(help="""Remove a delegated target role, and, optionally, its targets (depending on the remove-targets parameter). If targets should also be deleted, target files are remove and their corresponding entires are removed from repositoires.json. If targets should not get removed, the target files are signed using the removed role's parent role """) + @find_repository @catch_cli_exception(handle=TAFError) @click.argument("role") @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @@ -139,7 +205,6 @@ def remove_role_command(): @click.option("--no-commit", is_flag=True, default=False, help="Indicates that the changes should not be committed automatically") @click.option("--prompt-for-keys", is_flag=True, default=False, help="Whether to ask the user to enter their key if not located inside the keystore directory") def remove(role, path, keystore, scheme, remove_targets, no_commit, prompt_for_keys): - path = find_valid_repository(path) remove_role( path=path, role=role, @@ -162,6 +227,7 @@ def add_signing_key_command(): necessary to specify its path as the pub_key parameter's value. If this option is not used when calling this command, the key can be directly entered later. """) + @find_repository @catch_cli_exception(handle=TAFError) @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @click.option("--role", multiple=True, help="A list of roles to whose list of signing keys the new key should be added") @@ -172,7 +238,6 @@ def add_signing_key_command(): @click.option("--no-commit", is_flag=True, default=False, help="Indicates that the changes should not be committed automatically") @click.option("--prompt-for-keys", is_flag=True, default=False, help="Whether to ask the user to enter their key if not located inside the keystore directory") def adding_signing_key(path, role, pub_key_path, keystore, keys_description, scheme, no_commit, prompt_for_keys): - path = find_valid_repository(path) if not role: print("Specify at least one role") return @@ -195,11 +260,11 @@ def list_keys_command(): List all keys of the specified role. If certs directory exists and contains certificates exported from YubiKeys, include additional information read from these certificates, like name or organization. """) + @find_repository @catch_cli_exception(handle=TAFError) @click.argument("role") @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") def list_keys(role, path): - path = find_valid_repository(path) key_infos = list_keys_of_role( path=path, role=role, @@ -211,8 +276,9 @@ def list_keys(role, path): def attach_to_group(group): group.add_command(add_role_command(), name='add') - group.add_command(add_multiple_roles_command(), name='add-multiple') + group.add_command(add_multiple_command(), name='add-multiple') group.add_command(add_role_paths_command(), name='add-role-paths') - group.add_command(remove_role_command(), name='remove') + # group.add_command(remove_role_command(), name='remove') group.add_command(add_signing_key_command(), name='add-signing-key') group.add_command(list_keys_command(), name='list-keys') + group.add_command(export_roles_description_command(), name="export-description") diff --git a/taf/tools/targets/__init__.py b/taf/tools/targets/__init__.py index c6e88470..f9eebb3a 100644 --- a/taf/tools/targets/__init__.py +++ b/taf/tools/targets/__init__.py @@ -1,3 +1,6 @@ +import json +from pathlib import Path +import sys import click from taf.api.targets import ( list_targets, @@ -10,8 +13,8 @@ ) from taf.constants import DEFAULT_RSA_SIGNATURE_SCHEME from taf.exceptions import TAFError -from taf.repository_utils import find_valid_repository -from taf.tools.cli import catch_cli_exception, process_custom_command_line_args +from taf.tools.cli import catch_cli_exception, find_repository +from taf.log import taf_logger def add_repo_command(): @@ -21,18 +24,17 @@ def add_repo_command(): ), help="""Add a new repository by adding it to repositories.json, creating a delegation (if targets is not its signing role) and adding and signing initial target files if the repository is found on the filesystem. All additional information that should be saved as the repository's custom content in `repositories.json` - is specified by providing additional options. If the signing role does not exist, it will be created. + is specified by providing a json file containing this data E.g. - `taf targets add-repo --path auth-path --target-name namespace1/repo --serve latest --role role1` - - or - - `taf targets add-repo --target-name namespace1/repo --serve latest --role role1` + { + "custom-prop1": "custom-val1", + "custom-prop2": "custom-val2" + } if directly inside the authentication repository. - In this case, serve: latest will be added to the custom part of the target repository's entry in + In this case, custom-prop1 and custom-prop2 will be added to the custom part of the target repository's entry in repositories.json. If the repository does not exist, it is sufficient to provide its namespace prefixed name @@ -41,6 +43,7 @@ def add_repo_command(): in a directory whose name corresponds to its name. If authentication repository's path is `E:\\examples\\root\\namespace\\auth`, and the target's namespace prefixed name is `namespace1\\repo1`, the target's path will be set to `E:\\examples\\root\\namespace1\\repo1`.""") + @find_repository @catch_cli_exception(handle=TAFError) @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @click.argument("target-name") @@ -49,10 +52,17 @@ def add_repo_command(): @click.option("--keystore", default=None, help="Location of the keystore files") @click.option("--prompt-for-keys", is_flag=True, default=False, help="Whether to ask the user to enter their key if not located inside the keystore directory") @click.option("--no-commit", is_flag=True, default=False, help="Indicates that the changes should not be committed automatically") - @click.pass_context - def add_repo(ctx, path, target_path, target_name, role, keystore, prompt_for_keys, no_commit): - path = find_valid_repository(path) - custom = process_custom_command_line_args(ctx) + @click.option("--custom-file", type=click.Path(exists=True), help="Path to the JSON file containing additional, custom targets data.") + def add_repo(path, target_path, target_name, role, keystore, prompt_for_keys, no_commit, custom_file): + + custom = {} + if custom_file: + try: + custom = json.loads(Path(custom_file).read_text()) + except json.JSONDecodeError: + taf_logger.error("Invalid custom JSON provided") + sys.exit(1) + add_target_repo( path=path, target_path=target_path, @@ -77,12 +87,13 @@ def export_history_command(): data will include all target repositories. to a file whose location is specified using the output option, or print it to console.""") + @find_repository + @catch_cli_exception(handle=TAFError) @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @click.option("--commit", default=None, help="Starting authentication repository commit") @click.option("--output", default=None, help="File to which the resulting json will be written. If not provided, the output will be printed to console") @click.option("--repo", multiple=True, help="Target repository whose historical data should be collected") def export_history(path, commit, output, repo): - path = find_valid_repository(path) export_targets_history(path, commit, output, repo) return export_history @@ -99,23 +110,24 @@ def list_targets_command(): - if there are unsigned changes (commits not registered in the authentication repository) - if they are up-to-date with remote - if there are uncommitted changes""") + @find_repository + @catch_cli_exception(handle=TAFError) @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @click.option("--library-dir", default=None, help="Directory where target repositories and, optionally, authentication repository are located. If omitted it is calculated based on authentication repository's path. Authentication repo is presumed to be at library-dir/namespace/auth-repo-name") def list(path, library_dir): - path = find_valid_repository(path) list_targets(path, library_dir) return list def remove_repo_command(): @click.command(help="Remove a target repository (from repsoitories.json and target file) and sign") + @find_repository @catch_cli_exception(handle=TAFError) @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @click.argument("target-name") @click.option("--keystore", default=None, help="Location of the keystore files") @click.option("--prompt-for-keys", is_flag=True, default=False, help="Whether to ask the user to enter their key if not located inside the keystore directory") def remove_repo(path, target_name, keystore, prompt_for_keys): - path = find_valid_repository(path) remove_target_repo( path=path, target_name=target_name, @@ -132,6 +144,7 @@ def sign_targets_command(): keystore parameter is provided, keys stored in that directory will be used for signing. If a needed key is not in that directory, the file can either be signed by manually entering the key or by using a Yubikey.""") + @find_repository @catch_cli_exception(handle=TAFError) @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @click.option("--keystore", default=None, help="Location of the keystore files") @@ -141,7 +154,6 @@ def sign_targets_command(): @click.option("--no-commit", is_flag=True, default=False, help="Indicates that the changes should not be committed automatically") def sign(path, keystore, keys_description, scheme, prompt_for_keys, no_commit): try: - path = find_valid_repository(path) register_target_files( path=path, keystore=keystore, @@ -178,6 +190,7 @@ def update_and_sign_command(): that the namespace of target repositories is equal to the authentication repository's namespace, determined based on the repository's path. E.g. Namespace of E:\\root\\namespace2\\auth-repo is namespace2.""") + @find_repository @catch_cli_exception(handle=TAFError) @click.option("--path", default=".", help="Authentication repository's location. If not specified, set to the current directory") @click.option("--library-dir", default=None, help="Directory where target repositories and, optionally, authentication repository are located. If omitted it is calculated based on authentication repository's path. Authentication repo is presumed to be at library-dir/namespace/auth-repo-name") @@ -189,7 +202,6 @@ def update_and_sign_command(): @click.option("--no-commit", is_flag=True, default=False, help="Indicates that the changes should not be committed automatically") def update_and_sign(path, library_dir, target_type, keystore, keys_description, scheme, prompt_for_keys, no_commit): try: - path = find_valid_repository(path) if len(target_type): update_and_sign_targets( path, diff --git a/taf/updater/handlers.py b/taf/updater/handlers.py index f60d7081..5ae870c4 100644 --- a/taf/updater/handlers.py +++ b/taf/updater/handlers.py @@ -76,7 +76,7 @@ def metadata_dir(self): def targets_dir(self): return str(self.validation_auth_repo.path / "targets") - def __init__(self, auth_url, repository_directory, repository_name): + def __init__(self, auth_urls, repository_directory, repository_name): """ Args: auth_url: repository url of the git repository which we want to clone. @@ -91,7 +91,7 @@ def __init__(self, auth_url, repository_directory, repository_name): validation_path = settings.validation_repo_path.get(repository_name) - self.set_validation_repo(validation_path, auth_url) + self.set_validation_repo(validation_path, auth_urls) self._init_commits() @@ -187,11 +187,11 @@ def wrapper(self): return wrapper - def set_validation_repo(self, path, url): + def set_validation_repo(self, path, urls): """ Used outside of GitUpdater to access validation auth repo. """ - self.validation_auth_repo = AuthenticationRepository(path=path, urls=[url]) + self.validation_auth_repo = AuthenticationRepository(path=path, urls=urls) def cleanup(self): """ diff --git a/taf/updater/updater.py b/taf/updater/updater.py index 1b9a990c..018af98d 100644 --- a/taf/updater/updater.py +++ b/taf/updater/updater.py @@ -139,8 +139,9 @@ def _reset_repository(repo, commits_data): @define class UpdateConfig: operation: OperationType = field(converter=OperationType) - url: str = field( - metadata={"docs": "URL of the remote authentication repository"}, default=None + remote_url: str = field( + metadata={"docs": "Remote URL of the remote authentication repository"}, + default=None, ) path: Path = field( default=None, @@ -203,6 +204,10 @@ class UpdateConfig: "docs": "Whether to checkout last validated commits after update. Optional." }, ) + clone_urls: list = field( + default=None, + metadata={"docs": "List of URLs to clone repositories from. Optional."}, + ) excluded_target_globs: list = field( default=None, metadata={ @@ -281,8 +286,10 @@ def clone_repository(config: UpdateConfig): """ settings.strict = config.strict - if config.url is None: - raise UpdateFailedError("URL has to be specified when cloning repositories") + if config.remote_url is None: + raise UpdateFailedError( + "Remote URL has to be specified when cloning repositories" + ) if config.path and is_non_empty_directory(config.path): raise UpdateFailedError( @@ -329,9 +336,9 @@ def update_repository(config: UpdateConfig): taf_logger.info(f"Updating repository {auth_repo.name}") - if config.url is None: - config.url = auth_repo.get_remote_url() - if config.url is None: + if config.remote_url is None: + config.remote_url = auth_repo.get_remote_url() + if config.remote_url is None: raise UpdateFailedError("URL cannot be determined. Please specify it") if auth_repo.is_bare_repository: @@ -419,9 +426,9 @@ def _process_repo_update( if visited is None: visited = [] # if there is a recursive dependency - if update_config.url in visited: + if update_config.remote_url in visited: return - visited.append(update_config.url) + visited.append(update_config.remote_url) # at the moment, we assume that the initial commit is valid and that it contains at least root.json update_status = update_output.event auth_repo = update_output.users_auth_repo @@ -481,7 +488,8 @@ def _process_repo_update( update_status = Event.FAILED repo = output.users_auth_repo child_config = copy.copy(update_config) - child_config.url = repo.urls[0] + child_config.remote_url = repo.urls[0] + child_config.clone_urls = repo.urls child_config.out_of_band_authentication = ( repo.out_of_band_authentication ) @@ -582,7 +590,8 @@ def _update_child_repo(updater_pipeline): child_config.operation = ( OperationType.UPDATE if repo.is_git_repository else OperationType.CLONE ) - child_config.url = repo.urls[0] + child_config.remote_url = repo.urls[0] + child_config.clone_urls = repo.urls child_config.out_of_band_authentication = repo.out_of_band_authentication child_config.path = repo.path pipeline = AuthenticationRepositoryUpdatePipeline(child_config) @@ -638,7 +647,7 @@ def validate_repository( try: config = UpdateConfig( operation=OperationType.UPDATE, - url=str(auth_path), + remote_url=str(auth_path), path=auth_path, library_dir=library_dir, validate_from_commit=validate_from_commit, diff --git a/taf/updater/updater_pipeline.py b/taf/updater/updater_pipeline.py index 3cebd02d..a3954bb6 100644 --- a/taf/updater/updater_pipeline.py +++ b/taf/updater/updater_pipeline.py @@ -87,6 +87,9 @@ class UpdateState: temp_root: TempPartition = field(default=None) update_handler: GitUpdater = field(default=None) clean_check_data: Dict[str, str] = field(factory=dict) + last_validated_data_per_repositories: Dict[str, Dict[str, str]] = field( + factory=dict + ) @attrs @@ -347,7 +350,7 @@ def __init__( ), ) self.operation = update_config.operation - self.url = update_config.url + self.urls = update_config.clone_urls or [update_config.remote_url] self.library_dir = update_config.library_dir self.auth_path = update_config.path self.update_from_filesystem = update_config.update_from_filesystem @@ -410,7 +413,7 @@ def set_users_auth_repo(self): self.state.existing_repo = False if self.auth_path: self.state.users_auth_repo = AuthenticationRepository( - path=self.auth_path, urls=[self.url] + path=self.auth_path, urls=self.urls ) self.state.auth_repo_name = self.state.users_auth_repo.name if self.state.users_auth_repo.is_git_repository_root: @@ -468,7 +471,7 @@ def check_if_local_repositories_clean(self): if not self.state.existing_repo: return UpdateStatus.SUCCESS - auth_repo = AuthenticationRepository(path=self.auth_path, urls=[self.url]) + auth_repo = AuthenticationRepository(path=self.auth_path, urls=self.urls) taf_logger.info( f"{auth_repo.name}: Checking if local repositories are clean..." ) @@ -594,7 +597,7 @@ def clone_auth_to_temp(self): path = Path(self.state.temp_root.temp_dir, "auth_repo").absolute() self.state.validation_auth_repo = AuthenticationRepository( - path=path, urls=[self.url], alias="Validation repository" + path=path, urls=self.urls, alias="Validation repository" ) self.state.validation_auth_repo.clone(bare=True) self.state.validation_auth_repo.fetch(fetch_all=True) @@ -760,7 +763,7 @@ def run_tuf_updater(self): try: self.state.update_handler = GitUpdater( - self.url, self.library_dir, self.state.validation_auth_repo.name + self.urls, self.library_dir, self.state.validation_auth_repo.name ) last_validated_remote_commit, error = _run_tuf_updater( self.state.update_handler, self.state.auth_repo_name @@ -791,7 +794,7 @@ def run_tuf_updater(self): self.state.users_auth_repo = AuthenticationRepository( library_dir=self.library_dir, name=self.state.auth_repo_name, - urls=[self.url], + urls=self.urls, ) self._validate_operation_type() @@ -854,7 +857,7 @@ def run_tuf_updater(self): self.state.users_auth_repo = AuthenticationRepository( self.library_dir, self.state.auth_repo_name, - urls=[self.url], + urls=self.urls, ) def _validate_operation_type(self): @@ -952,6 +955,7 @@ def load_target_repositories(self): self.state.users_auth_repo, self.state.auth_commits_since_last_validated[-1::], excluded_target_globs=self.excluded_target_globs, + library_dir=self.library_dir, ) ) if self.only_validate: @@ -1293,7 +1297,7 @@ def validate_target_repositories(self): # need to be set to old head since that is the last validated target self.state.validated_commits_per_target_repos_branches = defaultdict(dict) - last_validated_data_per_repositories = defaultdict(dict) + self.last_validated_data_per_repositories = defaultdict(dict) self.state.validated_auth_commits = [] for auth_commit in self.state.auth_commits_since_last_validated: @@ -1313,7 +1317,9 @@ def validate_target_repositories(self): "branch", repository.default_branch ) current_commit = current_targets_data["commit"] - if not len(last_validated_data_per_repositories[repository.name]): + if not len( + self.last_validated_data_per_repositories[repository.name] + ): current_head_commit_and_branch = ( self.state.targets_data_by_auth_commits[ repository.name @@ -1324,10 +1330,10 @@ def validate_target_repositories(self): if previous_commit is not None and previous_branch is None: previous_branch = repository.default_branch else: - previous_branch = last_validated_data_per_repositories[ + previous_branch = self.last_validated_data_per_repositories[ repository.name ].get("branch") - previous_commit = last_validated_data_per_repositories[ + previous_commit = self.last_validated_data_per_repositories[ repository.name ]["commit"] @@ -1347,7 +1353,7 @@ def validate_target_repositories(self): auth_commit, ) - last_validated_data_per_repositories[repository.name] = { + self.last_validated_data_per_repositories[repository.name] = { "commit": validated_commit, "branch": current_branch, } @@ -1573,16 +1579,20 @@ def merge_commits(self): return self.state.update_status for repository in self.state.users_target_repositories.values(): # this will only include branches that were, at least partially, validated (up until a certain point) + last_branch = self.last_validated_data_per_repositories[ + repository.name + ]["branch"] for ( branch, validated_commits, ) in self.state.validated_commits_per_target_repos_branches[ repository.name ].items(): + is_last_branch = branch == last_branch last_validated_commit = validated_commits[-1] commit_to_merge = last_validated_commit update_status = self._merge_commit( - repository, branch, commit_to_merge, force_revert=True + repository, branch, commit_to_merge, is_last_branch ) events_list.append(update_status) @@ -1623,7 +1633,7 @@ def merge_auth_commits(self): self.state.users_auth_repo, self.state.users_auth_repo.default_branch, last_commit, - False, + True, ) # update the last validated commit self.state.users_auth_repo.set_last_validated_commit(last_commit) @@ -1634,7 +1644,7 @@ def merge_auth_commits(self): self.state.event = Event.FAILED return UpdateStatus.FAILED - def _merge_commit(self, repository, branch, commit_to_merge, force_revert=True): + def _merge_commit(self, repository, branch, commit_to_merge, is_last_branch): """Merge the specified commit into the given branch and check out the branch. If the repository cannot contain unauthenticated commits, check out the merged commit. """ @@ -1647,9 +1657,9 @@ def _merge_commit(self, repository, branch, commit_to_merge, force_revert=True): # out the newest branch and print a warning # if run with --force, checkout the newest branch regardless of the repo's state - checkout_branch = True + checkout_branch = is_last_branch local_branch_exists = repository.branch_exists(branch, include_remotes=False) - if not self.force: + if is_last_branch and not self.force: try: if repository.is_detached_head: self.state.warnings.append( @@ -1685,7 +1695,7 @@ def _merge_commit(self, repository, branch, commit_to_merge, force_revert=True): f"Please update the repository at {repository.path} manually and try again." ) elif not local_branch_exists: - repository.create_local_branch(branch) + repository.create_local_branch_from_remote_tracking(branch) if repository.top_commit_of_branch(branch) == commit_to_merge: return Event.UNCHANGED diff --git a/taf/yubikey.py b/taf/yubikey.py index 1cac0212..3630803a 100644 --- a/taf/yubikey.py +++ b/taf/yubikey.py @@ -43,7 +43,8 @@ def add_key_id_mapping(serial_num: str, keyid: str) -> None: if "ids" not in _yks_data_dict: _yks_data_dict["ids"] = defaultdict(dict) - _yks_data_dict["ids"][keyid] = serial_num + if keyid not in _yks_data_dict["ids"]: + _yks_data_dict["ids"][keyid] = serial_num def add_key_pin(serial_num: str, pin: str) -> None: @@ -481,7 +482,10 @@ def _read_and_check_yubikey( if get_key_public_key(serial_num) is None and public_key is not None: add_key_public_key(serial_num, public_key) - add_key_id_mapping(serial_num, key_name) + + # when reusing the same yubikey, public key will already be in the public keys dictionary + # but the key name still needs to be added to the key id mapping dictionary + add_key_id_mapping(serial_num, key_name) if role is not None: if loaded_yubikeys is None: