diff --git a/CHANGELOG.md b/CHANGELOG.md index 11f44f59..6129a7bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,9 +8,52 @@ is too old (i.e., must be updated). ## Upcoming +## v0.5.0 + ### Major/Breaking changes * The configuration must be updated, see `CHANGELOG.toml` for details. + * The `compatibility` setting is now independent of the butido version and + will be used to output the required configuration changes +* Butido now rejects unknown fields in its configuration source(s) + (`config.toml` files and environment variables) and package definitions + (`pkg.toml` files). This is done to detect typos/mistakes and requires + changes if such unknown/misspelled fields are present. +* The "merging" of `patches` arrays in `pkg.toml` files has been fixed (the old + logic silently ignored a patch if a patch with the same name was declared in + an upper level/layer `pkg.toml` file) + +### Highlights + +* Lots of dependency updates (including security updates) + * All dependencies are up-to-date now (except for the `config` crate due to + breaking changes in `0.14.x` that currently prevent us from updating) + * Some deprecated/unmaintained crates have been replaced or dropped +* CLI improvements and fixes (partly due to the clap update from version 3 to 4) + * Better / more consistent argument parsing + * Output tweaks/improvements + * Improved error messages (e.g., for dependency cycles and unsupported + version specifications) + * Support for short build image names + * More filtering options for DB commands + * Build dependencies are now marked with a star (`*`) in the `tree-of` output + * A new "release list" subcommand ("db releases" alias for convenience) +* Fixed the default log level (info -> warn) +* Source download improvements (warnings when downloading HTML files, no more + empty source files after failed downloads, a check for the HTTP status code + to avoid downloading "404 Not Found" HTML responses, and the User-Agent + header is now set) +* The included example configuration and packages repo have been fixed (and new + CI tests should ensure that they remain valid) +* It is now possible to depend on packages whose name starts with a number +* The tracing has been improved (some spans and structured fields are now used) + and the `--tracing-chrome` flag has been added to generate `chrome://tracing` + compatible JSON files +* Butido no longer crashes when `..` is used in paths of `patches` (`pkg.toml`) +* Lots of cleanups, refactorings, new tests (+ CI improvements), and minor fixes +* Various minor improvements (additional directory/filesystem checks, handling + of multiple release stores, etc.) +* Minor documentation improvements (typos, fixes, additional comments, etc.) ## v0.4.0 diff --git a/Cargo.lock b/Cargo.lock index 48dbdc09..6f81bdf6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -214,7 +214,7 @@ checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "butido" -version = "0.4.0" +version = "0.5.0" dependencies = [ "anyhow", "aquamarine", diff --git a/Cargo.toml b/Cargo.toml index 96652004..3a853224 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "butido" -version = "0.4.0" +version = "0.5.0" authors = [ # Only for the current/active maintainers (sorted alphabetically by the surname) # All other authors are listed in the "Authors" section of README.md @@ -71,8 +71,8 @@ tokio = { version = "1", features = ["macros", "fs", "process", "io-util", "time tokio-stream = "0.1" toml = "0.8" tracing = "0.1" +tracing-chrome = "0.7" tracing-subscriber = { version = "0.3", features = ["env-filter"] } -tracing-chrome = "0.7.1" typed-builder = "0.18" unindent = "0.2" url = { version = "2", features = ["serde"] } diff --git a/src/cli.rs b/src/cli.rs index f3c9c300..7000773d 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -58,7 +58,7 @@ pub fn cli() -> Command { .after_help(indoc::indoc!(r#" The following environment variables can be passed to butido: - RUST_LOG - to enable logging, for exact usage see the rust cookbook + RUST_LOG - to enable logging, for exact usage see the Rust cookbook "#)) .arg(Arg::new("version") @@ -73,7 +73,7 @@ pub fn cli() -> Command { .action(ArgAction::SetTrue) .required(false) .long("tracing-chrome") - .help("Generate a chrome compatible trace file") + .help("Generate a Chrome compatible trace file (trace-*.json)") ) .arg(Arg::new("hide_bars") @@ -90,7 +90,7 @@ pub fn cli() -> Command { .help("Override the database host") .long_help(indoc::indoc!(r#" Override the database host set via configuration. - Can also be overriden via environment variable 'BUTIDO_DATABASE_HOST', but this setting has precedence. + Can also be overridden via environment variable 'BUTIDO_DATABASE_HOST', but this setting has precedence. "#)) ) .arg(Arg::new("database_port") @@ -100,7 +100,7 @@ pub fn cli() -> Command { .help("Override the database port") .long_help(indoc::indoc!(r#" Override the database port set via configuration. - Can also be overriden via environment 'BUTIDO_DATABASE_PORT', but this setting has precedence. + Can also be overridden via environment 'BUTIDO_DATABASE_PORT', but this setting has precedence. "#)) ) .arg(Arg::new("database_user") @@ -110,7 +110,7 @@ pub fn cli() -> Command { .help("Override the database user") .long_help(indoc::indoc!(r#" Override the database user set via configuration. - Can also be overriden via environment 'BUTIDO_DATABASE_USER', but this setting has precedence. + Can also be overridden via environment 'BUTIDO_DATABASE_USER', but this setting has precedence. "#)) ) .arg(Arg::new("database_password") @@ -121,7 +121,7 @@ pub fn cli() -> Command { .help("Override the database password") .long_help(indoc::indoc!(r#" Override the database password set via configuration. - Can also be overriden via environment 'BUTIDO_DATABASE_PASSWORD', but this setting has precedence. + Can also be overridden via environment 'BUTIDO_DATABASE_PASSWORD', but this setting has precedence. "#)) ) .arg(Arg::new("database_name") @@ -131,7 +131,7 @@ pub fn cli() -> Command { .help("Override the database name") .long_help(indoc::indoc!(r#" Override the database name set via configuration. - Can also be overriden via environment 'BUTIDO_DATABASE_NAME', but this setting has precedence. + Can also be overridden via environment 'BUTIDO_DATABASE_NAME', but this setting has precedence. "#)) ) .arg(Arg::new("database_connection_timeout") @@ -141,7 +141,7 @@ pub fn cli() -> Command { .help("Override the database connection timeout") .long_help(indoc::indoc!(r#" Override the database connection timeout set via configuration. - Can also be overriden via environment 'BUTIDO_DATABASE_CONNECTION_TIMEOUT', but this setting has precedence. + Can also be overridden via environment 'BUTIDO_DATABASE_CONNECTION_TIMEOUT', but this setting has precedence. "#)) ) @@ -511,7 +511,7 @@ pub fn cli() -> Command { ) .subcommand(Command::new("dependencies-of") .alias("depsof") - .about("List the depenendcies of a package") + .about("List the dependencies of a package") .arg(Arg::new("package_name") .required(true) .index(1) @@ -522,7 +522,7 @@ pub fn cli() -> Command { .required(false) .index(2) .value_name("VERSION_CONSTRAINT") - .help("A version constraint to search for (optional), E.G. '=1.0.0'") + .help("A version constraint to search for (optional), e.g., '=1.0.0'") ) .arg(Arg::new("dependency_type") .required(false) @@ -564,7 +564,7 @@ pub fn cli() -> Command { .required(true) .index(2) .value_name("VERSION_CONSTRAINT") - .help("A version constraint to search for (optional), E.G. '=1.0.0'") + .help("A version constraint to search for (optional), e.g., '=1.0.0'") ) ) @@ -580,21 +580,21 @@ pub fn cli() -> Command { .required(false) .index(2) .value_name("VERSION_CONSTRAINT") - .help("A version constraint to search for (optional), E.G. '=1.0.0'") + .help("A version constraint to search for (optional), e.g., '=1.0.0'") ) .arg(Arg::new("no_script_filter") .action(ArgAction::SetTrue) .long("no-script-filter") .short('S') .required(false) - .help("Don't check for script equality. Can cause unexact results.") + .help("Don't check for script equality. Can cause inexact results.") ) .arg(Arg::new("staging_dir") .required(false) .long("staging-dir") .value_name("PATH") .value_parser(dir_exists_validator) - .help("Also consider this staging dir when searching for artifacts") + .help("Also consider this staging directory when searching for artifacts") ) .arg(Arg::new("env_filter") .required(false) @@ -626,7 +626,7 @@ pub fn cli() -> Command { .required(false) .index(2) .value_name("VERSION_CONSTRAINT") - .help("A version constraint to search for (optional), E.G. '=1.0.0'") + .help("A version constraint to search for (optional), e.g., '=1.0.0'") ) .arg(Arg::new("terse") @@ -825,18 +825,18 @@ pub fn cli() -> Command { ) ) .subcommand(Command::new("of") - .about("Get the pathes of the sources of a package") + .about("Get the paths of the sources of a package") .arg(Arg::new("package_name") .required(false) .index(1) .value_name("PKG") - .help("Get the source file pathes for this package") + .help("Get the source file paths for this package") ) .arg(Arg::new("package_version") .required(false) .index(2) .value_name("VERSION") - .help("Get the source file pathes for the package in this version") + .help("Get the source file paths for the package in this version") ) ) ) @@ -927,7 +927,7 @@ pub fn cli() -> Command { .action(ArgAction::SetTrue) .required(false) .long("non-interactive") - .help("Dont be interactive (only with --update at the moment)") + .help("Don't be interactive (only with --update at the moment)") .requires("package_do_update") ) .arg(Arg::new("quiet") @@ -935,7 +935,7 @@ pub fn cli() -> Command { .required(false) .long("quiet") .short('q') - .help("Don't print pathes to released filesfiles after releases are complete") + .help("Don't print the paths to released files after releases are complete") ) ) @@ -953,7 +953,7 @@ pub fn cli() -> Command { .required(false) .index(2) .value_name("VERSION_CONSTRAINT") - .help("A version constraint to search for (optional), E.G. '=1.0.0'") + .help("A version constraint to search for (optional), e.g., '=1.0.0'") ) ) @@ -969,7 +969,7 @@ pub fn cli() -> Command { .required(false) .index(2) .value_name("VERSION_CONSTRAINT") - .help("A version constraint to search for (optional), E.G. '=1.0.0'") + .help("A version constraint to search for (optional), e.g., '=1.0.0'") ) .arg(Arg::new("image") .required(false) @@ -1005,7 +1005,7 @@ pub fn cli() -> Command { ) .subcommand(Command::new("endpoint") - .about("Endpoint maintentance commands") + .about("Endpoint maintenance commands") .arg(Arg::new("endpoint_name") .required(false) .index(1) @@ -1251,7 +1251,7 @@ fn env_pass_validator(s: &str) -> Result { Err(s) } Ok((k, v)) => { - debug!("Env pass valiation: '{}={}'", k, v); + debug!("Env pass validation: '{}={}'", k, v); Ok(s.to_owned()) } } diff --git a/src/commands/build.rs b/src/commands/build.rs index ff7049cd..171fe689 100644 --- a/src/commands/build.rs +++ b/src/commands/build.rs @@ -116,7 +116,7 @@ pub async fn build( }) .collect::>(); { - // Because we're loading always sequencially, to have a bit more spread over the endpoints, + // Because we're loading always sequentially, to have a bit more spread over the endpoints, // shuffle the endpoints here. Not a perfect solution, but a working one. use rand::seq::SliceRandom; let mut rng = rand::thread_rng(); diff --git a/src/commands/db.rs b/src/commands/db.rs index a3a92b1b..8056316d 100644 --- a/src/commands/db.rs +++ b/src/commands/db.rs @@ -660,7 +660,7 @@ fn job( let parsed_log = crate::log::ParsedLog::from_str(&data.0.log_text)?; trace!("Parsed log = {:?}", parsed_log); let success = parsed_log.is_successfull(); - trace!("log successfull = {:?}", success); + trace!("log successful = {:?}", success); if csv { let hdrs = crate::commands::util::mk_header(vec![ diff --git a/src/commands/find_artifact.rs b/src/commands/find_artifact.rs index a48b8a1d..dd6d0724 100644 --- a/src/commands/find_artifact.rs +++ b/src/commands/find_artifact.rs @@ -163,7 +163,7 @@ pub async fn find_artifact( ((a, None), (b, None)) => a.cmp(b), } }) - .unique_by(|tpl| tpl.0.clone()) // TODO: Dont clone() + .unique_by(|tpl| tpl.0.clone()) // TODO: Don't clone() .try_for_each(|(path, releasetime)| { if let Some(time) = releasetime { writeln!(std::io::stdout(), "[{}] {}", time, path.display()) diff --git a/src/config/container_config.rs b/src/config/container_config.rs index 702d1009..094ce541 100644 --- a/src/config/container_config.rs +++ b/src/config/container_config.rs @@ -25,12 +25,12 @@ pub struct ContainerConfig { #[getset(get = "pub")] allowed_env: Vec, - /// Pass the current git author to the container - /// This can be used to the the "packager" name in a package, for example + /// Pass the current Git author to the container + /// This can be used for the "packager" name in a package, for example #[getset(get = "pub")] git_author: Option, - /// Pass the current git hash to the container + /// Pass the current Git hash to the container #[getset(get = "pub")] git_commit_hash: Option, } diff --git a/src/db/find_artifacts.rs b/src/db/find_artifacts.rs index 433570d6..d00bd5c8 100644 --- a/src/db/find_artifacts.rs +++ b/src/db/find_artifacts.rs @@ -50,7 +50,7 @@ use crate::util::EnvironmentVariableName; /// /// If the artifact was released, the return value contains a Some(NaiveDateTime), marking the date /// of the release. -/// Releases are returned prefferably, if multiple equal pathes for an artifact are found. +/// Releases are returned preferably, if multiple equal paths for an artifact are found. #[derive(typed_builder::TypedBuilder)] pub struct FindArtifacts<'a> { config: &'a Configuration, diff --git a/src/endpoint/configured.rs b/src/endpoint/configured.rs index 035da583..71330f27 100644 --- a/src/endpoint/configured.rs +++ b/src/endpoint/configured.rs @@ -181,7 +181,7 @@ impl Endpoint { .with_context(|| anyhow!("Getting API version of endpoint: {}", ep.name))?; if !v.contains(&avail.api_version) { - Err(anyhow!("Incompatible Docker API version on endpoint {}: Exepected: {}, Available: [{}]", + Err(anyhow!("Incompatible Docker API version on endpoint {}: Expected: {}, Available: [{}]", ep.name(), avail.api_version, v.join(", "))) } else { Ok(()) @@ -653,7 +653,7 @@ impl<'a> PreparedContainer<'a> { .copy_file_into(destination, &buf) .await .map_err(Error::from) - .inspect(|_| trace!("Copying patch {} successfull", patch.display())) + .inspect(|_| trace!("Copying patch {} successful", patch.display())) .with_context(|| { anyhow!( "Copying patch {} to container {}", diff --git a/src/filestore/path.rs b/src/filestore/path.rs index e8859594..9c3ff6ce 100644 --- a/src/filestore/path.rs +++ b/src/filestore/path.rs @@ -104,9 +104,9 @@ impl StoreRoot { /// Unpack a tar archive in this location /// /// This function unpacks the provided tar archive "butido-style" in the location pointed to by - /// `self` and returns the written pathes. + /// `self` and returns the written paths. /// - /// The function filteres out the "/output" directory (that's what is meant by "butido-style"). + /// The function filters out the "/output" directory (that's what is meant by "butido-style"). pub(in crate::filestore) fn unpack_archive_here( &self, mut ar: tar::Archive, diff --git a/src/job/runnable.rs b/src/job/runnable.rs index f36feffe..de06da4c 100644 --- a/src/job/runnable.rs +++ b/src/job/runnable.rs @@ -50,9 +50,6 @@ pub struct RunnableJob { } impl RunnableJob { - // TODO: Drop the following clippy exception once we don't need to check against rustc 1.75 - // anymore (a fix was already merged but it likely won't be backported to 1.75): - #[allow(clippy::map_identity)] pub fn build_from_job( job: &Job, source_cache: &SourceCache, diff --git a/src/main.rs b/src/main.rs index 166f63fd..cdcc6376 100644 --- a/src/main.rs +++ b/src/main.rs @@ -127,7 +127,7 @@ async fn main() -> Result<()> { let repo = git2::Repository::open(PathBuf::from(".")).map_err(|e| match e.code() { git2::ErrorCode::NotFound => { - eprintln!("Butido must be executed in the top-level of the git repository"); + eprintln!("Butido must be executed in the top-level of the Git repository"); std::process::exit(1) } _ => Error::from(e), diff --git a/src/orchestrator/orchestrator.rs b/src/orchestrator/orchestrator.rs index 03fda41c..8dd218f9 100644 --- a/src/orchestrator/orchestrator.rs +++ b/src/orchestrator/orchestrator.rs @@ -541,7 +541,7 @@ struct JobTask<'a> { /// /// This implementation is a bit of a hack. /// Because all `JobTask`s are `JobTask::run()` in parallel, but there is no IPC _between_ the -/// tasks (there is IPC between childs and parents, but not between all JobTask objects), we never +/// tasks (there is IPC between children and parents, but not between all JobTask objects), we never /// know whether any other task errored when the JobTask object is destructed. /// /// One way to implement this would be to add multi-cast IPC between all `JobTask` objects, with some @@ -560,7 +560,7 @@ impl<'a> Drop for JobTask<'a> { // If there are dependencies, the error is probably from another task // If there are no dependencies, the error was caused by something else let errmsg = if self.jobdef.dependencies.is_empty() { - "error occured" + "error occurred" } else { "error on other task" }; @@ -610,7 +610,7 @@ impl<'a> JobTask<'a> { /// Run the job /// - /// This function runs the job from this object on the scheduler as soon as all dependend jobs + /// This function runs the job from this object on the scheduler as soon as all dependent jobs /// returned successfully. async fn run(mut self) -> Result<()> { debug!(job_uuid = %self.jobdef.job.uuid(), "Running"); @@ -748,7 +748,7 @@ impl<'a> JobTask<'a> { // subcommand. In this case, there might be an artifact for this job in the // staging store. In this case, we want to use it as a replacement, of course. // - // The fact that released artifacts are returned prefferably from this function + // The fact that released artifacts are returned preferably from this function // call does not change anything, because if there is an artifact that's a released // one that matches this job, we should use it anyways. .staging_store(Some(&staging_store)) @@ -778,7 +778,7 @@ impl<'a> JobTask<'a> { }) // We don't need duplicates here, so remove them by making the iterator unique // If we have two artifacts that are the same, the one in the staging store will be - // preffered in the next step + // preferred in the next step .unique_by(|tpl| tpl.0.artifact_path().clone()) // Fetch the artifact from the staging store, if there is one. // If there is none, try the release store. @@ -914,7 +914,7 @@ impl<'a> JobTask<'a> { Ok(()) } - /// Performe a recv() call on the receiving side of the channel + /// Perform a recv() call on the receiving side of the channel /// /// Put the dependencies you received into the `received_dependencies`, the errors in the /// `received_errors` @@ -977,7 +977,7 @@ impl<'a> JobTask<'a> { if !missing_deps.is_empty() { let missing: Vec = missing_deps.iter().map(|u| u.to_string()).collect(); Err(anyhow!( - "Childs finished, but dependencies still missing: {:?}", + "Children finished, but dependencies still missing: {:?}", missing )) } else { diff --git a/src/repository/fs/path.rs b/src/repository/fs/path.rs index b5afa583..73fa44cd 100644 --- a/src/repository/fs/path.rs +++ b/src/repository/fs/path.rs @@ -13,7 +13,7 @@ use std::path::Component; use anyhow::anyhow; use anyhow::Result; -/// Helper type for filtering for pathes we need or dont need +/// Helper type for filtering for paths we need or don't need /// /// We either have a directory, which has a name, or we have a pkg.toml file, which is of interest. /// All other files can be ignored and thus are not represented by this type. diff --git a/src/util/filters.rs b/src/util/filters.rs index 2139188c..ff32b288 100644 --- a/src/util/filters.rs +++ b/src/util/filters.rs @@ -27,7 +27,11 @@ pub fn build_package_filter_by_dependency_name( ) -> impl filters::failable::filter::FailableFilter { let n = name.clone(); // clone, so we can move into closure let filter_build_dep = move |p: &Package| -> Result { - trace!("Checking whether any build depenency of {:?} is '{}'", p, n); + trace!( + "Checking whether any build dependency of {:?} is '{}'", + p, + n + ); Ok({ check_build_dep && p.dependencies() @@ -46,7 +50,7 @@ pub fn build_package_filter_by_dependency_name( let n = name.clone(); // clone, so we can move into closure let filter_rt_dep = move |p: &Package| -> Result { trace!( - "Checking whether any runtime depenency of {:?} is '{}'", + "Checking whether any runtime dependency of {:?} is '{}'", p, n );