diff --git a/.config/dictionaries/project.dic b/.config/dictionaries/project.dic index 52715d4bb0..9c8611843c 100644 --- a/.config/dictionaries/project.dic +++ b/.config/dictionaries/project.dic @@ -189,6 +189,7 @@ Precertificate preprod projectcatalyst Prokhorenko +proptest psql Ptarget pubkey diff --git a/Earthfile b/Earthfile index 306dcf3452..b41357596e 100644 --- a/Earthfile +++ b/Earthfile @@ -1,8 +1,8 @@ VERSION 0.8 -IMPORT github.com/input-output-hk/catalyst-ci/earthly/mdlint:v3.2.07 AS mdlint-ci -IMPORT github.com/input-output-hk/catalyst-ci/earthly/cspell:v3.2.07 AS cspell-ci -IMPORT github.com/input-output-hk/catalyst-ci/earthly/postgresql:v3.2.07 AS postgresql-ci +IMPORT github.com/input-output-hk/catalyst-ci/earthly/mdlint:v3.2.10 AS mdlint-ci +IMPORT github.com/input-output-hk/catalyst-ci/earthly/cspell:v3.2.10 AS cspell-ci +IMPORT github.com/input-output-hk/catalyst-ci/earthly/postgresql:v3.2.10 AS postgresql-ci FROM debian:stable-slim diff --git a/catalyst-gateway/Earthfile b/catalyst-gateway/Earthfile index 8ec606226d..16896b37f9 100644 --- a/catalyst-gateway/Earthfile +++ b/catalyst-gateway/Earthfile @@ -1,6 +1,6 @@ VERSION 0.8 -IMPORT github.com/input-output-hk/catalyst-ci/earthly/rust:v3.2.07 AS rust-ci +IMPORT github.com/input-output-hk/catalyst-ci/earthly/rust:v3.2.10 AS rust-ci #cspell: words rustfmt toolsets USERARCH stdcfgs diff --git a/catalyst-gateway/bin/Cargo.toml b/catalyst-gateway/bin/Cargo.toml index aed66bbb86..72cad17317 100644 --- a/catalyst-gateway/bin/Cargo.toml +++ b/catalyst-gateway/bin/Cargo.toml @@ -21,7 +21,7 @@ pallas = { version = "0.30.1", git = "https://github.com/input-output-hk/catalys pallas-traverse = { version = "0.30.1", git = "https://github.com/input-output-hk/catalyst-pallas.git", rev = "9b5183c8b90b90fe2cc319d986e933e9518957b3" } #pallas-crypto = { version = "0.30.1", git = "https://github.com/input-output-hk/catalyst-pallas.git", rev = "9b5183c8b90b90fe2cc319d986e933e9518957b3" } -clap = { version = "4.5.17", features = ["derive", "env"] } +clap = { version = "4.5.18", features = ["derive", "env"] } tracing = { version = "0.1.40", features = ["log"] } tracing-subscriber = { version = "0.3.18", features = [ "fmt", @@ -31,25 +31,25 @@ tracing-subscriber = { version = "0.3.18", features = [ "time", "env-filter", ] } -serde = { version = "1.0.204", features = ["derive"] } +serde = { version = "1.0.210", features = ["derive"] } serde_json = "1.0.128" -thiserror = "1.0.63" +thiserror = "1.0.64" chrono = "0.4.38" # async-trait = "0.1.82" bb8 = "0.8.5" bb8-postgres = "0.8.1" -tokio-postgres = { version = "0.7.11", features = [ +tokio-postgres = { version = "0.7.12", features = [ "with-chrono-0_4", "with-serde_json-1", "with-time-0_3", ] } -tokio = { version = "1.39.2", features = ["rt", "macros", "rt-multi-thread"] } +tokio = { version = "1.40.0", features = ["rt", "macros", "rt-multi-thread"] } dotenvy = "0.15.7" -local-ip-address = "0.6.2" +local-ip-address = "0.6.3" gethostname = "0.5.0" hex = "0.4.3" -handlebars = "6.0.0" -anyhow = "1.0.86" +handlebars = "6.1.0" +anyhow = "1.0.89" #cddl = "0.9.4" #ciborium = "0.2.2" # stringzilla = "3.9.3" @@ -69,8 +69,8 @@ rust_decimal = { version = "1.36.0", features = [ "serde-with-float", "db-tokio-postgres", ] } -poem = { version = "3.0.4", features = ["embed", "prometheus", "compression"] } -poem-openapi = { version = "5.0.3", features = [ +poem = { version = "3.1.0", features = ["embed", "prometheus", "compression"] } +poem-openapi = { version = "5.1.1", features = [ "openapi-explorer", "rapidoc", "redoc", @@ -81,7 +81,7 @@ poem-openapi = { version = "5.0.3", features = [ ] } uuid = { version = "1.10.0", features = ["v4", "serde"] } ulid = { version = "1.1.3", features = ["serde", "uuid"] } -cryptoxide = "0.4.4" # TODO: For blake2b replace with blake2b_simd. +blake2b_simd = "1.0.2" url = "2.5.2" panic-message = "0.3.0" cpu-time = "1.0.0" @@ -89,7 +89,10 @@ prometheus = "0.13.4" rust-embed = "8.5.0" num-traits = "0.2.19" base64 = "0.22.1" -dashmap = "6.0.1" +dashmap = "6.1.0" + +[dev-dependencies] +proptest = "1.5.0" [build-dependencies] build-info-build = "0.0.38" \ No newline at end of file diff --git a/catalyst-gateway/bin/src/cardano/util.rs b/catalyst-gateway/bin/src/cardano/util.rs index 9916797b7f..5383c36900 100644 --- a/catalyst-gateway/bin/src/cardano/util.rs +++ b/catalyst-gateway/bin/src/cardano/util.rs @@ -1,5 +1,4 @@ //! Block stream parsing and filtering utils -use cryptoxide::{blake2b::Blake2b, digest::Digest}; use pallas::ledger::{ primitives::conway::StakeCredential, traverse::{Era, MultiEraAsset, MultiEraCert, MultiEraPolicyAssets}, @@ -18,21 +17,6 @@ pub type StakeCredentialHash = String; /// Correct stake credential key in hex pub type StakeCredentialKey = String; -/// Hash size -#[allow(dead_code)] -pub(crate) const BLAKE_2B_256_HASH_SIZE: usize = 256 / 8; - -/// Helper function to generate the `blake2b_256` hash of a byte slice -#[allow(dead_code)] -pub(crate) fn hash(bytes: &[u8]) -> [u8; BLAKE_2B_256_HASH_SIZE] { - let mut digest = [0u8; BLAKE_2B_256_HASH_SIZE]; - let mut context = Blake2b::new(BLAKE_2B_256_HASH_SIZE); - context.input(bytes); - context.result(&mut digest); - - digest -} - #[derive(Default, Debug, Serialize)] /// Assets pub struct Asset { diff --git a/catalyst-gateway/bin/src/db/index/schema/cql/sync_status.cql b/catalyst-gateway/bin/src/db/index/schema/cql/sync_status.cql new file mode 100644 index 0000000000..7f82d255e0 --- /dev/null +++ b/catalyst-gateway/bin/src/db/index/schema/cql/sync_status.cql @@ -0,0 +1,11 @@ +-- Most recent completed +-- Can also be used to convert a known stake key hash back to a full stake address. +CREATE TABLE IF NOT EXISTS sync_status ( + -- Primary Key Data + end_slot varint, -- The slot that has been indexed up-to (inclusive). + start_slot varint, -- The slot the sync block started at (inclusive). + sync_time timestamp, -- The time we finished the sync. + node_id uuid, -- The node that synced this data. + + PRIMARY KEY (end_slot, start_slot, sync_time, node_id) +); diff --git a/catalyst-gateway/bin/src/db/index/schema/mod.rs b/catalyst-gateway/bin/src/db/index/schema/mod.rs index 89cba358da..61e2f2057b 100644 --- a/catalyst-gateway/bin/src/db/index/schema/mod.rs +++ b/catalyst-gateway/bin/src/db/index/schema/mod.rs @@ -8,17 +8,27 @@ use scylla::Session; use serde_json::json; use tracing::error; -use crate::settings::cassandra_db; +use crate::{settings::cassandra_db, utils::blake2b_hash::generate_uuid_string_from_data}; + +/// The version of the Index DB Schema we SHOULD BE using. +/// DO NOT change this unless you are intentionally changing the Schema. +/// +/// This constant is ONLY used by Unit tests to identify when the schema version will +/// change accidentally, and is NOT to be used directly to set the schema version of the +/// table namespaces. +#[allow(dead_code)] +const SCHEMA_VERSION: &str = "a0e54866-1f30-8ad2-9ac7-df1cfaf9c634"; /// Keyspace Create (Templated) const CREATE_NAMESPACE_CQL: &str = include_str!("./cql/namespace.cql"); -/// The version of the Schema we are using. -/// Must be incremented if there is a breaking change in any schema tables below. -pub(crate) const SCHEMA_VERSION: u64 = 1; - /// All Schema Creation Statements const SCHEMAS: &[(&str, &str)] = &[ + ( + // Sync Status Table Schema + include_str!("./cql/sync_status.cql"), + "Create Sync Status Table", + ), ( // TXO by Stake Address Table Schema include_str!("./cql/txo_by_stake_table.cql"), @@ -66,10 +76,83 @@ const SCHEMAS: &[(&str, &str)] = &[ ), ]; +/// Removes all comments from each line in the input query text and joins the remaining +/// lines into a single string, reducing consecutive whitespace characters to a single +/// space. Comments are defined as any text following `--` on a line. +/// +/// # Arguments +/// +/// * `text`: A string slice that holds the query to be cleaned. +/// +/// # Returns +/// +/// A new string with comments removed and whitespace reduced, where each remaining line +/// from the original text is separated by a newline character. +fn remove_comments_and_join_query_lines(text: &str) -> String { + // Split the input text into lines, removing any trailing empty lines + let raw_lines: Vec<&str> = text.lines().collect(); + let mut clean_lines: Vec = Vec::new(); + + // Filter out comments from each line + for line in raw_lines { + let mut clean_line = line.to_string(); + if let Some(no_comment) = line.split_once("--") { + clean_line = no_comment.0.to_string(); + } + clean_line = clean_line + .split_whitespace() + .collect::>() + .join(" ") + .trim() + .to_string(); + if !clean_line.is_empty() { + clean_lines.push(clean_line); + } + } + clean_lines.join("\n") +} + +/// Generates a unique schema version identifier based on the content of all CQL schemas. +/// +/// This function processes each CQL schema, removes comments from its lines and joins +/// them into a single string. It then sorts these processed strings to ensure consistency +/// in schema versions regardless of their order in the list. Finally, it generates a UUID +/// from a 127 bit hash of this sorted collection of schema contents, which serves as a +/// unique identifier for the current version of all schemas. +/// +/// # Returns +/// +/// A string representing the UUID derived from the concatenated and cleaned CQL +/// schema contents. +fn generate_cql_schema_version() -> String { + // Where we will actually store the bytes we derive the UUID from. + let mut clean_schemas: Vec = Vec::new(); + + // Iterate through each CQL schema and add it to the list of clean schemas documents. + for (schema, _) in SCHEMAS { + let schema = remove_comments_and_join_query_lines(schema); + if !schema.is_empty() { + clean_schemas.push(schema); + } + } + + // make sure any re-ordering of the schemas in the list does not effect the generated + // schema version + clean_schemas.sort(); + + // Generate a unique hash of the clean schemas, + // and use it to form a UUID to identify the schema version. + generate_uuid_string_from_data("Catalyst-Gateway Index Database Schema", &clean_schemas) +} + /// Get the namespace for a particular db configuration pub(crate) fn namespace(cfg: &cassandra_db::EnvVars) -> String { // Build and set the Keyspace to use. - format!("{}_V{}", cfg.namespace.as_str(), SCHEMA_VERSION) + format!( + "{}_{}", + cfg.namespace.as_str(), + generate_cql_schema_version().replace('-', "_") + ) } /// Create the namespace we will use for this session @@ -83,11 +166,19 @@ async fn create_namespace( // disable default `html_escape` function // which transforms `<`, `>` symbols to `<`, `>` reg.register_escape_fn(|s| s.into()); - let query = reg.render_template(CREATE_NAMESPACE_CQL, &json!({"keyspace": keyspace}))?; + let query = reg + .render_template(CREATE_NAMESPACE_CQL, &json!({"keyspace": keyspace})) + .context(format!("Keyspace: {keyspace}"))?; // Create the Keyspace if it doesn't exist already. - let stmt = session.prepare(query).await?; - session.execute_unpaged(&stmt, ()).await?; + let stmt = session + .prepare(query) + .await + .context(format!("Keyspace: {keyspace}"))?; + session + .execute_unpaged(&stmt, ()) + .await + .context(format!("Keyspace: {keyspace}"))?; // Wait for the Schema to be ready. session.await_schema_agreement().await?; @@ -104,22 +195,88 @@ async fn create_namespace( pub(crate) async fn create_schema( session: &mut Arc, cfg: &cassandra_db::EnvVars, ) -> anyhow::Result<()> { - create_namespace(session, cfg).await?; - - for schema in SCHEMAS { - let stmt = session - .prepare(schema.0) - .await - .context(format!("{} : Prepared", schema.1))?; - - session - .execute_unpaged(&stmt, ()) - .await - .context(format!("{} : Executed", schema.1))?; + create_namespace(session, cfg) + .await + .context("Creating Namespace")?; + + let mut failed = false; + + for (schema, schema_name) in SCHEMAS { + match session.prepare(*schema).await { + Ok(stmt) => { + if let Err(err) = session.execute_unpaged(&stmt, ()).await { + failed = true; + error!(schema=schema_name, error=%err, "Failed to Execute Create Schema Query"); + }; + }, + Err(err) => { + failed = true; + error!(schema=schema_name, error=%err, "Failed to Prepare Create Schema Query"); + }, + } } + anyhow::ensure!(!failed, "Failed to Create Schema"); + // Wait for the Schema to be ready. session.await_schema_agreement().await?; Ok(()) } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + /// This test is designed to fail if the schema version has changed. + /// It is used to help detect inadvertent schema version changes. + /// If you did NOT intend to change the index db schema and this test fails, + /// then revert or fix your changes to the schema files. + fn check_schema_version_has_not_changed() { + let calculated_version = generate_cql_schema_version(); + assert_eq!(SCHEMA_VERSION, calculated_version); + } + + #[test] + fn test_no_comments() { + let input = "SELECT * FROM table1;"; + let expected_output = "SELECT * FROM table1;"; + assert_eq!(remove_comments_and_join_query_lines(input), expected_output); + } + + #[test] + fn test_single_line_comment() { + let input = "SELECT -- some comment * FROM table1;"; + let expected_output = "SELECT"; + assert_eq!(remove_comments_and_join_query_lines(input), expected_output); + } + + #[test] + fn test_multi_line_comment() { + let input = "SELECT -- some comment\n* FROM table1;"; + let expected_output = "SELECT\n* FROM table1;"; + assert_eq!(remove_comments_and_join_query_lines(input), expected_output); + } + + #[test] + fn test_multiple_lines() { + let input = "SELECT * FROM table1;\n-- another comment\nSELECT * FROM table2;"; + let expected_output = "SELECT * FROM table1;\nSELECT * FROM table2;"; + assert_eq!(remove_comments_and_join_query_lines(input), expected_output); + } + + #[test] + fn test_empty_lines() { + let input = "\n\nSELECT * FROM table1;\n-- comment here\n\n"; + let expected_output = "SELECT * FROM table1;"; + assert_eq!(remove_comments_and_join_query_lines(input), expected_output); + } + + #[test] + fn test_whitespace_only() { + let input = " \n -- comment here\n "; + let expected_output = ""; + assert_eq!(remove_comments_and_join_query_lines(input), expected_output); + } +} diff --git a/catalyst-gateway/bin/src/main.rs b/catalyst-gateway/bin/src/main.rs index 7331e373d3..b2b591e0f3 100644 --- a/catalyst-gateway/bin/src/main.rs +++ b/catalyst-gateway/bin/src/main.rs @@ -8,6 +8,7 @@ mod db; mod logger; mod service; mod settings; +mod utils; #[tokio::main] async fn main() -> anyhow::Result<()> { diff --git a/catalyst-gateway/bin/src/service/api/cardano/types.rs b/catalyst-gateway/bin/src/service/api/cardano/types.rs index f12c4c649c..b1b4379e34 100644 --- a/catalyst-gateway/bin/src/service/api/cardano/types.rs +++ b/catalyst-gateway/bin/src/service/api/cardano/types.rs @@ -4,9 +4,10 @@ //! Event DB logic for chain-sync. They should be replaced with proper types in a better //! place. -use cryptoxide::{blake2b::Blake2b, digest::Digest}; use serde::{Deserialize, Serialize}; +use crate::utils::blake2b_hash::blake2b_224; + /// Pub key #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub(crate) struct PubKey(Vec); @@ -15,11 +16,7 @@ impl PubKey { /// Get credentials, a blake2b 28 bytes hash of the pub key #[allow(dead_code)] pub(crate) fn get_credentials(&self) -> [u8; 28] { - let mut digest = [0u8; 28]; - let mut context = Blake2b::new(28); - context.input(&self.0); - context.result(&mut digest); - digest + blake2b_224(&self.0) } /// Get bytes diff --git a/catalyst-gateway/bin/src/service/utilities/middleware/tracing_mw.rs b/catalyst-gateway/bin/src/service/utilities/middleware/tracing_mw.rs index 7ac2cfbfce..385d247840 100644 --- a/catalyst-gateway/bin/src/service/utilities/middleware/tracing_mw.rs +++ b/catalyst-gateway/bin/src/service/utilities/middleware/tracing_mw.rs @@ -2,7 +2,6 @@ use std::{sync::LazyLock, time::Instant}; use cpu_time::ProcessTime; // ThreadTime doesn't work. -use cryptoxide::{blake2b::Blake2b, digest::Digest}; use poem::{ http::{header, HeaderMap}, web::RealIp, @@ -17,7 +16,7 @@ use tracing::{error, field, Instrument, Level, Span}; use ulid::Ulid; use uuid::Uuid; -use crate::settings::Settings; +use crate::{settings::Settings, utils::blake2b_hash::generate_uuid_string_from_data}; /// Labels for the metrics const METRIC_LABELS: [&str; 3] = ["endpoint", "method", "status_code"]; @@ -124,20 +123,8 @@ pub(crate) struct TracingEndpoint { /// Given a Clients IP Address, return the anonymized version of it. fn anonymize_ip_address(remote_addr: &str) -> String { - // We are going to represent it as a UUID. - let mut b2b = Blake2b::new_keyed(16, Settings::client_id_key().as_bytes()); - let mut out = [0; 16]; - - b2b.input_str(Settings::client_id_key()); - b2b.input_str(remote_addr); - b2b.result(&mut out); - - uuid::Builder::from_bytes(out) - .with_version(uuid::Version::Random) - .with_variant(uuid::Variant::RFC4122) - .into_uuid() - .hyphenated() - .to_string() + let addr: Vec = vec![remote_addr.to_string()]; + generate_uuid_string_from_data(Settings::client_id_key(), &addr) } /// Get an anonymized client ID from the request. diff --git a/catalyst-gateway/bin/src/settings/mod.rs b/catalyst-gateway/bin/src/settings/mod.rs index fa96587296..6d86cddefc 100644 --- a/catalyst-gateway/bin/src/settings/mod.rs +++ b/catalyst-gateway/bin/src/settings/mod.rs @@ -9,7 +9,6 @@ use std::{ use anyhow::anyhow; use clap::Args; -use cryptoxide::{blake2b::Blake2b, mac::Mac}; use dotenvy::dotenv; use duration_string::DurationString; use str_env_var::StringEnvVar; @@ -20,6 +19,7 @@ use crate::{ build_info::{log_build_info, BUILD_INFO}, logger::{self, LogLevel, LOG_LEVEL_DEFAULT}, service::utilities::net::{get_public_ipv4, get_public_ipv6}, + utils::blake2b_hash::generate_uuid_string_from_data, }; pub(crate) mod cassandra_db; @@ -58,21 +58,9 @@ const EVENT_DB_URL_DEFAULT: &str = /// Hash the Public IPv4 and IPv6 address of the machine, and convert to a 128 bit V4 /// UUID. fn calculate_service_uuid() -> String { - let mut hasher = Blake2b::new_keyed(16, "Catalyst-Gateway-Machine-UID".as_bytes()); + let ip_addr: Vec = vec![get_public_ipv4().to_string(), get_public_ipv6().to_string()]; - let ipv4 = get_public_ipv4().to_string(); - let ipv6 = get_public_ipv6().to_string(); - - hasher.input(ipv4.as_bytes()); - hasher.input(ipv6.as_bytes()); - - let mut hash = [0u8; 16]; - - hasher.raw_result(&mut hash); - uuid::Builder::from_custom_bytes(hash) - .into_uuid() - .hyphenated() - .to_string() + generate_uuid_string_from_data("Catalyst-Gateway-Machine-UID", &ip_addr) } /// Settings for the application. diff --git a/catalyst-gateway/bin/src/utils/blake2b_hash.rs b/catalyst-gateway/bin/src/utils/blake2b_hash.rs new file mode 100644 index 0000000000..de8485c625 --- /dev/null +++ b/catalyst-gateway/bin/src/utils/blake2b_hash.rs @@ -0,0 +1,134 @@ +//! Types of Blake-2b Hash + +use blake2b_simd::Params; + +/// Generates a UUID string from the provided key and data using `BLAKE2b` hashing. +/// +/// # Arguments +/// - `key`: A string slice that is used as part of the hash function input. +/// - `data`: A vector of strings which will be included in the `BLAKE2b` hash +/// computation. +/// +/// # Returns +/// A UUID string generated from the `BLAKE2b` hash of the concatenated data with the key. +pub(crate) fn generate_uuid_string_from_data(key: &str, data: &[String]) -> String { + // Where we will actually store the bytes we derive the UUID from. + let mut bytes: uuid::Bytes = uuid::Bytes::default(); + + // Generate a unique hash of the data. + let mut hasher = Params::new() + .hash_length(bytes.len()) + .key(key.as_bytes()) + .personal(b"Project Catalyst") + .to_state(); + + for datum in data { + hasher.update(datum.as_bytes()); + } + + // Finalize the hash and get the digest as a byte array + let hash = hasher.finalize(); + + // Create a new array containing the first 16 elements from the original array + bytes.copy_from_slice(hash.as_bytes()); + + // Convert the hash to a UUID + uuid::Builder::from_custom_bytes(bytes) + .as_uuid() + .as_hyphenated() + .to_string() +} + +/// 224 Byte Blake2b Hash +pub(crate) type Blake2b224 = [u8; 28]; + +/// Computes a BLAKE2b-224 hash of the input bytes. +/// +/// # Arguments +/// - `input_bytes`: A slice of bytes to be hashed. +/// +/// # Returns +/// An array containing the BLAKE2b-224 hash of the input bytes. +pub(crate) fn blake2b_224(input_bytes: &[u8]) -> Blake2b224 { + // Where we will actually store the bytes we derive the UUID from. + let mut bytes: Blake2b224 = Blake2b224::default(); + + // Generate a unique hash of the data. + let mut hasher = Params::new().hash_length(bytes.len()).to_state(); + + hasher.update(input_bytes); + let hash = hasher.finalize(); + + // Create a new array containing the first 16 elements from the original array + bytes.copy_from_slice(hash.as_bytes()); + + bytes +} + +/// 256 Byte Blake2b Hash +pub(crate) type Blake2b256 = [u8; 32]; + +/// Computes a BLAKE2b-256 hash of the input bytes. +/// +/// # Arguments +/// - `input_bytes`: A slice of bytes to be hashed. +/// +/// # Returns +/// An array containing the BLAKE2b-256 hash of the input bytes. +#[allow(dead_code)] +pub(crate) fn blake2b_256(input_bytes: &[u8]) -> Blake2b256 { + // Where we will actually store the bytes we derive the UUID from. + let mut bytes: Blake2b256 = Blake2b256::default(); + + // Generate a unique hash of the data. + let mut hasher = Params::new().hash_length(bytes.len()).to_state(); + + hasher.update(input_bytes); + let hash = hasher.finalize(); + + // Create a new array containing the first 16 elements from the original array + bytes.copy_from_slice(hash.as_bytes()); + + bytes +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_generate_uuid_string_from_data() { + let key = "test key"; + let data = vec!["test1".to_string(), "test2".to_string()]; + + // Call the function under test + let uuid_str = generate_uuid_string_from_data(key, &data); + + // Verify that the output is a valid UUID string + assert!(uuid::Uuid::parse_str(&uuid_str).is_ok()); + } + + #[test] + fn test_generate_uuid_string_from_data_empty_data() { + let key = "test key"; + let data: Vec = vec![]; + + // Call the function under test + let uuid_str = generate_uuid_string_from_data(key, &data); + + // Verify that the output is a valid UUID string + assert!(uuid::Uuid::parse_str(&uuid_str).is_ok()); + } + + #[test] + fn test_generate_uuid_string_from_data_empty_key() { + let key = ""; + let data = vec!["test1".to_string(), "test2".to_string()]; + + // Call the function under test + let uuid_str = generate_uuid_string_from_data(key, &data); + + // Verify that the output is a valid UUID string + assert!(uuid::Uuid::parse_str(&uuid_str).is_ok()); + } +} diff --git a/catalyst-gateway/bin/src/utils/mod.rs b/catalyst-gateway/bin/src/utils/mod.rs new file mode 100644 index 0000000000..dd92d18e20 --- /dev/null +++ b/catalyst-gateway/bin/src/utils/mod.rs @@ -0,0 +1,3 @@ +//! General Purpose utility functions + +pub(crate) mod blake2b_hash; diff --git a/catalyst-gateway/event-db/Earthfile b/catalyst-gateway/event-db/Earthfile index 79c7917b10..1d35538d73 100644 --- a/catalyst-gateway/event-db/Earthfile +++ b/catalyst-gateway/event-db/Earthfile @@ -3,7 +3,7 @@ # the database and its associated software. VERSION 0.8 -IMPORT github.com/input-output-hk/catalyst-ci/earthly/postgresql:v3.2.07 AS postgresql-ci +IMPORT github.com/input-output-hk/catalyst-ci/earthly/postgresql:v3.2.10 AS postgresql-ci # cspell: words diff --git a/catalyst-gateway/rust-toolchain.toml b/catalyst-gateway/rust-toolchain.toml index 20a42f2a9f..f01d02df3b 100644 --- a/catalyst-gateway/rust-toolchain.toml +++ b/catalyst-gateway/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "1.80" +channel = "1.81" profile = "default" \ No newline at end of file diff --git a/catalyst-gateway/tests/Earthfile b/catalyst-gateway/tests/Earthfile index d3e0b7df81..5c12cc9fa0 100644 --- a/catalyst-gateway/tests/Earthfile +++ b/catalyst-gateway/tests/Earthfile @@ -1,5 +1,5 @@ VERSION 0.8 -IMPORT github.com/input-output-hk/catalyst-ci/earthly/spectral:v3.2.07 AS spectral-ci +IMPORT github.com/input-output-hk/catalyst-ci/earthly/spectral:v3.2.10 AS spectral-ci # test-lint-openapi - OpenAPI linting from an artifact # testing whether the OpenAPI generated during build stage follows good practice. diff --git a/catalyst-gateway/tests/api_tests/Earthfile b/catalyst-gateway/tests/api_tests/Earthfile index 3ebea8e0f8..ebbb339d6e 100644 --- a/catalyst-gateway/tests/api_tests/Earthfile +++ b/catalyst-gateway/tests/api_tests/Earthfile @@ -1,6 +1,6 @@ VERSION 0.8 -IMPORT github.com/input-output-hk/catalyst-ci/earthly/python:v3.2.07 AS python-ci +IMPORT github.com/input-output-hk/catalyst-ci/earthly/python:v3.2.10 AS python-ci builder: FROM python-ci+python-base diff --git a/catalyst_voices/Earthfile b/catalyst_voices/Earthfile index abbf261917..0339877018 100644 --- a/catalyst_voices/Earthfile +++ b/catalyst_voices/Earthfile @@ -1,7 +1,7 @@ VERSION 0.8 IMPORT ../catalyst-gateway AS catalyst-gateway -IMPORT github.com/input-output-hk/catalyst-ci/earthly/flutter:v3.2.07 AS flutter-ci +IMPORT github.com/input-output-hk/catalyst-ci/earthly/flutter:v3.2.10 AS flutter-ci # Copy all the necessary files and running bootstrap builder: diff --git a/catalyst_voices/uikit_example/Earthfile b/catalyst_voices/uikit_example/Earthfile index dd7eec8dda..eed673c6ce 100644 --- a/catalyst_voices/uikit_example/Earthfile +++ b/catalyst_voices/uikit_example/Earthfile @@ -1,7 +1,7 @@ VERSION 0.8 IMPORT ../ AS catalyst-voices -IMPORT github.com/input-output-hk/catalyst-ci/earthly/flutter:v3.2.07 AS flutter-ci +IMPORT github.com/input-output-hk/catalyst-ci/earthly/flutter:v3.2.10 AS flutter-ci # local-build-web - build web version of UIKit example. # Prefixed by "local" to make sure it's not auto triggered, the target was diff --git a/catalyst_voices_packages/catalyst_cardano/catalyst_cardano/wallet-automation/Earthfile b/catalyst_voices_packages/catalyst_cardano/catalyst_cardano/wallet-automation/Earthfile index 68ae4dc003..6e91cc3d71 100644 --- a/catalyst_voices_packages/catalyst_cardano/catalyst_cardano/wallet-automation/Earthfile +++ b/catalyst_voices_packages/catalyst_cardano/catalyst_cardano/wallet-automation/Earthfile @@ -1,5 +1,5 @@ VERSION 0.8 -IMPORT github.com/input-output-hk/catalyst-ci/earthly/flutter:v3.2.07 AS flutter-ci +IMPORT github.com/input-output-hk/catalyst-ci/earthly/flutter:v3.2.10 AS flutter-ci deps: FROM mcr.microsoft.com/playwright:v1.45.2-jammy @@ -51,7 +51,7 @@ package-app: SAVE IMAGE test-app:$tag nightly-test: - FROM earthly/alpine-3.20-docker-26.1.5-r0 + FROM earthly/dind:alpine-3.20-docker-26.1.5-r0 COPY compose.yml . WITH DOCKER \ --compose compose.yml \ diff --git a/docs/Earthfile b/docs/Earthfile index 741d0f4867..a027fcdece 100644 --- a/docs/Earthfile +++ b/docs/Earthfile @@ -1,6 +1,6 @@ VERSION 0.8 -IMPORT github.com/input-output-hk/catalyst-ci/earthly/docs:v3.2.07 AS docs-ci +IMPORT github.com/input-output-hk/catalyst-ci/earthly/docs:v3.2.10 AS docs-ci IMPORT .. AS repo IMPORT ../catalyst-gateway AS catalyst-gateway