diff --git a/.config/dictionaries/project.dic b/.config/dictionaries/project.dic index 16219e6214..95c8675ac7 100644 --- a/.config/dictionaries/project.dic +++ b/.config/dictionaries/project.dic @@ -137,6 +137,7 @@ pytest rapidoc redoc Replayability +reloadable repr reqwest rfwtxt @@ -159,6 +160,7 @@ slotno sqlfluff Stefano stevenj +stringzilla Subkey subosito SYSROOT diff --git a/.github/workflows/generate-allure-report.yml b/.github/workflows/generate-allure-report.yml index fec4a0ca2d..89ec6926f5 100644 --- a/.github/workflows/generate-allure-report.yml +++ b/.github/workflows/generate-allure-report.yml @@ -53,7 +53,7 @@ jobs: if: always() continue-on-error: true with: - earthfile: ./catalyst-gateway/tests/ + earthfile: ./catalyst-gateway/tests/schemathesis_tests flags: --allow-privileged targets: test-fuzzer-api target_flags: diff --git a/.gitignore b/.gitignore index d6aba0034f..ee537b0c12 100644 --- a/.gitignore +++ b/.gitignore @@ -107,3 +107,6 @@ dev-catalyst-voice-9f78f27c6bc5.json # Specifically exclude it in the directory it appears, if its required. # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html Cargo.lock + +#Hypothesis +.hypothesis diff --git a/catalyst-gateway/Cargo.toml b/catalyst-gateway/Cargo.toml index 7b85571ab8..e3bbad020c 100644 --- a/catalyst-gateway/Cargo.toml +++ b/catalyst-gateway/Cargo.toml @@ -50,6 +50,7 @@ cddl = "0.9.2" ciborium = "0.2" pallas = { git = "https://github.com/input-output-hk/catalyst-pallas.git", rev = "709acb19c52c6b789279ecc4bc8793b5d8b5abe9", version = "0.25.0" } cardano-chain-follower = { git = "https://github.com/input-output-hk/hermes.git", version="0.0.1" } +stringzilla = "3.8.4" [workspace.lints.rust] warnings = "deny" diff --git a/catalyst-gateway/bin/Cargo.toml b/catalyst-gateway/bin/Cargo.toml index f08b2b7a71..7d702c6e01 100644 --- a/catalyst-gateway/bin/Cargo.toml +++ b/catalyst-gateway/bin/Cargo.toml @@ -24,7 +24,7 @@ tokio-postgres = { workspace = true, features = [ ] } clap = { workspace = true, features = ["derive", "env"] } tracing = { workspace = true, features = ["log"] } -tracing-subscriber = { workspace = true, features = ["fmt", "json", "time"] } +tracing-subscriber = { workspace = true, features = ["fmt", "json", "registry", "std", "time"] } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } tokio = { workspace = true, features = ["rt", "macros", "rt-multi-thread"] } @@ -68,3 +68,4 @@ handlebars = { workspace = true } cddl = { workspace = true } ciborium = { workspace = true } ed25519-dalek = "2.1.1" +stringzilla = { workspace = true } diff --git a/catalyst-gateway/bin/src/cli.rs b/catalyst-gateway/bin/src/cli.rs index aae2367fd3..502b626c0f 100644 --- a/catalyst-gateway/bin/src/cli.rs +++ b/catalyst-gateway/bin/src/cli.rs @@ -38,13 +38,16 @@ impl Cli { pub(crate) async fn exec(self) -> anyhow::Result<()> { match self { Self::Run(settings) => { - logger::init(settings.log_level)?; + let logger_handle = logger::init(settings.log_level); // Unique machine id let machine_id = settings.follower_settings.machine_uid; - let state = Arc::new(State::new(Some(settings.database_url)).await?); + let state = Arc::new(State::new(Some(settings.database_url), logger_handle).await?); let event_db = state.event_db(); + event_db + .modify_deep_query(settings.deep_query_inspection.into()) + .await; tokio::spawn(async move { match service::run(&settings.docs_settings, state.clone()).await { diff --git a/catalyst-gateway/bin/src/event_db/cardano/chain_state/mod.rs b/catalyst-gateway/bin/src/event_db/cardano/chain_state/mod.rs index d028ac2cf1..3127652ab7 100644 --- a/catalyst-gateway/bin/src/event_db/cardano/chain_state/mod.rs +++ b/catalyst-gateway/bin/src/event_db/cardano/chain_state/mod.rs @@ -58,23 +58,17 @@ impl SlotInfoQueryType { /// Get SQL query fn get_sql_query(&self) -> anyhow::Result { let tmpl_fields = match self { - SlotInfoQueryType::Previous => { - SlotInfoQueryTmplFields { - sign: "<", - ordering: Some("DESC"), - } + SlotInfoQueryType::Previous => SlotInfoQueryTmplFields { + sign: "<", + ordering: Some("DESC"), }, - SlotInfoQueryType::Current => { - SlotInfoQueryTmplFields { - sign: "=", - ordering: None, - } + SlotInfoQueryType::Current => SlotInfoQueryTmplFields { + sign: "=", + ordering: None, }, - SlotInfoQueryType::Next => { - SlotInfoQueryTmplFields { - sign: ">", - ordering: None, - } + SlotInfoQueryType::Next => SlotInfoQueryTmplFields { + sign: ">", + ordering: None, }, }; @@ -161,13 +155,16 @@ impl EventDB { let sink = tx .copy_in("COPY tmp_cardano_slot_index (slot_no, network, epoch_no, block_time, block_hash) FROM STDIN BINARY") .await?; - let writer = BinaryCopyInWriter::new(sink, &[ - Type::INT8, - Type::TEXT, - Type::INT8, - Type::TIMESTAMPTZ, - Type::BYTEA, - ]); + let writer = BinaryCopyInWriter::new( + sink, + &[ + Type::INT8, + Type::TEXT, + Type::INT8, + Type::TIMESTAMPTZ, + Type::BYTEA, + ], + ); tokio::pin!(writer); for params in values { @@ -197,13 +194,11 @@ impl EventDB { pub(crate) async fn get_slot_info( &self, date_time: DateTime, network: Network, query_type: SlotInfoQueryType, ) -> anyhow::Result<(SlotNumber, BlockHash, DateTime)> { - let conn = self.pool.get().await?; - - let rows = conn - .query(&query_type.get_sql_query()?, &[ - &network.to_string(), - &date_time, - ]) + let rows = self + .query( + &query_type.get_sql_query()?, + &[&network.to_string(), &date_time], + ) .await?; let row = rows.first().ok_or(NotFoundError)?; @@ -218,9 +213,7 @@ impl EventDB { pub(crate) async fn last_updated_state( &self, network: Network, ) -> anyhow::Result<(SlotNumber, BlockHash, DateTime)> { - let conn = self.pool.get().await?; - - let rows = conn + let rows = self .query(SELECT_UPDATE_STATE_SQL, &[&network.to_string()]) .await?; @@ -239,8 +232,6 @@ impl EventDB { &self, last_updated: DateTime, slot_no: SlotNumber, block_hash: BlockHash, network: Network, machine_id: &MachineId, ) -> anyhow::Result<()> { - let conn = self.pool.get().await?; - // Rollback or update let update = true; @@ -248,8 +239,9 @@ impl EventDB { // An insert only happens once when there is no update metadata available // All future additions are just updates on ended, slot_no and block_hash - let _rows = conn - .query(INSERT_UPDATE_STATE_SQL, &[ + self.modify( + INSERT_UPDATE_STATE_SQL, + &[ &i64::try_from(network_id)?, &last_updated, &last_updated, @@ -258,8 +250,9 @@ impl EventDB { &network.to_string(), &block_hash, &update, - ]) - .await?; + ], + ) + .await?; Ok(()) } diff --git a/catalyst-gateway/bin/src/event_db/cardano/cip36_registration/mod.rs b/catalyst-gateway/bin/src/event_db/cardano/cip36_registration/mod.rs index cbbfaf19d1..5cc3cf1367 100644 --- a/catalyst-gateway/bin/src/event_db/cardano/cip36_registration/mod.rs +++ b/catalyst-gateway/bin/src/event_db/cardano/cip36_registration/mod.rs @@ -90,7 +90,6 @@ impl IndexedVoterRegistrationParams { } else { (None, None, None, None) }; - let encoded_voting_key = if let Some(voting_info) = voting_info.as_ref() { let Ok(enc) = serde_json::to_string(voting_info) else { return None; @@ -156,16 +155,19 @@ impl EventDB { let sink = tx .copy_in("COPY tmp_cardano_voter_registration (tx_id, stake_credential, public_voting_key, payment_address, nonce, metadata_cip36, stats, valid) FROM STDIN BINARY") .await?; - let writer = BinaryCopyInWriter::new(sink, &[ - Type::BYTEA, - Type::BYTEA, - Type::BYTEA, - Type::BYTEA, - Type::INT8, - Type::BYTEA, - Type::JSONB, - Type::BOOL, - ]); + let writer = BinaryCopyInWriter::new( + sink, + &[ + Type::BYTEA, + Type::BYTEA, + Type::BYTEA, + Type::BYTEA, + Type::INT8, + Type::BYTEA, + Type::JSONB, + Type::BOOL, + ], + ); tokio::pin!(writer); for params in values { @@ -201,14 +203,11 @@ impl EventDB { pub(crate) async fn get_registration_info( &self, stake_credential: StakeCredential, network: Network, slot_num: SlotNumber, ) -> anyhow::Result<(TxId, PaymentAddress, PublicVotingInfo, Nonce)> { - let conn = self.pool.get().await?; - - let rows = conn - .query(SELECT_VOTER_REGISTRATION_SQL, &[ - &stake_credential, - &network.to_string(), - &slot_num, - ]) + let rows = self + .query( + SELECT_VOTER_REGISTRATION_SQL, + &[&stake_credential, &network.to_string(), &slot_num], + ) .await?; let row = rows.first().ok_or(NotFoundError)?; diff --git a/catalyst-gateway/bin/src/event_db/cardano/config/mod.rs b/catalyst-gateway/bin/src/event_db/cardano/config/mod.rs index e114e3eb98..bbf7b42ff7 100644 --- a/catalyst-gateway/bin/src/event_db/cardano/config/mod.rs +++ b/catalyst-gateway/bin/src/event_db/cardano/config/mod.rs @@ -55,12 +55,10 @@ const SELECT_CONFIG_SQL: &str = include_str!("select_config.sql"); impl EventDB { /// Config query pub(crate) async fn get_follower_config(&self) -> anyhow::Result> { - let conn = self.pool.get().await?; - let id = "cardano"; let id2 = "follower"; - let rows = conn.query(SELECT_CONFIG_SQL, &[&id, &id2]).await?; + let rows = self.query(SELECT_CONFIG_SQL, &[&id, &id2]).await?; let mut follower_configs = Vec::new(); for row in rows { diff --git a/catalyst-gateway/bin/src/event_db/cardano/utxo/mod.rs b/catalyst-gateway/bin/src/event_db/cardano/utxo/mod.rs index c4ccd8eff6..1441445e93 100644 --- a/catalyst-gateway/bin/src/event_db/cardano/utxo/mod.rs +++ b/catalyst-gateway/bin/src/event_db/cardano/utxo/mod.rs @@ -144,13 +144,16 @@ impl EventDB { let sink = tx .copy_in("COPY tmp_cardano_utxo (tx_id, index, asset, stake_credential, value) FROM STDIN BINARY") .await?; - let writer = BinaryCopyInWriter::new(sink, &[ - Type::BYTEA, - Type::INT4, - Type::JSONB, - Type::BYTEA, - Type::INT8, - ]); + let writer = BinaryCopyInWriter::new( + sink, + &[ + Type::BYTEA, + Type::INT4, + Type::JSONB, + Type::BYTEA, + Type::INT8, + ], + ); tokio::pin!(writer); for params in values { @@ -272,14 +275,11 @@ impl EventDB { pub(crate) async fn total_utxo_amount( &self, stake_credential: StakeCredential, network: Network, slot_num: SlotNumber, ) -> anyhow::Result<(StakeAmount, SlotNumber)> { - let conn = self.pool.get().await?; - - let row = conn - .query_one(SELECT_TOTAL_UTXO_AMOUNT_SQL, &[ - &stake_credential, - &network.to_string(), - &slot_num, - ]) + let row = self + .query_one( + SELECT_TOTAL_UTXO_AMOUNT_SQL, + &[&stake_credential, &network.to_string(), &slot_num], + ) .await?; // Aggregate functions as SUM and MAX return NULL if there are no rows, so we need to diff --git a/catalyst-gateway/bin/src/event_db/legacy/queries/event/ballot.rs b/catalyst-gateway/bin/src/event_db/legacy/queries/event/ballot.rs index 969209176f..87b0a5ed04 100644 --- a/catalyst-gateway/bin/src/event_db/legacy/queries/event/ballot.rs +++ b/catalyst-gateway/bin/src/event_db/legacy/queries/event/ballot.rs @@ -53,9 +53,7 @@ impl EventDB { pub(crate) async fn get_ballot( &self, event: EventId, objective: ObjectiveId, proposal: ProposalId, ) -> anyhow::Result { - let conn = self.pool.get().await?; - - let rows = conn + let rows = self .query(Self::BALLOT_VOTE_OPTIONS_QUERY, &[ &event.0, &objective.0, @@ -65,7 +63,7 @@ impl EventDB { let row = rows.first().ok_or(NotFoundError)?; let choices = row.try_get("objective")?; - let rows = conn + let rows = self .query(Self::BALLOT_VOTE_PLANS_QUERY, &[ &event.0, &objective.0, @@ -96,9 +94,7 @@ impl EventDB { pub(crate) async fn get_objective_ballots( &self, event: EventId, objective: ObjectiveId, ) -> anyhow::Result> { - let conn = self.pool.get().await?; - - let rows = conn + let rows = self .query(Self::BALLOTS_VOTE_OPTIONS_PER_OBJECTIVE_QUERY, &[ &event.0, &objective.0, @@ -110,7 +106,7 @@ impl EventDB { let choices = row.try_get("objective")?; let proposal_id = ProposalId(row.try_get("proposal_id")?); - let rows = conn + let rows = self .query(Self::BALLOT_VOTE_PLANS_QUERY, &[ &event.0, &objective.0, @@ -146,9 +142,7 @@ impl EventDB { pub(crate) async fn get_event_ballots( &self, event: EventId, ) -> anyhow::Result> { - let conn = self.pool.get().await?; - - let rows = conn + let rows = self .query(Self::BALLOTS_VOTE_OPTIONS_PER_EVENT_QUERY, &[&event.0]) .await?; let mut ballots = HashMap::>::new(); @@ -157,7 +151,7 @@ impl EventDB { let proposal_id = ProposalId(row.try_get("proposal_id")?); let objective_id = ObjectiveId(row.try_get("objective_id")?); - let rows = conn + let rows = self .query(Self::BALLOT_VOTE_PLANS_QUERY, &[ &event.0, &objective_id.0, diff --git a/catalyst-gateway/bin/src/event_db/legacy/queries/event/mod.rs b/catalyst-gateway/bin/src/event_db/legacy/queries/event/mod.rs index 9b0e5a1352..329426a151 100644 --- a/catalyst-gateway/bin/src/event_db/legacy/queries/event/mod.rs +++ b/catalyst-gateway/bin/src/event_db/legacy/queries/event/mod.rs @@ -45,9 +45,7 @@ impl EventDB { pub(crate) async fn get_events( &self, limit: Option, offset: Option, ) -> anyhow::Result> { - let conn = self.pool.get().await?; - - let rows = conn + let rows = self .query(Self::EVENTS_QUERY, &[&limit, &offset.unwrap_or(0)]) .await?; @@ -77,9 +75,7 @@ impl EventDB { /// Get event query #[allow(dead_code)] pub(crate) async fn get_event(&self, event: EventId) -> anyhow::Result { - let conn = self.pool.get().await?; - - let rows = conn.query(Self::EVENT_QUERY, &[&event.0]).await?; + let rows = self.query(Self::EVENT_QUERY, &[&event.0]).await?; let row = rows.first().ok_or(NotFoundError)?; let ends = row @@ -133,7 +129,7 @@ impl EventDB { .map(|val| val.and_local_timezone(Utc).unwrap()), }; - let rows = conn.query(Self::EVENT_GOALS_QUERY, &[&event.0]).await?; + let rows = self.query(Self::EVENT_GOALS_QUERY, &[&event.0]).await?; let mut goals = Vec::new(); for row in rows { goals.push(EventGoal { diff --git a/catalyst-gateway/bin/src/event_db/legacy/queries/event/objective.rs b/catalyst-gateway/bin/src/event_db/legacy/queries/event/objective.rs index aecacb2714..d702d2efb8 100644 --- a/catalyst-gateway/bin/src/event_db/legacy/queries/event/objective.rs +++ b/catalyst-gateway/bin/src/event_db/legacy/queries/event/objective.rs @@ -33,9 +33,7 @@ impl EventDB { pub(crate) async fn get_objectives( &self, event: EventId, limit: Option, offset: Option, ) -> anyhow::Result> { - let conn = self.pool.get().await?; - - let rows = conn + let rows = self .query(Self::OBJECTIVES_QUERY, &[ &event.0, &limit, @@ -64,7 +62,7 @@ impl EventDB { }; let mut groups = Vec::new(); - let rows = conn.query(Self::VOTING_GROUPS_QUERY, &[&row_id]).await?; + let rows = self.query(Self::VOTING_GROUPS_QUERY, &[&row_id]).await?; for row in rows { let group = row.try_get::<_, Option>("group")?.map(VoterGroupId); let voting_token: Option<_> = row.try_get("voting_token")?; diff --git a/catalyst-gateway/bin/src/event_db/legacy/queries/event/proposal.rs b/catalyst-gateway/bin/src/event_db/legacy/queries/event/proposal.rs index fb0340ceb0..f4465a4e7a 100644 --- a/catalyst-gateway/bin/src/event_db/legacy/queries/event/proposal.rs +++ b/catalyst-gateway/bin/src/event_db/legacy/queries/event/proposal.rs @@ -33,11 +33,7 @@ impl EventDB { pub(crate) async fn get_proposal( &self, event: EventId, objective: ObjectiveId, proposal: ProposalId, ) -> anyhow::Result { - let conn: bb8::PooledConnection< - bb8_postgres::PostgresConnectionManager, - > = self.pool.get().await?; - - let rows = conn + let rows = self .query(Self::PROPOSAL_QUERY, &[&event.0, &objective.0, &proposal.0]) .await?; let row = rows.first().ok_or(NotFoundError)?; @@ -72,9 +68,7 @@ impl EventDB { pub(crate) async fn get_proposals( &self, event: EventId, objective: ObjectiveId, limit: Option, offset: Option, ) -> anyhow::Result> { - let conn = self.pool.get().await?; - - let rows = conn + let rows = self .query(Self::PROPOSALS_QUERY, &[ &event.0, &objective.0, diff --git a/catalyst-gateway/bin/src/event_db/legacy/queries/event/review.rs b/catalyst-gateway/bin/src/event_db/legacy/queries/event/review.rs index 9ad6cb529b..7faa54c32d 100644 --- a/catalyst-gateway/bin/src/event_db/legacy/queries/event/review.rs +++ b/catalyst-gateway/bin/src/event_db/legacy/queries/event/review.rs @@ -41,9 +41,7 @@ impl EventDB { &self, event: EventId, objective: ObjectiveId, proposal: ProposalId, limit: Option, offset: Option, ) -> anyhow::Result> { - let conn = self.pool.get().await?; - - let rows = conn + let rows = self .query(Self::REVIEWS_QUERY, &[ &event.0, &objective.0, @@ -59,7 +57,7 @@ impl EventDB { let review_id: i32 = row.try_get("row_id")?; let mut ratings = Vec::new(); - let rows = conn + let rows = self .query(Self::RATINGS_PER_REVIEW_QUERY, &[&review_id]) .await?; for row in rows { @@ -81,9 +79,7 @@ impl EventDB { pub(crate) async fn get_review_types( &self, event: EventId, objective: ObjectiveId, limit: Option, offset: Option, ) -> anyhow::Result> { - let conn = self.pool.get().await?; - - let rows = conn + let rows = self .query(Self::REVIEW_TYPES_QUERY, &[ &event.0, &objective.0, diff --git a/catalyst-gateway/bin/src/event_db/legacy/queries/registration.rs b/catalyst-gateway/bin/src/event_db/legacy/queries/registration.rs index 07bd85c82b..805fe73222 100644 --- a/catalyst-gateway/bin/src/event_db/legacy/queries/registration.rs +++ b/catalyst-gateway/bin/src/event_db/legacy/queries/registration.rs @@ -76,13 +76,11 @@ impl EventDB { pub(crate) async fn get_voter( &self, event: &Option, voting_key: String, with_delegations: bool, ) -> anyhow::Result { - let conn = self.pool.get().await?; - let rows = if let Some(event) = event { - conn.query(Self::VOTER_BY_EVENT_QUERY, &[&voting_key, &event.0]) + self.query(Self::VOTER_BY_EVENT_QUERY, &[&voting_key, &event.0]) .await? } else { - conn.query(Self::VOTER_BY_LAST_EVENT_QUERY, &[&voting_key]) + self.query(Self::VOTER_BY_LAST_EVENT_QUERY, &[&voting_key]) .await? }; let voter = rows.first().ok_or(NotFoundError)?; @@ -91,13 +89,13 @@ impl EventDB { let voting_power = voter.try_get("voting_power")?; let rows = if let Some(event) = event { - conn.query(Self::TOTAL_BY_EVENT_VOTING_QUERY, &[ + self.query(Self::TOTAL_BY_EVENT_VOTING_QUERY, &[ &voting_group.0, &event.0, ]) .await? } else { - conn.query(Self::TOTAL_BY_LAST_EVENT_VOTING_QUERY, &[&voting_group.0]) + self.query(Self::TOTAL_BY_LAST_EVENT_VOTING_QUERY, &[&voting_group.0]) .await? }; @@ -120,10 +118,10 @@ impl EventDB { let delegator_addresses = if with_delegations { let rows = if let Some(event) = event { - conn.query(Self::VOTER_DELEGATORS_LIST_QUERY, &[&voting_key, &event.0]) + self.query(Self::VOTER_DELEGATORS_LIST_QUERY, &[&voting_key, &event.0]) .await? } else { - conn.query(Self::VOTER_DELEGATORS_LIST_QUERY, &[ + self.query(Self::VOTER_DELEGATORS_LIST_QUERY, &[ &voting_key, &voter.try_get::<_, i32>("event")?, ]) @@ -165,24 +163,23 @@ impl EventDB { pub(crate) async fn get_delegator( &self, event: &Option, stake_public_key: String, ) -> anyhow::Result { - let conn = self.pool.get().await?; let rows = if let Some(event) = event { - conn.query(Self::DELEGATOR_SNAPSHOT_INFO_BY_EVENT_QUERY, &[&event.0]) + self.query(Self::DELEGATOR_SNAPSHOT_INFO_BY_EVENT_QUERY, &[&event.0]) .await? } else { - conn.query(Self::DELEGATOR_SNAPSHOT_INFO_BY_LAST_EVENT_QUERY, &[]) + self.query(Self::DELEGATOR_SNAPSHOT_INFO_BY_LAST_EVENT_QUERY, &[]) .await? }; let delegator_snapshot_info = rows.first().ok_or(NotFoundError)?; let delegation_rows = if let Some(event) = event { - conn.query(Self::DELEGATIONS_BY_EVENT_QUERY, &[ + self.query(Self::DELEGATIONS_BY_EVENT_QUERY, &[ &stake_public_key, &event.0, ]) .await? } else { - conn.query(Self::DELEGATIONS_BY_EVENT_QUERY, &[ + self.query(Self::DELEGATIONS_BY_EVENT_QUERY, &[ &stake_public_key, &delegator_snapshot_info.try_get::<_, i32>("event")?, ]) @@ -203,10 +200,10 @@ impl EventDB { } let rows = if let Some(version) = event { - conn.query(Self::TOTAL_POWER_BY_EVENT_QUERY, &[&version.0]) + self.query(Self::TOTAL_POWER_BY_EVENT_QUERY, &[&version.0]) .await? } else { - conn.query(Self::TOTAL_POWER_BY_LAST_EVENT_QUERY, &[]) + self.query(Self::TOTAL_POWER_BY_LAST_EVENT_QUERY, &[]) .await? }; let total_power: i64 = rows diff --git a/catalyst-gateway/bin/src/event_db/legacy/queries/search.rs b/catalyst-gateway/bin/src/event_db/legacy/queries/search.rs index 4c0f0dfb3a..4ea8d650be 100644 --- a/catalyst-gateway/bin/src/event_db/legacy/queries/search.rs +++ b/catalyst-gateway/bin/src/event_db/legacy/queries/search.rs @@ -107,9 +107,7 @@ impl EventDB { async fn search_total( &self, search_query: SearchQuery, limit: Option, offset: Option, ) -> anyhow::Result { - let conn = self.pool.get().await?; - - let rows: Vec = conn + let rows: Vec = self .query(&Self::construct_count_query(&search_query), &[ &limit, &offset.unwrap_or(0), @@ -128,8 +126,7 @@ impl EventDB { async fn search_events( &self, search_query: SearchQuery, limit: Option, offset: Option, ) -> anyhow::Result { - let conn = self.pool.get().await?; - let rows: Vec = conn + let rows: Vec = self .query(&Self::construct_query(&search_query), &[ &limit, &offset.unwrap_or(0), @@ -169,8 +166,7 @@ impl EventDB { async fn search_objectives( &self, search_query: SearchQuery, limit: Option, offset: Option, ) -> anyhow::Result { - let conn = self.pool.get().await?; - let rows: Vec = conn + let rows: Vec = self .query(&Self::construct_query(&search_query), &[ &limit, &offset.unwrap_or(0), @@ -205,9 +201,7 @@ impl EventDB { async fn search_proposals( &self, search_query: SearchQuery, limit: Option, offset: Option, ) -> anyhow::Result { - let conn = self.pool.get().await?; - - let rows: Vec = conn + let rows: Vec = self .query(&Self::construct_query(&search_query), &[ &limit, &offset.unwrap_or(0), diff --git a/catalyst-gateway/bin/src/event_db/legacy/queries/vit_ss/fund.rs b/catalyst-gateway/bin/src/event_db/legacy/queries/vit_ss/fund.rs index 723abc42e6..844846ae26 100644 --- a/catalyst-gateway/bin/src/event_db/legacy/queries/vit_ss/fund.rs +++ b/catalyst-gateway/bin/src/event_db/legacy/queries/vit_ss/fund.rs @@ -109,9 +109,7 @@ impl EventDB { // TODO(stevenj): https://github.com/input-output-hk/catalyst-voices/issues/68 #[allow(dead_code, clippy::too_many_lines)] pub(crate) async fn get_fund(&self) -> anyhow::Result { - let conn = self.pool.get().await?; - - let rows = conn.query(Self::FUND_QUERY, &[]).await?; + let rows = self.query(Self::FUND_QUERY, &[]).await?; let row = rows.first().ok_or(NotFoundError)?; let fund_id = row.try_get("id")?; @@ -132,7 +130,7 @@ impl EventDB { .and_local_timezone(Utc) .unwrap(); - let rows = conn.query(Self::FUND_VOTE_PLANS_QUERY, &[&fund_id]).await?; + let rows = self.query(Self::FUND_VOTE_PLANS_QUERY, &[&fund_id]).await?; let mut chain_vote_plans = Vec::new(); for row in rows { chain_vote_plans.push(Voteplan { @@ -152,7 +150,7 @@ impl EventDB { }); } - let rows = conn.query(Self::FUND_CHALLENGES_QUERY, &[&fund_id]).await?; + let rows = self.query(Self::FUND_CHALLENGES_QUERY, &[&fund_id]).await?; let mut challenges = Vec::new(); for row in rows { challenges.push(Challenge { @@ -177,7 +175,7 @@ impl EventDB { }); } - let rows = conn.query(Self::FUND_GOALS_QUERY, &[&fund_id]).await?; + let rows = self.query(Self::FUND_GOALS_QUERY, &[&fund_id]).await?; let mut goals = Vec::new(); for row in rows { goals.push(Goal { @@ -187,7 +185,7 @@ impl EventDB { }); } - let rows = conn.query(Self::FUND_GROUPS_QUERY, &[&fund_id]).await?; + let rows = self.query(Self::FUND_GROUPS_QUERY, &[&fund_id]).await?; let mut groups = Vec::new(); for row in rows { groups.push(Group { diff --git a/catalyst-gateway/bin/src/event_db/mod.rs b/catalyst-gateway/bin/src/event_db/mod.rs index 1c1c2e25f2..9dcb83b9aa 100644 --- a/catalyst-gateway/bin/src/event_db/mod.rs +++ b/catalyst-gateway/bin/src/event_db/mod.rs @@ -1,10 +1,13 @@ //! Catalyst Election Database crate -use std::str::FromStr; +use std::{str::FromStr, sync::Arc}; use bb8::Pool; use bb8_postgres::PostgresConnectionManager; use dotenvy::dotenv; -use tokio_postgres::NoTls; +use stringzilla::StringZilla; +use tokio::sync::RwLock; +use tokio_postgres::{types::ToSql, NoTls, Row}; +use tracing::{debug, debug_span, Instrument}; pub(crate) mod cardano; pub(crate) mod error; @@ -19,6 +22,26 @@ const DATABASE_URL_ENVVAR: &str = "EVENT_DB_URL"; /// Must equal the last Migrations Version Number. pub(crate) const DATABASE_SCHEMA_VERSION: i32 = 9; +#[derive(Clone, Copy, Default, Debug, PartialEq, Eq)] +/// Settings for deep query inspection +pub(crate) enum DeepQueryInspectionFlag { + /// Enable deep query inspection + Enabled, + /// Disable deep query inspection + #[default] + Disabled, +} + +impl From for DeepQueryInspectionFlag { + fn from(b: bool) -> Self { + if b { + Self::Enabled + } else { + Self::Disabled + } + } +} + #[allow(unused)] /// Connection to the Election Database pub(crate) struct EventDB { @@ -26,12 +49,182 @@ pub(crate) struct EventDB { /// All database operations (queries, inserts, etc) should be constrained /// to this crate and should be exported with a clean data access api. pool: Pool>, + /// Deep query inspection flag. + deep_query_inspection_flag: Arc>, } -/// No DB URL was provided +/// `EventDB` Errors #[derive(thiserror::Error, Debug, PartialEq, Eq)] -#[error("DB URL is undefined")] -pub(crate) struct NoDatabaseUrlError; +pub(crate) enum Error { + /// Database statement is not a valid modify statement + #[error("Invalid Modify Statement")] + InvalidModifyStatement, + /// Database statement is not a valid query statement + #[error("Invalid Query Statement")] + InvalidQueryStatement, + /// No DB URL was provided + #[error("DB URL is undefined")] + NoDatabaseUrl, +} + +impl EventDB { + /// Determine if deep query inspection is enabled. + pub(crate) async fn is_deep_query_enabled(&self) -> bool { + *self.deep_query_inspection_flag.read().await == DeepQueryInspectionFlag::Enabled + } + + /// Modify the deep query inspection setting. + /// + /// # Arguments + /// + /// * `deep_query` - `DeepQueryInspection` setting. + pub(crate) async fn modify_deep_query( + &self, deep_query_inspection_flag: DeepQueryInspectionFlag, + ) { + let mut flag = self.deep_query_inspection_flag.write().await; + *flag = deep_query_inspection_flag; + } + + /// Query the database. + /// + /// If deep query inspection is enabled, this will log the query plan inside a + /// rolled-back transaction, before running the query. + /// + /// # Arguments + /// + /// * `stmt` - `&str` SQL statement. + /// * `params` - `&[&(dyn ToSql + Sync)]` SQL parameters. + /// + /// # Returns + /// + /// `Result, anyhow::Error>` + #[must_use = "ONLY use this function for SELECT type operations which return row data, otherwise use `modify()`"] + pub(crate) async fn query( + &self, stmt: &str, params: &[&(dyn ToSql + Sync)], + ) -> Result, anyhow::Error> { + if self.is_deep_query_enabled().await { + // Check if this is a query statement + // if is_query_stmt(stmt) { + // self.explain_analyze_rollback(stmt, params).await?; + // } else { + // return Err(Error::InvalidQueryStatement.into()); + // } + self.explain_analyze_rollback(stmt, params).await?; + } + let rows = self.pool.get().await?.query(stmt, params).await?; + Ok(rows) + } + + /// Query the database for a single row. + /// + /// # Arguments + /// + /// * `stmt` - `&str` SQL statement. + /// * `params` - `&[&(dyn ToSql + Sync)]` SQL parameters. + /// + /// # Returns + /// + /// `Result` + #[must_use = "ONLY use this function for SELECT type operations which return row data, otherwise use `modify()`"] + pub(crate) async fn query_one( + &self, stmt: &str, params: &[&(dyn ToSql + Sync)], + ) -> Result { + if self.is_deep_query_enabled().await { + // Check if this is a query statement + // if is_query_stmt(stmt) { + // self.explain_analyze_rollback(stmt, params).await?; + // } else { + // return Err(Error::InvalidQueryStatement.into()); + // } + self.explain_analyze_rollback(stmt, params).await?; + } + let row = self.pool.get().await?.query_one(stmt, params).await?; + Ok(row) + } + + /// Modify the database. + /// + /// Use this for `UPDATE`, `DELETE`, and other DB statements that + /// don't return data. + /// + /// # Arguments + /// + /// * `stmt` - `&str` SQL statement. + /// * `params` - `&[&(dyn ToSql + Sync)]` SQL parameters. + /// + /// # Returns + /// + /// `anyhow::Result<()>` + pub(crate) async fn modify( + &self, stmt: &str, params: &[&(dyn ToSql + Sync)], + ) -> anyhow::Result<()> { + if self.is_deep_query_enabled().await { + // Check if this is a query statement + // if is_query_stmt(stmt) { + // return Err(Error::InvalidModifyStatement.into()); + // } + self.explain_analyze_commit(stmt, params).await?; + } else { + self.pool.get().await?.query(stmt, params).await?; + } + Ok(()) + } + + /// Prepend `EXPLAIN ANALYZE` to the query, and rollback the transaction. + async fn explain_analyze_rollback( + &self, stmt: &str, params: &[&(dyn ToSql + Sync)], + ) -> anyhow::Result<()> { + self.explain_analyze(stmt, params, true).await + } + + /// Prepend `EXPLAIN ANALYZE` to the query, and commit the transaction. + async fn explain_analyze_commit( + &self, stmt: &str, params: &[&(dyn ToSql + Sync)], + ) -> anyhow::Result<()> { + self.explain_analyze(stmt, params, false).await + } + + /// Prepend `EXPLAIN ANALYZE` to the query. + /// + /// Log the query plan inside a transaction that may be committed or rolled back. + /// + /// # Arguments + /// + /// * `stmt` - `&str` SQL statement. + /// * `params` - `&[&(dyn ToSql + Sync)]` SQL parameters. + /// * `rollback` - `bool` whether to roll back the transaction or not. + async fn explain_analyze( + &self, stmt: &str, params: &[&(dyn ToSql + Sync)], rollback: bool, + ) -> anyhow::Result<()> { + let span = debug_span!( + "query_plan", + query_statement = stmt, + params = format!("{:?}", params), + uuid = uuid::Uuid::new_v4().to_string() + ); + + async move { + let mut conn = self.pool.get().await?; + let transaction = conn.transaction().await?; + let explain_stmt = transaction + .prepare(format!("EXPLAIN ANALYZE {stmt}").as_str()) + .await?; + let rows = transaction.query(&explain_stmt, params).await?; + for r in rows { + let query_plan_str: String = r.get("QUERY PLAN"); + debug!("{}", query_plan_str); + } + if rollback { + transaction.rollback().await?; + } else { + transaction.commit().await?; + } + Ok(()) + } + .instrument(span) + .await + } +} /// Establish a connection to the database, and check the schema is up-to-date. /// @@ -62,7 +255,7 @@ pub(crate) async fn establish_connection(url: Option) -> anyhow::Result< let database_url = match url { Some(url) => url, // If the Database connection URL is not supplied, try and get from the env var. - None => std::env::var(DATABASE_URL_ENVVAR).map_err(|_| NoDatabaseUrlError)?, + None => std::env::var(DATABASE_URL_ENVVAR).map_err(|_| Error::NoDatabaseUrl)?, }; let config = tokio_postgres::config::Config::from_str(&database_url)?; @@ -71,5 +264,43 @@ pub(crate) async fn establish_connection(url: Option) -> anyhow::Result< let pool = Pool::builder().build(pg_mgr).await?; - Ok(EventDB { pool }) + Ok(EventDB { + pool, + deep_query_inspection_flag: Arc::default(), + }) +} + +/// Determine if the statement is a query statement. +/// +/// Returns true f the query statement starts with `SELECT` or contains `RETURNING`. +fn is_query_stmt(stmt: &str) -> bool { + // First, determine if the statement is a `SELECT` operation + if let Some(stmt) = &stmt.get(..6) { + if *stmt == "SELECT" { + return true; + } + } + // Otherwise, determine if the statement contains `RETURNING` + stmt.sz_rfind("RETURNING").is_some() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_is_query_statement() { + let stmt = "SELECT * FROM dummy"; + assert!(is_query_stmt(stmt)); + let stmt = "UPDATE dummy SET foo = $1 WHERE bar = $2 RETURNING *"; + assert!(is_query_stmt(stmt)); + } + + #[test] + fn test_is_not_query_statement() { + let stmt = "UPDATE dummy SET foo_count = foo_count + 1 WHERE bar = (SELECT bar_id FROM foo WHERE name = 'FooBar')"; + assert!(!is_query_stmt(stmt)); + let stmt = "UPDATE dummy SET foo = $1 WHERE bar = $2"; + assert!(!is_query_stmt(stmt)); + } } diff --git a/catalyst-gateway/bin/src/event_db/schema_check/mod.rs b/catalyst-gateway/bin/src/event_db/schema_check/mod.rs index 13ad786645..2fe81bbef9 100644 --- a/catalyst-gateway/bin/src/event_db/schema_check/mod.rs +++ b/catalyst-gateway/bin/src/event_db/schema_check/mod.rs @@ -20,9 +20,7 @@ impl EventDB { /// return the current schema version if its current. /// Otherwise return an error. pub(crate) async fn schema_version_check(&self) -> anyhow::Result { - let conn = self.pool.get().await?; - - let schema_check = conn.query_one(SELECT_MAX_VERSION_SQL, &[]).await?; + let schema_check = self.query_one(SELECT_MAX_VERSION_SQL, &[]).await?; let current_ver = schema_check.try_get("max")?; diff --git a/catalyst-gateway/bin/src/logger.rs b/catalyst-gateway/bin/src/logger.rs index dde8bbea17..4fc0abcb6b 100644 --- a/catalyst-gateway/bin/src/logger.rs +++ b/catalyst-gateway/bin/src/logger.rs @@ -1,16 +1,19 @@ //! Setup for logging for the service. + use clap::ValueEnum; use tracing::level_filters::LevelFilter; use tracing_subscriber::{ - fmt::{format::FmtSpan, time}, - FmtSubscriber, + fmt::{self, format::FmtSpan, time}, + prelude::*, + reload::{self, Handle}, + Registry, }; /// Default log level pub(crate) const LOG_LEVEL_DEFAULT: &str = "info"; /// All valid logging levels -#[derive(ValueEnum, Clone, Copy)] +#[derive(ValueEnum, Clone, Copy, Debug)] pub(crate) enum LogLevel { /// Debug messages Debug, @@ -45,10 +48,10 @@ impl From for tracing::log::LevelFilter { } /// Initialize the tracing subscriber -pub(crate) fn init(log_level: LogLevel) -> anyhow::Result<()> { - let subscriber = FmtSubscriber::builder() +pub(crate) fn init(log_level: LogLevel) -> Handle { + // Create the formatting layer + let layer = fmt::layer() .json() - .with_max_level(LevelFilter::from_level(log_level.into())) .with_timer(time::UtcTime::rfc_3339()) .with_span_events(FmtSpan::CLOSE) .with_target(true) @@ -58,12 +61,17 @@ pub(crate) fn init(log_level: LogLevel) -> anyhow::Result<()> { .with_thread_names(true) .with_thread_ids(true) .with_current_span(true) - .with_span_list(true) - .finish(); + .with_span_list(true); + // Create a reloadable layer with the specified log_level + let filter = LevelFilter::from_level(log_level.into()); + let (filter, logger_handle) = reload::Layer::new(filter); + tracing_subscriber::registry() + .with(filter) + .with(layer) + .init(); // Logging is globally disabled by default, so globally enable it to the required level. tracing::log::set_max_level(log_level.into()); - tracing::subscriber::set_global_default(subscriber)?; - Ok(()) + logger_handle } diff --git a/catalyst-gateway/bin/src/main.rs b/catalyst-gateway/bin/src/main.rs index 11a5a8bfba..05b436d63d 100644 --- a/catalyst-gateway/bin/src/main.rs +++ b/catalyst-gateway/bin/src/main.rs @@ -3,6 +3,7 @@ use clap::Parser; mod cardano; mod cli; +#[allow(dead_code)] mod event_db; mod logger; mod service; diff --git a/catalyst-gateway/bin/src/service/api/health/inspection_get.rs b/catalyst-gateway/bin/src/service/api/health/inspection_get.rs new file mode 100644 index 0000000000..77031ff07a --- /dev/null +++ b/catalyst-gateway/bin/src/service/api/health/inspection_get.rs @@ -0,0 +1,90 @@ +//! Implementation of the GET /health/inspection endpoint + +use std::sync::Arc; + +use poem::web::Data; +use poem_openapi::{ApiResponse, Enum}; +use tracing::debug; + +use crate::{event_db, logger, service::common::responses::WithErrorResponses, state::State}; + +/// `LogLevel` Open API definition. +#[derive(Debug, Clone, Copy, Enum)] +#[oai(rename_all = "lowercase")] +pub(crate) enum LogLevel { + /// Debug messages + Debug, + /// Informational Messages + Info, + /// Warnings + Warn, + /// Errors + Error, +} + +impl From for logger::LogLevel { + fn from(val: LogLevel) -> Self { + match val { + LogLevel::Debug => logger::LogLevel::Debug, + LogLevel::Info => logger::LogLevel::Info, + LogLevel::Warn => logger::LogLevel::Warn, + LogLevel::Error => logger::LogLevel::Error, + } + } +} + +/// `DeepQueryInspectionFlag` Open API definition. +#[derive(Debug, Clone, Copy, Enum)] +#[oai(rename_all = "lowercase")] +pub(crate) enum DeepQueryInspectionFlag { + /// Enable deep query inspection + Enabled, + /// Disable deep query inspection + Disabled, +} + +impl From for event_db::DeepQueryInspectionFlag { + fn from(val: DeepQueryInspectionFlag) -> Self { + match val { + DeepQueryInspectionFlag::Enabled => event_db::DeepQueryInspectionFlag::Enabled, + DeepQueryInspectionFlag::Disabled => event_db::DeepQueryInspectionFlag::Disabled, + } + } +} + +/// Endpoint responses. +#[derive(ApiResponse)] +pub(crate) enum Responses { + /// Service is Started and can serve requests. + #[oai(status = 204)] + NoContent, +} + +/// All responses. +pub(crate) type AllResponses = WithErrorResponses; + +/// # GET /health/inspection +/// +/// Inspection settings endpoint. +pub(crate) async fn endpoint( + state: Data<&Arc>, log_level: Option, + query_inspection: Option, +) -> AllResponses { + if let Some(level) = log_level { + match state.modify_logger_level(level.into()) { + Ok(()) => debug!("successfully set log level to: {:?}", level), + Err(err) => return AllResponses::handle_error(&err), + } + } + + if let Some(inspection_mode) = query_inspection { + let event_db = state.event_db(); + event_db.modify_deep_query(inspection_mode.into()).await; + debug!( + "successfully set deep query inspection mode to: {:?}", + inspection_mode + ); + } + // otherwise everything seems to be A-OK + Responses::NoContent.into() +} diff --git a/catalyst-gateway/bin/src/service/api/health/mod.rs b/catalyst-gateway/bin/src/service/api/health/mod.rs index 0e886a474a..c5b0b12a72 100644 --- a/catalyst-gateway/bin/src/service/api/health/mod.rs +++ b/catalyst-gateway/bin/src/service/api/health/mod.rs @@ -2,10 +2,11 @@ use std::sync::Arc; use poem::web::Data; -use poem_openapi::OpenApi; +use poem_openapi::{param::Query, OpenApi}; use crate::{service::common::tags::ApiTags, state::State}; +mod inspection_get; mod live_get; mod ready_get; mod started_get; @@ -57,4 +58,17 @@ impl HealthApi { async fn live_get(&self) -> live_get::AllResponses { live_get::endpoint().await } + + #[oai( + path = "/inspection", + method = "get", + operation_id = "healthInspection" + )] + /// Options for service inspection. + async fn inspection( + &self, state: Data<&Arc>, log_level: Query>, + query_inspection: Query>, + ) -> inspection_get::AllResponses { + inspection_get::endpoint(state, log_level.0, query_inspection.0).await + } } diff --git a/catalyst-gateway/bin/src/service/common/responses/mod.rs b/catalyst-gateway/bin/src/service/common/responses/mod.rs index fce647e085..21b8ef7223 100644 --- a/catalyst-gateway/bin/src/service/common/responses/mod.rs +++ b/catalyst-gateway/bin/src/service/common/responses/mod.rs @@ -59,7 +59,7 @@ impl WithErrorResponses { err.to_string(), )))) }, - err if err.is::() => { + err if err.is::>() => { WithErrorResponses::Error(ErrorResponses::ServiceUnavailable) }, err => { diff --git a/catalyst-gateway/bin/src/settings.rs b/catalyst-gateway/bin/src/settings.rs index 93e33f470c..0d566bada8 100644 --- a/catalyst-gateway/bin/src/settings.rs +++ b/catalyst-gateway/bin/src/settings.rs @@ -67,6 +67,10 @@ pub(crate) struct ServiceSettings { /// Follower settings. #[clap(flatten)] pub(crate) follower_settings: FollowerSettings, + + /// Enable deep query inspection. + #[clap(long, action = clap::ArgAction::SetTrue)] + pub(crate) deep_query_inspection: bool, } /// Settings specifies `OpenAPI` docs generation. diff --git a/catalyst-gateway/bin/src/state/mod.rs b/catalyst-gateway/bin/src/state/mod.rs index 55b9a9d0db..31a78c3560 100644 --- a/catalyst-gateway/bin/src/state/mod.rs +++ b/catalyst-gateway/bin/src/state/mod.rs @@ -1,7 +1,19 @@ //! Shared state used by all endpoints. use std::sync::Arc; -use crate::event_db::{establish_connection, EventDB}; +use tracing::level_filters::LevelFilter; +use tracing_subscriber::{reload::Handle, Registry}; + +use crate::{ + event_db::{establish_connection, EventDB}, + logger::LogLevel, +}; + +/// Settings for logger level +pub(crate) struct LoggerSettings { + /// Logger handle for formatting layer. + logger_handle: Handle, +} /// Global State of the service pub(crate) struct State { @@ -13,15 +25,23 @@ pub(crate) struct State { // Private need to get it with a function. event_db: Arc, /* This needs to be obsoleted, we want the DB * to be able to be down. */ + /// Logger settings + logger_settings: Arc, } impl State { /// Create a new global [`State`] - pub(crate) async fn new(database_url: Option) -> anyhow::Result { + pub(crate) async fn new( + database_url: Option, logger_handle: Handle, + ) -> anyhow::Result { // Get a configured pool to the Database, runs schema version check internally. let event_db = Arc::new(establish_connection(database_url).await?); + let logger_settings = Arc::new(LoggerSettings { logger_handle }); - let state = Self { event_db }; + let state = Self { + event_db, + logger_settings, + }; // We don't care if this succeeds or not. // We just try our best to connect to the event DB. @@ -34,4 +54,13 @@ impl State { pub(crate) fn event_db(&self) -> Arc { self.event_db.clone() } + + /// Modify the logger level setting. + /// This will reload the logger. + pub(crate) fn modify_logger_level(&self, level: LogLevel) -> anyhow::Result<()> { + self.logger_settings + .logger_handle + .modify(|f| *f = LevelFilter::from_level(level.into()))?; + Ok(()) + } } diff --git a/catalyst-gateway/tests/Earthfile b/catalyst-gateway/tests/Earthfile index 2713223062..8e3c08596d 100644 --- a/catalyst-gateway/tests/Earthfile +++ b/catalyst-gateway/tests/Earthfile @@ -1,64 +1,5 @@ VERSION 0.8 -package-schemathesis: - FROM python:3.12-alpine3.19 - - ARG tag="latest" - ARG max_examples=1000 - # TODO: https://github.com/input-output-hk/catalyst-voices/issues/465 - ARG max_response_time=1000 - ARG wait_for_schema=15 - ARG workers=2 - ARG schema_version=30 - ARG openapi_spec - - RUN apk add --no-cache gcc musl-dev - RUN python -m pip install schemathesis==3.27.1 - RUN mkdir /results - - VOLUME /results - ENTRYPOINT st run --checks all $openapi_spec \ - --workers=$workers \ - --wait-for-schema=$wait_for_schema \ - --max-response-time=$max_response_time \ - --hypothesis-max-examples=$max_examples \ - --data-generation-method=all \ - --skip-deprecated-operations \ - --force-schema-version=$schema_version \ - --show-trace \ - --force-color \ - --junit-xml=/results/junit-report.xml \ - --cassette-path=/results/cassette.yaml - - SAVE IMAGE schemathesis:$tag - -# test-fuzzer-api - Fuzzy test cat-gateway using openapi specs -test-fuzzer-api: - FROM earthly/dind:alpine-3.19 - RUN apk update && apk add iptables-legacy # workaround for https://github.com/earthly/earthly/issues/3784 - RUN apk add yq zstd - ARG OPENAPI_SPEC="http://127.0.0.1:3030/docs/cat-gateway.json" - - COPY schemathesis-docker-compose.yml . - WITH DOCKER \ - --compose schemathesis-docker-compose.yml \ - --load schemathesis:latest=(+package-schemathesis --openapi_spec=$OPENAPI_SPEC) \ - --load event-db:latest=(../event-db+build) \ - --load cat-gateway:latest=(../+package-cat-gateway) \ - --service event-db \ - --service cat-gateway \ - --allow-privileged - RUN docker run --net=host --name=st schemathesis:latest || echo fail > fail && \ - docker-compose logs cat-gateway > ./cat-gateway.log && zstd -9 cat-gateway.log && \ - docker cp st:/results/junit-report.xml junit-report.xml && \ - docker cp st:/results/cassette.yaml cassette.yaml - END - WAIT - SAVE ARTIFACT junit-report.xml AS LOCAL schemathesis.junit-report.xml - SAVE ARTIFACT cat-gateway.log.zst AS LOCAL cat-gateway.log.zst - SAVE ARTIFACT cassette.yaml AS LOCAL cassette.yaml - END - # test-lint-openapi - OpenAPI linting from an artifact # testing whether the OpenAPI generated during build stage follows good practice. test-lint-openapi: diff --git a/catalyst-gateway/tests/api_tests/.gitignore b/catalyst-gateway/tests/api_tests/.gitignore index 40f65ddf37..3b9825d2f9 100644 --- a/catalyst-gateway/tests/api_tests/.gitignore +++ b/catalyst-gateway/tests/api_tests/.gitignore @@ -1,2 +1,3 @@ __pycache__ -junit-report.xml \ No newline at end of file +junit-report.xml +.hypothesis/ \ No newline at end of file diff --git a/catalyst-gateway/tests/api_tests/api_tests/__init__.py b/catalyst-gateway/tests/api_tests/api_tests/__init__.py index e15ed46f89..14d2437930 100644 --- a/catalyst-gateway/tests/api_tests/api_tests/__init__.py +++ b/catalyst-gateway/tests/api_tests/api_tests/__init__.py @@ -22,7 +22,6 @@ def cat_gateway_endpoint_url(endpoint: str): def check_is_live(): resp = requests.get(cat_gateway_endpoint_url("api/health/live")) - print(f"resp: {resp}, code: {resp.status_code}") assert resp.status_code == 204 logger.info("cat-gateway service is LIVE.") diff --git a/catalyst-gateway/tests/schemathesis_tests/.gitignore b/catalyst-gateway/tests/schemathesis_tests/.gitignore new file mode 100644 index 0000000000..3b9825d2f9 --- /dev/null +++ b/catalyst-gateway/tests/schemathesis_tests/.gitignore @@ -0,0 +1,3 @@ +__pycache__ +junit-report.xml +.hypothesis/ \ No newline at end of file diff --git a/catalyst-gateway/tests/schemathesis_tests/Earthfile b/catalyst-gateway/tests/schemathesis_tests/Earthfile new file mode 100644 index 0000000000..b98d1d4f76 --- /dev/null +++ b/catalyst-gateway/tests/schemathesis_tests/Earthfile @@ -0,0 +1,62 @@ +VERSION 0.8 + +package-schemathesis: + FROM python:3.12-alpine3.19 + + ARG tag="latest" + ARG max_examples=1000 + # TODO: https://github.com/input-output-hk/catalyst-voices/issues/465 + ARG max_response_time=3000 + ARG wait_for_schema=15 + ARG workers=2 + ARG schema_version=30 + ARG openapi_spec + + RUN apk add --no-cache gcc musl-dev + RUN python -m pip install schemathesis==3.27.1 + RUN mkdir /results + COPY ./hooks/hooks.py . + + VOLUME /results + ENTRYPOINT export SCHEMATHESIS_HOOKS=hooks \ + && st run --checks all $openapi_spec \ + --workers=$workers \ + --wait-for-schema=$wait_for_schema \ + --max-response-time=$max_response_time \ + --hypothesis-max-examples=$max_examples \ + --data-generation-method=all \ + --skip-deprecated-operations \ + --force-schema-version=$schema_version \ + --show-trace \ + --force-color \ + --junit-xml=/results/junit-report.xml \ + --cassette-path=/results/cassette.yaml + + SAVE IMAGE schemathesis:$tag + +# test-fuzzer-api - Fuzzy test cat-gateway using openapi specs +test-fuzzer-api: + FROM earthly/dind:alpine-3.19 + RUN apk update && apk add iptables-legacy # workaround for https://github.com/earthly/earthly/issues/3784 + RUN apk add yq zstd + ARG OPENAPI_SPEC="http://127.0.0.1:3030/docs/cat-gateway.json" + + COPY schemathesis-docker-compose.yml . + WITH DOCKER \ + --compose schemathesis-docker-compose.yml \ + --load schemathesis:latest=(+package-schemathesis --openapi_spec=$OPENAPI_SPEC) \ + --load event-db:latest=(../../event-db+build) \ + --load cat-gateway:latest=(../../+package-cat-gateway) \ + --service event-db \ + --service cat-gateway \ + --allow-privileged + RUN docker run --net=host --name=st schemathesis:latest || echo fail > fail && \ + docker-compose logs cat-gateway > ./cat-gateway.log && zstd -9 cat-gateway.log && \ + docker cp st:/results/junit-report.xml junit-report.xml && \ + docker cp st:/results/cassette.yaml cassette.yaml + END + WAIT + SAVE ARTIFACT junit-report.xml AS LOCAL schemathesis.junit-report.xml + SAVE ARTIFACT cat-gateway.log.zst AS LOCAL cat-gateway.log.zst + SAVE ARTIFACT cassette.yaml AS LOCAL cassette.yaml + END diff --git a/catalyst-gateway/tests/schemathesis_tests/hooks/hooks.py b/catalyst-gateway/tests/schemathesis_tests/hooks/hooks.py new file mode 100644 index 0000000000..1948e9d6db --- /dev/null +++ b/catalyst-gateway/tests/schemathesis_tests/hooks/hooks.py @@ -0,0 +1,36 @@ +import copy +import schemathesis +import schemathesis.schemas +from typing import Any, Dict + +@schemathesis.hook +def before_load_schema( + context: schemathesis.hooks.HookContext, + raw_schema: Dict[str, Any], + ) -> None: + paths = dict(raw_schema["paths"]) + for endpoint in paths.keys(): + endpoint_data = dict(raw_schema["paths"][endpoint]) + + # Get first method data as reference + ref_info = next(iter(endpoint_data.values())) + + # Create new method data using reference data, replacing 'responses' field + new_info = copy.deepcopy(ref_info) + new_info["responses"] = {"405": {"description": "method not allowed"}} + + # Update endpoint if a method is not declared + if "get" not in endpoint_data: + endpoint_data.update([("get", new_info),]) + if "post" not in endpoint_data: + endpoint_data.update([("post", new_info),]) + if "put" not in endpoint_data: + endpoint_data.update([("put", new_info),]) + if "patch" not in endpoint_data: + endpoint_data.update([("patch", new_info),]) + if "delete" not in endpoint_data: + endpoint_data.update([("delete", new_info),]) + + # Update endpoint + raw_schema["paths"][endpoint] = endpoint_data + diff --git a/catalyst-gateway/tests/schemathesis-docker-compose.yml b/catalyst-gateway/tests/schemathesis_tests/schemathesis-docker-compose.yml similarity index 100% rename from catalyst-gateway/tests/schemathesis-docker-compose.yml rename to catalyst-gateway/tests/schemathesis_tests/schemathesis-docker-compose.yml diff --git a/catalyst_voices/Earthfile b/catalyst_voices/Earthfile index 46b4ed0dc1..d1c95b88b0 100644 --- a/catalyst_voices/Earthfile +++ b/catalyst_voices/Earthfile @@ -56,7 +56,8 @@ code-generator: # check-flutter-code-generator - Checks that the code generation is consistent # with the generated code currently in the repo. -check-flutter-code-generator: +# TODO: enable it +flutter-code-generator: FROM +code-generator # Copy generated files in the local file tree to a temporary folder COPY packages/catalyst_voices_services/lib/generated/catalyst_gateway /tmp/repo_generated diff --git a/catalyst_voices/packages/catalyst_voices_assets/assets/images/catalyst_logo_icon.svg b/catalyst_voices/packages/catalyst_voices_assets/assets/images/catalyst_logo_icon.svg new file mode 100644 index 0000000000..99604f3253 --- /dev/null +++ b/catalyst_voices/packages/catalyst_voices_assets/assets/images/catalyst_logo_icon.svg @@ -0,0 +1,60 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/catalyst_voices/packages/catalyst_voices_assets/assets/images/catalyst_logo_icon_white.svg b/catalyst_voices/packages/catalyst_voices_assets/assets/images/catalyst_logo_icon_white.svg new file mode 100644 index 0000000000..9c10d0b2a4 --- /dev/null +++ b/catalyst_voices/packages/catalyst_voices_assets/assets/images/catalyst_logo_icon_white.svg @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff --git a/catalyst_voices/packages/catalyst_voices_assets/assets/images/catalyst_logo_white.svg b/catalyst_voices/packages/catalyst_voices_assets/assets/images/catalyst_logo_white.svg new file mode 100644 index 0000000000..51915d6656 --- /dev/null +++ b/catalyst_voices/packages/catalyst_voices_assets/assets/images/catalyst_logo_white.svg @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/catalyst_voices/packages/catalyst_voices_assets/assets/images/fallback_logo.svg b/catalyst_voices/packages/catalyst_voices_assets/assets/images/fallback_logo.svg new file mode 100644 index 0000000000..5f93f5dfa5 --- /dev/null +++ b/catalyst_voices/packages/catalyst_voices_assets/assets/images/fallback_logo.svg @@ -0,0 +1,315 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/catalyst_voices/packages/catalyst_voices_assets/assets/images/fallback_logo_icon.svg b/catalyst_voices/packages/catalyst_voices_assets/assets/images/fallback_logo_icon.svg new file mode 100644 index 0000000000..6eebad4859 --- /dev/null +++ b/catalyst_voices/packages/catalyst_voices_assets/assets/images/fallback_logo_icon.svg @@ -0,0 +1,81 @@ + + + + + + + + + + + + + + + + + + + diff --git a/catalyst_voices/packages/catalyst_voices_assets/lib/generated/assets.gen.dart b/catalyst_voices/packages/catalyst_voices_assets/lib/generated/assets.gen.dart index e2079b0e71..ce2cdbaf47 100644 --- a/catalyst_voices/packages/catalyst_voices_assets/lib/generated/assets.gen.dart +++ b/catalyst_voices/packages/catalyst_voices_assets/lib/generated/assets.gen.dart @@ -18,6 +18,18 @@ class $AssetsImagesGen { SvgGenImage get catalystLogo => const SvgGenImage('assets/images/catalyst_logo.svg'); + /// File path: assets/images/catalyst_logo_icon.svg + SvgGenImage get catalystLogoIcon => + const SvgGenImage('assets/images/catalyst_logo_icon.svg'); + + /// File path: assets/images/catalyst_logo_icon_white.svg + SvgGenImage get catalystLogoIconWhite => + const SvgGenImage('assets/images/catalyst_logo_icon_white.svg'); + + /// File path: assets/images/catalyst_logo_white.svg + SvgGenImage get catalystLogoWhite => + const SvgGenImage('assets/images/catalyst_logo_white.svg'); + /// File path: assets/images/coming_soon_bkg.webp AssetGenImage get comingSoonBkg => const AssetGenImage('assets/images/coming_soon_bkg.webp'); @@ -26,9 +38,25 @@ class $AssetsImagesGen { AssetGenImage get dummyCatalystVoices => const AssetGenImage('assets/images/dummy_catalyst_voices.webp'); + /// File path: assets/images/fallback_logo.svg + SvgGenImage get fallbackLogo => + const SvgGenImage('assets/images/fallback_logo.svg'); + + /// File path: assets/images/fallback_logo_icon.svg + SvgGenImage get fallbackLogoIcon => + const SvgGenImage('assets/images/fallback_logo_icon.svg'); + /// List of all assets - List get values => - [catalystLogo, comingSoonBkg, dummyCatalystVoices]; + List get values => [ + catalystLogo, + catalystLogoIcon, + catalystLogoIconWhite, + catalystLogoWhite, + comingSoonBkg, + dummyCatalystVoices, + fallbackLogo, + fallbackLogoIcon + ]; } class VoicesAssets { diff --git a/catalyst_voices/packages/catalyst_voices_assets/pubspec.yaml b/catalyst_voices/packages/catalyst_voices_assets/pubspec.yaml index 6743ad0b9d..b469ea2c33 100644 --- a/catalyst_voices/packages/catalyst_voices_assets/pubspec.yaml +++ b/catalyst_voices/packages/catalyst_voices_assets/pubspec.yaml @@ -17,7 +17,6 @@ dev_dependencies: flutter_gen_runner: ^5.3.2 flutter: - uses-material-design: true generate: true assets: - assets/images/ diff --git a/catalyst_voices/packages/catalyst_voices_brands/lib/src/theme_extensions/brand_assets.dart b/catalyst_voices/packages/catalyst_voices_brands/lib/src/theme_extensions/brand_assets.dart new file mode 100644 index 0000000000..d722ec6065 --- /dev/null +++ b/catalyst_voices/packages/catalyst_voices_brands/lib/src/theme_extensions/brand_assets.dart @@ -0,0 +1,53 @@ +import 'package:catalyst_voices_assets/catalyst_voices_assets.dart'; +import 'package:flutter/material.dart'; + +/// A `ThemeExtension` that holds brand-specific assets for theming purposes. +/// +/// `BrandAssets` is used to encapsulate theme-dependent assets such as the +/// brand's logo and logo icon as SVG images which can be utilized throughout +/// the application wherever is required. +/// +/// Example usage: +/// ```dart +/// final logo = Theme.of(context).brandAssets.logo; +/// final logoIcon = Theme.of(context).brandAssets.logoIcon; +/// ``` +@immutable +class BrandAssets extends ThemeExtension { + final SvgGenImage logo; + final SvgGenImage logoIcon; + + const BrandAssets({ + required this.logo, + required this.logoIcon, + }); + + @override + ThemeExtension copyWith({ + SvgGenImage? logo, + SvgGenImage? logoIcon, + }) { + return BrandAssets( + logo: logo ?? this.logo, + logoIcon: logo ?? this.logoIcon, + ); + } + + @override + BrandAssets lerp( + ThemeExtension? other, + double t, + ) { + if (other is! BrandAssets) { + return this; + } else if (t >= 0.5) { + return other; + } else { + return this; + } + } +} + +extension BrandAssetsExtension on ThemeData { + BrandAssets get brandAssets => extension()!; +} diff --git a/catalyst_voices/packages/catalyst_voices_brands/lib/src/themes/voices_color_scheme.dart b/catalyst_voices/packages/catalyst_voices_brands/lib/src/theme_extensions/voices_color_scheme.dart similarity index 96% rename from catalyst_voices/packages/catalyst_voices_brands/lib/src/themes/voices_color_scheme.dart rename to catalyst_voices/packages/catalyst_voices_brands/lib/src/theme_extensions/voices_color_scheme.dart index 2708ae520f..42e01c0493 100644 --- a/catalyst_voices/packages/catalyst_voices_brands/lib/src/themes/voices_color_scheme.dart +++ b/catalyst_voices/packages/catalyst_voices_brands/lib/src/theme_extensions/voices_color_scheme.dart @@ -1,5 +1,10 @@ import 'package:flutter/material.dart'; +/// A `ThemeExtension` that encapsulates a set color properties and extends the +/// standard material color scheme. +/// +/// `VoicesColorScheme` is used to define a comprehensive set of color +/// attributes that can be used in the Voices application to ensure consistency. @immutable class VoicesColorScheme extends ThemeExtension { final Color? textPrimary; diff --git a/catalyst_voices/packages/catalyst_voices_brands/lib/src/themes/catalyst.dart b/catalyst_voices/packages/catalyst_voices_brands/lib/src/themes/catalyst.dart index 885c4efcf6..de9de295e3 100644 --- a/catalyst_voices/packages/catalyst_voices_brands/lib/src/themes/catalyst.dart +++ b/catalyst_voices/packages/catalyst_voices_brands/lib/src/themes/catalyst.dart @@ -1,12 +1,13 @@ import 'package:catalyst_voices_assets/catalyst_voices_assets.dart'; -import 'package:catalyst_voices_assets/generated/colors.gen.dart'; -import 'package:catalyst_voices_brands/src/themes/voices_color_scheme.dart'; +import 'package:catalyst_voices_brands/src/theme_extensions/brand_assets.dart'; +import 'package:catalyst_voices_brands/src/theme_extensions/voices_color_scheme.dart'; import 'package:flutter/material.dart'; import 'package:google_fonts/google_fonts.dart'; ThemeData _buildThemeData( ColorScheme colorScheme, VoicesColorScheme voicesColorScheme, + BrandAssets brandAssets, ) { return ThemeData( textTheme: TextTheme( @@ -113,6 +114,7 @@ ThemeData _buildThemeData( colorScheme: colorScheme, extensions: >[ voicesColorScheme, + brandAssets, ], ); } @@ -219,14 +221,26 @@ const VoicesColorScheme darkVoicesColorScheme = VoicesColorScheme( iconsError: VoicesColors.darkIconsError, ); +final BrandAssets lightBrandAssets = BrandAssets( + logo: VoicesAssets.images.catalystLogo, + logoIcon: VoicesAssets.images.catalystLogoIcon, +); + +final BrandAssets darkBrandAssets = BrandAssets( + logo: VoicesAssets.images.catalystLogoWhite, + logoIcon: VoicesAssets.images.catalystLogoIconWhite, +); + /// [ThemeData] for the `catalyst` brand. final ThemeData catalyst = _buildThemeData( lightColorScheme, lightVoicesColorScheme, + lightBrandAssets, ); /// Dark [ThemeData] for the `catalyst` brand. final ThemeData darkCatalyst = _buildThemeData( darkColorScheme, darkVoicesColorScheme, + darkBrandAssets, ); diff --git a/catalyst_voices/packages/catalyst_voices_brands/lib/src/themes/fallback.dart b/catalyst_voices/packages/catalyst_voices_brands/lib/src/themes/fallback.dart index 7fa296225d..bdb8b42232 100644 --- a/catalyst_voices/packages/catalyst_voices_brands/lib/src/themes/fallback.dart +++ b/catalyst_voices/packages/catalyst_voices_brands/lib/src/themes/fallback.dart @@ -1,7 +1,19 @@ +import 'package:catalyst_voices_assets/catalyst_voices_assets.dart'; +import 'package:catalyst_voices_brands/src/theme_extensions/brand_assets.dart'; import 'package:flutter/material.dart'; +final BrandAssets lightBrandAssets = BrandAssets( + logo: VoicesAssets.images.fallbackLogo, + logoIcon: VoicesAssets.images.fallbackLogoIcon, +); + /// [ThemeData] for the `fallback` brand. -final ThemeData fallback = ThemeData.light(); +final ThemeData fallback = ThemeData( + colorScheme: ThemeData.light().colorScheme, + extensions: >[ + lightBrandAssets, + ], +); /// Dark [ThemeData] for the `fallback` brand. final ThemeData darkFallback = ThemeData.dark(); diff --git a/catalyst_voices/packages/catalyst_voices_brands/test/src/catalyst_voices_brands_test.dart b/catalyst_voices/packages/catalyst_voices_brands/test/src/catalyst_voices_brands_test.dart index eea47f7db2..c99034142f 100644 --- a/catalyst_voices/packages/catalyst_voices_brands/test/src/catalyst_voices_brands_test.dart +++ b/catalyst_voices/packages/catalyst_voices_brands/test/src/catalyst_voices_brands_test.dart @@ -1,6 +1,7 @@ import 'package:catalyst_voices_assets/catalyst_voices_assets.dart'; import 'package:catalyst_voices_blocs/src/brand/brand_bloc.dart'; import 'package:catalyst_voices_brands/catalyst_voices_brands.dart'; +import 'package:catalyst_voices_brands/src/theme_extensions/brand_assets.dart'; import 'package:flutter/material.dart'; import 'package:flutter_bloc/flutter_bloc.dart'; import 'package:flutter_test/flutter_test.dart'; @@ -18,6 +19,9 @@ void main() { builder: (context) => Scaffold( body: Row( children: [ + CatalystSvgPicture.asset( + Theme.of(context).brandAssets.logo.path, + ), MaterialButton( key: catalystKey, color: Theme.of(context).primaryColor, @@ -127,4 +131,67 @@ void main() { ); }); }); + group('Test brand_assets', () { + final catalystLogo = CatalystSvgPicture.asset( + VoicesAssets.images.catalystLogo.path, + ); + final fallbackLogo = CatalystSvgPicture.asset( + VoicesAssets.images.fallbackLogo.path, + ); + testWidgets('Logo from Default theme is applied', (tester) async { + await tester.pumpWidget( + buildApp(), + ); + + final logo = find.byType(CatalystSvgPicture).first; + + expect(logo, findsOneWidget); + expect( + tester.widget(logo).bytesLoader, + catalystLogo.bytesLoader, + ); + }); + + testWidgets('Fallback Logo is applied after switch', (tester) async { + await tester.pumpWidget( + buildApp(), + ); + + final logo = find.byType(CatalystSvgPicture).first; + final fallbackButton = find.byKey(fallbackKey); + + expect(logo, findsOneWidget); + expect(fallbackButton, findsOneWidget); + + await tester.tap(fallbackButton); + await tester.pumpAndSettle(); + expect( + tester.widget(logo).bytesLoader, + fallbackLogo.bytesLoader, + ); + }); + + testWidgets('Catalyst Logo is applied after switch', (tester) async { + await tester.pumpWidget( + buildApp(), + ); + + final logo = find.byType(CatalystSvgPicture).first; + final catalystButton = find.byKey(catalystKey); + final fallbackButton = find.byKey(fallbackKey); + + expect(logo, findsOneWidget); + expect(catalystButton, findsOneWidget); + expect(fallbackButton, findsOneWidget); + + await tester.tap(fallbackButton); + await tester.pumpAndSettle(); + await tester.tap(catalystButton); + await tester.pumpAndSettle(); + expect( + tester.widget(logo).bytesLoader, + catalystLogo.bytesLoader, + ); + }); + }); } diff --git a/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.enums.swagger.dart b/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.enums.swagger.dart index ec8b97b8b5..0b7f02760e 100644 --- a/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.enums.swagger.dart +++ b/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.enums.swagger.dart @@ -1,6 +1,38 @@ import 'package:json_annotation/json_annotation.dart'; import 'package:collection/collection.dart'; +enum DeepQueryInspection { + @JsonValue(null) + swaggerGeneratedUnknown(null), + + @JsonValue('enabled') + enabled('enabled'), + @JsonValue('disabled') + disabled('disabled'); + + final String? value; + + const DeepQueryInspection(this.value); +} + +enum LogLevel { + @JsonValue(null) + swaggerGeneratedUnknown(null), + + @JsonValue('debug') + debug('debug'), + @JsonValue('info') + info('info'), + @JsonValue('warn') + warn('warn'), + @JsonValue('error') + error('error'); + + final String? value; + + const LogLevel(this.value); +} + enum Network { @JsonValue(null) swaggerGeneratedUnknown(null), diff --git a/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.models.swagger.dart b/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.models.swagger.dart index 49881b58a2..f26998df9c 100644 --- a/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.models.swagger.dart +++ b/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.models.swagger.dart @@ -1468,6 +1468,140 @@ extension $VotingInfoDirectVoterExtension on VotingInfoDirectVoter { } } +String? deepQueryInspectionNullableToJson( + enums.DeepQueryInspection? deepQueryInspection) { + return deepQueryInspection?.value; +} + +String? deepQueryInspectionToJson( + enums.DeepQueryInspection deepQueryInspection) { + return deepQueryInspection.value; +} + +enums.DeepQueryInspection deepQueryInspectionFromJson( + Object? deepQueryInspection, [ + enums.DeepQueryInspection? defaultValue, +]) { + return enums.DeepQueryInspection.values + .firstWhereOrNull((e) => e.value == deepQueryInspection) ?? + defaultValue ?? + enums.DeepQueryInspection.swaggerGeneratedUnknown; +} + +enums.DeepQueryInspection? deepQueryInspectionNullableFromJson( + Object? deepQueryInspection, [ + enums.DeepQueryInspection? defaultValue, +]) { + if (deepQueryInspection == null) { + return null; + } + return enums.DeepQueryInspection.values + .firstWhereOrNull((e) => e.value == deepQueryInspection) ?? + defaultValue; +} + +String deepQueryInspectionExplodedListToJson( + List? deepQueryInspection) { + return deepQueryInspection?.map((e) => e.value!).join(',') ?? ''; +} + +List deepQueryInspectionListToJson( + List? deepQueryInspection) { + if (deepQueryInspection == null) { + return []; + } + + return deepQueryInspection.map((e) => e.value!).toList(); +} + +List deepQueryInspectionListFromJson( + List? deepQueryInspection, [ + List? defaultValue, +]) { + if (deepQueryInspection == null) { + return defaultValue ?? []; + } + + return deepQueryInspection + .map((e) => deepQueryInspectionFromJson(e.toString())) + .toList(); +} + +List? deepQueryInspectionNullableListFromJson( + List? deepQueryInspection, [ + List? defaultValue, +]) { + if (deepQueryInspection == null) { + return defaultValue; + } + + return deepQueryInspection + .map((e) => deepQueryInspectionFromJson(e.toString())) + .toList(); +} + +String? logLevelNullableToJson(enums.LogLevel? logLevel) { + return logLevel?.value; +} + +String? logLevelToJson(enums.LogLevel logLevel) { + return logLevel.value; +} + +enums.LogLevel logLevelFromJson( + Object? logLevel, [ + enums.LogLevel? defaultValue, +]) { + return enums.LogLevel.values.firstWhereOrNull((e) => e.value == logLevel) ?? + defaultValue ?? + enums.LogLevel.swaggerGeneratedUnknown; +} + +enums.LogLevel? logLevelNullableFromJson( + Object? logLevel, [ + enums.LogLevel? defaultValue, +]) { + if (logLevel == null) { + return null; + } + return enums.LogLevel.values.firstWhereOrNull((e) => e.value == logLevel) ?? + defaultValue; +} + +String logLevelExplodedListToJson(List? logLevel) { + return logLevel?.map((e) => e.value!).join(',') ?? ''; +} + +List logLevelListToJson(List? logLevel) { + if (logLevel == null) { + return []; + } + + return logLevel.map((e) => e.value!).toList(); +} + +List logLevelListFromJson( + List? logLevel, [ + List? defaultValue, +]) { + if (logLevel == null) { + return defaultValue ?? []; + } + + return logLevel.map((e) => logLevelFromJson(e.toString())).toList(); +} + +List? logLevelNullableListFromJson( + List? logLevel, [ + List? defaultValue, +]) { + if (logLevel == null) { + return defaultValue; + } + + return logLevel.map((e) => logLevelFromJson(e.toString())).toList(); +} + String? networkNullableToJson(enums.Network? network) { return network?.value; } diff --git a/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.swagger.chopper.dart b/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.swagger.chopper.dart index d839455d5e..cbd9f774aa 100644 --- a/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.swagger.chopper.dart +++ b/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.swagger.chopper.dart @@ -50,6 +50,25 @@ final class _$CatGatewayApi extends CatGatewayApi { return client.send($request); } + @override + Future> _apiHealthInspectionGet({ + String? logLevel, + String? queryInspection, + }) { + final Uri $url = Uri.parse('/api/health/inspection'); + final Map $params = { + 'log_level': logLevel, + 'query_inspection': queryInspection, + }; + final Request $request = Request( + 'GET', + $url, + client.baseUrl, + parameters: $params, + ); + return client.send($request); + } + @override Future> _apiCardanoStakedAdaStakeAddressGet({ required String? stakeAddress, diff --git a/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.swagger.dart b/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.swagger.dart index 698049b133..f9212cfa52 100644 --- a/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.swagger.dart +++ b/catalyst_voices/packages/catalyst_voices_services/lib/generated/catalyst_gateway/cat_gateway_api.swagger.dart @@ -75,6 +75,27 @@ abstract class CatGatewayApi extends ChopperService { @Get(path: '/api/health/live') Future _apiHealthLiveGet(); + ///Options for service inspection. + ///@param log_level + ///@param query_inspection + Future apiHealthInspectionGet({ + enums.LogLevel? logLevel, + enums.DeepQueryInspection? queryInspection, + }) { + return _apiHealthInspectionGet( + logLevel: logLevel?.value?.toString(), + queryInspection: queryInspection?.value?.toString()); + } + + ///Options for service inspection. + ///@param log_level + ///@param query_inspection + @Get(path: '/api/health/inspection') + Future _apiHealthInspectionGet({ + @Query('log_level') String? logLevel, + @Query('query_inspection') String? queryInspection, + }); + ///Get staked ada amount. ///@param stake_address The stake address of the user. Should a valid Bech32 encoded address followed by the https://cips.cardano.org/cip/CIP-19/#stake-addresses. ///@param network Cardano network type. If omitted network type is identified from the stake address. If specified it must be correspondent to the network type encoded in the stake address. As `preprod` and `preview` network types in the stake address encoded as a `testnet`, to specify `preprod` or `preview` network type use this query parameter.