diff --git a/.github/actions/warm-up-repo/action.yml b/.github/actions/warm-up-repo/action.yml index 39408a7659e..af5b6e87218 100644 --- a/.github/actions/warm-up-repo/action.yml +++ b/.github/actions/warm-up-repo/action.yml @@ -17,7 +17,7 @@ runs: # cache: yarn ## Currently disabled because of frequent timeouts - name: Install WASM tools - uses: taiki-e/install-action@a3df29458daa5c62abace1e6b358018bed57d5b4 # v2.44.43 + uses: taiki-e/install-action@437c908c7e5ee18b63a261286cbe5147219f8a39 # v2.44.44 with: tool: wasm-pack@0.12.1 diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 545a4ddec73..1db54473a8c 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -93,7 +93,7 @@ jobs: - name: Install Rust tools if: steps.benches.outputs.has-rust == 'true' - uses: taiki-e/install-action@a3df29458daa5c62abace1e6b358018bed57d5b4 # v2.44.43 + uses: taiki-e/install-action@437c908c7e5ee18b63a261286cbe5147219f8a39 # v2.44.44 with: tool: just@1.34.0,critcmp@0.1.8 @@ -241,7 +241,7 @@ jobs: - name: Install Rust tools if: steps.benches.outputs.has-rust == 'true' - uses: taiki-e/install-action@a3df29458daa5c62abace1e6b358018bed57d5b4 # v2.44.43 + uses: taiki-e/install-action@437c908c7e5ee18b63a261286cbe5147219f8a39 # v2.44.44 with: tool: just@1.34.0,critcmp@0.1.8 @@ -393,7 +393,7 @@ jobs: [[ ${{ needs.integration-benches.result }} =~ success|skipped ]] - name: Notify Slack on failure - uses: rtCamp/action-slack-notify@65e6fc1ce697e2df8149d9ae9909acc5ec5599ce + uses: rtCamp/action-slack-notify@3154c16259190ff5eb4675d8f012f8dc6ffbd77f if: ${{ failure() && github.event_name == 'merge_group' }} env: SLACK_LINK_NAMES: true diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 701cffd3be3..8c62bb91cfa 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -100,7 +100,7 @@ jobs: [[ ${{ needs.sourcemaps.result }} =~ success|skipped ]] - name: Notify Slack on failure - uses: rtCamp/action-slack-notify@65e6fc1ce697e2df8149d9ae9909acc5ec5599ce + uses: rtCamp/action-slack-notify@3154c16259190ff5eb4675d8f012f8dc6ffbd77f if: ${{ failure() && github.event_name == 'merge_group' }} env: SLACK_LINK_NAMES: true diff --git a/.github/workflows/hash-backend-cd.yml b/.github/workflows/hash-backend-cd.yml index 8731155b86d..9c1cf35a88f 100644 --- a/.github/workflows/hash-backend-cd.yml +++ b/.github/workflows/hash-backend-cd.yml @@ -522,7 +522,7 @@ jobs: if: ${{ failure() }} steps: - name: Slack Notification - uses: rtCamp/action-slack-notify@65e6fc1ce697e2df8149d9ae9909acc5ec5599ce + uses: rtCamp/action-slack-notify@3154c16259190ff5eb4675d8f012f8dc6ffbd77f env: SLACK_LINK_NAMES: true SLACK_MESSAGE: "Error deploying the HASH backend <@U0143NL4GMP> <@U02NLJY0FGX>" # Notifies C & T diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 9055b1468bb..606ce1d9171 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -105,7 +105,7 @@ jobs: - name: Install Rust tools if: always() && steps.lints.outputs.has-rust == 'true' - uses: taiki-e/install-action@a3df29458daa5c62abace1e6b358018bed57d5b4 # v2.44.43 + uses: taiki-e/install-action@437c908c7e5ee18b63a261286cbe5147219f8a39 # v2.44.44 with: tool: just@1.34.0,cargo-hack@0.6.30,clippy-sarif@0.6.5,sarif-fmt@0.6.5 @@ -342,7 +342,7 @@ jobs: [[ ${{ needs.global.result }} =~ success|skipped ]] - name: Notify Slack on failure - uses: rtCamp/action-slack-notify@65e6fc1ce697e2df8149d9ae9909acc5ec5599ce + uses: rtCamp/action-slack-notify@3154c16259190ff5eb4675d8f012f8dc6ffbd77f if: ${{ failure() && github.event_name == 'merge_group' }} env: SLACK_LINK_NAMES: true diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 11b5be34fcd..ad24724e261 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -27,7 +27,7 @@ jobs: NPM_TOKEN: ${{ secrets.NPM_TOKEN }} - name: Notify Slack on failure - uses: rtCamp/action-slack-notify@65e6fc1ce697e2df8149d9ae9909acc5ec5599ce + uses: rtCamp/action-slack-notify@3154c16259190ff5eb4675d8f012f8dc6ffbd77f if: ${{ failure() }} env: SLACK_LINK_NAMES: true diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index be8644f5cbc..af6d05ef804 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -145,7 +145,7 @@ jobs: - name: Install Rust tools if: always() && steps.tests.outputs.has-rust == 'true' - uses: taiki-e/install-action@a3df29458daa5c62abace1e6b358018bed57d5b4 # v2.44.43 + uses: taiki-e/install-action@437c908c7e5ee18b63a261286cbe5147219f8a39 # v2.44.44 with: tool: just@1.34.0,cargo-hack@0.6.30,cargo-nextest@0.9.72,cargo-llvm-cov@0.6.11 @@ -259,7 +259,7 @@ jobs: - name: Install Rust tools if: steps.tests.outputs.has-rust == 'true' - uses: taiki-e/install-action@a3df29458daa5c62abace1e6b358018bed57d5b4 # v2.44.43 + uses: taiki-e/install-action@437c908c7e5ee18b63a261286cbe5147219f8a39 # v2.44.44 with: tool: just@1.34.0,cargo-hack@0.6.30,cargo-nextest@0.9.72,cargo-llvm-cov@0.6.11 @@ -436,7 +436,7 @@ jobs: [[ ${{ needs.publish.result }} =~ success|skipped ]] - name: Notify Slack on failure - uses: rtCamp/action-slack-notify@65e6fc1ce697e2df8149d9ae9909acc5ec5599ce + uses: rtCamp/action-slack-notify@3154c16259190ff5eb4675d8f012f8dc6ffbd77f if: ${{ failure() && github.event_name == 'merge_group' }} env: SLACK_LINK_NAMES: true diff --git a/.github/workflows/tf-apply-hash.yml b/.github/workflows/tf-apply-hash.yml index 542b1ff3800..8136ce3c7dd 100644 --- a/.github/workflows/tf-apply-hash.yml +++ b/.github/workflows/tf-apply-hash.yml @@ -63,7 +63,7 @@ jobs: env: ${{ matrix.env }} - name: Notify Slack on failure - uses: rtCamp/action-slack-notify@65e6fc1ce697e2df8149d9ae9909acc5ec5599ce + uses: rtCamp/action-slack-notify@3154c16259190ff5eb4675d8f012f8dc6ffbd77f if: ${{ failure() }} env: SLACK_LINK_NAMES: true diff --git a/Cargo.lock b/Cargo.lock index 9df3d77c5db..9fec91b4d9a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1045,6 +1045,9 @@ name = "bytes" version = "1.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" +dependencies = [ + "serde", +] [[package]] name = "bytes-utils" @@ -2625,7 +2628,6 @@ dependencies = [ "error-stack", "futures", "harpc-net", - "harpc-service", "harpc-tower", "multiaddr", "thiserror", @@ -2772,7 +2774,6 @@ dependencies = [ "enumflags2", "error-stack", "expect-test", - "harpc-codec", "harpc-types", "proptest", "similar-asserts", diff --git a/apps/hash-ai-worker-ts/package.json b/apps/hash-ai-worker-ts/package.json index b497672ab75..ed9e18d7a03 100644 --- a/apps/hash-ai-worker-ts/package.json +++ b/apps/hash-ai-worker-ts/package.json @@ -103,7 +103,7 @@ "@types/lodash.pickby": "4.6.9", "@types/md5": "2.3.5", "@types/mime-types": "2.1.4", - "@types/node": "20.16.12", + "@types/node": "20.16.13", "@types/papaparse": "5.3.15", "@types/sanitize-html": "2.13.0", "@vitest/coverage-istanbul": "2.1.2", diff --git a/apps/hash-api/package.json b/apps/hash-api/package.json index f7d450a105c..1186e2b44a3 100644 --- a/apps/hash-api/package.json +++ b/apps/hash-api/package.json @@ -24,11 +24,11 @@ }, "dependencies": { "@apps/hash-graph": "0.0.0-private", - "@aws-sdk/client-s3": "3.670.0", - "@aws-sdk/client-ses": "3.670.0", - "@aws-sdk/credential-provider-node": "3.670.0", - "@aws-sdk/s3-presigned-post": "3.670.0", - "@aws-sdk/s3-request-presigner": "3.670.0", + "@aws-sdk/client-s3": "3.675.0", + "@aws-sdk/client-ses": "3.675.0", + "@aws-sdk/credential-provider-node": "3.675.0", + "@aws-sdk/s3-presigned-post": "3.675.0", + "@aws-sdk/s3-request-presigner": "3.675.0", "@blockprotocol/core": "0.1.3", "@blockprotocol/type-system": "0.1.2-canary.0", "@graphql-tools/schema": "8.5.1", diff --git a/apps/hash-frontend/package.json b/apps/hash-frontend/package.json index adab49bffea..4de3aa3681d 100644 --- a/apps/hash-frontend/package.json +++ b/apps/hash-frontend/package.json @@ -146,7 +146,7 @@ "eslint": "8.57.0", "graphology-types": "0.24.7", "rimraf": "6.0.1", - "sass": "1.79.5", + "sass": "1.80.3", "typescript": "5.6.3", "webpack": "5.95.0" }, diff --git a/apps/hash-realtime/package.json b/apps/hash-realtime/package.json index df7fadfbd0d..3ae0862d223 100644 --- a/apps/hash-realtime/package.json +++ b/apps/hash-realtime/package.json @@ -24,7 +24,7 @@ "devDependencies": { "@local/eslint-config": "0.0.0-private", "@local/tsconfig": "0.0.0-private", - "@types/node": "20.16.12", + "@types/node": "20.16.13", "@types/set-interval-async": "1.0.3", "eslint": "8.57.0" } diff --git a/apps/hash-search-loader/package.json b/apps/hash-search-loader/package.json index f24d0867922..a7055b1c294 100644 --- a/apps/hash-search-loader/package.json +++ b/apps/hash-search-loader/package.json @@ -24,7 +24,7 @@ "devDependencies": { "@local/eslint-config": "0.0.0-private", "@local/tsconfig": "0.0.0-private", - "@types/node": "20.16.12", + "@types/node": "20.16.13", "eslint": "8.57.0" } } diff --git a/apps/hashdotdev/package.json b/apps/hashdotdev/package.json index b4bcdc215c1..f98764adbed 100644 --- a/apps/hashdotdev/package.json +++ b/apps/hashdotdev/package.json @@ -72,7 +72,7 @@ "@types/gtag.js": "0.0.20", "@types/html-to-text": "9.0.4", "@types/md5": "2.3.5", - "@types/node": "20.16.12", + "@types/node": "20.16.13", "@types/prismjs": "1.26.4", "@types/react": "18.2.68", "@types/react-dom": "18.2.25", diff --git a/apps/plugin-browser/package.json b/apps/plugin-browser/package.json index 3a3243741b2..84fcd53838f 100755 --- a/apps/plugin-browser/package.json +++ b/apps/plugin-browser/package.json @@ -80,7 +80,7 @@ "process": "0.11.10", "react-refresh": "0.14.0", "react-refresh-typescript": "2.0.9", - "sass": "1.79.5", + "sass": "1.80.3", "sass-loader": "13.3.3", "source-map-loader": "3.0.2", "style-loader": "3.3.4", @@ -92,6 +92,6 @@ "webpack": "5.95.0", "webpack-cli": "4.10.0", "webpack-dev-server": "4.15.2", - "zip-webpack-plugin": "4.0.1" + "zip-webpack-plugin": "4.0.2" } } diff --git a/libs/@blockprotocol/graph/package.json b/libs/@blockprotocol/graph/package.json index af64fcac89e..efbed440a0b 100644 --- a/libs/@blockprotocol/graph/package.json +++ b/libs/@blockprotocol/graph/package.json @@ -78,7 +78,7 @@ "@local/eslint-config": "0.0.0-private", "@local/tsconfig": "0.0.0-private", "@types/lodash.isequal": "4.5.8", - "@types/node": "20.16.12", + "@types/node": "20.16.13", "@types/react": "18.2.68", "eslint": "8.57.0", "rimraf": "6.0.1" diff --git a/libs/@blockprotocol/type-system/rust/src/schema/data_type/constraint/mod.rs b/libs/@blockprotocol/type-system/rust/src/schema/data_type/constraint/mod.rs index 116102f2f80..e427e097b78 100644 --- a/libs/@blockprotocol/type-system/rust/src/schema/data_type/constraint/mod.rs +++ b/libs/@blockprotocol/type-system/rust/src/schema/data_type/constraint/mod.rs @@ -403,4 +403,106 @@ mod tests { assert_eq!(errors, actual_errors); } + + #[test] + fn intersect_typed_any_of_single() { + check_schema_intersection( + [ + json!({ + "anyOf": [ + { + "type": "string", + "minLength": 8, + "description": "A string with a minimum length of 8 characters", + }, + { + "type": "number", + "minimum": 0, + "description": "A number greater than or equal to 0", + }, + ] + }), + json!({ + "type": "string", + "maxLength": 10, + }), + ], + [json!({ + "anyOf": [ + { + "type": "string", + "minLength": 8, + "maxLength": 10, + "description": "A string with a minimum length of 8 characters", + } + ] + })], + ); + } + + #[test] + fn intersect_typed_any_of_multi() { + check_schema_intersection( + [ + json!({ + "type": "string", + "maxLength": 10, + }), + json!({ + "anyOf": [ + { + "type": "string", + "minLength": 8, + }, + { + "type": "string", + "maxLength": 25, + }, + ] + }), + ], + [json!({ + "anyOf": [ + { + "type": "string", + "minLength": 8, + "maxLength": 10, + }, + { + "type": "string", + "maxLength": 10, + }, + ] + })], + ); + + check_schema_intersection( + [ + json!({ + "type": "string", + "maxLength": 10, + }), + json!({ + "anyOf": [ + { + "type": "string", + "minLength": 8, + }, + { + "type": "string", + "maxLength": 25, + }, + ] + }), + json!({ + "type": "string", + "maxLength": 5, + }), + ], + [json!({ + "type": "string", + "maxLength": 5, + })], + ); + } } diff --git a/libs/@blockprotocol/type-system/rust/src/schema/data_type/constraint/string.rs b/libs/@blockprotocol/type-system/rust/src/schema/data_type/constraint/string.rs index fd8fd23129a..3d844cad777 100644 --- a/libs/@blockprotocol/type-system/rust/src/schema/data_type/constraint/string.rs +++ b/libs/@blockprotocol/type-system/rust/src/schema/data_type/constraint/string.rs @@ -5,20 +5,20 @@ use core::{ use std::{collections::HashSet, sync::OnceLock}; use email_address::EmailAddress; -use error_stack::{Report, ReportSink, ResultExt, bail}; +use error_stack::{Report, ReportSink, ResultExt, TryReportIteratorExt as _, bail, ensure}; use iso8601_duration::{Duration, ParseDurationError}; use regex::Regex; use serde::{Deserialize, Serialize}; -use serde_json::Value as JsonValue; +use serde_json::{Value as JsonValue, json}; use thiserror::Error; use url::{Host, Url}; use uuid::Uuid; use crate::schema::{ - ConstraintError, JsonSchemaValueType, + ConstraintError, JsonSchemaValueType, SingleValueConstraints, data_type::{ closed::ResolveClosedDataTypeError, - constraint::{Constraint, ConstraintValidator}, + constraint::{Constraint, ConstraintValidator, ValueConstraints}, }, }; @@ -228,21 +228,131 @@ pub enum StringSchema { } impl Constraint for StringSchema { + #[expect(clippy::too_many_lines)] fn intersection( self, other: Self, ) -> Result<(Self, Option), Report> { Ok(match (self, other) { - (Self::Constrained(lhs), Self::Constrained(rhs)) => { - let (combined, remainder) = lhs.intersection(rhs)?; - ( - Self::Constrained(combined), - remainder.map(Self::Constrained), - ) + (Self::Constrained(lhs), Self::Constrained(rhs)) => lhs + .intersection(rhs) + .map(|(lhs, rhs)| (Self::Constrained(lhs), rhs.map(Self::Constrained)))?, + (Self::Const { r#const }, Self::Constrained(constraints)) + | (Self::Constrained(constraints), Self::Const { r#const }) => { + constraints.validate_value(&r#const).change_context( + ResolveClosedDataTypeError::UnsatisfiedConstraint( + json!(r#const), + ValueConstraints::Typed(SingleValueConstraints::String(Self::Constrained( + constraints, + ))), + ), + )?; + + (Self::Const { r#const }, None) + } + (Self::Enum { r#enum }, Self::Constrained(constraints)) + | (Self::Constrained(constraints), Self::Enum { r#enum }) => { + // We use the fast way to filter the values that pass the constraints and collect + // them. In most cases this will result in at least one value + // passing the constraints. + let passed = r#enum + .iter() + .filter(|&value| constraints.is_valid(value)) + .cloned() + .collect::>(); + + match passed.len() { + 0 => { + // We now properly capture errors to return it to the caller. + let () = r#enum + .iter() + .map(|value| { + constraints.validate_value(value).change_context( + ResolveClosedDataTypeError::UnsatisfiedEnumConstraintVariant( + json!(*value), + ), + ) + }) + .try_collect_reports() + .change_context( + ResolveClosedDataTypeError::UnsatisfiedEnumConstraint( + ValueConstraints::Typed(SingleValueConstraints::String( + Self::Constrained(constraints.clone()), + )), + ), + )?; + + // This should only happen if `enum` is malformed and has no values. This + // should be caught by the schema validation, however, if this still happens + // we return an error as validating empty enum will always fail. + bail!(ResolveClosedDataTypeError::UnsatisfiedEnumConstraint( + ValueConstraints::Typed(SingleValueConstraints::String( + Self::Constrained(constraints), + )), + )) + } + 1 => ( + Self::Const { + r#const: passed.into_iter().next().unwrap_or_else(|| { + unreachable!( + "we have exactly one value in the enum that passed the \ + constraints" + ) + }), + }, + None, + ), + _ => (Self::Enum { r#enum: passed }, None), + } + } + (Self::Const { r#const: lhs }, Self::Const { r#const: rhs }) => { + if lhs == rhs { + (Self::Const { r#const: lhs }, None) + } else { + bail!(ResolveClosedDataTypeError::ConflictingConstValues( + json!(lhs), + json!(rhs), + )) + } + } + (Self::Enum { r#enum: lhs }, Self::Enum { r#enum: rhs }) => { + let intersection = lhs.intersection(&rhs).cloned().collect::>(); + + match intersection.len() { + 0 => bail!(ResolveClosedDataTypeError::ConflictingEnumValues( + lhs.iter().map(|val| json!(*val)).collect(), + rhs.iter().map(|val| json!(*val)).collect(), + )), + 1 => ( + Self::Const { + r#const: intersection.into_iter().next().unwrap_or_else(|| { + unreachable!( + "we have exactly least one value in the enum intersection" + ) + }), + }, + None, + ), + _ => ( + Self::Enum { + r#enum: intersection, + }, + None, + ), + } + } + (Self::Const { r#const }, Self::Enum { r#enum }) + | (Self::Enum { r#enum }, Self::Const { r#const }) => { + ensure!( + r#enum.contains(&r#const), + ResolveClosedDataTypeError::ConflictingConstEnumValue( + json!(r#const), + r#enum.iter().map(|val| json!(*val)).collect(), + ) + ); + + (Self::Const { r#const }, None) } - // TODO: Implement folding for string constraints - // see https://linear.app/hash/issue/H-3428/implement-folding-for-string-constraints - (lhs, rhs) => (lhs, Some(rhs)), }) } } @@ -307,7 +417,7 @@ impl ConstraintValidator for StringSchema { } } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Serialize, Deserialize)] #[cfg_attr(target_arch = "wasm32", derive(tsify::Tsify))] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct StringConstraints { @@ -328,12 +438,48 @@ pub struct StringConstraints { impl Constraint for StringConstraints { fn intersection( - self, + mut self, other: Self, ) -> Result<(Self, Option), Report> { - // TODO: Implement folding for string constraints - // see https://linear.app/hash/issue/H-3428/implement-folding-for-string-constraints - Ok((self, Some(other))) + let mut remainder = None::; + + self.min_length = match self.min_length.zip(other.min_length) { + Some((lhs, rhs)) => Some(lhs.max(rhs)), + None => self.min_length.or(other.min_length), + }; + self.max_length = match self.max_length.zip(other.max_length) { + Some((lhs, rhs)) => Some(lhs.min(rhs)), + None => self.max_length.or(other.max_length), + }; + match self.format.zip(other.format) { + Some((lhs, rhs)) if lhs == rhs => {} + Some((_, rhs)) => { + remainder.get_or_insert_default().format = Some(rhs); + } + None => self.format = self.format.or(other.format), + }; + match self.pattern.as_ref().zip(other.pattern.as_ref()) { + Some((lhs, rhs)) if lhs.as_str() == rhs.as_str() => {} + Some((_, _)) => { + remainder.get_or_insert_default().pattern = other.pattern; + } + None => self.pattern = self.pattern.or(other.pattern), + }; + + if let Some((min_length, max_length)) = self.min_length.zip(self.max_length) { + ensure!( + min_length <= max_length, + ResolveClosedDataTypeError::UnsatisfiableConstraint(ValueConstraints::Typed( + SingleValueConstraints::String(StringSchema::Constrained(Self { + min_length: Some(min_length), + max_length: Some(max_length), + ..Self::default() + }),) + ),) + ); + } + + Ok((self, remainder)) } } @@ -411,12 +557,16 @@ mod tests { use super::*; use crate::schema::{ - JsonSchemaValueType, + JsonSchemaValueType, SingleValueConstraints, data_type::constraint::{ ValueConstraints, - tests::{check_constraints, check_constraints_error, read_schema}, + tests::{ + check_constraints, check_constraints_error, check_schema_intersection, + check_schema_intersection_error, intersect_schemas, read_schema, + }, }, }; + #[test] fn unconstrained() { let string_schema = read_schema(&json!({ @@ -528,4 +678,594 @@ mod tests { })) .expect_err("Deserialized string schema with mixed properties"); } + + #[test] + fn intersect_default() { + check_schema_intersection( + [ + json!({ + "type": "string", + }), + json!({ + "type": "string", + }), + ], + [json!({ + "type": "string", + })], + ); + } + + #[test] + fn intersect_length_one() { + check_schema_intersection( + [ + json!({ + "type": "string", + "minLength": 5, + "maxLength": 10, + }), + json!({ + "type": "string", + }), + ], + [json!({ + "type": "string", + "minLength": 5, + "maxLength": 10, + })], + ); + } + + #[test] + fn intersect_length_both() { + check_schema_intersection( + [ + json!({ + "type": "string", + "minLength": 5, + "maxLength": 10, + }), + json!({ + "type": "string", + "minLength": 7, + "maxLength": 12, + }), + ], + [json!({ + "type": "string", + "minLength": 7, + "maxLength": 10, + })], + ); + } + + #[test] + fn intersect_length_invalid() { + check_schema_intersection_error( + [ + json!({ + "type": "string", + "minLength": 5, + "maxLength": 10, + }), + json!({ + "type": "string", + "minLength": 12, + "maxLength": 15, + }), + ], + [ResolveClosedDataTypeError::UnsatisfiableConstraint( + from_value(json!( + { + "type": "string", + "minLength": 12, + "maxLength": 10, + } + )) + .expect("Failed to parse schema"), + )], + ); + } + + #[test] + fn intersect_pattern_one() { + check_schema_intersection( + [ + json!({ + "type": "string", + "pattern": "^[0-9]{5}$", + }), + json!({ + "type": "string", + }), + ], + [json!({ + "type": "string", + "pattern": "^[0-9]{5}$", + })], + ); + } + + #[test] + fn intersect_pattern_both_different() { + check_schema_intersection( + [ + json!({ + "type": "string", + "pattern": r"^\d{5}$", + }), + json!({ + "type": "string", + "pattern": "^[0-9]{5}$", + }), + ], + [ + json!({ + "type": "string", + "pattern": r"^\d{5}$", + }), + json!({ + "type": "string", + "pattern": "^[0-9]{5}$", + }), + ], + ); + } + + #[test] + fn intersect_pattern_both_same() { + check_schema_intersection( + [ + json!({ + "type": "string", + "pattern": r"^\d{5}$", + }), + json!({ + "type": "string", + "pattern": r"^\d{5}$", + }), + ], + [json!({ + "type": "string", + "pattern": r"^\d{5}$", + })], + ); + } + + #[test] + fn intersect_format_one() { + check_schema_intersection( + [ + json!({ + "type": "string", + "format": "uri", + }), + json!({ + "type": "string", + }), + ], + [json!({ + "type": "string", + "format": "uri", + })], + ); + } + + #[test] + fn intersect_format_both_different() { + check_schema_intersection( + [ + json!({ + "type": "string", + "format": "uri", + }), + json!({ + "type": "string", + "format": "hostname", + }), + ], + [ + json!({ + "type": "string", + "format": "uri", + }), + json!({ + "type": "string", + "format": "hostname", + }), + ], + ); + } + + #[test] + fn intersect_format_both_same() { + check_schema_intersection( + [ + json!({ + "type": "string", + "format": "uri", + }), + json!({ + "type": "string", + "format": "uri", + }), + ], + [json!({ + "type": "string", + "format": "uri", + })], + ); + } + + #[test] + fn intersect_const_const_same() { + check_schema_intersection( + [ + json!({ + "type": "string", + "const": "foo", + }), + json!({ + "type": "string", + "const": "foo", + }), + ], + [json!({ + "type": "string", + "const": "foo", + })], + ); + } + + #[test] + fn intersect_const_const_different() { + check_schema_intersection_error( + [ + json!({ + "type": "string", + "const": "foo", + }), + json!({ + "type": "string", + "const": "bar", + }), + ], + [ResolveClosedDataTypeError::ConflictingConstValues( + json!("foo"), + json!("bar"), + )], + ); + } + + #[test] + fn intersect_const_enum_compatible() { + check_schema_intersection( + [ + json!({ + "type": "string", + "const": "foo", + }), + json!({ + "type": "string", + "enum": ["foo", "bar"], + }), + ], + [json!({ + "type": "string", + "const": "foo", + })], + ); + } + + #[test] + fn intersect_const_enum_incompatible() { + let report = intersect_schemas([ + json!({ + "type": "string", + "const": "foo", + }), + json!({ + "type": "string", + "enum": ["bar", "baz"], + }), + ]) + .expect_err("Intersected invalid schemas"); + + let Some(ResolveClosedDataTypeError::ConflictingConstEnumValue(lhs, rhs)) = + report.downcast_ref::() + else { + panic!("Expected conflicting const-enum values error"); + }; + assert_eq!(lhs, &json!("foo")); + + assert_eq!(rhs.len(), 2); + assert!(rhs.contains(&json!("bar"))); + assert!(rhs.contains(&json!("baz"))); + } + + #[test] + fn intersect_enum_enum_compatible_multi() { + let intersection = intersect_schemas([ + json!({ + "type": "string", + "enum": ["foo", "bar", "baz"], + }), + json!({ + "type": "string", + "enum": ["foo", "baz", "qux"], + }), + json!({ + "type": "string", + "enum": ["foo", "bar", "qux", "baz"], + }), + ]) + .expect("Intersected invalid constraints") + .into_iter() + .map(|schema| { + from_value::(schema).expect("Failed to deserialize schema") + }) + .collect::>(); + + // We need to manually check the intersection because the order of the enum values is not + // guaranteed. + assert_eq!(intersection.len(), 1); + let SingleValueConstraints::String(StringSchema::Enum { r#enum }) = &intersection[0] else { + panic!("Expected string enum schema"); + }; + assert_eq!(r#enum.len(), 2); + assert!(r#enum.contains("foo")); + assert!(r#enum.contains("baz")); + } + + #[test] + fn intersect_enum_enum_compatible_single() { + check_schema_intersection( + [ + json!({ + "type": "string", + "enum": ["foo", "bar"], + }), + json!({ + "type": "string", + "enum": ["foo", "baz"], + }), + json!({ + "type": "string", + "enum": ["foo", "qux"], + }), + ], + [json!({ + "type": "string", + "const": "foo", + })], + ); + } + + #[test] + fn intersect_enum_enum_incompatible() { + let report = intersect_schemas([ + json!({ + "type": "string", + "enum": ["foo", "bar"], + }), + json!({ + "type": "string", + "enum": ["baz", "qux"], + }), + ]) + .expect_err("Intersected invalid schemas"); + + let Some(ResolveClosedDataTypeError::ConflictingEnumValues(lhs, rhs)) = + report.downcast_ref::() + else { + panic!("Expected conflicting enum values error"); + }; + assert_eq!(lhs.len(), 2); + assert!(lhs.contains(&json!("foo"))); + assert!(lhs.contains(&json!("bar"))); + + assert_eq!(rhs.len(), 2); + assert!(rhs.contains(&json!("baz"))); + assert!(rhs.contains(&json!("qux"))); + } + + #[test] + fn intersect_const_constraint_compatible() { + check_schema_intersection( + [ + json!({ + "type": "string", + "const": "foo", + }), + json!({ + "type": "string", + "minLength": 3, + }), + ], + [json!({ + "type": "string", + "const": "foo", + })], + ); + } + + #[test] + fn intersect_const_constraint_incompatible() { + check_schema_intersection_error( + [ + json!({ + "type": "string", + "const": "foo", + }), + json!({ + "type": "string", + "minLength": 5, + }), + ], + [ResolveClosedDataTypeError::UnsatisfiedConstraint( + json!("foo"), + from_value(json!({ + "type": "string", + "minLength": 5, + })) + .expect("Failed to parse schema"), + )], + ); + } + + #[test] + fn intersect_enum_constraint_compatible_single() { + check_schema_intersection( + [ + json!({ + "type": "string", + "enum": ["foo", "foobar"], + }), + json!({ + "type": "string", + "minLength": 5, + }), + ], + [json!({ + "type": "string", + "const": "foobar", + })], + ); + } + + #[test] + fn intersect_enum_constraint_compatible_multi() { + let intersection = intersect_schemas([ + json!({ + "type": "string", + "enum": ["foo", "foobar", "bar"], + }), + json!({ + "type": "string", + "maxLength": 3, + }), + ]) + .expect("Intersected invalid constraints") + .into_iter() + .map(|schema| { + from_value::(schema).expect("Failed to deserialize schema") + }) + .collect::>(); + + // We need to manually check the intersection because the order of the enum values is not + // guaranteed. + assert_eq!(intersection.len(), 1); + let SingleValueConstraints::String(StringSchema::Enum { r#enum }) = &intersection[0] else { + panic!("Expected string enum schema"); + }; + assert_eq!(r#enum.len(), 2); + assert!(r#enum.contains("foo")); + assert!(r#enum.contains("bar")); + } + + #[test] + fn intersect_enum_constraint_incompatible() { + check_schema_intersection_error( + [ + json!({ + "type": "string", + "enum": ["foo", "bar"], + }), + json!({ + "type": "string", + "minLength": 5, + }), + ], + [ + ResolveClosedDataTypeError::UnsatisfiedEnumConstraint( + from_value(json!({ + "type": "string", + "minLength": 5, + })) + .expect("Failed to parse schema"), + ), + ResolveClosedDataTypeError::UnsatisfiedEnumConstraintVariant(json!("foo")), + ResolveClosedDataTypeError::UnsatisfiedEnumConstraintVariant(json!("bar")), + ], + ); + } + + #[test] + fn intersect_mixed() { + check_schema_intersection( + [ + json!({ + "type": "string", + "minLength": 5, + }), + json!({ + "type": "string", + "pattern": "^[0-9]{5}$", + }), + json!({ + "type": "string", + "minLength": 8, + }), + json!({ + "type": "string", + "format": "uri", + }), + json!({ + "type": "string", + "maxLength": 10, + }), + json!({ + "type": "string", + "pattern": r"^\d{5}$", + }), + ], + [ + json!({ + "type": "string", + "minLength": 8, + "maxLength": 10, + "pattern": "^[0-9]{5}$", + "format": "uri", + }), + json!({ + "type": "string", + "pattern": r"^\d{5}$", + }), + ], + ); + + check_schema_intersection( + [ + json!({ + "type": "string", + "minLength": 2, + }), + json!({ + "type": "string", + "maxLength": 8, + }), + json!({ + "type": "string", + "format": "hostname", + }), + json!({ + "type": "string", + "maxLength": 10, + }), + json!({ + "type": "string", + "pattern": "^[a-z]{3}$", + }), + json!({ + "type": "string", + "enum": ["foo", "foobar"], + }), + ], + [json!({ + "type": "string", + "const": "foo", + })], + ); + } } diff --git a/libs/@blockprotocol/type-system/typescript/package.json b/libs/@blockprotocol/type-system/typescript/package.json index 5089aaaeb42..9ae1369c166 100644 --- a/libs/@blockprotocol/type-system/typescript/package.json +++ b/libs/@blockprotocol/type-system/typescript/package.json @@ -59,7 +59,7 @@ "@rollup/plugin-node-resolve": "15.3.0", "@rollup/plugin-typescript": "12.1.1", "@rollup/plugin-wasm": "6.2.2", - "@types/node": "20.16.12", + "@types/node": "20.16.13", "@types/react": "18.2.68", "@vitest/coverage-istanbul": "2.1.2", "eslint": "8.57.0", diff --git a/libs/@local/harpc/client/Cargo.toml b/libs/@local/harpc/client/Cargo.toml index d51dcc46c58..367a596353b 100644 --- a/libs/@local/harpc/client/Cargo.toml +++ b/libs/@local/harpc/client/Cargo.toml @@ -18,7 +18,6 @@ tower = { workspace = true, public = true } # Private workspace dependencies error-stack = { workspace = true } harpc-net = { workspace = true } -harpc-service = { workspace = true } # Private third-party dependencies bytes = { workspace = true } diff --git a/libs/@local/harpc/client/package.json b/libs/@local/harpc/client/package.json index 90854a29ab9..9f7dca41e65 100644 --- a/libs/@local/harpc/client/package.json +++ b/libs/@local/harpc/client/package.json @@ -6,7 +6,6 @@ "dependencies": { "@rust/error-stack": "0.5.0", "@rust/harpc-net": "0.0.0-private", - "@rust/harpc-service": "0.0.0-private", "@rust/harpc-tower": "0.0.0-private" } } diff --git a/libs/@local/harpc/codec/src/decode.rs b/libs/@local/harpc/codec/src/decode.rs index 2c79f0c48cb..d23877ca709 100644 --- a/libs/@local/harpc/codec/src/decode.rs +++ b/libs/@local/harpc/codec/src/decode.rs @@ -1,5 +1,4 @@ -use bytes::{Buf, Bytes}; -use error_stack::{Context, Report}; +use bytes::Buf; use futures_core::{Stream, TryStream}; pub trait Decoder { @@ -15,45 +14,3 @@ pub trait Decoder { T: serde::de::DeserializeOwned, S: TryStream + Send; } - -pub trait ErrorDecoder { - type Error; - /// Type of recovery information. - /// - /// This type represents recovery information used when error encoding fails. - /// During error encoding with `serde`, failures can occur. To prevent a complete - /// encoding process failure and ensure some error information reaches the user, - /// we encode any serialization errors as recovery errors. - /// - /// Unlike regular error encoding, recovery error encoding is guaranteed to succeed. - /// The codec itself, not the user, determines the recovery type, this is often just a simple - /// [`Display`] representation of the serialization error. - /// - /// [`Display`]: core::fmt::Display - type Recovery; - - /// Decodes an error from a stream of bytes. - /// - /// # Errors - /// - /// Returns `Self::Error` if decoding fails. - fn decode_error(self, bytes: Bytes) -> Result - where - E: serde::de::DeserializeOwned; - - /// Decodes a report from a stream of bytes. - /// - /// # Errors - /// - /// Returns `Self::Error` if decoding fails. - fn decode_report(self, bytes: Bytes) -> Result, Self::Error> - where - C: Context; - - /// Decodes recovery information from a stream of bytes. - /// - /// # Errors - /// - /// Returns `Self::Error` if decoding fails. - fn decode_recovery(self, bytes: Bytes) -> Self::Recovery; -} diff --git a/libs/@local/harpc/codec/src/encode.rs b/libs/@local/harpc/codec/src/encode.rs index 7607d6481cc..31dd83320a0 100644 --- a/libs/@local/harpc/codec/src/encode.rs +++ b/libs/@local/harpc/codec/src/encode.rs @@ -1,11 +1,6 @@ -use core::error::Error; - use bytes::Buf; -use error_stack::{Context, Report}; use futures_core::Stream; -use crate::error::EncodedError; - pub trait Encoder { type Buf: Buf; type Error; @@ -19,26 +14,3 @@ pub trait Encoder { S: Stream + Send, T: serde::Serialize; } - -/// Encode an error into a byte stream. -/// -/// # Contract -/// -/// Implementors of this trait must ensure that each network error is preceded by a 1-byte tag. -/// -/// The tag `0xFF` is reserved for errors that happen during encoding. -pub trait ErrorEncoder { - /// Encode a network error. - /// - /// The tag for a network error is `0x00`. - fn encode_error(self, error: E) -> EncodedError - where - E: Error + serde::Serialize; - - /// Encode a report. - /// - /// the tag for a report is `0x01`. - fn encode_report(self, report: Report) -> EncodedError - where - C: Context; -} diff --git a/libs/@local/harpc/codec/src/error.rs b/libs/@local/harpc/codec/src/error.rs index a3c9ad1fbae..af39625ba61 100644 --- a/libs/@local/harpc/codec/src/error.rs +++ b/libs/@local/harpc/codec/src/error.rs @@ -1,223 +1,185 @@ -use core::marker::PhantomData; +use core::{ + error::Error, + fmt::{self, Debug, Display, Write}, +}; -use bytes::{Buf, BufMut, Bytes, BytesMut}; +use bytes::{BufMut, Bytes, BytesMut}; +use error_stack::Report; use harpc_types::error_code::ErrorCode; -use self::kind::ErrorKind; +fn error_request_error_code(error: &E) -> ErrorCode +where + E: core::error::Error, +{ + core::error::request_ref(error) + .copied() + .or_else(|| core::error::request_value(error)) + .unwrap_or(ErrorCode::INTERNAL_SERVER_ERROR) +} + +fn report_request_error_code(report: &Report) -> ErrorCode { + report + .request_ref() + .next() + .copied() + .or_else(|| report.request_value().next()) + .unwrap_or(ErrorCode::INTERNAL_SERVER_ERROR) +} -/// An error that is has been fully encoded and can be sent or received over the network. -/// -/// Essentially a compiled version of a `NetworkError` or `Report` into it's wire format. -/// -/// An `EncodedError` is constructed through the `ErrorBuffer`. -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct EncodedError { +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct NetworkError { code: ErrorCode, bytes: Bytes, } -impl EncodedError { - pub fn new(code: ErrorCode, bytes: Bytes) -> Option { - let &first = bytes.first()?; - - kind::Tag::variants() - .into_iter() - .any(|tag| tag as u8 == first) - .then(|| Self { code, bytes }) - } - - pub const fn code(&self) -> ErrorCode { - self.code - } - - pub const fn bytes(&self) -> &Bytes { - &self.bytes - } +impl NetworkError { + #[expect( + clippy::cast_possible_truncation, + clippy::big_endian_bytes, + reason = "numbers are always encoded in big-endian in our encoding scheme" + )] + fn capture_display(value: &impl Display) -> Bytes { + let mut buffer = BytesMut::new(); + buffer.put_u32(0); - pub fn into_parts(self) -> (ErrorCode, Bytes) { - (self.code, self.bytes) - } -} + write!(&mut buffer, "{value}").unwrap_or_else(|_error| { + unreachable!("`BytesMut` has a capacity of `usize::MAX`"); + }); -pub trait NetworkError { - fn code(&self) -> ErrorCode; -} + // The length is not necessarily needed if we already have the total message, although it is + // absolutely necessary for the `NetworkError` to be able to be deserialized in a streaming + // fashion. + let length = buffer.len() - 4; + debug_assert!( + u32::try_from(length).is_ok(), + "debug message should be smaller than 4GiB", + ); + let length = length as u32; -impl NetworkError for T -where - T: core::error::Error, -{ - fn code(&self) -> ErrorCode { - core::error::request_ref::(self) - .copied() - .or_else(|| core::error::request_value::(self)) - .unwrap_or(ErrorCode::INTERNAL_SERVER_ERROR) - } -} + buffer[..4].copy_from_slice(&length.to_be_bytes()); -pub mod kind { - #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] - #[repr(u8)] - pub enum Tag { - NetworkError = 0x01, - Report = 0x02, - Recovery = 0xFF, + buffer.freeze() } - impl Tag { - pub(crate) fn variants() -> impl IntoIterator { - [Self::NetworkError, Self::Report, Self::Recovery] - } - - #[must_use] - pub const fn from_u8(value: u8) -> Option { - match value { - 0x01 => Some(Self::NetworkError), - 0x02 => Some(Self::Report), - 0xFF => Some(Self::Recovery), - _ => None, - } + #[must_use] + pub fn capture_error(error: &E) -> Self + where + E: core::error::Error, + { + Self { + code: error_request_error_code(error), + bytes: Self::capture_display(error), } } - pub trait ErrorKind { - fn tag() -> Tag; + #[must_use] + pub fn capture_report(report: &Report) -> Self { + Self { + code: report_request_error_code(report), + bytes: Self::capture_display(report), + } } - pub struct NetworkError { - _private: (), - } - impl ErrorKind for NetworkError { - fn tag() -> Tag { - Tag::NetworkError - } + pub const fn code(&self) -> ErrorCode { + self.code } - pub struct Report { - _private: (), + pub const fn bytes(&self) -> &Bytes { + &self.bytes } - impl ErrorKind for Report { - fn tag() -> Tag { - Tag::Report - } + pub fn into_bytes(self) -> Bytes { + self.bytes } - pub struct Recovery { - _private: (), + pub fn into_parts(self) -> (ErrorCode, Bytes) { + (self.code, self.bytes) } - impl ErrorKind for Recovery { - fn tag() -> Tag { - Tag::Recovery + /// Constructs a `NetworkError` from an `ErrorCode` and `Bytes`. + /// + /// # Errors + /// + /// This function will return an error if the length encoded in the first 4 bytes + /// of the `bytes` parameter does not match the actual length of the remaining data. + #[expect( + clippy::big_endian_bytes, + clippy::panic_in_result_fn, + clippy::missing_panics_doc, + reason = "numbers are always encoded in big-endian in our encoding scheme" + )] + pub fn try_from_parts(code: ErrorCode, bytes: Bytes) -> Result { + let slice = bytes.as_ref(); + if slice.len() < 4 { + return Err(bytes); } - } -} -pub struct ErrorBuffer { - kind: PhantomData *const T>, - buffer: BytesMut, -} + // assert only exists to elide bounds checks and satisfy clippy + assert!(slice.len() >= 4); -impl ErrorBuffer -where - T: ErrorKind, -{ - fn new() -> Self { - let mut buffer = BytesMut::new(); - buffer.put_u8(T::tag() as u8); + let expected_length = u32::from_be_bytes([slice[0], slice[1], slice[2], slice[3]]) as usize; + let actual_length = bytes.len() - 4; - Self { - kind: PhantomData, - buffer, + if actual_length != expected_length { + return Err(bytes); } - } - #[must_use] - pub fn finish(self, code: ErrorCode) -> EncodedError { - EncodedError { - code, - bytes: self.buffer.freeze(), - } + Ok(Self { code, bytes }) } } -impl ErrorBuffer { - #[must_use] - pub fn error() -> Self { - Self::new() - } -} +impl Display for NetworkError { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + // First 4 bytes are always the length of the message + let message = &self.bytes[4..]; -impl ErrorBuffer { - #[must_use] - pub fn report() -> Self { - Self::new() + if let Ok(message) = core::str::from_utf8(message) { + Display::fmt(message, fmt) + } else { + Debug::fmt(&message, fmt) + } } } -impl ErrorBuffer { - #[must_use] - pub fn recovery() -> Self { - Self::new() +impl Error for NetworkError { + fn provide<'a>(&'a self, request: &mut core::error::Request<'a>) { + request.provide_value(self.code); } } -impl Buf for ErrorBuffer { - fn remaining(&self) -> usize { - self.buffer.remaining() - } - - fn chunk(&self) -> &[u8] { - self.buffer.chunk() - } - - fn advance(&mut self, cnt: usize) { - self.buffer.advance(cnt); - } - - // These methods are specialized in the underlying `Bytes` implementation, relay them as well - - fn copy_to_bytes(&mut self, len: usize) -> Bytes { - self.buffer.copy_to_bytes(len) - } -} +#[cfg(test)] +mod test { + use super::NetworkError; -#[expect( - unsafe_code, - reason = "delegating to the underlying `BytesMut` implementation" -)] -// SAFETY: we are delegating to the underlying `BytesMut` implementation -unsafe impl BufMut for ErrorBuffer { - fn remaining_mut(&self) -> usize { - self.buffer.remaining_mut() - } + #[derive(Debug, thiserror::Error)] + #[error("example message")] + struct ExampleError; - unsafe fn advance_mut(&mut self, cnt: usize) { - // SAFETY: This is safe, as we are delegating to the underlying `BytesMut` implementation - unsafe { - self.buffer.advance_mut(cnt); - } - } + #[expect( + clippy::big_endian_bytes, + reason = "numbers are always encoded in big-endian in our encoding scheme" + )] + #[test] + fn properly_encodes_length() { + let error = NetworkError::capture_error(&ExampleError); + let value = error.into_bytes(); - fn chunk_mut(&mut self) -> &mut bytes::buf::UninitSlice { - self.buffer.chunk_mut() + assert_eq!(value[0..4], 15_u32.to_be_bytes()); + assert_eq!(value[4..], *b"example message"); } - // These methods are specialized in the underlying `BytesMut` implementation, relay them as well + // if we encode and decode the error, we should get the same error back + #[test] + fn encode_decode() { + let error = NetworkError::capture_error(&ExampleError); - fn put(&mut self, src: B) - where - Self: Sized, - { - self.buffer.put(src); - } + let code = error.code(); + let value = error.bytes(); - fn put_slice(&mut self, src: &[u8]) { - self.buffer.put_slice(src); - } + let decoded = + NetworkError::try_from_parts(code, value.clone()).expect("encode/decode should work"); - fn put_bytes(&mut self, val: u8, cnt: usize) { - self.buffer.put_bytes(val, cnt); + assert_eq!(decoded.code(), code); + assert_eq!(decoded.bytes(), value); } } diff --git a/libs/@local/harpc/codec/src/json.rs b/libs/@local/harpc/codec/src/json.rs index fa2c94b0be5..76496e93e2d 100644 --- a/libs/@local/harpc/codec/src/json.rs +++ b/libs/@local/harpc/codec/src/json.rs @@ -1,5 +1,4 @@ use core::{ - error::Error, pin::Pin, task::{Context, Poll, ready}, }; @@ -8,14 +7,9 @@ use bytes::{Buf, BufMut, Bytes, BytesMut}; use error_stack::{Report, ResultExt}; use futures_core::{Stream, TryStream}; use futures_util::stream::{self, StreamExt}; -use harpc_types::error_code::ErrorCode; use serde::de::DeserializeOwned; -use crate::{ - decode::{Decoder, ErrorDecoder}, - encode::{Encoder, ErrorEncoder}, - error::{EncodedError, ErrorBuffer, NetworkError}, -}; +use crate::{decode::Decoder, encode::Encoder}; #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, thiserror::Error)] pub enum JsonError { @@ -128,12 +122,12 @@ where loop { let mut this = self.as_mut().project(); - // we use an option here to avoid repeated polling of the inner stream once it has + // We use an option here to avoid repeated polling of the inner stream once it has // returned `None`, as that would lead to potentially undefined behavior. let inner = this.inner.as_mut().as_pin_mut(); let Some(inner) = inner else { - // the underlying stream has already returned `None`, we now only flush the + // The underlying stream has already returned `None`, we now only flush the // remaining buffer. if let Some(value) = Self::poll_item(self.as_mut(), 0) { return Poll::Ready(Some(value.change_context(JsonError::Decode))); @@ -183,85 +177,6 @@ where } } -#[derive(Debug, serde::Serialize, serde::Deserialize)] -struct JsonErrorRepr { - message: String, - details: T, -} - -impl ErrorEncoder for JsonCodec { - fn encode_error(self, error: E) -> EncodedError - where - E: Error + serde::Serialize, - { - let code = error.code(); - - let buffer = ErrorBuffer::error(); - let mut writer = buffer.writer(); - - if let Err(error) = serde_json::to_writer(&mut writer, &JsonErrorRepr { - message: error.to_string(), - details: error, - }) { - let mut buffer = ErrorBuffer::recovery(); - let error = error.to_string(); - buffer.put(error.as_bytes()); - - return buffer.finish(ErrorCode::INTERNAL_SERVER_ERROR); - }; - - writer.into_inner().finish(code) - } - - fn encode_report(self, report: Report) -> EncodedError - where - C: error_stack::Context, - { - let buffer = ErrorBuffer::error(); - let mut writer = buffer.writer(); - - if let Err(error) = serde_json::to_writer(&mut writer, &report) { - let mut buffer = ErrorBuffer::recovery(); - let error = error.to_string(); - buffer.put(error.as_bytes()); - - return buffer.finish(ErrorCode::INTERNAL_SERVER_ERROR); - }; - - let code = report - .request_ref() - .next() - .copied() - .or_else(|| report.request_value().next()) - .unwrap_or(ErrorCode::INTERNAL_SERVER_ERROR); - - writer.into_inner().finish(code) - } -} - -impl ErrorDecoder for JsonCodec { - type Error = serde_json::Error; - type Recovery = Box; - - fn decode_error(self, bytes: Bytes) -> Result - where - E: serde::de::DeserializeOwned, - { - serde_json::from_slice::>(&bytes).map(|error| error.details) - } - - fn decode_report(self, _: Bytes) -> Result, Self::Error> - where - C: error_stack::Context, - { - unimplemented!("unable to deserialize reports") - } - - fn decode_recovery(self, bytes: Bytes) -> Self::Recovery { - Box::from(String::from_utf8_lossy(&bytes)) - } -} - #[cfg(test)] mod tests { use core::future::ready; @@ -437,7 +352,7 @@ mod tests { .await .expect("should have a value") .expect_err("should be an error"); - assert_eq!(error.to_string(), "underlying stream returned an error"); + assert_eq!(error.to_string(), "unable to decode JSON value"); assert!(decoder.next().await.is_none()); } diff --git a/libs/@local/harpc/codec/src/lib.rs b/libs/@local/harpc/codec/src/lib.rs index 3a100e173ab..2731eb74be1 100644 --- a/libs/@local/harpc/codec/src/lib.rs +++ b/libs/@local/harpc/codec/src/lib.rs @@ -8,7 +8,6 @@ use bytes::Buf; use futures_core::{Stream, TryStream}; -use self::{decode::ErrorDecoder, encode::ErrorEncoder}; use crate::{decode::Decoder, encode::Encoder}; pub mod decode; @@ -18,10 +17,8 @@ pub mod error; pub mod json; pub trait Codec: Encoder + Decoder {} -pub trait ErrorCodec: ErrorEncoder + ErrorDecoder {} impl Codec for T where T: Encoder + Decoder {} -impl ErrorCodec for T where T: ErrorEncoder + ErrorDecoder {} pub struct SplitCodec { pub encoder: E, diff --git a/libs/@local/harpc/net/Cargo.toml b/libs/@local/harpc/net/Cargo.toml index 20356ee51cc..86005f26775 100644 --- a/libs/@local/harpc/net/Cargo.toml +++ b/libs/@local/harpc/net/Cargo.toml @@ -51,6 +51,7 @@ tokio-util = { workspace = true, features = ["codec", "compat", "rt", "tracing"] tracing = { workspace = true } [dev-dependencies] +bytes = { workspace = true, features = ["serde"] } harpc-codec = { workspace = true, features = ["json"] } harpc-types = { workspace = true } harpc-wire-protocol = { workspace = true, features = ["test-utils"] } diff --git a/libs/@local/harpc/net/src/session/server/connection/mod.rs b/libs/@local/harpc/net/src/session/server/connection/mod.rs index 2968e04f412..e14a6733087 100644 --- a/libs/@local/harpc/net/src/session/server/connection/mod.rs +++ b/libs/@local/harpc/net/src/session/server/connection/mod.rs @@ -7,7 +7,7 @@ use core::{error::Error, fmt::Debug, future}; use std::io; use futures::{FutureExt, Sink, Stream, StreamExt, stream}; -use harpc_codec::encode::ErrorEncoder; +use harpc_codec::error::NetworkError; use harpc_types::response_kind::ResponseKind; use harpc_wire_protocol::{ request::{Request, body::RequestBody, id::RequestId}, @@ -61,7 +61,7 @@ where } } -pub(crate) struct ConnectionTask { +pub(crate) struct ConnectionTask { pub peer: PeerId, pub session: SessionId, @@ -70,19 +70,19 @@ pub(crate) struct ConnectionTask { pub events: broadcast::Sender, pub config: SessionConfig, - pub encoder: E, + pub _permit: OwnedSemaphorePermit, } -impl ConnectionTask -where - E: ErrorEncoder + Clone + Send + Sync + 'static, -{ - async fn respond_error(&self, id: RequestId, error: T, tx: &mpsc::Sender) +impl ConnectionTask { + async fn respond_error(&self, id: RequestId, error: &E, tx: &mpsc::Sender) where - T: Error + serde::Serialize + Send + Sync, + E: Error + Sync, { - let (code, bytes) = self.encoder.clone().encode_error(error).into_parts(); + let error = NetworkError::capture_error(error); + + let code = error.code(); + let bytes = error.into_bytes(); let mut writer = ResponseWriter::new( WriterOptions { no_delay: false }, @@ -127,7 +127,7 @@ where Err(error) => { tracing::warn!("transaction limit reached, dropping transaction"); - self.respond_error(request_id, error, &tx).await; + self.respond_error(request_id, &error, &tx).await; return; } }; @@ -153,7 +153,7 @@ where // be processed, this is also known as the "graceful shutdown" phase. tracing::info!("supervisor has been dropped, dropping transaction"); - self.respond_error(request_id, ConnectionGracefulShutdownError, &tx) + self.respond_error(request_id, &ConnectionGracefulShutdownError, &tx) .await; return; } @@ -165,7 +165,7 @@ where self.transactions.release(request_id).await; - self.respond_error(request_id, InstanceTransactionLimitReachedError, &tx) + self.respond_error(request_id, &InstanceTransactionLimitReachedError, &tx) .await; return; } @@ -192,7 +192,7 @@ where } RequestBody::Frame(_) => { if let Err(error) = self.transactions.send(request).await { - self.respond_error(request_id, error, &tx).await; + self.respond_error(request_id, &error, &tx).await; } } } diff --git a/libs/@local/harpc/net/src/session/server/connection/test.rs b/libs/@local/harpc/net/src/session/server/connection/test.rs index 73888aeaefa..96197e7dd9f 100644 --- a/libs/@local/harpc/net/src/session/server/connection/test.rs +++ b/libs/@local/harpc/net/src/session/server/connection/test.rs @@ -3,9 +3,8 @@ use alloc::sync::Arc; use core::{assert_matches::assert_matches, num::NonZero, time::Duration}; use std::io; -use bytes::{Buf, Bytes}; +use bytes::Bytes; use futures::{StreamExt, prelude::sink::SinkExt}; -use harpc_codec::json::JsonCodec; use harpc_types::{error_code::ErrorCode, response_kind::ResponseKind}; use harpc_wire_protocol::{ flags::BitFlagsOp, @@ -101,7 +100,7 @@ impl Setup { output: output_tx, events: events_tx, config, - encoder: JsonCodec, + _permit: permit, }; @@ -488,12 +487,8 @@ async fn transaction_limit_reached_connection() { assert_eq!(response.header.request_id, mock_request_id(0x02)); assert!(response.header.flags.contains(ResponseFlag::EndOfResponse)); - let mut bytes = response.body.payload().as_bytes().clone(); - assert_eq!(*bytes.first().expect("should have a byte"), 0x01); - bytes.advance(1); - - let error = String::from_utf8(bytes.to_vec()).expect("should be utf8"); - insta::assert_snapshot!(error, @r###"{"message":"transaction limit per connection has been reached, the transaction has been dropped. The limit is 1","details":{"limit":1}}"###); + let bytes = response.body.payload().as_bytes().clone(); + insta::assert_debug_snapshot!(bytes, @r###"b"\0\0\0ctransaction limit per connection has been reached, the transaction has been dropped. The limit is 1""###); } #[tokio::test] @@ -530,12 +525,8 @@ async fn transaction_limit_reached_instance() { ); assert!(response.header.flags.contains(ResponseFlag::EndOfResponse)); - let mut bytes = response.body.payload().as_bytes().clone(); - assert_eq!(*bytes.first().expect("should have a byte"), 0x01); - bytes.advance(1); - - let error = String::from_utf8(bytes.to_vec()).expect("should be utf8"); - insta::assert_snapshot!(error, @r###"{"message":"transaction has been dropped, because the server is unable to process more transactions","details":null}"###); + let bytes = response.body.payload().as_bytes().clone(); + insta::assert_debug_snapshot!(bytes, @r###"b"\0\0\0Wtransaction has been dropped, because the server is unable to process more transactions""###); } #[tokio::test] @@ -880,12 +871,8 @@ async fn transaction_request_buffer_limit_reached() { let response = responses.pop().expect("should have a response"); assert!(response.header.flags.contains(ResponseFlag::EndOfResponse)); - let mut bytes = response.body.payload().as_bytes().clone(); - assert_eq!(*bytes.first().expect("should have a byte"), 0x01); - bytes.advance(1); - - let error = String::from_utf8(bytes.to_vec()).expect("should be utf8"); - insta::assert_snapshot!(error, @r###"{"message":"transaction has been dropped, because it is unable to receive more request packets","details":null}"###); + let bytes = response.body.payload().as_bytes().clone(); + insta::assert_debug_snapshot!(bytes, @r###"b"\0\0\0Rtransaction has been dropped, because it is unable to receive more request packets""###); assert_matches!( response.body, @@ -983,12 +970,8 @@ async fn transaction_send_output_closed() { }) ); - let mut bytes = response.body.payload().as_bytes().clone(); - assert_eq!(*bytes.first().expect("should have a byte"), 0x01); - bytes.advance(1); - - let error = String::from_utf8(bytes.to_vec()).expect("should be utf8"); - insta::assert_snapshot!(error, @r###"{"message":"The connection is in the graceful shutdown state and no longer accepts any new transactions","details":null}"###); + let bytes = response.body.payload().as_bytes().clone(); + insta::assert_debug_snapshot!(bytes, @r###"b"\0\0\0[The connection is in the graceful shutdown state and no longer accepts any new transactions""###); stream .send(Ok(make_request_frame(RequestFlag::EndOfRequest, "hello"))) diff --git a/libs/@local/harpc/net/src/session/server/mod.rs b/libs/@local/harpc/net/src/session/server/mod.rs index 626eff2afde..57b39b13e0e 100644 --- a/libs/@local/harpc/net/src/session/server/mod.rs +++ b/libs/@local/harpc/net/src/session/server/mod.rs @@ -14,7 +14,6 @@ use core::{ use error_stack::{Report, ResultExt}; use futures::{Stream, stream::FusedStream}; -use harpc_codec::encode::ErrorEncoder; use libp2p::Multiaddr; use tokio::sync::{Semaphore, broadcast, mpsc}; use tokio_util::task::TaskTracker; @@ -144,9 +143,8 @@ impl FusedStream for EventStream { /// /// The session layer is responsible for accepting incoming connections, and splitting them up into /// dedicated sessions, these sessions are then used to form transactions. -pub struct SessionLayer { +pub struct SessionLayer { config: SessionConfig, - encoder: E, events: broadcast::Sender, @@ -155,18 +153,15 @@ pub struct SessionLayer { tasks: TaskTracker, } -impl SessionLayer -where - E: ErrorEncoder + Clone + Send + Sync + 'static, -{ - pub fn new(config: SessionConfig, transport: TransportLayer, encoder: E) -> Self { +impl SessionLayer { + #[must_use] + pub fn new(config: SessionConfig, transport: TransportLayer) -> Self { let tasks = transport.tasks().clone(); let (events, _) = broadcast::channel(config.event_buffer_size.get()); Self { config, - encoder, events, @@ -220,7 +215,7 @@ where )), output, events: self.events.clone(), - encoder: self.encoder, + _transport: self.transport, }; diff --git a/libs/@local/harpc/net/src/session/server/task.rs b/libs/@local/harpc/net/src/session/server/task.rs index 1437a47e12a..0a3e23af39e 100644 --- a/libs/@local/harpc/net/src/session/server/task.rs +++ b/libs/@local/harpc/net/src/session/server/task.rs @@ -1,7 +1,6 @@ use alloc::sync::Arc; use futures::{FutureExt, StreamExt}; -use harpc_codec::encode::ErrorEncoder; use tokio::{ select, sync::{Semaphore, TryAcquireError, broadcast, mpsc}, @@ -20,7 +19,7 @@ use crate::{ }, }; -pub(crate) struct Task { +pub(crate) struct Task { pub id: SessionIdProducer, pub config: SessionConfig, @@ -29,16 +28,12 @@ pub(crate) struct Task { pub output: mpsc::Sender, pub events: broadcast::Sender, - pub encoder: E, // significant because of the Drop, if dropped this will stop the task automatically pub _transport: TransportLayer, } -impl Task -where - E: ErrorEncoder + Clone + Send + Sync + 'static, -{ +impl Task { #[expect( clippy::integer_division_remainder_used, reason = "required for select! macro" @@ -110,7 +105,7 @@ where transactions: TransactionCollection::new(self.config, cancel.clone()), output: self.output.clone(), events: self.events.clone(), - encoder: self.encoder.clone(), + _permit: permit, }; diff --git a/libs/@local/harpc/net/src/session/server/test.rs b/libs/@local/harpc/net/src/session/server/test.rs index 101013e9fa5..acd03b55ad8 100644 --- a/libs/@local/harpc/net/src/session/server/test.rs +++ b/libs/@local/harpc/net/src/session/server/test.rs @@ -9,7 +9,6 @@ use std::io::{self, ErrorKind}; use bytes::{Bytes, BytesMut}; use error_stack::{Report, ResultExt}; use futures::{SinkExt, Stream, StreamExt}; -use harpc_codec::json::JsonCodec; use harpc_types::{ procedure::{ProcedureDescriptor, ProcedureId}, service::{ServiceDescriptor, ServiceId}, @@ -97,7 +96,7 @@ async fn session_map( config: SessionConfig, address: Multiaddr, map_transport: impl FnOnce(&TransportLayer) -> T + Send, - map_layer: impl FnOnce(&SessionLayer) -> U + Send, + map_layer: impl FnOnce(&SessionLayer) -> U + Send, ) -> (ListenStream, T, U, impl Drop) where T: Send, @@ -107,7 +106,7 @@ where let transport_data = map_transport(&transport); - let layer = SessionLayer::new(config, transport, JsonCodec); + let layer = SessionLayer::new(config, transport); let layer_data = map_layer(&layer); diff --git a/libs/@local/harpc/net/src/session/server/transaction/mod.rs b/libs/@local/harpc/net/src/session/server/transaction/mod.rs index 505346d1c1e..2b228c64066 100644 --- a/libs/@local/harpc/net/src/session/server/transaction/mod.rs +++ b/libs/@local/harpc/net/src/session/server/transaction/mod.rs @@ -9,7 +9,7 @@ use core::{ use bytes::Bytes; use futures::{Sink, Stream, StreamExt, stream::FusedStream}; -use harpc_codec::error::EncodedError; +use harpc_codec::error::NetworkError; use harpc_types::{ procedure::ProcedureDescriptor, response_kind::ResponseKind, service::ServiceDescriptor, }; @@ -37,9 +37,9 @@ struct TransactionSendDelegateTask

{ config: SessionConfig, // TODO: consider switching to `tachyonix` crate for better performance (not yet tested) - // as well as more predictable buffering behavioud. `PollSender` is prone to just buffer + // as well as more predictable buffering behavior. `PollSender` is prone to just buffer // everything before sending, which might not be the best idea in this scenario. - rx: mpsc::Receiver>, + rx: mpsc::Receiver>, tx: mpsc::Sender, permit: Arc

, @@ -106,7 +106,8 @@ where } } Err(error) => { - let (code, bytes) = error.into_parts(); + let code = error.code(); + let bytes = error.into_bytes(); writer = ResponseWriter::new( WriterOptions { @@ -133,7 +134,7 @@ where pub(crate) struct TransactionTask

{ config: SessionConfig, - response_rx: mpsc::Receiver>, + response_rx: mpsc::Receiver>, response_tx: mpsc::Sender, permit: Arc

, @@ -213,7 +214,7 @@ pub struct Transaction { context: TransactionContext, request: tachyonix::Receiver, - response: mpsc::Sender>, + response: mpsc::Sender>, permit: Arc, } @@ -376,7 +377,7 @@ impl FusedStream for TransactionStream { } } -type SinkItem = Result; +type SinkItem = Result; pin_project_lite::pin_project! { #[must_use = "sinks do nothing unless polled"] diff --git a/libs/@local/harpc/net/src/session/server/transaction/test.rs b/libs/@local/harpc/net/src/session/server/transaction/test.rs index d6b97dea403..bf5faba1f94 100644 --- a/libs/@local/harpc/net/src/session/server/transaction/test.rs +++ b/libs/@local/harpc/net/src/session/server/transaction/test.rs @@ -1,8 +1,13 @@ -use alloc::sync::Arc; -use core::{num::NonZero, time::Duration}; +use alloc::{borrow::Cow, sync::Arc}; +use core::{ + error::Error, + fmt::{self, Display}, + num::NonZero, + time::Duration, +}; -use bytes::{BufMut, Bytes}; -use harpc_codec::error::{EncodedError, ErrorBuffer}; +use bytes::Bytes; +use harpc_codec::error::NetworkError; use harpc_types::{ error_code::ErrorCode, procedure::{ProcedureDescriptor, ProcedureId}, @@ -74,7 +79,7 @@ impl ServerTransactionPermit for StaticTransactionPermit { fn setup_send( no_delay: bool, ) -> ( - mpsc::Sender>, + mpsc::Sender>, mpsc::Receiver, JoinHandle<()>, ) { @@ -431,15 +436,34 @@ async fn send_delay_empty_bytes() { }); } +#[derive(Debug, Clone, PartialEq, Eq)] +struct ExampleError { + code: ErrorCode, + message: Cow<'static, str>, +} + +impl Display for ExampleError { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + Display::fmt(&self.message, fmt) + } +} + +impl Error for ExampleError { + fn provide<'a>(&'a self, request: &mut core::error::Request<'a>) { + request.provide_value(self.code); + } +} + #[tokio::test] async fn send_delay_error_immediate() { let (bytes_tx, mut response_rx, handle) = setup_send(false); let code = ErrorCode::new(NonZero::new(0xFF_FF).expect("infallible")); - let mut error = ErrorBuffer::error(); - error.put_slice(&[0, 1, 2, 3, 4, 5, 6, 7]); - let error = error.finish(code); + let error = NetworkError::capture_error(&ExampleError { + message: Cow::Borrowed("immediate delay"), + code, + }); // send an error message let payload = error.bytes().clone(); @@ -482,11 +506,12 @@ async fn send_delay_error_delayed() { let code = ErrorCode::new(NonZero::new(0xFF_FF).expect("infallible")); - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(&[0, 1, 2, 3, 4, 5, 6, 7]); + let error = NetworkError::capture_error(&ExampleError { + message: Cow::Borrowed("delayed error"), + code, + }); // send an error message - let error = buffer.finish(code); let payload_err = error.bytes().clone(); bytes_tx @@ -528,10 +553,11 @@ async fn send_delay_error_multiple() { // fully buffered. let code = ErrorCode::new(NonZero::new(0xFF_FF).expect("infallible")); - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(&vec![1; Payload::MAX_SIZE + 8]); + let error = NetworkError::capture_error(&ExampleError { + message: Cow::Owned("1".repeat(Payload::MAX_SIZE + 8)), + code, + }); - let error = buffer.finish(code); let payload_err = error.bytes().clone(); for _ in 0..4 { @@ -577,10 +603,11 @@ async fn send_delay_error_interspersed() { let code = ErrorCode::new(NonZero::new(0xFF_FF).expect("infallible")); - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(&vec![1; Payload::MAX_SIZE + 8]); + let error = NetworkError::capture_error(&ExampleError { + message: Cow::Owned("1".repeat(Payload::MAX_SIZE + 8)), + code, + }); - let error = buffer.finish(code); let payload_err = error.bytes().clone(); for _ in 0..4 { @@ -636,10 +663,11 @@ async fn send_delay_error_interspersed_small() { let code = ErrorCode::new(NonZero::new(0xFF_FF).expect("infallible")); - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(&[1; 8]); + let error = NetworkError::capture_error(&ExampleError { + message: Cow::Borrowed("interspersed delayed errors"), + code, + }); - let error = buffer.finish(code); let payload_err = error.bytes().clone(); for _ in 0..4 { @@ -680,10 +708,11 @@ async fn send_delay_error_split_large() { // if we have a large payload we split it into multiple frames let code = ErrorCode::new(NonZero::new(0xFF_FF).expect("infallible")); - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(&vec![1; Payload::MAX_SIZE + 8]); + let error = NetworkError::capture_error(&ExampleError { + message: Cow::Owned("1".repeat(Payload::MAX_SIZE + 8)), + code, + }); - let error = buffer.finish(code); let payload_err = error.bytes().clone(); bytes_tx @@ -973,10 +1002,11 @@ async fn send_no_delay_error_immediate() { let code = ErrorCode::new(NonZero::new(0xFF_FF).expect("infallible")); - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(b"error"); + let error = NetworkError::capture_error(&ExampleError { + message: Cow::Borrowed("error"), + code, + }); - let error = buffer.finish(code); let payload_err = error.bytes().clone(); bytes_tx @@ -1015,9 +1045,11 @@ async fn send_no_delay_error_delayed() { let code = ErrorCode::new(NonZero::new(0xFF_FF).expect("infallible")); - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(b"error"); - let error = buffer.finish(code); + let error = NetworkError::capture_error(&ExampleError { + message: Cow::Borrowed("error"), + code, + }); + let payload_err = error.bytes().clone(); bytes_tx @@ -1067,9 +1099,11 @@ async fn send_no_delay_error_multiple() { let code = ErrorCode::new(NonZero::new(0xFF_FF).expect("infallible")); - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(b"error"); - let error = buffer.finish(code); + let error = NetworkError::capture_error(&ExampleError { + message: Cow::Borrowed("error"), + code, + }); + let payload_err = error.bytes().clone(); bytes_tx @@ -1122,9 +1156,11 @@ async fn send_no_delay_error_interspersed() { let payload_ok = Bytes::from_static(b"ok"); let code = ErrorCode::new(NonZero::new(0xFF_FF).expect("infallible")); - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(b"error"); - let error = buffer.finish(code); + let error = NetworkError::capture_error(&ExampleError { + message: Cow::Borrowed("error"), + code, + }); + let payload_err = error.bytes().clone(); for _ in 0..3 { @@ -1176,9 +1212,11 @@ async fn send_no_delay_error_split_large() { let code = ErrorCode::new(NonZero::new(0xFF_FF).expect("infallible")); - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(&vec![0; Payload::MAX_SIZE + 8]); - let error = buffer.finish(code); + let error = NetworkError::capture_error(&ExampleError { + message: Cow::Owned("0".repeat(Payload::MAX_SIZE + 8)), + code, + }); + let payload_err = error.bytes().clone(); bytes_tx diff --git a/libs/@local/harpc/net/src/session/test.rs b/libs/@local/harpc/net/src/session/test.rs index 6f07e3b5eab..62260b41f0c 100644 --- a/libs/@local/harpc/net/src/session/test.rs +++ b/libs/@local/harpc/net/src/session/test.rs @@ -4,7 +4,6 @@ use core::{iter, net::Ipv4Addr, time::Duration}; use bytes::Bytes; use error_stack::{Report, ResultExt}; use futures::{prelude::stream, sink::SinkExt, stream::StreamExt}; -use harpc_codec::json::JsonCodec; use harpc_types::{ procedure::{ProcedureDescriptor, ProcedureId}, service::{ServiceDescriptor, ServiceId}, @@ -103,13 +102,13 @@ fn server( transport_config: TransportConfig, session_config: server::SessionConfig, transport: impl Transport, -) -> (server::SessionLayer, impl Drop) { +) -> (server::SessionLayer, impl Drop) { let cancel = CancellationToken::new(); let transport_layer = TransportLayer::start(transport_config, transport, cancel.clone()) .expect("failed to start transport layer"); - let session_layer = server::SessionLayer::new(session_config, transport_layer, JsonCodec); + let session_layer = server::SessionLayer::new(session_config, transport_layer); (session_layer, cancel.drop_guard()) } @@ -146,12 +145,6 @@ where .await .expect("should be able to listen on TCP"); - // Give the swarm some time to acquire the external address - // This is necessary for CI, as otherwise the tests are a bit flaky. - // TODO: Implement waiting for server to be ready - // see https://linear.app/hash/issue/H-2837 - tokio::time::sleep(tokio::time::Duration::from_millis(50)).await; - let address = server_ipc .external_addresses() .await @@ -309,10 +302,6 @@ async fn echo_concurrent( .await .expect("should be able to listen on TCP"); - // Give the swarm some time to acquire the external address - // This is necessary for CI, as otherwise the tests are a bit flaky. - // TODO: `listen_on` should wait until the transport layer has acquired said address. - // see https://linear.app/hash/issue/H-2837 tokio::time::sleep(tokio::time::Duration::from_millis(50)).await; let address = server_ipc diff --git a/libs/@local/harpc/net/src/transport/ipc.rs b/libs/@local/harpc/net/src/transport/ipc.rs index 35cf438e276..419389a8612 100644 --- a/libs/@local/harpc/net/src/transport/ipc.rs +++ b/libs/@local/harpc/net/src/transport/ipc.rs @@ -1,5 +1,5 @@ use error_stack::{Result, ResultExt}; -use libp2p::{Multiaddr, PeerId, core::transport::ListenerId}; +use libp2p::{Multiaddr, PeerId}; use libp2p_stream::Control; use tokio::sync::{mpsc, oneshot}; @@ -39,7 +39,7 @@ impl TransportLayerIpc { .change_context(IpcError::Swarm) } - pub(super) async fn listen_on(&self, address: Multiaddr) -> Result { + pub(super) async fn listen_on(&self, address: Multiaddr) -> Result { let (tx, rx) = oneshot::channel(); self.tx diff --git a/libs/@local/harpc/net/src/transport/mod.rs b/libs/@local/harpc/net/src/transport/mod.rs index 831c528e9fe..305bdd5f065 100644 --- a/libs/@local/harpc/net/src/transport/mod.rs +++ b/libs/@local/harpc/net/src/transport/mod.rs @@ -14,8 +14,7 @@ use alloc::sync::Arc; use error_stack::{Result, ResultExt}; use futures::stream::StreamExt; use libp2p::{ - Multiaddr, PeerId, StreamProtocol, core::transport::ListenerId, metrics, - tcp::tokio::Transport as TokioTcpTransport, + Multiaddr, PeerId, StreamProtocol, metrics, tcp::tokio::Transport as TokioTcpTransport, }; use libp2p_core::transport::MemoryTransport; use tokio::io::BufStream; @@ -183,7 +182,7 @@ impl TransportLayer { /// /// If the background task cannot be reached, crashes while processing the request, or the /// multiaddr is not supported by the transport. - pub async fn listen_on(&self, address: Multiaddr) -> Result { + pub async fn listen_on(&self, address: Multiaddr) -> Result { self.ipc .listen_on(address) .await diff --git a/libs/@local/harpc/net/src/transport/task.rs b/libs/@local/harpc/net/src/transport/task.rs index 046d058e750..74e2f5b5170 100644 --- a/libs/@local/harpc/net/src/transport/task.rs +++ b/libs/@local/harpc/net/src/transport/task.rs @@ -32,8 +32,8 @@ use super::{ }; type SenderPeerId = oneshot::Sender>; -type SenderListenerId = - oneshot::Sender>>; +type SenderListenOn = + oneshot::Sender>>; pub(crate) enum Command { IssueControl { @@ -45,7 +45,7 @@ pub(crate) enum Command { }, ListenOn { address: Multiaddr, - tx: SenderListenerId, + tx: SenderListenOn, }, ExternalAddresses { tx: oneshot::Sender>, @@ -63,9 +63,11 @@ pub(crate) struct TransportTask { ipc: TransportLayerIpc, peers: HashMap, - peers_waiting: HashMap>, peers_address_lookup: HashMap, + + listeners: HashMap>, + listeners_waiting: HashMap>, } impl TransportTask { @@ -108,10 +110,10 @@ impl TransportTask { // 3-10% slower than using yamux. // // Another alternative would be using QUIC, this has a massive performance penalty - // of ~50% as well as is unable to be used with js as `nodejs` does not support QUIC - // yet. + // of ~50% as well as is unable to be used with JavaScript as `nodejs` does not + // support QUIC yet. // - // As a compromise we're using `yamux 0.12` in `WindowUpdateMode::OnReceive` mode + // As a compromise we're using `yamux 0.12` in `WindowUpdateMode::OnReceive` mode // with a buffer that is 16x higher than the default (as default) with a value of // 16MiB. let yamux: yamux::Config = config.yamux.into(); @@ -158,6 +160,9 @@ impl TransportTask { peers: HashMap::new(), peers_waiting: HashMap::new(), peers_address_lookup: HashMap::new(), + + listeners: HashMap::new(), + listeners_waiting: HashMap::new(), }) } @@ -206,6 +211,34 @@ impl TransportTask { } } + fn handle_listen_on(&mut self, address: Multiaddr, tx: SenderListenOn) { + tracing::debug!(%address, "starting to listening on address"); + + match self.swarm.listen_on(address) { + Ok(id) => { + if let Some(addresses) = self.listeners.get(&id) { + let address = addresses[0].clone(); + + Self::send_ipc_response(tx, Ok(address)); + } else { + let entry = self.listeners_waiting.entry(id); + + match entry { + Entry::Occupied(mut entry) => { + entry.get_mut().push(tx); + } + Entry::Vacant(entry) => { + entry.insert(vec![tx]); + } + } + } + } + Err(error) => { + Self::send_ipc_response(tx, Err(error)); + } + } + } + fn handle_command(&mut self, command: Command) { match command { Command::IssueControl { tx } => { @@ -213,12 +246,8 @@ impl TransportTask { Self::send_ipc_response(tx, control); } - Command::LookupPeer { address: addr, tx } => self.handle_dial(addr, tx), - Command::ListenOn { address, tx } => { - let result = self.swarm.listen_on(address); - - Self::send_ipc_response(tx, result); - } + Command::LookupPeer { address, tx } => self.handle_dial(address, tx), + Command::ListenOn { address, tx } => self.handle_listen_on(address, tx), Command::ExternalAddresses { tx } => { let addresses = self.swarm.listeners().cloned().collect(); @@ -269,6 +298,25 @@ impl TransportTask { } } + fn handle_new_listen_addr(&mut self, address: Multiaddr, listener_id: ListenerId) { + tracing::info!(%address, "listening on address"); + + if let Some(senders) = self.listeners_waiting.remove(&listener_id) { + for tx in senders { + Self::send_ipc_response(tx, Ok(address.clone())); + } + } + + match self.listeners.entry(listener_id) { + Entry::Occupied(mut entry) => { + entry.get_mut().push(address); + } + Entry::Vacant(entry) => { + entry.insert(vec![address]); + } + } + } + fn handle_event(&mut self, event: SwarmEvent) { tracing::debug!(?event, "received swarm event"); @@ -283,8 +331,11 @@ impl TransportTask { } match event { - SwarmEvent::NewListenAddr { address, .. } => { - tracing::info!(%address, "listening on address"); + SwarmEvent::NewListenAddr { + address, + listener_id, + } => { + self.handle_new_listen_addr(address, listener_id); } SwarmEvent::NewExternalAddrOfPeer { peer_id, address } => { self.handle_new_external_address_of_peer(peer_id, address); diff --git a/libs/@local/harpc/net/src/transport/test.rs b/libs/@local/harpc/net/src/transport/test.rs index cd2a11a7b3d..7eae16a1afd 100644 --- a/libs/@local/harpc/net/src/transport/test.rs +++ b/libs/@local/harpc/net/src/transport/test.rs @@ -22,8 +22,10 @@ use harpc_wire_protocol::{ }; use libp2p::{ Multiaddr, TransportError, core::transport::MemoryTransport, multiaddr, swarm::DialError, + tcp::tokio::Transport, }; use libp2p_stream::OpenStreamError; +use multiaddr::multiaddr; use tokio_util::sync::CancellationToken; use super::{TransportConfig, TransportLayer}; @@ -80,6 +82,17 @@ pub(crate) fn layer() -> (TransportLayer, impl Drop) { (layer, cancel.drop_guard()) } +pub(crate) fn layer_tcp() -> (TransportLayer, impl Drop) { + let transport = Transport::default(); + let config = TransportConfig::default(); + let cancel = CancellationToken::new(); + + let layer = TransportLayer::start(config, transport, cancel.clone()) + .expect("should be able to create swarm"); + + (layer, cancel.drop_guard()) +} + #[tokio::test] async fn lookup_peer() { let (server, _guard_server) = layer(); @@ -479,3 +492,36 @@ async fn listen_on() { .await .expect("memory transport should be able to listen on memory address"); } + +#[tokio::test] +async fn listen_on_duplicate_address() { + let (layer, _guard) = layer(); + + let address = memory_address(); + + layer + .listen_on(address.clone()) + .await + .expect("memory transport should be able to listen on memory address"); + + let _error = layer + .listen_on(address) + .await + .expect_err("should not be able to listen on the same address twice"); +} + +#[tokio::test] +async fn listen_on_tcp_unspecified() { + let (layer, _guard) = layer_tcp(); + + let address = multiaddr![Ip4(Ipv4Addr::UNSPECIFIED), Tcp(0_u16)]; + + let chosen = layer + .listen_on(address) + .await + .expect("memory transport should be able to listen on memory address"); + + let protocol: Vec<_> = chosen.iter().collect(); + assert_matches!(protocol[0], multiaddr::Protocol::Ip4(addr) if addr != Ipv4Addr::UNSPECIFIED); + assert_matches!(protocol[1], multiaddr::Protocol::Tcp(port) if port != 0); +} diff --git a/libs/@local/harpc/server/examples/account.rs b/libs/@local/harpc/server/examples/account.rs index 33d71fbb9fc..1825200aff5 100644 --- a/libs/@local/harpc/server/examples/account.rs +++ b/libs/@local/harpc/server/examples/account.rs @@ -52,7 +52,6 @@ use harpc_types::{ version::Version, }; use multiaddr::multiaddr; -use serde::de::IgnoredAny; use tower::{ServiceBuilder, ServiceExt as _}; use uuid::Uuid; @@ -310,15 +309,14 @@ where } async fn server() { - let server = - Server::new(ServerConfig::default(), JsonCodec).expect("should be able to start service"); + let server = Server::new(ServerConfig::default()).expect("should be able to start service"); let router = RouterBuilder::new::<()>(JsonCodec) - .with_builder(|builder, codec| { + .with_builder(|builder| { builder .layer(BoxedResponseLayer::new()) - .layer(HandleReportLayer::new(*codec)) - .layer(HandleBodyReportLayer::new(*codec)) + .layer(HandleReportLayer::new()) + .layer(HandleBodyReportLayer::new()) }) .register(AccountServerDelegate { service: AccountServiceImpl, @@ -351,9 +349,7 @@ async fn client() { let connection = ServiceBuilder::new() .layer(MapRequestBodyLayer::new(|req| ready(StreamBody::new(req)))) - .layer(MapResponseBodyLayer::new(|res| { - ready(PackError::<_, _, IgnoredAny>::new(res, JsonCodec)) - })) + .layer(MapResponseBodyLayer::new(|res| ready(PackError::new(res)))) .service(connection); for _ in 0..16 { diff --git a/libs/@local/harpc/server/src/lib.rs b/libs/@local/harpc/server/src/lib.rs index 6056e740be0..d212ff94a04 100644 --- a/libs/@local/harpc/server/src/lib.rs +++ b/libs/@local/harpc/server/src/lib.rs @@ -17,7 +17,6 @@ use core::{ use error_stack::{Report, ResultExt}; use futures::{Stream, StreamExt, stream::FusedStream}; -use harpc_codec::encode::ErrorEncoder; use harpc_net::{ session::server::{EventStream, ListenStream, SessionConfig, SessionLayer, Transaction}, transport::{TransportConfig, TransportLayer}, @@ -69,28 +68,25 @@ impl FusedStream for TransactionStream { } } -pub struct Server { - session: SessionLayer, +pub struct Server { + session: SessionLayer, guard: DropGuard, } -impl Server -where - E: ErrorEncoder + Clone + Send + Sync + 'static, -{ +impl Server { /// Creates a new server instance with the given configuration and error encoder. /// /// # Errors /// /// This function will return an error if: /// - The transport layer fails to start. - pub fn new(config: ServerConfig, encoder: E) -> Result> { + pub fn new(config: ServerConfig) -> Result> { let token = CancellationToken::new(); let transport = TransportLayer::tcp(config.transport, token.clone()) .change_context(ServerError::StartTransportLayer)?; - let session = SessionLayer::new(config.session, transport, encoder); + let session = SessionLayer::new(config.session, transport); Ok(Self { session, @@ -99,6 +95,7 @@ where } /// Returns the event stream for this server. + #[must_use] pub fn events(&self) -> EventStream { self.session.events() } diff --git a/libs/@local/harpc/server/src/route.rs b/libs/@local/harpc/server/src/route.rs index 8e714c1812d..fad3ed05216 100644 --- a/libs/@local/harpc/server/src/route.rs +++ b/libs/@local/harpc/server/src/route.rs @@ -3,7 +3,7 @@ use core::future::ready; use bytes::Bytes; use frunk::{HCons, HNil}; use futures::FutureExt; -use harpc_codec::encode::ErrorEncoder; +use harpc_codec::error::NetworkError; use harpc_tower::{ body::{Body, controlled::Controlled, full::Full}, request::Request, @@ -67,22 +67,21 @@ impl Handler { /// /// [`Router`]: crate::router::Router /// [`Steer`]: https://docs.rs/tower/latest/tower/steer/struct.Steer.html -pub trait Route { +pub trait Route { type ResponseBody: Body, Error = !>; type Future: Future>; - fn call(&self, request: Request, codec: C) -> Self::Future + fn call(&self, request: Request) -> Self::Future where - ReqBody: Body + Send + Sync, - C: ErrorEncoder + Send + Sync; + ReqBody: Body + Send + Sync; } // The clone requirement might seem odd here, but is the same as in axum's router implementation. // see: https://docs.rs/axum/latest/src/axum/routing/route.rs.html#45 -impl Route for HCons, Tail> +impl Route for HCons, Tail> where Svc: Service, Response = Response, Error = !> + Clone, - Tail: Route, + Tail: Route, ResBody: Body, Error = !>, { // cannot use `impl Future` here, as it would require additional constraints on the associated @@ -99,10 +98,9 @@ where >; type ResponseBody = harpc_tower::either::Either; - fn call(&self, request: Request, codec: C) -> Self::Future + fn call(&self, request: Request) -> Self::Future where ReqBody: Body + Send + Sync, - C: ErrorEncoder + Send + Sync, { let requirement = self.head.version.into_requirement(); @@ -119,21 +117,20 @@ where } else { futures::future::Either::Right( self.tail - .call(request, codec) + .call(request) .map(|response| response.map_body(harpc_tower::either::Either::Right)), ) } } } -impl Route for HNil { +impl Route for HNil { type Future = core::future::Ready>; type ResponseBody = Controlled>; - fn call(&self, request: Request, codec: C) -> Self::Future + fn call(&self, request: Request) -> Self::Future where ReqBody: Body + Send + Sync, - C: ErrorEncoder + Send + Sync, { let error = NotFound { service: request.service().id, @@ -142,7 +139,7 @@ impl Route for HNil { let session = request.session(); - let error = codec.encode_error(error); + let error = NetworkError::capture_error(&error); ready(Response::from_error(Parts::new(session), error)) } diff --git a/libs/@local/harpc/server/src/router.rs b/libs/@local/harpc/server/src/router.rs index 0dd60ded0ab..d438143bf97 100644 --- a/libs/@local/harpc/server/src/router.rs +++ b/libs/@local/harpc/server/src/router.rs @@ -6,7 +6,6 @@ use core::{ use frunk::{HCons, HNil}; use futures::{FutureExt, Stream}; -use harpc_codec::encode::ErrorEncoder; use harpc_net::session::server::SessionEvent; use harpc_service::delegate::ServiceDelegate; use harpc_tower::{ @@ -62,11 +61,11 @@ type ServiceHandler = Handler< RouterBuilder { pub fn with_builder( self, - builder: impl FnOnce(ServiceBuilder, &C) -> ServiceBuilder, + builder: impl FnOnce(ServiceBuilder) -> ServiceBuilder, ) -> RouterBuilder { RouterBuilder { routes: self.routes, - builder: builder(self.builder, &self.codec), + builder: builder(self.builder), session: self.session, codec: self.codec, cancel: self.cancel, @@ -128,28 +127,24 @@ impl RouterBuilder { .with_cancellation_token(self.cancel.child_token()) } - pub fn build(self) -> Router + pub fn build(self) -> Router where R: Send + Sync + 'static, - C: ErrorEncoder + Clone + Send + Sync + 'static, { Router { routes: Arc::new(self.routes), - codec: self.codec, } } } -pub struct RouterService { +pub struct RouterService { routes: Arc, - codec: C, } -impl Service> for RouterService +impl Service> for RouterService where - R: Route, + R: Route, ReqBody: Body + Send + Sync, - C: ErrorEncoder + Clone + Send + Sync + 'static, { type Error = !; type Response = Response; @@ -161,24 +156,18 @@ where } fn call(&mut self, req: Request) -> Self::Future { - let codec = self.codec.clone(); - - self.routes.call(req, codec).map(Ok) + self.routes.call(req).map(Ok) } } -pub struct Router { +pub struct Router { routes: Arc, - codec: C, } -impl Service<()> for Router -where - C: Clone, -{ +impl Service<()> for Router { type Error = !; type Future = Ready>; - type Response = PackService, C>; + type Response = PackService>; fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll> { Poll::Ready(Ok(())) @@ -186,11 +175,10 @@ where fn call(&mut self, (): ()) -> Self::Future { let routes = Arc::clone(&self.routes); - let codec = self.codec.clone(); - let layer = PackLayer::new(codec.clone()); + let layer = PackLayer::new(); - future::ready(Ok(layer.layer(RouterService { routes, codec }))) + future::ready(Ok(layer.layer(RouterService { routes }))) } } diff --git a/libs/@local/harpc/server/src/serve.rs b/libs/@local/harpc/server/src/serve.rs index 38f0dc92950..d6936269da9 100644 --- a/libs/@local/harpc/server/src/serve.rs +++ b/libs/@local/harpc/server/src/serve.rs @@ -2,7 +2,7 @@ use core::future::poll_fn; use bytes::Bytes; use futures::{Stream, StreamExt}; -use harpc_codec::error::EncodedError; +use harpc_codec::error::NetworkError; use harpc_net::session::server::Transaction; use harpc_tower::{ body::server::request::RequestBody, @@ -20,7 +20,7 @@ where M: MakeService< (), Request, - Response: futures::Stream> + Send, + Response: futures::Stream> + Send, Error = !, Service: tower::Service, Future: Send> + Send + 'static, MakeError = !, diff --git a/libs/@local/harpc/tower/Cargo.toml b/libs/@local/harpc/tower/Cargo.toml index 246e3370b0c..718aacc6821 100644 --- a/libs/@local/harpc/tower/Cargo.toml +++ b/libs/@local/harpc/tower/Cargo.toml @@ -39,7 +39,7 @@ tokio-util = { workspace = true, features = ["time"] } tower-test = { workspace = true } tokio-test = { workspace = true } harpc-codec = { workspace = true, features = ["json"] } -insta.workspace = true +insta = { workspace = true } serde = { workspace = true, features = ["unstable"] } [lints] diff --git a/libs/@local/harpc/tower/src/body/encode_error.rs b/libs/@local/harpc/tower/src/body/encode_error.rs index 8c37b0cfe28..0de13570488 100644 --- a/libs/@local/harpc/tower/src/body/encode_error.rs +++ b/libs/@local/harpc/tower/src/body/encode_error.rs @@ -5,7 +5,7 @@ use core::{ }; use bytes::Bytes; -use harpc_codec::encode::ErrorEncoder; +use harpc_codec::error::NetworkError; use harpc_types::response_kind::ResponseKind; use super::{Body, full::Full}; @@ -51,26 +51,23 @@ pin_project_lite::pin_project! { /// /// While this method ensures safe error handling, it means that any data in the inner body /// after an error will not be processed. - pub struct EncodeError { + pub struct EncodeError { #[pin] state: State, - encoder: E, } } -impl EncodeError { - pub const fn new(inner: B, encoder: E) -> Self { +impl EncodeError { + pub const fn new(inner: B) -> Self { Self { state: State::Inner { inner }, - encoder, } } } -impl Body for EncodeError +impl Body for EncodeError where - B: Body, - E: ErrorEncoder + Clone, + B: Body, { type Control = Either; type Data = Either; @@ -94,7 +91,7 @@ where Some(Ok(frame.map_data(Either::Left).map_control(Either::Left))) } Some(Err(error)) => { - let error = this.encoder.clone().encode_error(error); + let error = NetworkError::capture_error(&error); let (code, data) = error.into_parts(); let inner = Controlled::new(ResponseKind::Err(code), Full::new(data)); @@ -139,7 +136,6 @@ mod test { use core::assert_matches::assert_matches; use bytes::Bytes; - use harpc_codec::json::JsonCodec; use harpc_types::{error_code::ErrorCode, response_kind::ResponseKind}; use insta::assert_debug_snapshot; @@ -158,7 +154,7 @@ mod test { #[test] fn encode_error() { let inner = StaticBody::::new([Err(TestError)]); - let mut body = EncodeError::new(inner, JsonCodec); + let mut body = EncodeError::new(inner); let frame = poll_frame_unpin(&mut body) .expect("should be ready") @@ -180,7 +176,7 @@ mod test { insta::assert_debug_snapshot!(frame, @r###" Data( Right( - b"\x01{\"message\":\"test error\",\"details\":null}", + b"\0\0\0\ntest error", ), ) "###); @@ -193,7 +189,7 @@ mod test { fn passthrough_data() { let inner = StaticBody::::new([Ok(Frame::new_data(Bytes::from_static(b"test data")))]); - let mut body = EncodeError::new(inner, JsonCodec); + let mut body = EncodeError::new(inner); let frame = poll_frame_unpin(&mut body) .expect("should be ready") @@ -209,7 +205,7 @@ mod test { #[test] fn passthrough_control() { let inner = StaticBody::::new([Ok(Frame::new_control(2_i32))]); - let mut body = EncodeError::new(inner, JsonCodec); + let mut body = EncodeError::new(inner); let frame = poll_frame_unpin(&mut body) .expect("should be ready") @@ -227,7 +223,7 @@ mod test { const DATA: &[u8] = b"test data"; let inner = StaticBody::::new([Ok(Frame::new_data(Bytes::from_static(DATA)))]); - let mut body = EncodeError::new(inner, JsonCodec); + let mut body = EncodeError::new(inner); assert_eq!(body.size_hint(), SizeHint::with_exact(DATA.len() as u64)); @@ -244,7 +240,7 @@ mod test { Err(TestError), Ok(Frame::new_data(Bytes::from_static(DATA))), ]); - let mut body = EncodeError::new(inner, JsonCodec); + let mut body = EncodeError::new(inner); // no error yet, so the size hint should be the size of the data assert_eq!(body.size_hint(), SizeHint::with_exact(DATA.len() as u64)); @@ -252,8 +248,8 @@ mod test { let _frame = poll_frame_unpin(&mut body); // we now have an error, therefore the size hint should include the error - // `40` is taken from the serialization in the unit test above - assert_eq!(body.size_hint(), SizeHint::with_exact(40)); + // `14` is taken from the serialization in the unit test above + assert_eq!(body.size_hint(), SizeHint::with_exact(14)); let _frame = poll_frame_unpin(&mut body); @@ -265,7 +261,7 @@ mod test { fn passthrough_state() { let inner = StaticBody::::new([Ok(Frame::new_data(Bytes::from_static(b"test data")))]); - let mut body = EncodeError::new(inner, JsonCodec); + let mut body = EncodeError::new(inner); assert_eq!(body.state(), None); @@ -277,7 +273,7 @@ mod test { #[test] fn state_on_error() { let inner = StaticBody::::new([Err(TestError)]); - let mut body = EncodeError::new(inner, JsonCodec); + let mut body = EncodeError::new(inner); assert_eq!(body.state(), None); @@ -296,7 +292,7 @@ mod test { Err(TestError), Ok(Frame::new_data(Bytes::from_static(b"test data"))), ]); - let mut body = EncodeError::new(inner, JsonCodec); + let mut body = EncodeError::new(inner); assert_eq!(body.state(), None); @@ -313,7 +309,7 @@ mod test { #[test] fn size_hint_and_state_with_empty_body() { let inner = StaticBody::::new([]); - let body = EncodeError::new(inner, JsonCodec); + let body = EncodeError::new(inner); assert_eq!(body.size_hint(), SizeHint::with_exact(0)); assert_eq!(body.state(), Some(BodyState::Complete)); @@ -328,7 +324,7 @@ mod test { Ok(Frame::new_control(())), Ok(Frame::new_data(Bytes::from_static(b"data3"))), ]); - let mut body = EncodeError::new(inner, JsonCodec); + let mut body = EncodeError::new(inner); let frame = poll_frame_unpin(&mut body) .expect("should be ready") @@ -354,7 +350,7 @@ mod test { assert_debug_snapshot!(frame, @r###" Data( Right( - b"\x01{\"message\":\"test error\",\"details\":null}", + b"\0\0\0\ntest error", ), ) "###); diff --git a/libs/@local/harpc/tower/src/body/encode_report.rs b/libs/@local/harpc/tower/src/body/encode_report.rs index 5d18de6af91..17bf13f2977 100644 --- a/libs/@local/harpc/tower/src/body/encode_report.rs +++ b/libs/@local/harpc/tower/src/body/encode_report.rs @@ -1,11 +1,12 @@ use core::{ + error::Error, pin::Pin, task::{Context, Poll, ready}, }; use bytes::Bytes; use error_stack::Report; -use harpc_codec::encode::ErrorEncoder; +use harpc_codec::error::NetworkError; use harpc_types::response_kind::ResponseKind; use super::{Body, full::Full}; @@ -52,27 +53,24 @@ pin_project_lite::pin_project! { /// While this method ensures safe error handling, it means that any data in the inner body /// after an error will not be processed. // We need a separate type for this because of the `Error` bound, `Report` could implement `Error`, in that case we would have a conflicting implementation. - pub struct EncodeReport { + pub struct EncodeReport { #[pin] state: State, - encoder: E, } } -impl EncodeReport { - pub const fn new(inner: B, encoder: E) -> Self { +impl EncodeReport { + pub const fn new(inner: B) -> Self { Self { state: State::Inner { inner }, - encoder, } } } -impl Body for EncodeReport +impl Body for EncodeReport where B: Body>, - E: ErrorEncoder + Clone, - C: error_stack::Context, + C: Error + Send + Sync + 'static, { type Control = Either; type Data = Either; @@ -96,7 +94,7 @@ where Some(Ok(frame.map_data(Either::Left).map_control(Either::Left))) } Some(Err(error)) => { - let error = this.encoder.clone().encode_report(error); + let error = NetworkError::capture_report(&error); let (code, data) = error.into_parts(); let inner = Controlled::new(ResponseKind::Err(code), Full::new(data)); @@ -142,7 +140,6 @@ mod test { use bytes::Bytes; use error_stack::Report; - use harpc_codec::json::JsonCodec; use harpc_types::{error_code::ErrorCode, response_kind::ResponseKind}; use insta::assert_debug_snapshot; @@ -161,7 +158,7 @@ mod test { #[test] fn encode_error() { let inner = StaticBody::>::new([Err(Report::new(TestError))]); - let mut body = EncodeReport::new(inner, JsonCodec); + let mut body = EncodeReport::new(inner); let frame = poll_frame_unpin(&mut body) .expect("should be ready") @@ -183,7 +180,7 @@ mod test { insta::assert_debug_snapshot!(frame, @r###" Data( Right( - b"\x01[{\"context\":\"test error\",\"attachments\":[],\"sources\":[]}]", + b"\0\0\0\ntest error", ), ) "###); @@ -197,7 +194,7 @@ mod test { let inner = StaticBody::>::new([Ok(Frame::new_data( Bytes::from_static(b"test data"), ))]); - let mut body = EncodeReport::new(inner, JsonCodec); + let mut body = EncodeReport::new(inner); let frame = poll_frame_unpin(&mut body) .expect("should be ready") @@ -214,7 +211,7 @@ mod test { fn passthrough_control() { let inner = StaticBody::>::new([Ok(Frame::new_control(2_i32))]); - let mut body = EncodeReport::new(inner, JsonCodec); + let mut body = EncodeReport::new(inner); let frame = poll_frame_unpin(&mut body) .expect("should be ready") @@ -234,7 +231,7 @@ mod test { let inner = StaticBody::>::new([Ok(Frame::new_data( Bytes::from_static(DATA), ))]); - let mut body = EncodeReport::new(inner, JsonCodec); + let mut body = EncodeReport::new(inner); assert_eq!(body.size_hint(), SizeHint::with_exact(DATA.len() as u64)); @@ -251,7 +248,7 @@ mod test { Err(Report::new(TestError)), Ok(Frame::new_data(Bytes::from_static(DATA))), ]); - let mut body = EncodeReport::new(inner, JsonCodec); + let mut body = EncodeReport::new(inner); // no error yet, so the size hint should be the size of the data assert_eq!(body.size_hint(), SizeHint::with_exact(DATA.len() as u64)); @@ -259,8 +256,8 @@ mod test { let _frame = poll_frame_unpin(&mut body); // we now have an error, therefore the size hint should include the error - // `40` is taken from the serialization in the unit test above - assert_eq!(body.size_hint(), SizeHint::with_exact(57)); + // `14` is taken from the serialization in the unit test above + assert_eq!(body.size_hint(), SizeHint::with_exact(14)); let _frame = poll_frame_unpin(&mut body); @@ -273,7 +270,7 @@ mod test { let inner = StaticBody::>::new([Ok(Frame::new_data( Bytes::from_static(b"test data"), ))]); - let mut body = EncodeReport::new(inner, JsonCodec); + let mut body = EncodeReport::new(inner); assert_eq!(body.state(), None); @@ -285,7 +282,7 @@ mod test { #[test] fn state_on_error() { let inner = StaticBody::>::new([Err(Report::new(TestError))]); - let mut body = EncodeReport::new(inner, JsonCodec); + let mut body = EncodeReport::new(inner); assert_eq!(body.state(), None); @@ -304,7 +301,7 @@ mod test { Err(Report::new(TestError)), Ok(Frame::new_data(Bytes::from_static(b"test data"))), ]); - let mut body = EncodeReport::new(inner, JsonCodec); + let mut body = EncodeReport::new(inner); assert_eq!(body.state(), None); @@ -321,7 +318,7 @@ mod test { #[test] fn size_hint_and_state_with_empty_body() { let inner = StaticBody::>::new([]); - let body = EncodeReport::new(inner, JsonCodec); + let body = EncodeReport::new(inner); assert_eq!(body.size_hint(), SizeHint::with_exact(0)); assert_eq!(body.state(), Some(BodyState::Complete)); @@ -336,7 +333,7 @@ mod test { Ok(Frame::new_control(())), Ok(Frame::new_data(Bytes::from_static(b"data3"))), ]); - let mut body = EncodeReport::new(inner, JsonCodec); + let mut body = EncodeReport::new(inner); let frame = poll_frame_unpin(&mut body) .expect("should be ready") @@ -362,7 +359,7 @@ mod test { assert_debug_snapshot!(frame, @r###" Data( Right( - b"\x01[{\"context\":\"test error\",\"attachments\":[],\"sources\":[]}]", + b"\0\0\0\ntest error", ), ) "###); diff --git a/libs/@local/harpc/tower/src/layer/body_error.rs b/libs/@local/harpc/tower/src/layer/body_error.rs index 4a0cee272f0..9d10f5f9229 100644 --- a/libs/@local/harpc/tower/src/layer/body_error.rs +++ b/libs/@local/harpc/tower/src/layer/body_error.rs @@ -4,7 +4,6 @@ use core::{ }; use futures::TryFutureExt; -use harpc_codec::encode::ErrorEncoder; use harpc_types::response_kind::ResponseKind; use tower::{Layer, Service}; @@ -15,48 +14,44 @@ use crate::{ }; #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub struct HandleBodyErrorLayer { - encoder: E, +pub struct HandleBodyErrorLayer { + _private: (), } -impl HandleBodyErrorLayer { - pub const fn new(encoder: E) -> Self { - Self { encoder } +impl HandleBodyErrorLayer { + #[expect( + clippy::new_without_default, + reason = "layer construction should be explicit and we might add fields in the future" + )] + #[must_use] + pub const fn new() -> Self { + Self { _private: () } } } -impl Layer for HandleBodyErrorLayer -where - E: Clone, -{ - type Service = HandleBodyErrorService; +impl Layer for HandleBodyErrorLayer { + type Service = HandleBodyErrorService; fn layer(&self, inner: S) -> Self::Service { - HandleBodyErrorService { - inner, - encoder: self.encoder.clone(), - } + HandleBodyErrorService { inner } } } #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub struct HandleBodyErrorService { +pub struct HandleBodyErrorService { inner: S, - - encoder: E, } -impl Service> for HandleBodyErrorService +impl Service> for HandleBodyErrorService where S: Service, Response = Response> + Clone + Send, - E: ErrorEncoder + Clone, ReqBody: Body, // the extra bounds here are not strictly required, but they help to make the error messages // more expressive during compilation ResBody: Body, Error: Error + serde::Serialize>, { type Error = S::Error; - type Response = Response>; + type Response = Response>; type Future = impl Future>; @@ -65,10 +60,8 @@ where } fn call(&mut self, req: Request) -> Self::Future { - let encoder = self.encoder.clone(); - self.inner .call(req) - .map_ok(|res| res.map_body(|body| EncodeError::new(body, encoder))) + .map_ok(|res| res.map_body(|body| EncodeError::new(body))) } } diff --git a/libs/@local/harpc/tower/src/layer/body_report.rs b/libs/@local/harpc/tower/src/layer/body_report.rs index 32c2fa644d5..8476ed0edc4 100644 --- a/libs/@local/harpc/tower/src/layer/body_report.rs +++ b/libs/@local/harpc/tower/src/layer/body_report.rs @@ -2,7 +2,6 @@ use core::task::{Context, Poll}; use error_stack::Report; use futures::TryFutureExt; -use harpc_codec::encode::ErrorEncoder; use harpc_types::response_kind::ResponseKind; use tower::{Layer, Service}; @@ -13,49 +12,45 @@ use crate::{ }; #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub struct HandleBodyReportLayer { - encoder: E, +pub struct HandleBodyReportLayer { + _private: (), } -impl HandleBodyReportLayer { - pub const fn new(encoder: E) -> Self { - Self { encoder } +impl HandleBodyReportLayer { + #[expect( + clippy::new_without_default, + reason = "layer construction should be explicit and we might add fields in the future" + )] + #[must_use] + pub const fn new() -> Self { + Self { _private: () } } } -impl Layer for HandleBodyReportLayer -where - E: Clone, -{ - type Service = HandleBodyErrorService; +impl Layer for HandleBodyReportLayer { + type Service = HandleBodyErrorService; fn layer(&self, inner: S) -> Self::Service { - HandleBodyErrorService { - inner, - encoder: self.encoder.clone(), - } + HandleBodyErrorService { inner } } } #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub struct HandleBodyErrorService { +pub struct HandleBodyErrorService { inner: S, - - encoder: E, } -impl Service> for HandleBodyErrorService +impl Service> for HandleBodyErrorService where S: Service, Response = Response> + Clone + Send, - E: ErrorEncoder + Clone, ReqBody: Body, - // the extra bounds here are not strictly required, but they help to make the error messages + // The extra bounds here are not strictly required, but they help to make the error messages // more expressive during compilation ResBody: Body, Error = Report>, C: error_stack::Context, { type Error = S::Error; - type Response = Response>; + type Response = Response>; type Future = impl Future>; @@ -64,10 +59,8 @@ where } fn call(&mut self, req: Request) -> Self::Future { - let encoder = self.encoder.clone(); - self.inner .call(req) - .map_ok(|res| res.map_body(|body| EncodeReport::new(body, encoder))) + .map_ok(|res| res.map_body(|body| EncodeReport::new(body))) } } diff --git a/libs/@local/harpc/tower/src/layer/decode_error.rs b/libs/@local/harpc/tower/src/layer/decode_error.rs index c0f3d25d211..f630e48615b 100644 --- a/libs/@local/harpc/tower/src/layer/decode_error.rs +++ b/libs/@local/harpc/tower/src/layer/decode_error.rs @@ -1,61 +1,50 @@ use core::{ - marker::PhantomData, mem, task::{Context, Poll}, }; -use harpc_codec::decode::ErrorDecoder; use harpc_types::response_kind::ResponseKind; use tower::{Layer, Service}; use crate::{body::Body, net::pack_error::PackError, request::Request, response::Response}; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct DecodeErrorLayer { - codec: C, - _marker: PhantomData *const E>, +pub struct DecodeErrorLayer { + _private: (), } -impl DecodeErrorLayer { - pub const fn new(codec: C) -> DecodeErrorLayer { - DecodeErrorLayer { - codec, - _marker: PhantomData, - } +impl DecodeErrorLayer { + #[expect( + clippy::new_without_default, + reason = "layer construction should be explicit and we might add fields in the future" + )] + #[must_use] + pub const fn new() -> Self { + Self { _private: () } } } -impl Layer for DecodeErrorLayer -where - C: Clone, -{ - type Service = DecodeErrorService; +impl Layer for DecodeErrorLayer { + type Service = DecodeErrorService; fn layer(&self, inner: S) -> Self::Service { - DecodeErrorService { - inner, - codec: self.codec.clone(), - _marker: PhantomData, - } + DecodeErrorService { inner } } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct DecodeErrorService { +pub struct DecodeErrorService { inner: S, - codec: C, - _marker: PhantomData *const E>, } -// takes a stream of `Body` and turns it into `Result` -impl Service> for DecodeErrorService +// Takes a stream of `Body` and turns it into `Result` +impl Service> for DecodeErrorService where S: Service, Response = Response> + Clone, ResBody: Body>, - C: ErrorDecoder + Clone, { type Error = S::Error; - type Response = Response>; + type Response = Response>; type Future = impl Future>; @@ -64,8 +53,6 @@ where } fn call(&mut self, req: Request) -> Self::Future { - let codec = self.codec.clone(); - // see: https://docs.rs/tower/latest/tower/trait.Service.html#be-careful-when-cloning-inner-services let clone = self.inner.clone(); let mut inner = mem::replace(&mut self.inner, clone); @@ -74,7 +61,7 @@ where let response = inner.call(req).await?; let (parts, body) = response.into_parts(); - let body = PackError::<_, _, E>::new(body, codec); + let body = PackError::new(body); let response = Response::from_parts(parts, body); Ok(response) diff --git a/libs/@local/harpc/tower/src/layer/error.rs b/libs/@local/harpc/tower/src/layer/error.rs index 3eb5ffc2270..7b2805154c4 100644 --- a/libs/@local/harpc/tower/src/layer/error.rs +++ b/libs/@local/harpc/tower/src/layer/error.rs @@ -4,7 +4,7 @@ use core::{ }; use bytes::Bytes; -use harpc_codec::encode::ErrorEncoder; +use harpc_codec::error::NetworkError; use harpc_types::response_kind::ResponseKind; use tower::{Layer, Service, ServiceExt}; @@ -17,41 +17,37 @@ use crate::{ }; #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub struct HandleErrorLayer { - encoder: E, +pub struct HandleErrorLayer { + _private: (), } -impl HandleErrorLayer { - pub const fn new(encoder: E) -> Self { - Self { encoder } +impl HandleErrorLayer { + #[expect( + clippy::new_without_default, + reason = "layer construction should be explicit and we might add fields in the future" + )] + #[must_use] + pub const fn new() -> Self { + Self { _private: () } } } -impl Layer for HandleErrorLayer -where - E: Clone, -{ - type Service = HandleErrorService; +impl Layer for HandleErrorLayer { + type Service = HandleErrorService; fn layer(&self, inner: S) -> Self::Service { - HandleErrorService { - inner, - encoder: self.encoder.clone(), - } + HandleErrorService { inner } } } -pub struct HandleErrorService { +pub struct HandleErrorService { inner: S, - - encoder: E, } -impl Service> for HandleErrorService +impl Service> for HandleErrorService where S: Service, Response = Response> + Clone + Send, - S::Error: Error + serde::Serialize, - E: ErrorEncoder + Clone, + S::Error: Error, ReqBody: Body, ResBody: Body>, { @@ -68,8 +64,6 @@ where } fn call(&mut self, req: Request) -> Self::Future { - let encoder = self.encoder.clone(); - let clone = self.inner.clone(); let inner = core::mem::replace(&mut self.inner, clone); @@ -79,7 +73,7 @@ where match inner.oneshot(req).await { Ok(response) => Ok(response.map_body(Either::Left)), Err(error) => { - let error = encoder.encode_error(error); + let error = NetworkError::capture_error(&error); Ok(Response::from_error( Parts { @@ -102,8 +96,7 @@ pub(crate) mod test { fmt::{self, Debug, Display}, }; - use bytes::{Buf, Bytes}; - use harpc_codec::json::JsonCodec; + use bytes::Bytes; use harpc_net::test_utils::mock_session_id; use harpc_types::{ error_code::ErrorCode, @@ -112,7 +105,6 @@ pub(crate) mod test { service::{ServiceDescriptor, ServiceId}, version::Version, }; - use insta::assert_snapshot; use tokio_test::{assert_pending, assert_ready}; use tower::{Layer, ServiceExt}; use tower_test::mock::spawn_with; @@ -224,7 +216,7 @@ pub(crate) mod test { let (mut service, mut handle) = spawn_with(|mock| { let mock = mock.map_err(BoxedError::from); - HandleErrorLayer::new(JsonCodec).layer(mock) + HandleErrorLayer::new().layer(mock) }); assert_pending!(handle.poll_request()); @@ -258,18 +250,12 @@ pub(crate) mod test { assert_eq!(control, ResponseKind::Err(ErrorCode::INTERNAL_SERVER_ERROR)); let Ok(frame) = body.frame().await.expect("frame should be present"); - let mut data = frame + let data = frame .into_data() .expect("should be data frame") .into_inner(); - let &first = data.first().expect("should be present"); - assert_eq!(first, 0x01); - data.advance(1); - - let output = String::from_utf8(data.to_vec()).expect("should be utf-8"); - - assert_snapshot!(output, @r###"{"message":"generic error","details":"generic error"}"###); + insta::assert_debug_snapshot!(data, @r###"b"\0\0\0\rgeneric error""###); } #[tokio::test] @@ -277,7 +263,7 @@ pub(crate) mod test { let (mut service, mut handle) = spawn_with(|mock| { let mock = mock.map_err(BoxedError::from); - HandleErrorLayer::new(JsonCodec).layer(mock) + HandleErrorLayer::new().layer(mock) }); assert_pending!(handle.poll_request()); diff --git a/libs/@local/harpc/tower/src/layer/report.rs b/libs/@local/harpc/tower/src/layer/report.rs index 4b282c9893f..0606f82f693 100644 --- a/libs/@local/harpc/tower/src/layer/report.rs +++ b/libs/@local/harpc/tower/src/layer/report.rs @@ -1,8 +1,11 @@ -use core::task::{Context, Poll}; +use core::{ + error::Error, + task::{Context, Poll}, +}; use bytes::Bytes; use error_stack::Report; -use harpc_codec::encode::ErrorEncoder; +use harpc_codec::error::NetworkError; use harpc_types::response_kind::ResponseKind; use tower::{Layer, Service, ServiceExt}; @@ -15,42 +18,38 @@ use crate::{ }; #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub struct HandleReportLayer { - encoder: E, +pub struct HandleReportLayer { + _private: (), } -impl HandleReportLayer { - pub const fn new(encoder: E) -> Self { - Self { encoder } +impl HandleReportLayer { + #[expect( + clippy::new_without_default, + reason = "layer construction should be explicit and we might add fields in the future" + )] + #[must_use] + pub const fn new() -> Self { + Self { _private: () } } } -impl Layer for HandleReportLayer -where - E: Clone, -{ - type Service = HandleReportService; +impl Layer for HandleReportLayer { + type Service = HandleReportService; fn layer(&self, inner: S) -> Self::Service { - HandleReportService { - inner, - encoder: self.encoder.clone(), - } + HandleReportService { inner } } } #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub struct HandleReportService { +pub struct HandleReportService { inner: S, - - encoder: E, } -impl Service> for HandleReportService +impl Service> for HandleReportService where S: Service, Error = Report, Response = Response> + Clone + Send, - E: ErrorEncoder + Clone, - C: error_stack::Context, + C: Error + Send + Sync + 'static, ReqBody: Body, ResBody: Body>, { @@ -67,8 +66,6 @@ where } fn call(&mut self, req: Request) -> Self::Future { - let encoder = self.encoder.clone(); - let clone = self.inner.clone(); let inner = core::mem::replace(&mut self.inner, clone); @@ -78,7 +75,7 @@ where match inner.oneshot(req).await { Ok(response) => Ok(response.map_body(Either::Left)), Err(report) => { - let error = encoder.encode_report(report); + let error = NetworkError::capture_report(&report); Ok(Response::from_error( Parts { @@ -96,12 +93,9 @@ where #[cfg(test)] mod test { - - use bytes::{Buf, Bytes}; + use bytes::Bytes; use error_stack::Report; - use harpc_codec::json::JsonCodec; use harpc_types::{error_code::ErrorCode, response_kind::ResponseKind}; - use insta::assert_snapshot; use tokio_test::{assert_pending, assert_ready}; use tower::{Layer, Service, ServiceExt}; use tower_test::mock::{self, spawn_with}; @@ -140,7 +134,7 @@ mod test { spawn_with(|service| { let service = service.map_err(|error| Report::from(BoxedError::from(error))); - HandleReportLayer::new(JsonCodec).layer(service) + HandleReportLayer::new().layer(service) }) } @@ -183,17 +177,12 @@ mod test { ); let Ok(frame) = body.frame().await.expect("frame should be present"); - let mut data = frame + let data = frame .into_data() .expect("should be data frame") .into_inner(); - let &first = data.first().expect("should be present"); - assert_eq!(first, 0x01); - data.advance(1); - - let output = String::from_utf8(data.to_vec()).expect("should be valid utf8"); - assert_snapshot!(output, @r###"[{"context":"generic error","attachments":[],"sources":[{"context":"generic error","attachments":[],"sources":[]}]}]"###); + insta::assert_debug_snapshot!(data, @r###"b"\0\0\0\rgeneric error""###); } #[tokio::test] diff --git a/libs/@local/harpc/tower/src/net/pack.rs b/libs/@local/harpc/tower/src/net/pack.rs index 3f6cf24afc5..cc0c17e9121 100644 --- a/libs/@local/harpc/tower/src/net/pack.rs +++ b/libs/@local/harpc/tower/src/net/pack.rs @@ -7,7 +7,7 @@ use core::{ use bytes::{Buf, BufMut, Bytes, BytesMut}; use futures::{FutureExt, Stream}; -use harpc_codec::{encode::ErrorEncoder, error::EncodedError}; +use harpc_codec::error::NetworkError; use harpc_types::{error_code::ErrorCode, response_kind::ResponseKind}; use tower::{Layer, Service}; @@ -17,25 +17,15 @@ use crate::{ response::Response, }; -#[derive( - Debug, - Copy, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - Hash, - derive_more::Display, - serde::Serialize, - serde::Deserialize, -)] -#[display("invalid error tag")] -struct InvalidTagError; - -impl Error for InvalidTagError { +#[derive(Debug, Clone, PartialEq, Eq, derive_more::Display)] +#[display("incomplete transaction error returned, message: {bytes:?}")] +struct DecodeTransactionError { + bytes: Bytes, +} + +impl Error for DecodeTransactionError { fn provide<'a>(&'a self, request: &mut core::error::Request<'a>) { - request.provide_value(ErrorCode::PACK_INVALID_ERROR_TAG); + request.provide_value(ErrorCode::PARTIAL_TRANSACTION_ERROR); } } @@ -45,27 +35,28 @@ struct PartialTransactionError { } impl PartialTransactionError { - fn finish(self, encoder: impl ErrorEncoder) -> EncodedError { - EncodedError::new(self.code, self.bytes.freeze()) - .map_or_else(|| encoder.encode_error(InvalidTagError), |error| error) + fn finish(self) -> NetworkError { + NetworkError::try_from_parts(self.code, self.bytes.freeze()).unwrap_or_else(|error| { + let error = DecodeTransactionError { bytes: error }; + + NetworkError::capture_error(&error) + }) } } pin_project_lite::pin_project! { - pub struct Pack { + pub struct Pack { #[pin] inner: B, - encoder: E, error: Option, exhausted: bool, } } -impl Pack { - pub const fn new(inner: B, encoder: E) -> Self { +impl Pack { + pub const fn new(inner: B) -> Self { Self { inner, - encoder, error: None, exhausted: false, } @@ -76,15 +67,14 @@ impl Pack { } } -impl Pack +impl Pack where B: Body, Error = !>, - E: ErrorEncoder + Clone, { fn poll( self: Pin<&mut Self>, cx: &mut Context<'_>, - ) -> ControlFlow>>> { + ) -> ControlFlow>>> { let this = self.project(); let Poll::Ready(next) = this.inner.poll_frame(cx) else { // simple propagation @@ -96,10 +86,8 @@ where let error = this.error.take(); *this.exhausted = true; - let encoder = this.encoder.clone(); - ControlFlow::Break(Poll::Ready( - error.map(|error| error.finish(encoder)).map(Err), + error.map(PartialTransactionError::finish).map(Err), )) } Some(Ok(Frame::Data(data))) => { @@ -118,7 +106,6 @@ where } Some(Ok(Frame::Control(control))) => { let kind = *control.as_ref(); - let encoder = this.encoder.clone(); match kind { ResponseKind::Err(code) => { @@ -132,9 +119,7 @@ where .map_or_else( || ControlFlow::Continue(()), |active| { - ControlFlow::Break(Poll::Ready(Some(Err( - active.finish(encoder) - )))) + ControlFlow::Break(Poll::Ready(Some(Err(active.finish())))) }, ) } @@ -143,9 +128,7 @@ where // if we wouldn't do that we would concatenate valid values to the error this.error.take().map_or_else( || ControlFlow::Continue(()), - |error| { - ControlFlow::Break(Poll::Ready(Some(Err(error.finish(encoder))))) - }, + |error| ControlFlow::Break(Poll::Ready(Some(Err(error.finish())))), ) } } @@ -154,12 +137,11 @@ where } } -impl Stream for Pack +impl Stream for Pack where B: Body, Error = !>, - E: ErrorEncoder + Clone, { - type Item = Result; + type Item = Result; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { if self.exhausted { @@ -174,42 +156,39 @@ where } } -pub struct PackLayer { - encoder: C, +pub struct PackLayer { + _private: (), } -impl PackLayer { - pub const fn new(encoder: E) -> Self { - Self { encoder } +impl PackLayer { + #[expect( + clippy::new_without_default, + reason = "layer construction should be explicit and we might add fields in the future" + )] + #[must_use] + pub const fn new() -> Self { + Self { _private: () } } } -impl Layer for PackLayer -where - E: Clone, -{ - type Service = PackService; +impl Layer for PackLayer { + type Service = PackService; fn layer(&self, inner: S) -> Self::Service { - PackService { - inner, - encoder: self.encoder.clone(), - } + PackService { inner } } } -pub struct PackService { +pub struct PackService { inner: S, - encoder: E, } -impl Service> for PackService +impl Service> for PackService where S: Service, Response = Response>, - C: ErrorEncoder + Clone, { type Error = S::Error; - type Response = Pack; + type Response = Pack; type Future = impl Future>; @@ -218,26 +197,41 @@ where } fn call(&mut self, req: Request) -> Self::Future { - let encoder = self.encoder.clone(); - self.inner .call(req) - .map(|result| result.map(|response| Pack::new(response.into_body(), encoder))) + .map(|result| result.map(|response| Pack::new(response.into_body()))) } } #[cfg(test)] mod test { - use bytes::{BufMut, Bytes}; + use alloc::borrow::Cow; + use core::error::Error; + + use bytes::Bytes; use futures::{StreamExt, stream}; - use harpc_codec::{encode::ErrorEncoder, error::ErrorBuffer, json::JsonCodec}; + use harpc_codec::error::NetworkError; use harpc_types::{error_code::ErrorCode, response_kind::ResponseKind}; + use super::DecodeTransactionError; use crate::{ body::{Frame, stream::StreamBody}, - net::pack::{InvalidTagError, Pack}, + net::pack::Pack, }; + #[derive(Debug, Clone, PartialEq, Eq, derive_more::Display)] + #[display("{message}")] + struct ExampleError { + message: Cow<'static, str>, + code: ErrorCode, + } + + impl Error for ExampleError { + fn provide<'a>(&'a self, request: &mut core::error::Request<'a>) { + request.provide_value(self.code); + } + } + #[tokio::test] async fn trailing_error() { let iter = stream::iter([ @@ -246,15 +240,18 @@ mod test { Ok(Frame::Control(ResponseKind::Err( ErrorCode::INTERNAL_SERVER_ERROR, ))), - Ok(Frame::Data(Bytes::from_static(b"\x01world" as &[_]))), + Ok(Frame::Data(Bytes::from_static( + b"\x00\x00\x00\x05world" as &[_], + ))), ]); - let pack = Pack::new(StreamBody::new(iter), JsonCodec); + let pack = Pack::new(StreamBody::new(iter)); let values = pack.collect::>().await; - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(b"world"); - let error = buffer.finish(ErrorCode::INTERNAL_SERVER_ERROR); + let error = NetworkError::capture_error(&ExampleError { + message: Cow::Borrowed("world"), + code: ErrorCode::INTERNAL_SERVER_ERROR, + }); assert_eq!(values, [ Ok(Bytes::from_static(b"hello" as &[_])), @@ -263,19 +260,45 @@ mod test { } #[tokio::test] - async fn invalid_error_tag() { + async fn invalid_error_too_short() { let iter = stream::iter([ Result::<_, !>::Ok(Frame::Control(ResponseKind::Err( ErrorCode::INTERNAL_SERVER_ERROR, ))), - Ok(Frame::Data(Bytes::from_static(b"hello " as &[_]))), + Ok(Frame::Data(Bytes::from_static( + b"\x00\x00\x00\xFFhello " as &[_], + ))), + Ok(Frame::Data(Bytes::from_static(b"world" as &[_]))), + ]); + + let pack = Pack::new(StreamBody::new(iter)); + let values = pack.collect::>().await; + + let error = NetworkError::capture_error(&DecodeTransactionError { + bytes: Bytes::from_static(b"\x00\x00\x00\xFFhello world" as &[_]), + }); + + assert_eq!(values, [Err(error)]); + } + + #[tokio::test] + async fn invalid_error_too_long() { + let iter = stream::iter([ + Result::<_, !>::Ok(Frame::Control(ResponseKind::Err( + ErrorCode::INTERNAL_SERVER_ERROR, + ))), + Ok(Frame::Data(Bytes::from_static( + b"\x00\x00\x00\x05hello " as &[_], + ))), Ok(Frame::Data(Bytes::from_static(b"world" as &[_]))), ]); - let pack = Pack::new(StreamBody::new(iter), JsonCodec); + let pack = Pack::new(StreamBody::new(iter)); let values = pack.collect::>().await; - let error = JsonCodec.encode_error(InvalidTagError); + let error = NetworkError::capture_error(&DecodeTransactionError { + bytes: Bytes::from_static(b"\x00\x00\x00\x05hello world" as &[_]), + }); assert_eq!(values, [Err(error)]); } @@ -286,16 +309,19 @@ mod test { Result::<_, !>::Ok(Frame::Control(ResponseKind::Err( ErrorCode::INTERNAL_SERVER_ERROR, ))), - Ok(Frame::Data(Bytes::from_static(b"\x01hello " as &[_]))), + Ok(Frame::Data(Bytes::from_static( + b"\x00\x00\x00\x0Bhello " as &[_], + ))), Ok(Frame::Data(Bytes::from_static(b"world" as &[_]))), ]); - let pack = Pack::new(StreamBody::new(iter), JsonCodec); + let pack = Pack::new(StreamBody::new(iter)); let values = pack.collect::>().await; - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(b"hello world"); - let error = buffer.finish(ErrorCode::INTERNAL_SERVER_ERROR); + let error = NetworkError::capture_error(&ExampleError { + message: Cow::Borrowed("hello world"), + code: ErrorCode::INTERNAL_SERVER_ERROR, + }); assert_eq!(values, [Err(error)]); } @@ -306,7 +332,7 @@ mod test { Bytes::from_static(b"hello" as &[_]), ))]); - let pack = Pack::new(StreamBody::new(iter), JsonCodec); + let pack = Pack::new(StreamBody::new(iter)); let values = pack.collect::>().await; assert_eq!(values, [Ok(Bytes::from_static(b"hello" as &[_]))]); @@ -320,7 +346,7 @@ mod test { Ok(Frame::Data(Bytes::from_static(b"world" as &[_]))), ]); - let pack = Pack::new(StreamBody::new(iter), JsonCodec); + let pack = Pack::new(StreamBody::new(iter)); let values = pack.collect::>().await; assert_eq!(values, [ @@ -335,24 +361,30 @@ mod test { Result::<_, !>::Ok(Frame::Control(ResponseKind::Err( ErrorCode::INTERNAL_SERVER_ERROR, ))), - Ok(Frame::Data(Bytes::from_static(b"\x01hello " as &[_]))), + Ok(Frame::Data(Bytes::from_static( + b"\x00\x00\x00\x0Bhello " as &[_], + ))), Ok(Frame::Data(Bytes::from_static(b"world" as &[_]))), Ok(Frame::Control(ResponseKind::Err( ErrorCode::INTERNAL_SERVER_ERROR, ))), - Ok(Frame::Data(Bytes::from_static(b"\x01steven" as &[_]))), + Ok(Frame::Data(Bytes::from_static( + b"\x00\x00\x00\x06steven" as &[_], + ))), ]); - let pack = Pack::new(StreamBody::new(iter), JsonCodec); + let pack = Pack::new(StreamBody::new(iter)); let values = pack.collect::>().await; - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(b"hello world"); - let error1 = buffer.finish(ErrorCode::INTERNAL_SERVER_ERROR); + let error1 = NetworkError::capture_error(&ExampleError { + message: Cow::Borrowed("hello world"), + code: ErrorCode::INTERNAL_SERVER_ERROR, + }); - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(b"steven"); - let error2 = buffer.finish(ErrorCode::INTERNAL_SERVER_ERROR); + let error2 = NetworkError::capture_error(&ExampleError { + message: Cow::Borrowed("steven"), + code: ErrorCode::INTERNAL_SERVER_ERROR, + }); assert_eq!(values, [Err(error1), Err(error2)]); } @@ -367,7 +399,7 @@ mod test { Ok(Frame::Data(Bytes::from_static(b"steven" as &[_]))), ]); - let pack = Pack::new(StreamBody::new(iter), JsonCodec); + let pack = Pack::new(StreamBody::new(iter)); let values = pack.collect::>().await; assert_eq!(values, [ @@ -386,15 +418,18 @@ mod test { Ok(Frame::Control(ResponseKind::Err( ErrorCode::INTERNAL_SERVER_ERROR, ))), - Ok(Frame::Data(Bytes::from_static(b"\x01steven" as &[_]))), + Ok(Frame::Data(Bytes::from_static( + b"\x00\x00\x00\x06steven" as &[_], + ))), ]); - let pack = Pack::new(StreamBody::new(iter), JsonCodec); + let pack = Pack::new(StreamBody::new(iter)); let values = pack.collect::>().await; - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(b"steven"); - let error = buffer.finish(ErrorCode::INTERNAL_SERVER_ERROR); + let error = NetworkError::capture_error(&ExampleError { + message: Cow::Borrowed("steven"), + code: ErrorCode::INTERNAL_SERVER_ERROR, + }); assert_eq!(values, [ Ok(Bytes::from_static(b"hello " as &[_])), @@ -409,18 +444,21 @@ mod test { Result::<_, !>::Ok(Frame::Control(ResponseKind::Err( ErrorCode::INTERNAL_SERVER_ERROR, ))), - Ok(Frame::Data(Bytes::from_static(b"\x01hello " as &[_]))), + Ok(Frame::Data(Bytes::from_static( + b"\x00\x00\x00\x0Bhello " as &[_], + ))), Ok(Frame::Data(Bytes::from_static(b"world" as &[_]))), Ok(Frame::Control(ResponseKind::Ok)), Ok(Frame::Data(Bytes::from_static(b"steven" as &[_]))), ]); - let pack = Pack::new(StreamBody::new(iter), JsonCodec); + let pack = Pack::new(StreamBody::new(iter)); let values = pack.collect::>().await; - let mut buffer = ErrorBuffer::error(); - buffer.put_slice(b"hello world"); - let error = buffer.finish(ErrorCode::INTERNAL_SERVER_ERROR); + let error = NetworkError::capture_error(&ExampleError { + message: Cow::Borrowed("hello world"), + code: ErrorCode::INTERNAL_SERVER_ERROR, + }); assert_eq!(values, [ Err(error), @@ -434,10 +472,12 @@ mod test { ResponseKind::Err(ErrorCode::INTERNAL_SERVER_ERROR), ))]); - let pack = Pack::new(StreamBody::new(iter), JsonCodec); + let pack = Pack::new(StreamBody::new(iter)); let values = pack.collect::>().await; - let error = JsonCodec.encode_error(InvalidTagError); + let error = NetworkError::capture_error(&DecodeTransactionError { + bytes: Bytes::new(), + }); assert_eq!(values, [Err(error)]); } diff --git a/libs/@local/harpc/tower/src/net/pack_error.rs b/libs/@local/harpc/tower/src/net/pack_error.rs index 02b6d68b2ec..a7b82a5a040 100644 --- a/libs/@local/harpc/tower/src/net/pack_error.rs +++ b/libs/@local/harpc/tower/src/net/pack_error.rs @@ -1,81 +1,79 @@ use core::{ + error::Error, fmt::Debug, - marker::PhantomData, ops::ControlFlow, pin::Pin, task::{Context, Poll}, }; -use bytes::{Buf, BufMut, BytesMut}; +use bytes::{BufMut, Bytes, BytesMut}; use futures::Stream; -use harpc_codec::{decode::ErrorDecoder, error::kind}; +use harpc_codec::error::NetworkError; use harpc_types::{error_code::ErrorCode, response_kind::ResponseKind}; use crate::body::{Body, Frame}; +#[derive(Debug, Clone, PartialEq, Eq, derive_more::Display)] +#[display("incomplete network error returned, message: {bytes:?}")] +struct DecodeNetworkError { + bytes: Bytes, +} + +impl Error for DecodeNetworkError { + fn provide<'a>(&'a self, request: &mut core::error::Request<'a>) { + request.provide_value(ErrorCode::PARTIAL_NETWORK_ERROR); + } +} + struct PartialResponseError { - _code: ErrorCode, + code: ErrorCode, bytes: BytesMut, } impl PartialResponseError { - fn finish(self, decoder: D) -> E - where - E: serde::de::DeserializeOwned, - D: ErrorDecoder + Send, - { - let mut buffer = self.bytes.freeze(); - let tag = buffer.get_u8(); - - let tag = kind::Tag::from_u8(tag).expect("should have a correct tag"); - match tag { - kind::Tag::NetworkError => decoder - .decode_error(buffer) - .expect("should be able to decode error"), - kind::Tag::Report => { - unimplemented!("to be reworked"); - } - kind::Tag::Recovery => { - unimplemented!("to be reworked"); + fn finish(self) -> NetworkError { + let buffer = self.bytes.freeze(); + + let error = NetworkError::try_from_parts(self.code, buffer); + match error { + Ok(error) => error, + Err(bytes) => { + let error = DecodeNetworkError { bytes }; + + NetworkError::capture_error(&error) } } } } pin_project_lite::pin_project! { - pub struct PackError { + pub struct PackError { #[pin] body: B, - decoder: D, error: Option, exhausted: bool, - _marker: PhantomData *const E>, } } -impl PackError { - pub fn new(body: B, decoder: D) -> Self { +impl PackError { + pub const fn new(body: B) -> Self { Self { body, - decoder, error: None, exhausted: false, - _marker: PhantomData, } } } -impl PackError +impl PackError where B: Body, Error = !>, - D: ErrorDecoder + Clone + Send, - E: serde::de::DeserializeOwned, { #[expect(clippy::type_complexity, reason = "type is complex due to polling")] fn poll( self: Pin<&mut Self>, cx: &mut Context<'_>, - ) -> ControlFlow>>> { + ) -> ControlFlow>>> { let this = self.project(); let Poll::Ready(next) = this.body.poll_frame(cx) else { // simple propagation @@ -92,8 +90,7 @@ where return ControlFlow::Break(Poll::Ready(None)); }; - let decoder = this.decoder.clone(); - let error = error.finish::(decoder); + let error = error.finish(); ControlFlow::Break(Poll::Ready(Some(Err(error)))) } Some(Ok(Frame::Data(data))) => { @@ -112,7 +109,7 @@ where // if we have a previous error, finish said error and return it, otherwise // wait for the next frame to populate it let error = this.error.replace(PartialResponseError { - _code: code, + code, bytes: BytesMut::new(), }); @@ -120,8 +117,7 @@ where return ControlFlow::Continue(()); }; - let decoder = this.decoder.clone(); - let error = error.finish::(decoder); + let error = error.finish(); ControlFlow::Break(Poll::Ready(Some(Err(error)))) } @@ -134,8 +130,7 @@ where return ControlFlow::Continue(()); }; - let decoder = this.decoder.clone(); - let error = error.finish::(decoder); + let error = error.finish(); ControlFlow::Break(Poll::Ready(Some(Err(error)))) } @@ -145,13 +140,11 @@ where } } -impl Stream for PackError +impl Stream for PackError where B: Body, Error = !>, - D: ErrorDecoder + Clone + Send, - E: serde::de::DeserializeOwned, { - type Item = Result; + type Item = Result; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { if self.exhausted { diff --git a/libs/@local/harpc/tower/src/response.rs b/libs/@local/harpc/tower/src/response.rs index 11028d7cd60..189a292eb0d 100644 --- a/libs/@local/harpc/tower/src/response.rs +++ b/libs/@local/harpc/tower/src/response.rs @@ -1,6 +1,6 @@ use bytes::Bytes; use futures::{Stream, TryStreamExt, stream::MapOk}; -use harpc_codec::error::EncodedError; +use harpc_codec::error::NetworkError; use harpc_net::session::server::SessionId; use harpc_types::response_kind::ResponseKind; @@ -78,7 +78,7 @@ impl Response { } impl Response>> { - pub fn from_error(parts: Parts, error: EncodedError) -> Self { + pub fn from_error(parts: Parts, error: NetworkError) -> Self { let (code, bytes) = error.into_parts(); Self { diff --git a/libs/@local/harpc/types/src/error_code.rs b/libs/@local/harpc/types/src/error_code.rs index 44c2b75f238..8bb0cf18740 100644 --- a/libs/@local/harpc/types/src/error_code.rs +++ b/libs/@local/harpc/types/src/error_code.rs @@ -112,14 +112,20 @@ define_error_code_consts! { ], /// Errors that occur due to malformed payloads in the tower layer. 0xFF_10 => [ - /// Encoded error encountered an invalid error tag. + /// Encoded transaction error encountered is partially encoded or has too many bytes. /// - /// The returned payload for an encoded error does not have a valid error tag to distinguish - /// between the different error encodings and could therefore not be properly encoded. + /// The returned payload for an encoded error does not have the correct length. /// /// This is a fault in the implementation of the server, either in the `codec` or /// the `tower` layer. - PACK_INVALID_ERROR_TAG + PARTIAL_TRANSACTION_ERROR, + /// Encoded network error encountered is partially encoded or has too many bytes. + /// + /// The returned payload for an encoded error does not have the correct length. + /// + /// This is a fault in the implementation of the server, either in the `codec` or + /// the `tower` layer. + PARTIAL_NETWORK_ERROR ], /// Generic server errors. 0xFF_F0 => [ diff --git a/libs/@local/harpc/wire-protocol/Cargo.toml b/libs/@local/harpc/wire-protocol/Cargo.toml index cf0b5cd8d6e..7153b429c0d 100644 --- a/libs/@local/harpc/wire-protocol/Cargo.toml +++ b/libs/@local/harpc/wire-protocol/Cargo.toml @@ -12,7 +12,6 @@ authors.workspace = true # Public workspace dependencies error-stack = { workspace = true, public = true } harpc-types = { workspace = true, public = true } -harpc-codec = { workspace = true, public = true } # Public third-party dependencies bytes = { workspace = true, public = true } diff --git a/libs/@local/harpc/wire-protocol/package.json b/libs/@local/harpc/wire-protocol/package.json index f0a1ed891a4..d0eb0e81665 100644 --- a/libs/@local/harpc/wire-protocol/package.json +++ b/libs/@local/harpc/wire-protocol/package.json @@ -11,7 +11,6 @@ }, "dependencies": { "@rust/error-stack": "0.5.0", - "@rust/harpc-codec": "0.0.0-private", "@rust/harpc-types": "0.0.0-private" } } diff --git a/libs/@local/hash-backend-utils/package.json b/libs/@local/hash-backend-utils/package.json index 907f09b87bf..97d05483ccf 100644 --- a/libs/@local/hash-backend-utils/package.json +++ b/libs/@local/hash-backend-utils/package.json @@ -23,8 +23,8 @@ "test:unit": "vitest --run" }, "dependencies": { - "@aws-sdk/client-s3": "3.670.0", - "@aws-sdk/s3-request-presigner": "3.670.0", + "@aws-sdk/client-s3": "3.675.0", + "@aws-sdk/s3-request-presigner": "3.675.0", "@blockprotocol/core": "0.1.3", "@blockprotocol/graph": "0.4.0-canary.0", "@blockprotocol/type-system": "0.1.2-canary.0", @@ -59,7 +59,7 @@ "@local/eslint-config": "0.0.0-private", "@local/tsconfig": "0.0.0-private", "@types/dotenv-flow": "3.3.3", - "@types/node": "20.16.12", + "@types/node": "20.16.13", "@types/wait-on": "5.3.4", "@vitest/coverage-istanbul": "2.1.2", "eslint": "8.57.0", diff --git a/libs/@local/hash-graph-client/typescript/package.json b/libs/@local/hash-graph-client/typescript/package.json index ca2a96e4b0d..250746dcc98 100644 --- a/libs/@local/hash-graph-client/typescript/package.json +++ b/libs/@local/hash-graph-client/typescript/package.json @@ -12,15 +12,15 @@ "codegen": "redocly bundle -o openapi.bundle.json ../../../../apps/hash-graph/libs/api/openapi/openapi.json && JAVA_OPTS='-Dlog.level=warn' openapi-generator-cli generate && rm openapi.bundle.json && fix-esm-import-path *.ts" }, "dependencies": { - "@openapitools/openapi-generator-cli": "2.14.0", + "@openapitools/openapi-generator-cli": "2.14.1", "axios": "1.7.7" }, "devDependencies": { "@local/eslint-config": "0.0.0-private", "@local/tsconfig": "0.0.0-private", - "@redocly/cli": "1.25.7", + "@redocly/cli": "1.25.8", "@rust/graph-api": "0.0.0-private", - "@types/node": "20.16.12", + "@types/node": "20.16.13", "eslint": "8.57.0", "fix-esm-import-path": "1.10.1", "rimraf": "6.0.1", diff --git a/libs/@local/hash-isomorphic-utils/package.json b/libs/@local/hash-isomorphic-utils/package.json index 8cc5bc4308e..963f89b2c0f 100644 --- a/libs/@local/hash-isomorphic-utils/package.json +++ b/libs/@local/hash-isomorphic-utils/package.json @@ -64,7 +64,7 @@ "@local/tsconfig": "0.0.0-private", "@temporalio/workflow": "1.11.3", "@types/lodash-es": "4.17.12", - "@types/node": "20.16.12", + "@types/node": "20.16.13", "@types/pluralize": "0.0.33", "@vitest/coverage-istanbul": "2.1.2", "eslint": "8.57.0", diff --git a/libs/@local/internal-api-client/typescript/package.json b/libs/@local/internal-api-client/typescript/package.json index 5d0f1abed61..33e5992aed1 100644 --- a/libs/@local/internal-api-client/typescript/package.json +++ b/libs/@local/internal-api-client/typescript/package.json @@ -16,8 +16,8 @@ }, "devDependencies": { "@local/eslint-config": "0.0.0-private", - "@openapitools/openapi-generator-cli": "2.14.0", - "@types/node": "20.16.12", + "@openapitools/openapi-generator-cli": "2.14.1", + "@types/node": "20.16.13", "eslint": "8.57.0", "rimraf": "6.0.1", "typescript": "5.6.3" diff --git a/libs/@local/status/typescript/package.json b/libs/@local/status/typescript/package.json index 82ed938223d..08e4937ddc6 100644 --- a/libs/@local/status/typescript/package.json +++ b/libs/@local/status/typescript/package.json @@ -25,7 +25,7 @@ "@local/eslint-config": "0.0.0-private", "@local/tsconfig": "0.0.0-private", "@types/lodash-es": "4.17.12", - "@types/node": "20.16.12", + "@types/node": "20.16.13", "@types/yargs": "17.0.33", "eslint": "8.57.0", "quicktype": "16.0.43", diff --git a/package.json b/package.json index 704ef59d2a7..fdfe1e4b94c 100644 --- a/package.json +++ b/package.json @@ -115,7 +115,7 @@ "prettier-plugin-packagejson": "2.5.3", "prettier-plugin-sh": "0.14.0", "suppress-exit-code": "3.2.0", - "turbo": "2.2.1", + "turbo": "2.2.3", "wait-on": "8.0.1", "yarn-deduplicate": "6.0.2" }, diff --git a/yarn.lock b/yarn.lock index 2779eaf28c4..9d26d0d845e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -552,18 +552,18 @@ "@smithy/util-utf8" "^3.0.0" tslib "^2.6.2" -"@aws-sdk/client-s3@3.670.0": - version "3.670.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/client-s3/-/client-s3-3.670.0.tgz#53e9ba24ee2dacb94bed9dfab50521108d3b6fa0" - integrity sha512-8Pwu1K+PgbYpXDaGKNy5hEbRH5FXHlfXJOhtV4oEDroL7ngix3ZUVWN9oIVVSDK02y1oQS1jCSEGUiUiauzb0g== +"@aws-sdk/client-s3@3.675.0": + version "3.675.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-s3/-/client-s3-3.675.0.tgz#1588a70afec26be3cc9a7577fa3f37d768951222" + integrity sha512-WKPc9fwFsD0SrWmrj0MdMHE+hQ0YAIGLqACmTnL1yW76qAwjIlFa9TAhR8f29aVCQodt/I6HDf9dHX/F+GyDFg== dependencies: "@aws-crypto/sha1-browser" "5.2.0" "@aws-crypto/sha256-browser" "5.2.0" "@aws-crypto/sha256-js" "5.2.0" - "@aws-sdk/client-sso-oidc" "3.670.0" - "@aws-sdk/client-sts" "3.670.0" + "@aws-sdk/client-sso-oidc" "3.675.0" + "@aws-sdk/client-sts" "3.675.0" "@aws-sdk/core" "3.667.0" - "@aws-sdk/credential-provider-node" "3.670.0" + "@aws-sdk/credential-provider-node" "3.675.0" "@aws-sdk/middleware-bucket-endpoint" "3.667.0" "@aws-sdk/middleware-expect-continue" "3.667.0" "@aws-sdk/middleware-flexible-checksums" "3.669.0" @@ -571,14 +571,14 @@ "@aws-sdk/middleware-location-constraint" "3.667.0" "@aws-sdk/middleware-logger" "3.667.0" "@aws-sdk/middleware-recursion-detection" "3.667.0" - "@aws-sdk/middleware-sdk-s3" "3.669.0" + "@aws-sdk/middleware-sdk-s3" "3.674.0" "@aws-sdk/middleware-ssec" "3.667.0" "@aws-sdk/middleware-user-agent" "3.669.0" "@aws-sdk/region-config-resolver" "3.667.0" - "@aws-sdk/signature-v4-multi-region" "3.669.0" + "@aws-sdk/signature-v4-multi-region" "3.674.0" "@aws-sdk/types" "3.667.0" "@aws-sdk/util-endpoints" "3.667.0" - "@aws-sdk/util-user-agent-browser" "3.670.0" + "@aws-sdk/util-user-agent-browser" "3.675.0" "@aws-sdk/util-user-agent-node" "3.669.0" "@aws-sdk/xml-builder" "3.662.0" "@smithy/config-resolver" "^3.0.9" @@ -616,17 +616,17 @@ "@smithy/util-waiter" "^3.1.6" tslib "^2.6.2" -"@aws-sdk/client-ses@3.670.0": - version "3.670.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/client-ses/-/client-ses-3.670.0.tgz#d8f9b67acfab927091a3a35135b447e8dfa8d6aa" - integrity sha512-WMmdNa/oGBN4zXhFhlFg3b8TflnNhZRU193aGbMEggLtUy7f+UhUTdVSDwfKgEU8zWPR6sbLGv2TyoEor723Yw== +"@aws-sdk/client-ses@3.675.0": + version "3.675.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-ses/-/client-ses-3.675.0.tgz#127b53810b6cc32da1cd5d2d93553f81f45004b6" + integrity sha512-4/OyFFpHMIahDc063vk4viETLtNPjopcUpwmWMtV8rhOns8KjJ2b1tvpvV7lNYT53mUm+g3fhYok9McHFDeeMA== dependencies: "@aws-crypto/sha256-browser" "5.2.0" "@aws-crypto/sha256-js" "5.2.0" - "@aws-sdk/client-sso-oidc" "3.670.0" - "@aws-sdk/client-sts" "3.670.0" + "@aws-sdk/client-sso-oidc" "3.675.0" + "@aws-sdk/client-sts" "3.675.0" "@aws-sdk/core" "3.667.0" - "@aws-sdk/credential-provider-node" "3.670.0" + "@aws-sdk/credential-provider-node" "3.675.0" "@aws-sdk/middleware-host-header" "3.667.0" "@aws-sdk/middleware-logger" "3.667.0" "@aws-sdk/middleware-recursion-detection" "3.667.0" @@ -634,7 +634,7 @@ "@aws-sdk/region-config-resolver" "3.667.0" "@aws-sdk/types" "3.667.0" "@aws-sdk/util-endpoints" "3.667.0" - "@aws-sdk/util-user-agent-browser" "3.670.0" + "@aws-sdk/util-user-agent-browser" "3.675.0" "@aws-sdk/util-user-agent-node" "3.669.0" "@smithy/config-resolver" "^3.0.9" "@smithy/core" "^2.4.8" @@ -755,15 +755,15 @@ "@smithy/util-utf8" "^3.0.0" tslib "^2.6.2" -"@aws-sdk/client-sso-oidc@3.670.0": - version "3.670.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.670.0.tgz#9696dd19d6c0018fa398a6efd4aabbc97b22e1a7" - integrity sha512-4qDK2L36Q4J1lfemaHHd9ZxqKRaos3STp44qPAHf/8QyX6Uk5sXgZNVO2yWM7SIEtVKwwBh/fZAsdBkGPBfZcw== +"@aws-sdk/client-sso-oidc@3.675.0": + version "3.675.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.675.0.tgz#a30650a462afcf0386adb26e99283d4989b9bbf4" + integrity sha512-4kEcaa2P/BFz+xy5tagbtzM08gbjHXyYqW+n6SJuUFK7N6bZNnA4cu1hVgHcqOqk8Dbwv7fiseGT0x3Hhqjwqg== dependencies: "@aws-crypto/sha256-browser" "5.2.0" "@aws-crypto/sha256-js" "5.2.0" "@aws-sdk/core" "3.667.0" - "@aws-sdk/credential-provider-node" "3.670.0" + "@aws-sdk/credential-provider-node" "3.675.0" "@aws-sdk/middleware-host-header" "3.667.0" "@aws-sdk/middleware-logger" "3.667.0" "@aws-sdk/middleware-recursion-detection" "3.667.0" @@ -771,7 +771,7 @@ "@aws-sdk/region-config-resolver" "3.667.0" "@aws-sdk/types" "3.667.0" "@aws-sdk/util-endpoints" "3.667.0" - "@aws-sdk/util-user-agent-browser" "3.670.0" + "@aws-sdk/util-user-agent-browser" "3.675.0" "@aws-sdk/util-user-agent-node" "3.669.0" "@smithy/config-resolver" "^3.0.9" "@smithy/core" "^2.4.8" @@ -888,10 +888,10 @@ "@smithy/util-utf8" "^3.0.0" tslib "^2.6.2" -"@aws-sdk/client-sso@3.670.0": - version "3.670.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso/-/client-sso-3.670.0.tgz#5e4cdaa60ace04fe3f4df0618d7ed558ce40abc3" - integrity sha512-J+oz6uSsDvk4pimMDnKJb1wsV216zTrejvMTIL4RhUD1QPIVVOpteTdUShcjZUIZnkcJZGI+cym/SFK0kuzTpg== +"@aws-sdk/client-sso@3.675.0": + version "3.675.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso/-/client-sso-3.675.0.tgz#4e400ef0141ee2e19b64c9948af7a27697a3f0cc" + integrity sha512-2goBCEr4acZJ1YJ69eWPTsIfZUbO7enog+lBA5kZShDiwovqzwYSHSlf6OGz4ETs2xT1n7n+QfKY0p+TluTfEw== dependencies: "@aws-crypto/sha256-browser" "5.2.0" "@aws-crypto/sha256-js" "5.2.0" @@ -903,7 +903,7 @@ "@aws-sdk/region-config-resolver" "3.667.0" "@aws-sdk/types" "3.667.0" "@aws-sdk/util-endpoints" "3.667.0" - "@aws-sdk/util-user-agent-browser" "3.670.0" + "@aws-sdk/util-user-agent-browser" "3.675.0" "@aws-sdk/util-user-agent-node" "3.669.0" "@smithy/config-resolver" "^3.0.9" "@smithy/core" "^2.4.8" @@ -1024,16 +1024,16 @@ "@smithy/util-utf8" "^3.0.0" tslib "^2.6.2" -"@aws-sdk/client-sts@3.670.0": - version "3.670.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/client-sts/-/client-sts-3.670.0.tgz#fa90f49dafcd9e350f74b8eb22768f4e23814da7" - integrity sha512-bExrNo8ZVWorS3cjMZKQnA2HWqDmAzcZoSN/cPVoPFNkHwdl1lzPxvcLzmhpIr48JHgKfybBjrbluDZfIYeEog== +"@aws-sdk/client-sts@3.675.0": + version "3.675.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-sts/-/client-sts-3.675.0.tgz#8efcff1270d1f10e7dafa469f88fb71dcfd70178" + integrity sha512-zgjyR4GyuONeDGJBKNt9lFJ8HfDX7rpxZZVR7LSXr9lUkjf6vUGgD2k/K4UAoOTWCKKCor6TA562ezGlA8su6Q== dependencies: "@aws-crypto/sha256-browser" "5.2.0" "@aws-crypto/sha256-js" "5.2.0" - "@aws-sdk/client-sso-oidc" "3.670.0" + "@aws-sdk/client-sso-oidc" "3.675.0" "@aws-sdk/core" "3.667.0" - "@aws-sdk/credential-provider-node" "3.670.0" + "@aws-sdk/credential-provider-node" "3.675.0" "@aws-sdk/middleware-host-header" "3.667.0" "@aws-sdk/middleware-logger" "3.667.0" "@aws-sdk/middleware-recursion-detection" "3.667.0" @@ -1041,7 +1041,7 @@ "@aws-sdk/region-config-resolver" "3.667.0" "@aws-sdk/types" "3.667.0" "@aws-sdk/util-endpoints" "3.667.0" - "@aws-sdk/util-user-agent-browser" "3.670.0" + "@aws-sdk/util-user-agent-browser" "3.675.0" "@aws-sdk/util-user-agent-node" "3.669.0" "@smithy/config-resolver" "^3.0.9" "@smithy/core" "^2.4.8" @@ -1235,16 +1235,16 @@ "@smithy/types" "^3.3.0" tslib "^2.6.2" -"@aws-sdk/credential-provider-ini@3.670.0": - version "3.670.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.670.0.tgz#2157bc5fc0014ef3da72ac30b26df259a8443c83" - integrity sha512-TB1gacUj75leaTt2JsCTzygDSIk4ksv9uZoR7VenlgFPRktyOeT+fapwIVBeB2Qg7b9uxAY2K5XkKstDZyBEEw== +"@aws-sdk/credential-provider-ini@3.675.0": + version "3.675.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.675.0.tgz#031b75d26ab8e2921c8945a905f6ca7c2005e15e" + integrity sha512-kCBlC6grpbpCvgowk9T4JHZxJ88VfN0r77bDZClcadFRAKQ8UHyO02zhgFCfUdnU1lNv1mr3ngEcGN7XzJlYWA== dependencies: "@aws-sdk/core" "3.667.0" "@aws-sdk/credential-provider-env" "3.667.0" "@aws-sdk/credential-provider-http" "3.667.0" "@aws-sdk/credential-provider-process" "3.667.0" - "@aws-sdk/credential-provider-sso" "3.670.0" + "@aws-sdk/credential-provider-sso" "3.675.0" "@aws-sdk/credential-provider-web-identity" "3.667.0" "@aws-sdk/types" "3.667.0" "@smithy/credential-provider-imds" "^3.2.4" @@ -1289,16 +1289,16 @@ "@smithy/types" "^3.3.0" tslib "^2.6.2" -"@aws-sdk/credential-provider-node@3.670.0": - version "3.670.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-node/-/credential-provider-node-3.670.0.tgz#bf64e00d29db5ae758c518aa26c5f05e39b1d4e4" - integrity sha512-zwNrRYzubk4CaZ7zebeDhxsm8QtNWkbGKopZPOaZSnd5uqUGRcmx4ccVRngWUK68XDP44aEUWC8iU5Pc7btpHQ== +"@aws-sdk/credential-provider-node@3.675.0": + version "3.675.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-node/-/credential-provider-node-3.675.0.tgz#25ebe731279dbc1f165e2fb5f7648bae43b7c693" + integrity sha512-VO1WVZCDmAYu4sY/6qIBzdm5vJTxLhWKJWvL5kVFfSe8WiNNoHlTqYYUK9vAm/JYpIgFLTefPbIc5W4MK7o6Pg== dependencies: "@aws-sdk/credential-provider-env" "3.667.0" "@aws-sdk/credential-provider-http" "3.667.0" - "@aws-sdk/credential-provider-ini" "3.670.0" + "@aws-sdk/credential-provider-ini" "3.675.0" "@aws-sdk/credential-provider-process" "3.667.0" - "@aws-sdk/credential-provider-sso" "3.670.0" + "@aws-sdk/credential-provider-sso" "3.675.0" "@aws-sdk/credential-provider-web-identity" "3.667.0" "@aws-sdk/types" "3.667.0" "@smithy/credential-provider-imds" "^3.2.4" @@ -1367,12 +1367,12 @@ "@smithy/types" "^3.3.0" tslib "^2.6.2" -"@aws-sdk/credential-provider-sso@3.670.0": - version "3.670.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.670.0.tgz#04186708752f211592cbb5dd0ae674aac12799f1" - integrity sha512-5PkA8BOy4q57Vhe9AESoHKZ7vjRbElNPKjXA4qC01xY+DitClRFz4O3B9sMzFp0PHlz9nDVSXXKgq0yzF/nAag== +"@aws-sdk/credential-provider-sso@3.675.0": + version "3.675.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.675.0.tgz#d9bf80e25cd7756e959747804484340071ac3e83" + integrity sha512-p/EE2c0ebSgRhg1Fe1OH2+xNl7j1P4DTc7kZy1mX1NJ72fkqnGgBuf1vk5J9RmiRpbauPNMlm+xohjkGS7iodA== dependencies: - "@aws-sdk/client-sso" "3.670.0" + "@aws-sdk/client-sso" "3.675.0" "@aws-sdk/core" "3.667.0" "@aws-sdk/token-providers" "3.667.0" "@aws-sdk/types" "3.667.0" @@ -1570,10 +1570,10 @@ "@smithy/types" "^3.5.0" tslib "^2.6.2" -"@aws-sdk/middleware-sdk-s3@3.669.0": - version "3.669.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.669.0.tgz#421d89c19c513bb8c130cf8f0f40b68838a17a95" - integrity sha512-b2QUQ7DcIcVCUFhvmFEDI90BemvQhO0ntIajllLqQSy88PSNdLDCVx5mIzfxaaK/1tdY/UsEDRRm1kMQHJDQpg== +"@aws-sdk/middleware-sdk-s3@3.674.0": + version "3.674.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.674.0.tgz#ed80913d38ada26ce7ad184cbb77892f5b29ef99" + integrity sha512-IvXnWrKy4mO+I44kLYHd6Wlw+FdB4sg1jvHCmnZo1KNaAFIA3x1iXgOaZynKoBdEmol3xfr2uDbeXUQvIwoIgg== dependencies: "@aws-sdk/core" "3.667.0" "@aws-sdk/types" "3.667.0" @@ -1678,12 +1678,12 @@ "@smithy/util-middleware" "^3.0.7" tslib "^2.6.2" -"@aws-sdk/s3-presigned-post@3.670.0": - version "3.670.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/s3-presigned-post/-/s3-presigned-post-3.670.0.tgz#0c91740964a770fba539c47cb2f49b1c076a9fa0" - integrity sha512-HmZeps/k3GqNHeqaEEoWGg2Keq8+/93q1kp6DpEDM2ufUYLBCrWsQ1yjHZ8IYtajmF9vh+kjeCQCgUKHsb1N6A== +"@aws-sdk/s3-presigned-post@3.675.0": + version "3.675.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/s3-presigned-post/-/s3-presigned-post-3.675.0.tgz#0ecdf07de4f4fdfee9ade22b8686d47cac0479f0" + integrity sha512-KKlSEHILDSBs+OWntIJOsYOKYxTDiHH+VPPZWUmdKQjnH5BtnAlR8mEAVvmwuWRN8qBwGpPRqPU07YeT+lj0OQ== dependencies: - "@aws-sdk/client-s3" "3.670.0" + "@aws-sdk/client-s3" "3.675.0" "@aws-sdk/types" "3.667.0" "@aws-sdk/util-format-url" "3.667.0" "@smithy/middleware-endpoint" "^3.1.4" @@ -1693,12 +1693,12 @@ "@smithy/util-utf8" "^3.0.0" tslib "^2.6.2" -"@aws-sdk/s3-request-presigner@3.670.0": - version "3.670.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/s3-request-presigner/-/s3-request-presigner-3.670.0.tgz#3b4f9585eba3f6d3c8c1fc2a3fe02db453d25af6" - integrity sha512-cTTQWJhmCRNH9NZbTqAhx3aXmC+p7gYzj7kNOyzdTA6D4jJOQZekxFFEQ0T2BPFYPR9Elk77W0yK+Nv9wMVD4g== +"@aws-sdk/s3-request-presigner@3.675.0": + version "3.675.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/s3-request-presigner/-/s3-request-presigner-3.675.0.tgz#0019c3a6b1405e5bca9c933e949bfa80ee7c19d5" + integrity sha512-/2KWrFjB2FWTKV8nKK1gbufY1IX9GZy4yXVVKjdLxMpM0O6JIg79S0KGvkEZtCZW4SKen0sExsCU5Dsc1RMfwA== dependencies: - "@aws-sdk/signature-v4-multi-region" "3.669.0" + "@aws-sdk/signature-v4-multi-region" "3.674.0" "@aws-sdk/types" "3.667.0" "@aws-sdk/util-format-url" "3.667.0" "@smithy/middleware-endpoint" "^3.1.4" @@ -1707,12 +1707,12 @@ "@smithy/types" "^3.5.0" tslib "^2.6.2" -"@aws-sdk/signature-v4-multi-region@3.669.0": - version "3.669.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.669.0.tgz#d5ed320cf39143af8f85462295975dab3b4bc285" - integrity sha512-TVwlWAxfBHnFjnfTBQWUhzVJzjwVhkq1+KR0JZV7JrfqeyBOdZjAaV9ie3VNY9HUouecq1fDuKaSwe4JiWQsHg== +"@aws-sdk/signature-v4-multi-region@3.674.0": + version "3.674.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.674.0.tgz#03e37865cd09bed5b047d2b80457ed26e41101bb" + integrity sha512-VMQWbtcbg4FV/fILrODADV21pPg9AghuEzQlW2kH0hCtacvBwFl7eBxIiCBLLtkNple+CVPJvyBcqOZdBkEv/w== dependencies: - "@aws-sdk/middleware-sdk-s3" "3.669.0" + "@aws-sdk/middleware-sdk-s3" "3.674.0" "@aws-sdk/types" "3.667.0" "@smithy/protocol-http" "^4.1.4" "@smithy/signature-v4" "^4.2.0" @@ -1858,10 +1858,10 @@ bowser "^2.11.0" tslib "^2.6.2" -"@aws-sdk/util-user-agent-browser@3.670.0": - version "3.670.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.670.0.tgz#44504d56d035beace4688db5b7e0c02230290f0e" - integrity sha512-iRynWWazqEcCKwGMcQcywKTDLdLvqts1Yx474U64I9OKQXXwhOwhXbF5CAPSRta86lkVNAVYJa/0Bsv45pNn1A== +"@aws-sdk/util-user-agent-browser@3.675.0": + version "3.675.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.675.0.tgz#ad5371e0d4f68733e3dd04d455d99ee99609dbd9" + integrity sha512-HW4vGfRiX54RLcsYjLuAhcBBJ6lRVEZd7njfGpAwBB9s7BH8t48vrpYbyA5XbbqbTvXfYBnugQCUw9HWjEa1ww== dependencies: "@aws-sdk/types" "3.667.0" "@smithy/types" "^3.5.0" @@ -6020,10 +6020,10 @@ chalk "^4" fast-levenshtein "^3.0.0" -"@openapitools/openapi-generator-cli@2.14.0": - version "2.14.0" - resolved "https://registry.yarnpkg.com/@openapitools/openapi-generator-cli/-/openapi-generator-cli-2.14.0.tgz#016f509e7b13325524039d10d428c793503a4a04" - integrity sha512-k+ioQLtXLXgNbhQbp1UOxtaUnnYTWwAPev88hP5qauFA+eq4NyeQGNojknFssXg2x0VT0TUGmU3PZ2DiQ70IVg== +"@openapitools/openapi-generator-cli@2.14.1": + version "2.14.1" + resolved "https://registry.yarnpkg.com/@openapitools/openapi-generator-cli/-/openapi-generator-cli-2.14.1.tgz#74babae6544497ce34d8bc59ee876e206a7f09de" + integrity sha512-AHh5hcM5HoyLCHYdJesIAa53RGrmoaKw/RnIHpFCdumHEUNzqVmi7rHj0pWs4to1v99vbHE6zWp69WGOaa8rOg== dependencies: "@nestjs/axios" "3.0.3" "@nestjs/common" "10.4.3" @@ -6037,9 +6037,9 @@ console.table "0.10.0" fs-extra "10.1.0" glob "9.3.5" - https-proxy-agent "7.0.5" inquirer "8.2.6" lodash "4.17.21" + proxy-agent "6.4.0" reflect-metadata "0.1.13" rxjs "7.8.1" tslib "2.7.0" @@ -7459,12 +7459,12 @@ require-from-string "^2.0.2" uri-js-replace "^1.0.1" -"@redocly/cli@1.25.7": - version "1.25.7" - resolved "https://registry.yarnpkg.com/@redocly/cli/-/cli-1.25.7.tgz#18f6992552c524389eab4c8d2d9b60730d4073f5" - integrity sha512-JXsx1IEr4l7qjBQ7ry1Xzj6vpg8xjdKBzcuf0J8DP9Jaupc9g5tS5uVFaLupal1h64m1kJpscX7a8TPzKdoeRA== +"@redocly/cli@1.25.8": + version "1.25.8" + resolved "https://registry.yarnpkg.com/@redocly/cli/-/cli-1.25.8.tgz#fecd62d9ee1d564e6f0e1522f2c5648f514ce02b" + integrity sha512-oVFN3rpGFqupx57ZS0mF2B8grnk3i0xjTQrrMm1oftF3GEf7yTg5JzwnWi8KKRWuxin4qI7j+Id5AKgNQNmTKA== dependencies: - "@redocly/openapi-core" "1.25.7" + "@redocly/openapi-core" "1.25.8" abort-controller "^3.0.0" chokidar "^3.5.1" colorette "^1.2.0" @@ -7489,10 +7489,10 @@ resolved "https://registry.yarnpkg.com/@redocly/config/-/config-0.12.1.tgz#7b905a17d710244550ef826542d0db164d5ace02" integrity sha512-RW3rSirfsPdr0uvATijRDU3f55SuZV3m7/ppdTDvGw4IB0cmeZRkFmqTrchxMqWP50Gfg1tpHnjdxUCNo0E2qg== -"@redocly/openapi-core@1.25.7", "@redocly/openapi-core@^1.4.0": - version "1.25.7" - resolved "https://registry.yarnpkg.com/@redocly/openapi-core/-/openapi-core-1.25.7.tgz#34fb7fb3c2b534aaeb12019e2820af6908b39227" - integrity sha512-qidGKk4Bq0Ud0O8gRuXnDSLwVopwrf5+roNvpkvdQPVIHFSYJ5dscJkThdsn7OW8bNqahumQPWWczEh9l93FZw== +"@redocly/openapi-core@1.25.8", "@redocly/openapi-core@^1.4.0": + version "1.25.8" + resolved "https://registry.yarnpkg.com/@redocly/openapi-core/-/openapi-core-1.25.8.tgz#a3aff052b1d9d2db8ba86263ec994bbc85f6b8f1" + integrity sha512-eKKRqo2RYo7UIoDvIgcUB9ynhOjIWJnILXFz+VDevYeOBKd/CxvC0KbNRnuOrFqG3ip6363R/ONal2MyvuVrjg== dependencies: "@redocly/ajv" "^8.11.2" "@redocly/config" "^0.12.1" @@ -11038,10 +11038,10 @@ "@types/node" "*" form-data "^4.0.0" -"@types/node@*", "@types/node@20.16.12", "@types/node@>=13.7.0", "@types/node@^20.11.20", "@types/node@^20.12.7": - version "20.16.12" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.16.12.tgz#61cc9be049584b472fa31e465aa0ab3c090dac56" - integrity sha512-LfPFB0zOeCeCNQV3i+67rcoVvoN5n0NVuR2vLG0O5ySQMgchuZlC4lgz546ZOJyDtj5KIgOxy+lacOimfqZAIA== +"@types/node@*", "@types/node@20.16.13", "@types/node@>=13.7.0", "@types/node@^20.11.20", "@types/node@^20.12.7": + version "20.16.13" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.16.13.tgz#148c152d757dc73f8d65f0f6f078f39050b85b0c" + integrity sha512-GjQ7im10B0labo8ZGXDGROUl9k0BNyDgzfGpb4g/cl+4yYDWVKcozANF4FGr4/p0O/rAkQClM6Wiwkije++1Tg== dependencies: undici-types "~6.19.2" @@ -19428,14 +19428,6 @@ https-proxy-agent@5.0.0: agent-base "6" debug "4" -https-proxy-agent@7.0.5, https-proxy-agent@^7.0.0, https-proxy-agent@^7.0.1, https-proxy-agent@^7.0.2, https-proxy-agent@^7.0.3, https-proxy-agent@^7.0.4, https-proxy-agent@^7.0.5: - version "7.0.5" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz#9e8b5013873299e11fab6fd548405da2d6c602b2" - integrity sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw== - dependencies: - agent-base "^7.0.2" - debug "4" - https-proxy-agent@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-4.0.0.tgz#702b71fb5520a132a66de1f67541d9e62154d82b" @@ -19452,6 +19444,14 @@ https-proxy-agent@^5.0.0, https-proxy-agent@^5.0.1: agent-base "6" debug "4" +https-proxy-agent@^7.0.0, https-proxy-agent@^7.0.1, https-proxy-agent@^7.0.2, https-proxy-agent@^7.0.3, https-proxy-agent@^7.0.4, https-proxy-agent@^7.0.5: + version "7.0.5" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz#9e8b5013873299e11fab6fd548405da2d6c602b2" + integrity sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw== + dependencies: + agent-base "^7.0.2" + debug "4" + httpyac@6.15.1: version "6.15.1" resolved "https://registry.yarnpkg.com/httpyac/-/httpyac-6.15.1.tgz#fad6d93e04c1457fdf6855c2a85ac72a23ad7567" @@ -26037,7 +26037,7 @@ proxy-addr@~2.0.7: forwarded "0.2.0" ipaddr.js "1.9.1" -proxy-agent@^6.4.0: +proxy-agent@6.4.0, proxy-agent@^6.4.0: version "6.4.0" resolved "https://registry.yarnpkg.com/proxy-agent/-/proxy-agent-6.4.0.tgz#b4e2dd51dee2b377748aef8d45604c2d7608652d" integrity sha512-u0piLU+nCOHMgGjRbimiXmA9kM/L9EHh3zL81xCdp7m+Y2pHIsnmbdDoEDoAz5geaonNR6q6+yOPQs6n4T6sBQ== @@ -27746,10 +27746,10 @@ sass-lookup@^5.0.1: dependencies: commander "^10.0.1" -sass@1.79.5, sass@^1.52.3: - version "1.79.5" - resolved "https://registry.yarnpkg.com/sass/-/sass-1.79.5.tgz#646c627601cd5f84c64f7b1485b9292a313efae4" - integrity sha512-W1h5kp6bdhqFh2tk3DsI771MoEJjvrSY/2ihJRJS4pjIyfJCw0nTsxqhnrUzaLMOJjFchj8rOvraI/YUVjtx5g== +sass@1.80.3, sass@^1.52.3: + version "1.80.3" + resolved "https://registry.yarnpkg.com/sass/-/sass-1.80.3.tgz#3f63dd527647d2b3de35f36acb971bda80517423" + integrity sha512-ptDWyVmDMVielpz/oWy3YP3nfs7LpJTHIJZboMVs8GEC9eUmtZTZhMHlTW98wY4aEorDfjN38+Wr/XjskFWcfA== dependencies: "@parcel/watcher" "^2.4.1" chokidar "^4.0.0" @@ -30180,47 +30180,47 @@ tunnel-agent@^0.6.0: dependencies: safe-buffer "^5.0.1" -turbo-darwin-64@2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/turbo-darwin-64/-/turbo-darwin-64-2.2.1.tgz#6341720f463c955a54715ac52fec54bf44330a4a" - integrity sha512-jltMdSQ+7rQDVaorjW729PCw6fwAn1MgZSdoa0Gil7GZCOF3SnR/ok0uJw6G5mdm6F5XM8ZTlz+mdGzBLuBRaA== +turbo-darwin-64@2.2.3: + version "2.2.3" + resolved "https://registry.yarnpkg.com/turbo-darwin-64/-/turbo-darwin-64-2.2.3.tgz#f0ced75ed031091e52851cbe8bb05d21a161a22b" + integrity sha512-Rcm10CuMKQGcdIBS3R/9PMeuYnv6beYIHqfZFeKWVYEWH69sauj4INs83zKMTUiZJ3/hWGZ4jet9AOwhsssLyg== -turbo-darwin-arm64@2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/turbo-darwin-arm64/-/turbo-darwin-arm64-2.2.1.tgz#9f9c493541d2cc651d4b51b06f1f308c3b1535ab" - integrity sha512-RHW0c1NonsJXXlutlZeunmhLanf0/WbeizFfYgWuTEaJE4MbbhyD/RG4Fm/7iob5kxQ4Es2TzfDPqyMqpIO0GA== +turbo-darwin-arm64@2.2.3: + version "2.2.3" + resolved "https://registry.yarnpkg.com/turbo-darwin-arm64/-/turbo-darwin-arm64-2.2.3.tgz#0b4741383ab5070d8383891a65861a8869cc7202" + integrity sha512-+EIMHkuLFqUdJYsA3roj66t9+9IciCajgj+DVek+QezEdOJKcRxlvDOS2BUaeN8kEzVSsNiAGnoysFWYw4K0HA== -turbo-linux-64@2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/turbo-linux-64/-/turbo-linux-64-2.2.1.tgz#b89c22c7c502d919032197d8b09d7279f64412cb" - integrity sha512-RasrjV+i2B90hoR8r6B2Btf2/ebNT5MJbhkpY0G1EN06E1IkjCKfAXj/1Dwmjy9+Zo0NC2r69L3HxRrtpar8jQ== +turbo-linux-64@2.2.3: + version "2.2.3" + resolved "https://registry.yarnpkg.com/turbo-linux-64/-/turbo-linux-64-2.2.3.tgz#2b339db50c12bc52ce99139c156d5555717a209d" + integrity sha512-UBhJCYnqtaeOBQLmLo8BAisWbc9v9daL9G8upLR+XGj6vuN/Nz6qUAhverN4Pyej1g4Nt1BhROnj6GLOPYyqxQ== -turbo-linux-arm64@2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/turbo-linux-arm64/-/turbo-linux-arm64-2.2.1.tgz#c18a21b086f1966ac0fda37b6c97dfdb6bfd6f1d" - integrity sha512-LNkUUJuu1gNkhlo7Ky/zilXEiajLoGlWLiKT1XV5neEf+x1s+aU9Hzd/+HhSVMiyI8l7z6zLbrM1a6+v4co/SQ== +turbo-linux-arm64@2.2.3: + version "2.2.3" + resolved "https://registry.yarnpkg.com/turbo-linux-arm64/-/turbo-linux-arm64-2.2.3.tgz#a4daf6e0872a4e2652e2d05d68ad18cee5b10e94" + integrity sha512-hJYT9dN06XCQ3jBka/EWvvAETnHRs3xuO/rb5bESmDfG+d9yQjeTMlhRXKrr4eyIMt6cLDt1LBfyi+6CQ+VAwQ== -turbo-windows-64@2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/turbo-windows-64/-/turbo-windows-64-2.2.1.tgz#27cd73eb91844092747069a6146be0360db2ca7d" - integrity sha512-Mn5tlFrLzlQ6tW6wTWNlyT1osXuDUg0VT1VAjRpmRXlK2Zi3oKVVG0rs0nkkq4rmuheryD1xyuGPN9nFKbAn/A== +turbo-windows-64@2.2.3: + version "2.2.3" + resolved "https://registry.yarnpkg.com/turbo-windows-64/-/turbo-windows-64-2.2.3.tgz#d44b3385948bd0f2ef5c2d53391f142bdd467b18" + integrity sha512-NPrjacrZypMBF31b4HE4ROg4P3nhMBPHKS5WTpMwf7wydZ8uvdEHpESVNMOtqhlp857zbnKYgP+yJF30H3N2dQ== -turbo-windows-arm64@2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/turbo-windows-arm64/-/turbo-windows-arm64-2.2.1.tgz#7da48f2394b67a7650720f9a41acdc7cd969dc3a" - integrity sha512-bvYOJ3SMN00yiem+uAqwRMbUMau/KiMzJYxnD0YkFo6INc08z8gZi5g0GLZAR7g/L3JegktX3UQW2cJvryjvLg== +turbo-windows-arm64@2.2.3: + version "2.2.3" + resolved "https://registry.yarnpkg.com/turbo-windows-arm64/-/turbo-windows-arm64-2.2.3.tgz#d0625ec53f467013a6f259f87f7fc4ae8670aaa4" + integrity sha512-fnNrYBCqn6zgKPKLHu4sOkihBI/+0oYFr075duRxqUZ+1aLWTAGfHZLgjVeLh3zR37CVzuerGIPWAEkNhkWEIw== -turbo@2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/turbo/-/turbo-2.2.1.tgz#65e7d433a10a8e28901bac76dcd0b0f7ae315103" - integrity sha512-clZFkh6U6NpsLKBVZYRjlZjRTfju1Z5STqvFVaOGu5443uM75alJe1nCYH9pQ9YJoiOvXAqA2rDHWN5kLS9JMg== +turbo@2.2.3: + version "2.2.3" + resolved "https://registry.yarnpkg.com/turbo/-/turbo-2.2.3.tgz#0f45612d62526c98c75da0682aa8c26b902b5e07" + integrity sha512-5lDvSqIxCYJ/BAd6rQGK/AzFRhBkbu4JHVMLmGh/hCb7U3CqSnr5Tjwfy9vc+/5wG2DJ6wttgAaA7MoCgvBKZQ== optionalDependencies: - turbo-darwin-64 "2.2.1" - turbo-darwin-arm64 "2.2.1" - turbo-linux-64 "2.2.1" - turbo-linux-arm64 "2.2.1" - turbo-windows-64 "2.2.1" - turbo-windows-arm64 "2.2.1" + turbo-darwin-64 "2.2.3" + turbo-darwin-arm64 "2.2.3" + turbo-linux-64 "2.2.3" + turbo-linux-arm64 "2.2.3" + turbo-windows-64 "2.2.3" + turbo-windows-arm64 "2.2.3" tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" @@ -32227,10 +32227,10 @@ zip-stream@^4.1.0: compress-commons "^4.1.2" readable-stream "^3.6.0" -zip-webpack-plugin@4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/zip-webpack-plugin/-/zip-webpack-plugin-4.0.1.tgz#95f09716ecf73e53d949443017cfb03afe597dd3" - integrity sha512-G041Q4qUaog44Ynit6gs4o+o3JIv0WWfOLvc8Q3IxvPfuqd2KBHhpJWAXUB9Cm1JcWHTIOp9vS3oGMWa1p1Ehw== +zip-webpack-plugin@4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/zip-webpack-plugin/-/zip-webpack-plugin-4.0.2.tgz#81bbb67c08ae4819c8ffae054b74c8eddb5593c5" + integrity sha512-t7GEF8q8l2zcTtrMes5j6/RpkBgIw8ARZmB5jWNksLua1mNa0bVaIVXOU8cFPXnbcFsHjgrLEyRMvLT1HcUiEA== dependencies: yazl "^2.5.1"