Skip to content

Commit

Permalink
Merge master
Browse files Browse the repository at this point in the history
  • Loading branch information
ameba23 committed Oct 28, 2024
2 parents 944f931 + c1a2233 commit 14014a0
Show file tree
Hide file tree
Showing 31 changed files with 354 additions and 357 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,13 @@ At the moment this project **does not** adhere to
`AttestationQueue` config types were removed from the Attestation pallet.
- In [#1068](https://github.com/entropyxyz/entropy-core/pull/1068) an extra type `PckCertChainVerifier`
was added to the staking extension pallet's `Config` trait.
- In [#1134](https://github.com/entropyxyz/entropy-core/pull/1134/) the ```no-sync``` option was removed

### Changed
- Use correct key rotation endpoint in OCW ([#1104](https://github.com/entropyxyz/entropy-core/pull/1104))
- Change attestation flow to be pull based ([#1109](https://github.com/entropyxyz/entropy-core/pull/1109/))
- Handle PCK certificates ([#1068](https://github.com/entropyxyz/entropy-core/pull/1068))
- Remove declare synced ([#1134](https://github.com/entropyxyz/entropy-core/pull/1134/))

## [0.3.0-rc.1](https://github.com/entropyxyz/entropy-core/compare/release/v0.2.0...release/v0.3.0-rc.1) - 2024-10-04

Expand Down
14 changes: 7 additions & 7 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

35 changes: 26 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,16 @@ Our blockchain node is written with [Substrate](https://substrate.io/) using [Su

## Documentation

You can build the API documentation for Entropy by invoking `cargo doc --no-deps --open`.
There is also [high level documentation for Entropy available here](https://docs.entropy.xyz).
- High level introduction to Entropy: [docs.entropy.xyz](https://docs.entropy.xyz)
- API documentation for the `entropy-tss` crate: [docs.rs/entropy-tss](https://docs.rs/entropy-tss/latest/entropy_tss/index.html)

You can also build the API docs yourself:
1. [Install the dependencies](#building-from-source)
2. Invoke
```bash
cargo doc --no-deps --open`
```


## Getting Started

Expand Down Expand Up @@ -45,16 +53,25 @@ This repository provides a [Docker Compose](https://docs.docker.com/compose/) co

### Building from source

To build from source, you will need some development tooling installed on your local machine.

**Do this** to build Entropy from source.

1. [Install Rust](https://www.rust-lang.org/tools/install) and [Substrate dependencies for your Operating System](https://docs.substrate.io/install/).
1. Building the chain node and threshold signature scheme (TSS) server binaries can be done by running:
Dependencies you will need to build locally:
1. [Install Rust](https://www.rust-lang.org/tools/install)
1. [Install Substrate dependencies](https://docs.substrate.io/install/)
1. Add Rust components
```sh
rustup target add wasm32-unknown-unknown
rustup component add rust-src
```
1. Install `wasm-pack`
```sh
cargo build --release
curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
```

Build the chain node and threshold signature scheme (TSS) server binaries by running:

```sh
cargo build --release
```

### Run: Single-Node Development Chain

Spinning up a local Entropy node for development and basic testing can be done with:
Expand Down
2 changes: 1 addition & 1 deletion crates/client/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ anyhow ="1.0.91"

# Only for the browser
js-sys={ version="0.3.72", optional=true }
tokio ={ version="1.40", features=["time"] }
tokio ={ version="1.41", features=["time"] }

[dev-dependencies]
serial_test ="3.1.1"
Expand Down
Binary file modified crates/client/entropy_metadata.scale
Binary file not shown.
2 changes: 1 addition & 1 deletion crates/kvdb/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ chacha20poly1305={ version="0.9", features=["alloc"], default-features=false }
synedrion ={ version="0.2.0-beta.0" }

# Async
tokio ={ version="1.40", features=["macros", "sync", "fs", "rt-multi-thread", "io-util"] }
tokio ={ version="1.41", features=["macros", "sync", "fs", "rt-multi-thread", "io-util"] }
tracing={ version="0.1", default-features=false }

# Misc
Expand Down
2 changes: 1 addition & 1 deletion crates/protocol/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ synedrion ={ version="0.2.0-beta.0" }
serde ={ version="1.0", features=["derive"], default-features=false }
subxt ={ version="0.35.3", default-features=false }
sp-core ={ version="31.0.0", default-features=false, features=["full_crypto", "serde"] }
tokio ={ version="1.40", features=["sync", "rt", "macros"] }
tokio ={ version="1.41", features=["sync", "rt", "macros"] }
x25519-dalek ={ version="2.0.1", features=["static_secrets"] }
futures ="0.3"
hex ="0.4.3"
Expand Down
3 changes: 2 additions & 1 deletion crates/protocol/src/execute_protocol.rs
Original file line number Diff line number Diff line change
Expand Up @@ -334,6 +334,7 @@ pub async fn execute_reshare(
chans: Channels,
threshold_pair: &sr25519::Pair,
inputs: KeyResharingInputs<KeyParams, PartyId>,
verifiers: &BTreeSet<PartyId>,
aux_info_option: Option<AuxInfo<KeyParams, PartyId>>,
) -> Result<
(ThresholdKeyShare<KeyParams, PartyId>, AuxInfo<KeyParams, PartyId>),
Expand All @@ -350,7 +351,7 @@ pub async fn execute_reshare(
&mut OsRng,
SynedrionSessionId::from_seed(session_id_hash.as_slice()),
pair,
&inputs.new_holders,
verifiers,
inputs.clone(),
)
.map_err(ProtocolExecutionErr::SessionCreation)?;
Expand Down
3 changes: 2 additions & 1 deletion crates/protocol/tests/helpers/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,8 @@ pub async fn server(
new_threshold: old_key.threshold(),
};

let new_keyshare = execute_reshare(session_id, channels, &pair, inputs, None).await?;
let new_keyshare =
execute_reshare(session_id, channels, &pair, inputs, &party_ids, None).await?;
Ok(ProtocolOutput::Reshare(new_keyshare.0))
},
SessionId::Dkg { .. } => {
Expand Down
2 changes: 1 addition & 1 deletion crates/shared/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ edition ='2021'

[dependencies]
codec ={ package="parity-scale-codec", version="3.0.0", default-features=false }
scale-info ={ version='2.11.4', default-features=false, features=['derive'] }
scale-info ={ version='2.11.5', default-features=false, features=['derive'] }
serde ={ version="1.0", default-features=false, features=["derive"] }
serde_derive="1.0.147"
strum ="0.26.3"
Expand Down
8 changes: 4 additions & 4 deletions crates/shared/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ pub type BlockNumber = u32;
#[derive(Clone, Encode, Decode, Debug, Eq, PartialEq, TypeInfo)]
pub struct ValidatorInfo {
pub x25519_public_key: X25519PublicKey,
pub ip_address: codec::alloc::vec::Vec<u8>,
pub tss_account: codec::alloc::vec::Vec<u8>,
pub ip_address: Vec<u8>,
pub tss_account: Vec<u8>,
}

/// Offchain worker message for initiating the initial jumpstart DKG
Expand All @@ -55,8 +55,8 @@ pub struct OcwMessageDkg {
#[cfg_attr(feature = "std", derive(Serialize, Deserialize))]
#[derive(Clone, Encode, Decode, Debug, Eq, PartialEq, TypeInfo)]
pub struct OcwMessageReshare {
// Stash address of new signer
pub new_signer: Vec<u8>,
// Stash addresses of new signers
pub new_signers: Vec<Vec<u8>>,
pub block_number: BlockNumber,
}

Expand Down
2 changes: 1 addition & 1 deletion crates/test-cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ colored ="2.0.4"
subxt ="0.35.3"
sp-core ="31.0.0"
anyhow ="1.0.91"
tokio ={ version="1.40", features=["macros", "rt-multi-thread", "io-util", "process"] }
tokio ={ version="1.41", features=["macros", "rt-multi-thread", "io-util", "process"] }
hex ="0.4.3"
bincode ="1.3.3"
x25519-dalek ="2.0.1"
Expand Down
2 changes: 1 addition & 1 deletion crates/testing-utils/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ sp-core={ version="31.0.0", default-features=false }
parity-scale-codec="3.6.12"
lazy_static="1.5.0"
hex-literal="0.4.1"
tokio={ version="1.40", features=["macros", "fs", "rt-multi-thread", "io-util", "process"] }
tokio={ version="1.41", features=["macros", "fs", "rt-multi-thread", "io-util", "process"] }
axum={ version="0.7.7" }
entropy-shared={ version="0.3.0-rc.1", path="../shared" }
entropy-kvdb={ version="0.3.0-rc.1", path="../kvdb", default-features=false }
Expand Down
2 changes: 1 addition & 1 deletion crates/threshold-signature-server/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ backoff ={ version="0.4.0", features=["tokio"] }

# Async
futures="0.3"
tokio ={ version="1.40", features=["macros", "fs", "rt-multi-thread", "io-util", "process", "sync"] }
tokio ={ version="1.41", features=["macros", "fs", "rt-multi-thread", "io-util", "process", "sync"] }

# HTTP
reqwest={ version="0.12.8", features=["json", "stream"] }
Expand Down
10 changes: 0 additions & 10 deletions crates/threshold-signature-server/src/helpers/launch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -153,16 +153,6 @@ pub async fn load_kv_store(
#[derive(Parser, Debug, Clone)]
#[command(about, version)]
pub struct StartupArgs {
/// Indicates that a Threshold server **should not** ask its peers for key-share data.
///
/// This is useful to avoid in cases where:
///
/// - The network is being bootstrapped and peers don't have any useful data yet.
///
/// - There is outdated information about peers (e.g, outdated IP addresses coming from the
/// on-chain registry) and we don't want to sync outdated key-shares.
#[arg(short = 's', long = "no-sync")]
pub no_sync: bool,
/// Use the developer key Bob.
#[arg(short = 'b', long = "bob")]
pub bob: bool,
Expand Down
3 changes: 2 additions & 1 deletion crates/threshold-signature-server/src/signing_client/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,8 @@ pub async fn do_proactive_refresh(
.await?;

let result =
execute_reshare(session_id, channels, signer.signer(), inputs, Some(aux_info)).await?;
execute_reshare(session_id, channels, signer.signer(), inputs, &party_ids, Some(aux_info))
.await?;
Ok(result)
}

Expand Down
53 changes: 29 additions & 24 deletions crates/threshold-signature-server/src/validator/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ pub async fn new_reshare(
.map_err(|e| ValidatorErr::UserError(e.to_string()))?;

let old_holder: Option<OldHolder<KeyParams, PartyId>> =
if data.new_signer == my_stash_address.encode() {
if data.new_signers.contains(&my_stash_address.encode()) {
None
} else {
let kvdb_result = app_state.kv_store.kv().get(&hex::encode(NETWORK_PARENT_KEY)).await?;
Expand All @@ -116,14 +116,15 @@ pub async fn new_reshare(
Some(OldHolder { key_share: key_share.0 })
};

let party_ids: BTreeSet<PartyId> =
// new_holders -> From chain next_signers (old_holders (currently forced to be t) + new_holders)
// also acts as verifiers as is everyone in the party
let new_holders: BTreeSet<PartyId> =
validators_info.iter().cloned().map(|x| PartyId::new(x.tss_account)).collect();

let pruned_old_holders =
prune_old_holders(&api, &rpc, data.new_signer, validators_info.clone()).await?;

// old holders -> next_signers - new_signers (will be at least t)
let old_holders =
&prune_old_holders(&api, &rpc, data.new_signers, validators_info.clone()).await?;
let old_holders: BTreeSet<PartyId> =
pruned_old_holders.into_iter().map(|x| PartyId::new(x.tss_account)).collect();
old_holders.iter().map(|x| PartyId::new(x.tss_account.clone())).collect();

let new_holder = NewHolder {
verifying_key: decoded_verifying_key,
Expand All @@ -139,7 +140,7 @@ pub async fn new_reshare(
let inputs = KeyResharingInputs {
old_holder,
new_holder: Some(new_holder),
new_holders: party_ids.clone(),
new_holders: new_holders.clone(),
new_threshold: threshold as usize,
};

Expand All @@ -157,7 +158,6 @@ pub async fn new_reshare(
converted_validator_info.push(validator_info.clone());
tss_accounts.push(validator_info.tss_account.clone());
}

let channels = get_channels(
&app_state.listener_state,
converted_validator_info,
Expand All @@ -169,7 +169,8 @@ pub async fn new_reshare(
.await?;

let (new_key_share, aux_info) =
execute_reshare(session_id.clone(), channels, signer.signer(), inputs, None).await?;
execute_reshare(session_id.clone(), channels, signer.signer(), inputs, &new_holders, None)
.await?;

let serialized_key_share = key_serialize(&(new_key_share, aux_info))
.map_err(|_| ProtocolErr::KvSerialize("Kv Serialize Error".to_string()))?;
Expand Down Expand Up @@ -273,8 +274,8 @@ pub async fn validate_new_reshare(
.await?
.ok_or_else(|| ValidatorErr::ChainFetch("Not Currently in a reshare"))?;

if reshare_data.new_signer != chain_data.new_signer
|| chain_data.block_number != reshare_data.block_number
if chain_data.block_number != reshare_data.block_number.saturating_sub(1)
|| chain_data.new_signers != reshare_data.new_signers
{
return Err(ValidatorErr::InvalidData);
}
Expand Down Expand Up @@ -365,20 +366,24 @@ pub fn check_forbidden_key(key: &str) -> Result<(), ValidatorErr> {
pub async fn prune_old_holders(
api: &OnlineClient<EntropyConfig>,
rpc: &LegacyRpcMethods<EntropyConfig>,
new_signer: Vec<u8>,
new_signers: Vec<Vec<u8>>,
validators_info: Vec<ValidatorInfo>,
) -> Result<Vec<ValidatorInfo>, ValidatorErr> {
Ok(if !new_signer.is_empty() {
let address_slice: &[u8; 32] = &new_signer.clone().try_into().unwrap();
let new_signer_address = AccountId32(*address_slice);
let new_signer_info = &get_validators_info(api, rpc, vec![new_signer_address])
.await
.map_err(|e| ValidatorErr::UserError(e.to_string()))?[0];
validators_info
.iter()
.filter(|x| x.tss_account != new_signer_info.tss_account)
.cloned()
.collect()
Ok(if !new_signers.is_empty() {
let mut filtered_validators_info = vec![];
for new_signer in new_signers {
let address_slice: &[u8; 32] = &new_signer.clone().try_into().unwrap();
let new_signer_address = AccountId32(*address_slice);
let new_signer_info = &get_validators_info(api, rpc, vec![new_signer_address])
.await
.map_err(|e| ValidatorErr::UserError(e.to_string()))?[0];
filtered_validators_info = validators_info
.iter()
.filter(|x| x.tss_account != new_signer_info.tss_account)
.cloned()
.collect::<Vec<_>>();
}
filtered_validators_info
} else {
validators_info.clone()
})
Expand Down
Loading

0 comments on commit 14014a0

Please sign in to comment.